#  File src/library/tools/R/admin.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2025 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/


### * .install_package_description

## called from basepkg.mk and .install_packages
.install_package_description <-
function(dir, outDir, builtStamp=character())
{
    ## Function for taking the DESCRIPTION package meta-information,
    ## checking/validating it, and installing it with the 'Built:'
    ## field added.  Note that from 1.7.0 on, packages without
    ## compiled code are not marked as being from any platform.

    ## Check first.  Note that this also calls .read_description(), but
    ## .check_package_description() currently really needs to know the
    ## path to the DESCRIPTION file, and returns an object with check
    ## results and not the package metadata ...
    ok <- .check_package_description(file.path(dir, "DESCRIPTION"))
    if(any(as.integer(lengths(ok)) > 0L)) {
        stop(paste(gettext("Invalid DESCRIPTION file") ,
                   paste(format(ok), collapse = "\n\n"),
                   sep = "\n\n"),
             domain = NA,
             call. = FALSE)
    }

    ## This reads (in C locale) byte-by-byte, declares latin1 or UTF-8
    ## Maybe it would be better to re-encode others (there are none at
    ## present, at least in a UTF-8 locale?
    db <- .read_description(file.path(dir, "DESCRIPTION"))

    ## should not have a Built: field, so ignore it if it is there
    nm <- names(db)
    if("Built" %in% nm) {
        db <- db[-match("Built", nm)]
        warning(gettextf("*** someone has corrupted the Built field in package '%s' ***",
                         db["Package"]),
                domain = NA,
                call. = FALSE)
    }

    OStype <- R.version$platform
    if (grepl("-apple-darwin", OStype) && nzchar(Sys.getenv("R_ARCH")))
        OStype <- sub(".*-apple-darwin", "universal-apple-darwin", OStype)
    ## Some build systems want to supply a package-build timestamp for
    ## reproducibility
    if (length(builtStamp) == 0L) {
        ## Prefer date in ISO 8601 format, UTC, avoid sub-seconds.
        builtStamp <- format(Sys.time(), "%Y-%m-%d %H:%M:%S",
                             tz = "UTC", usetz = TRUE)
    }
    Built <-
	paste0("R ",
	       paste(R.version[c("major", "minor")], collapse = "."),
	       "; ",
	       if(dir.exists(file.path(dir, "src"))) OStype else "",
	       "; ",
               builtStamp,
	       "; ",
	       .OStype())

    ## At some point of time, we had:
    ##   We must not split the Built: field across lines.
    ## Not sure if this is still true.  If not, the following could be
    ## simplified to
    ##   db["Built"] <- Built
    ##   write.dcf(rbind(db), file.path(outDir, "DESCRIPTION"))
    ## But in any case, it is true for fields obtained from expanding R
    ## fields (Authors@R): these should not be reformatted.

    db <- c(db,
            .expand_package_description_db_R_fields(db),
            Built = Built)

    .write_description(db, file.path(outDir, "DESCRIPTION"))

    outMetaDir <- file.path(outDir, "Meta")
    if(!dir.exists(outMetaDir) && !dir.create(outMetaDir))
         stop(gettextf("cannot open directory '%s'",
                       outMetaDir),
              domain = NA)
    saveInfo <- .split_description(db)
    saveRDS(saveInfo, file.path(outMetaDir, "package.rds"))

    features <- list(internalsID = .Internal(internalsID()))
    saveRDS(features, file.path(outMetaDir, "features.rds"))

    invisible()
}

### * .split_description

## also used in .getRequiredPackages
.split_description <-
function(db, verbose = FALSE)
{
    if(!is.na(Built <- db["Built"])) {
        Built <- as.list(strsplit(Built, "; ")[[1L]])
        if(length(Built) != 4L) {
            warning(gettextf("*** someone has corrupted the Built field in package '%s' ***",
                             db["Package"]),
                    domain = NA,
                    call. = FALSE)
            Built <- NULL
        } else {
            names(Built) <- c("R", "Platform", "Date", "OStype")
            Built[["R"]] <- R_system_version(sub("^R ([0-9.]+)", "\\1",
                                                 Built[["R"]]))
        }
    } else Built <- NULL
    ## might perhaps have multiple entries
    Depends <- .split_dependencies(db[names(db) %in% "Depends"])
    ## several packages 'Depends' on base!
    ind <- match("base", names(Depends), 0L)
    if(ind) Depends <- Depends[-ind]
    ## We only need Rdepends for R < 2.7.0, but we still need to be
    ## able to check that someone is not trying to load this into a
    ## very old version of R.
    if("R" %in% names(Depends)) {
        Rdeps2 <- Depends["R" == names(Depends)]
        names(Rdeps2) <- NULL
        Rdeps <- Depends[["R", exact = TRUE]] # the first one
        Depends <- Depends[names(Depends) != "R"]
        ## several packages have 'Depends: R', which is a noop.
        if(verbose && length(Rdeps) == 1L)
             message("WARNING: omitting pointless dependence on 'R' without a version requirement")
        if(length(Rdeps) <= 1L)
            Rdeps2 <- Rdeps <- NULL
    } else Rdeps2 <- Rdeps <- NULL
    Rdeps <- as.vector(Rdeps)
    Suggests <- .split_dependencies(db[names(db) %in% "Suggests"])
    Imports <- .split_dependencies(db[names(db) %in% "Imports"])
    LinkingTo <- .split_dependencies(db[names(db) %in% "LinkingTo"])
    structure(list(DESCRIPTION = db, Built = Built,
                   Rdepends = Rdeps, Rdepends2 = Rdeps2,
                   Depends = Depends, Suggests = Suggests,
                   Imports = Imports, LinkingTo = LinkingTo),
              class = "packageDescription2")
}

### * .vinstall_package_descriptions_as_RDS

## called from src/library/Makefile
.vinstall_package_descriptions_as_RDS <-
function(dir, packages)
{
    ## For the given packages installed in @file{dir}, install their
    ## DESCRIPTION package metadata as R metadata.
    ## Really only useful for base packages under Unix.
    ## See @file{src/library/Makefile.in}.

    for(p in unlist(strsplit(packages, "[[:space:]]+"))) {
        meta_dir <- file.path(dir, p, "Meta")
        if(!dir.exists(meta_dir) && !dir.create(meta_dir))
            stop(gettextf("cannot open directory '%s'", meta_dir))
        package_info_dcf_file <- file.path(dir, p, "DESCRIPTION")
        package_info_rds_file <- file.path(meta_dir, "package.rds")
        if(file_test("-nt",
                     package_info_rds_file,
                     package_info_dcf_file))
            next
        saveRDS(.split_description(.read_description(package_info_dcf_file)),
                 package_info_rds_file)
    }
    invisible()
}

### * .update_package_rds

## not used
.update_package_rds <-
function(lib.loc = NULL)
{
    ## rebuild the dumped package descriptions for all packages in lib.loc
    if (is.null(lib.loc)) lib.loc <- .libPaths()
    lib.loc <- lib.loc[file.exists(lib.loc)]
    for (lib in lib.loc) {
        a <- list.files(lib, all.files = FALSE, full.names = TRUE)
        for (nam in a) {
            dfile <- file.path(nam, "DESCRIPTION")
            if (file.exists(dfile)) {
                print(nam)
                .install_package_description(nam, nam)
            }
        }
    }
}

### * .install_package_code_files

.install_package_code_files <-
function(dir, outDir)
{
    if(!dir.exists(dir))
        stop(gettextf("directory '%s' does not exist", dir),
             domain = NA)
    dir <- file_path_as_absolute(dir)

    ## Attempt to set the LC_COLLATE locale to 'C' to turn off locale
    ## specific sorting.
    curLocale <- Sys.getlocale("LC_COLLATE")
    on.exit(Sys.setlocale("LC_COLLATE", curLocale), add = TRUE)
    ## (Guaranteed to work as per the Sys.setlocale() docs.)
    lccollate <- "C"
    if(Sys.setlocale("LC_COLLATE", lccollate) != lccollate) {
        ## <NOTE>
        ## I don't think we can give an error here.
        ## It may be the case that Sys.setlocale() fails because the "OS
        ## reports request cannot be honored" (src/main/platform.c), in
        ## which case we should still proceed ...
        warning("cannot turn off locale-specific sorting via LC_COLLATE")
        ## </NOTE>
    }

    ## We definitely need a valid DESCRIPTION file.
    db <- .read_description(file.path(dir, "DESCRIPTION"))

    codeDir <- file.path(dir, "R")
    if(!dir.exists(codeDir)) return(invisible())

    codeFiles <- list_files_with_type(codeDir, "code", full.names = FALSE)

    collationField <-
        c(paste0("Collate.", .OStype()), "Collate")
    if(any(i <- collationField %in% names(db))) {
        collationField <- collationField[i][1L]
        codeFilesInCspec <- .read_collate_field(db[collationField])
        ## Duplicated entries in the collation spec?
        badFiles <-
            unique(codeFilesInCspec[duplicated(codeFilesInCspec)])
        if(length(badFiles)) {
            out <- gettextf("\nduplicated files in '%s' field:",
                            collationField)
            out <- paste(out,
                         paste0("  ", badFiles, collapse = "\n"),
                         sep = "\n")
            stop(out, domain = NA)
        }
        ## See which files are listed in the collation spec but don't
        ## exist.
        badFiles <- setdiff(codeFilesInCspec, codeFiles)
        if(length(badFiles)) {
            out <- gettextf("\nfiles in '%s' field missing from '%s':",
                            collationField,
                            codeDir)
            out <- paste(out,
                         paste0("  ", badFiles, collapse = "\n"),
                         sep = "\n")
            stop(out, domain = NA)
        }
        ## See which files exist but are missing from the collation
        ## spec.  Note that we do not want the collation spec to use
        ## only a subset of the available code files.
        badFiles <- setdiff(codeFiles, codeFilesInCspec)
        if(length(badFiles)) {
            out <- gettextf("\nfiles in '%s' missing from '%s' field:",
                            codeDir,
                            collationField)
            out <- paste(out,
                         paste0("  ", badFiles, collapse = "\n"),
                         sep = "\n")
            stop(out, domain = NA)
        }
        ## Everything's groovy ...
        codeFiles <- codeFilesInCspec
    }

    codeFiles <- file.path(codeDir, codeFiles)

    if(!dir.exists(outDir) && !dir.create(outDir))
        stop(gettextf("cannot open directory '%s'", outDir),
             domain = NA)
    outCodeDir <- file.path(outDir, "R")
    if(!dir.exists(outCodeDir) && !dir.create(outCodeDir))
        stop(gettextf("cannot open directory '%s'", outCodeDir),
             domain = NA)
    outFile <- file.path(outCodeDir, db["Package"])
    if(!file.create(outFile))
        stop(gettextf("unable to create '%s'", outFile), domain = NA)
    writeLines(paste0(".packageName <- \"", db["Package"], "\""),
               outFile)
    enc <- as.vector(db["Encoding"])
    need_enc <- !is.na(enc) # Encoding was specified
    testParse <- function(...) { # parse only to detect errors
        op <- options(showErrorCalls=FALSE)
        on.exit(options(op))
        parse(...)
        invisible()
    }
    ## assume that if locale is 'C' we can used 8-bit encodings unchanged.
    if(need_enc && (Sys.getlocale("LC_CTYPE") %notin% c("C", "POSIX"))) {
        con <- file(outFile, "a")
        on.exit(close(con))  # Windows does not like files left open
        badfiles <- c()
        for(f in codeFiles) {
            ## We needed more care here: iconv() in macOS 14.1 throws
            ## Aborts on some incorrectly encoded inputs.
            lines <- readLines(f, warn = FALSE)
            if (enc == "UTF-8") {
                valid <- validUTF8(lines)
                if (any(!valid)) {
                    warning(sprintf("file %s is invalid UTF-8",
                                    sQuote(basename(f))),
                            domain = NA, call. = FALSE)
                    badfiles <- c(badfiles, basename(f))
                }
            }
            tmp <- iconv(lines, from = enc, to = "")
            bad <- which(is.na(tmp))
            if(length(bad))
                tmp <- iconv(lines, from = enc, to = "", sub = "byte")
            ## do not report purely comment lines,
            ## nor trailing comments not after quotes
            comm <- grep("^[^#'\"]*#", lines[bad],
                         invert = TRUE, useBytes = TRUE)
            bad2 <- bad[comm]
            if(length(bad2)) {
                warning(sprintf(ngettext(length(bad2),
                                         "unable to re-encode %s line %s",
                                         "unable to re-encode %s lines %s"),
                                sQuote(basename(f)),
                                paste(bad2, collapse = ", ")),
                        domain = NA, call. = FALSE)
            }
            line1 <- paste0("#line 1 \"", f, "\"")
            testParse(text = c(line1, tmp))
            writeLines(line1, con)
            writeLines(tmp, con)
        }
        if(length(badfiles)) {
            validate <- config_val_to_logical(Sys.getenv("_R_CHECK_VALIDATE_UTF8_",
                                                         "FALSE"))
            if (validate)
                stop("invalidly encoded .R file(s)", domain = NA, call. = FALSE)
            else warning("invalidly encoded .R file(s)",
                         domain = NA, call. = FALSE)
        }
	close(con); on.exit()
    } else {
        ## <NOTE>
        ## It may be safer to do
        ##   writeLines(sapply(codeFiles, readLines), outFile)
        ## instead, but this would be much slower ...
        ## use fast version of file.append that ensures LF between files
        lapply(codeFiles, testParse)
        if(!all(.file_append_ensuring_LFs(outFile, codeFiles)))
            stop("unable to write code files")
        ## </NOTE>
    }
    invisible()
}


### * .install_package_indices
## called from R CMD INSTALL

.install_package_indices <-
function(dir, outDir)
{
    options(warn = 1)                   # to ensure warnings get seen
    if(!dir.exists(dir))
        stop(gettextf("directory '%s' does not exist", dir),
             domain = NA)
    if(!dir.exists(outDir))
        stop(gettextf("directory '%s' does not exist", outDir),
             domain = NA)

    ## If there is an @file{INDEX} file in the package sources, we
    ## install this, and do not build it.
    if(file_test("-f", file.path(dir, "INDEX")))
        if(!file.copy(file.path(dir, "INDEX"),
                      file.path(outDir, "INDEX"),
                      overwrite = TRUE))
            stop(gettextf("unable to copy INDEX to '%s'",
                          file.path(outDir, "INDEX")),
                 domain = NA)

    outMetaDir <- file.path(outDir, "Meta")
    if(!dir.exists(outMetaDir) && !dir.create(outMetaDir))
         stop(gettextf("cannot open directory '%s'", outMetaDir),
              domain = NA)
    .install_package_Rd_indices(dir, outDir)
    .install_package_demo_index(dir, outDir)
    invisible()
}

### * .install_package_Rd_indices

.install_package_Rd_indices <-
function(dir, outDir)
{
    dir <- file_path_as_absolute(dir)
    docsDir <- file.path(dir, "man")
    dataDir <- file.path(outDir, "data")
    outDir <- file_path_as_absolute(outDir)

    ## <FIXME>
    ## Not clear whether we should use the basename of the directory we
    ## install to, or the package name as obtained from the DESCRIPTION
    ## file in the directory we install from (different for versioned
    ## installs).  We definitely do not want the basename of the dir we
    ## install from.
    packageName <- basename(outDir)
    ## </FIXME>

    allRd <- if(dir.exists(docsDir))
        list_files_with_type(docsDir, "docs") else character()
    ## some people have man dirs without any valid .Rd files
    if(length(allRd)) {
        ## we want the date of the newest .Rd file we will install
        newestRd <- max(file.mtime(allRd))
        ## these files need not exist, which gives NA.
        indices <- c(file.path("Meta", "Rd.rds"),
                     file.path("Meta", "hsearch.rds"),
                     file.path("Meta", "links.rds"),
                     "INDEX")
        upToDate <- file.mtime(file.path(outDir, indices)) >= newestRd
        if(dir.exists(dataDir)
           && length(dataFiles <- list.files(dataDir))) {
            ## Note that the data index is computed from both the package's
            ## Rd files and the data sets actually available.
            newestData <- max(file.mtime(dataFiles))
            upToDate <- c(upToDate,
                          file.mtime(file.path(outDir, "Meta", "data.rds")) >=
                          max(newestRd, newestData))
        }
        ## Note that this is not quite good enough: an Rd file or data file
        ## might have been removed since the indices were made.
        RdsFile <- file.path("Meta", "Rd.rds")
        if(file.exists(RdsFile)) { ## for Rd files
            ## this has file names without path
            files <- readRDS(RdsFile)$File
            if(!identical(basename(allRd), files)) upToDate <- FALSE
        }
        ## we want to proceed if any is NA.
        if(all(upToDate %in% TRUE)) return(invisible())

        ## Rd objects should already have been installed.
        db <- tryCatch(Rd_db(basename(outDir), lib.loc = dirname(outDir)),
                       error = function(e) NULL)
        ## If not, we build the Rd db from the sources:
        if(is.null(db)) db <- .build_Rd_db(dir, allRd)
        contents <- Rd_contents(db)

        .write_Rd_contents_as_RDS(contents,
                                  file.path(outDir, "Meta", "Rd.rds"))

        defaultEncoding <- as.vector(readRDS(file.path(outDir, "Meta", "package.rds"))$DESCRIPTION["Encoding"])
        if(is.na(defaultEncoding)) defaultEncoding <- NULL
        saveRDS(.build_hsearch_index(contents, packageName, defaultEncoding),
                 file.path(outDir, "Meta", "hsearch.rds"))

        saveRDS(.build_links_index(contents, packageName),
                 file.path(outDir, "Meta", "links.rds"))

        ## If there is no @file{INDEX} file in the package sources, we
        ## build one.
        ## <NOTE>
        ## We currently do not also save this in RDS format, as we can
        ## always do
        ##   .build_Rd_index(readRDS(file.path(outDir, "Meta", "Rd.rds"))
        if(!file_test("-f", file.path(dir, "INDEX")))
            writeLines(formatDL(.build_Rd_index(contents)),
                       file.path(outDir, "INDEX"))
        ## </NOTE>
    } else {
        contents <- NULL
        saveRDS(.build_hsearch_index(contents, packageName, defaultEncoding),
                 file.path(outDir, "Meta", "hsearch.rds"))

        saveRDS(.build_links_index(contents, packageName),
                 file.path(outDir, "Meta", "links.rds"))

    }
    if(dir.exists(dataDir))
        saveRDS(.build_data_index(outDir, contents),
                 file.path(outDir, "Meta", "data.rds"))
    invisible()
}

### * .install_package_vignettes2
## called from R CMD INSTALL for pre 3.0.2-built tarballs
## and for installation from package sources (missing build/vignette.rds),
## including for the temporary package installation during R CMD build,
## and when building base packages (where we need to tangle vignettes)

.install_package_vignettes2 <-
function(dir, outDir, encoding = "", tangle = FALSE)
{
    dir <- file_path_as_absolute(dir)
    subdirs <- c("vignettes", file.path("inst", "doc"))
    ok <- dir.exists(file.path(dir, subdirs))
    ## Create a vignette index only if the vignette dir exists.
    if (!any(ok))
       return(invisible())

    subdir <- subdirs[ok][1L]
    vignetteDir <- file.path(dir, subdir)

    outDir <- file_path_as_absolute(outDir)
    packageName <- basename(outDir)
    outVignetteDir <- file.path(outDir, "doc")
    ## --no-inst installs do not have a outVignetteDir.
    if(!dir.exists(outVignetteDir)) return(invisible())

    ## If there is an HTML index in the @file{inst/doc} subdirectory of
    ## the package source directory (@code{dir}), we do not overwrite it
    ## (similar to top-level @file{INDEX} files).  Installation already
    ## copied this over.
    hasHtmlIndex <- file_test("-f", file.path(vignetteDir, "index.html"))
    htmlIndex <- file.path(outDir, "doc", "index.html")

    vigns <- pkgVignettes(dir = dir, subdirs = subdir, check = TRUE)

    ## Write dummy HTML index if no vignettes are found and exit.
    if(length(vigns$docs) == 0L) {
        ## we don't want to write an index if the directory is in fact empty
        files <- list.files(vignetteDir, all.files = TRUE, no.. = TRUE)
        if((length(files) > 0L) && !hasHtmlIndex)
            .writeVignetteHtmlIndex(packageName, htmlIndex)
        return(invisible())
    }

    if (subdir == "vignettes") {
        ## copy vignette sources over.
        file.copy(vigns$docs, outVignetteDir)
    }

    vigns <- tryCatch({
        pkgVignettes(dir=outDir, subdirs="doc", output=TRUE, source=TRUE)
    }, error = function(ex) {
        pkgVignettes(dir=outDir, subdirs="doc")
    })

    vignetteIndex <- .build_vignette_index(vigns)
    if(tangle && NROW(vignetteIndex) > 0L) {
        cwd <- getwd()
        if (is.null(cwd))
            stop("current working directory cannot be ascertained")
        setwd(outVignetteDir)

	loadVignetteBuilder(dir, mustwork = FALSE)

        ## install tangled versions of Sweave vignettes.
        for(i in seq_along(vigns$docs)) {
            file <- vigns$docs[i]
            if (!is.null(vigns$sources) && !is.null(vigns$sources[file][[1]]))
            	next
            file <- basename(file)
            enc <- vigns$encodings[i]

            cat("  ", sQuote(basename(file)),
                if(nzchar(enc)) paste("using", sQuote(enc)), "\n")

	    engine <- try(vignetteEngine(vigns$engines[i]), silent = TRUE)
	    ## tangling in outVignetteDir would fail if the vignette relied
	    ## on SweaveInput/child documents (not copied over),
	    ## but base packages currently don't do that
	    if (!inherits(engine, "try-error"))
            	engine$tangle(file, quiet = TRUE, encoding = enc)
            setwd(outVignetteDir) # just in case some strange tangle function changed it
        }
        setwd(cwd)

        # Update - now from the output directory
        vigns <- pkgVignettes(dir=outDir, subdirs="doc", source=TRUE)

        ## remove any files with no R code (they will have header comments).
        ## if not correctly declared they might not be in the current encoding
        sources <- unlist(vigns$sources)
        for(i in seq_along(sources)) {
            file <- sources[i]
            if (!file_test("-f", file)) next
            bfr <- readLines(file, warn = FALSE)
            if(all(grepl("(^###|^[[:space:]]*$)", bfr, useBytes = TRUE)))
                unlink(file)
        }

        # Update
        vigns <- pkgVignettes(dir=outDir, subdirs="doc", source=TRUE)

        # Add tangle source files (*.R) to the vignette index
        # Only the "main" R file, because tangle may also split
        # output into multiple files
        sources <- character(length(vigns$docs))
        for (i in seq_along(vigns$docs)) {
           name <- vigns$names[i]
           source <- find_vignette_product(name, by = "tangle", main = TRUE, dir = vigns$dir, engine = engine)
           if (length(source) > 0L)
              sources[i] <- basename(source)
        }
        vignetteIndex$R <- sources
    }

    if(!hasHtmlIndex)
        .writeVignetteHtmlIndex(packageName, htmlIndex, vignetteIndex)

    saveRDS(vignetteIndex,
             file = file.path(outDir, "Meta", "vignette.rds"))

    invisible()
}

### * .install_package_vignettes3
## called from R CMD INSTALL for 3.0.2 or later tarballs

.install_package_vignettes3 <-
function(dir, outDir, encoding = "")
{
    packageName <- basename(outDir)
    dir <- file_path_as_absolute(dir)
    indexname <- file.path(dir, "build", "vignette.rds")
    ok <- file_test("-f", indexname)
    ## Create a vignette index only if the vignette dir exists.
    if (!ok)
       return(invisible())

    ## Copy the index to Meta
    file.copy(indexname, file.path(outDir, "Meta"))

    ## If there is an HTML index in the @file{inst/doc} subdirectory of
    ## the package source directory (@code{dir}), we do not overwrite it
    ## (similar to top-level @file{INDEX} files).  Installation already
    ## copied this over.
    vignetteDir <- file.path(outDir, "doc")
    hasHtmlIndex <- file_test("-f", file.path(vignetteDir, "index.html"))
    htmlIndex <- file.path(outDir, "doc", "index.html")

    vignetteIndex <- readRDS(indexname)

    if(!hasHtmlIndex)
        .writeVignetteHtmlIndex(packageName, htmlIndex, vignetteIndex)

    invisible()
}

### * .install_package_demo_index

.install_package_demo_index <-
function(dir, outDir)
{
    demoDir <- file.path(dir, "demo")
    if(!dir.exists(demoDir)) return(invisible())
    demoIndex <- .build_demo_index(demoDir)
    saveRDS(demoIndex,
             file = file.path(outDir, "Meta", "demo.rds"))
    invisible()
}

### * .vinstall_package_indices

## called from src/library/Makefile
.vinstall_package_indices <-
function(src_dir, out_dir, packages)
{
    ## For the given packages with sources rooted at @file{src_dir} and
    ## installations rooted at @file{out_dir}, install the package
    ## indices.
    ## Really only useful for base packages under Unix.
    ## See @file{src/library/Makefile.in}.

    for(p in unlist(strsplit(packages, "[[:space:]]+")))
        .install_package_indices(file.path(src_dir, p), file.path(out_dir, p))
    utils::make.packages.html(.Library, verbose = FALSE)
    invisible()
}

### * .install_package_vignettes

## called from src/library/Makefile[.win]
## this is only used when building R
.install_package_vignettes <-
function(dir, outDir, keep.source = TRUE)
{
    dir <- file_path_as_absolute(dir)
    vigns <- pkgVignettes(dir = dir)
    if(is.null(vigns) || !length(vigns$docs)) return(invisible())

    outDir <- file_path_as_absolute(outDir)
    outVignetteDir <- file.path(outDir, "doc")
    if(!dir.exists(outVignetteDir) && !dir.create(outVignetteDir))
        stop(gettextf("cannot open directory '%s'", outVignetteDir),
             domain = NA)

    ## We have to be careful to avoid repeated rebuilding.
    vignettePDFs <- file.path(outVignetteDir, paste0(vigns$names, ".pdf"))
    upToDate <- file_test("-nt", vignettePDFs, vigns$docs)

    ## The primary use of this function is to build and install PDF
    ## vignettes in base packages.
    ## Hence, we build in a subdir of the current directory rather
    ## than a temp dir: this allows inspection of problems and
    ## automatic cleanup via Make.
    cwd <- getwd()
    if (is.null(cwd))
        stop("current working directory cannot be ascertained")
    buildDir <- file.path(cwd, ".vignettes")
    if(!dir.exists(buildDir) && !dir.create(buildDir))
        stop(gettextf("cannot create directory '%s'", buildDir), domain = NA)
    on.exit(setwd(cwd))
    setwd(buildDir)

    loadVignetteBuilder(vigns$pkgdir)

    for(i in seq_along(vigns$docs)[!upToDate]) {
        file <- vigns$docs[i]
        name <- vigns$names[i]
        engine <- vignetteEngine(vigns$engines[i])

        message(gettextf("processing %s", sQuote(basename(file))),
                domain = NA)

        ## Note that contrary to all other weave/tangle calls, here
        ## 'file' is not a file in the current directory [hence no
        ## file <- basename(file) above]. However, weave should/must
        ## always create a file ('output') in the current directory.
        output <- tryCatch({
            engine$weave(file, pdf = TRUE, eps = FALSE, quiet = TRUE,
                        keep.source = keep.source, stylepath = FALSE)
            setwd(buildDir)
            find_vignette_product(name, by = "weave", engine = engine)
        }, error = function(e) {
            stop(gettextf("running %s on vignette '%s' failed with message:\n%s",
                 engine[["name"]], file, conditionMessage(e)),
                 domain = NA, call. = FALSE)
        })
        ## In case of an error, do not clean up: should we point to
        ## buildDir for possible inspection of results/problems?
        ## We need to ensure that the src vignettes dir is in (TEX|BIB)INPUTS
        ## and this R's texmf is found (system TEXINPUTS could list another R).
        if (vignette_is_tex(output)) {
            tryCatch({
                texi2pdf(file = output, quiet = TRUE,
                         texinputs = c(vigns$dir, paste0(R.home("share"), "/texmf//")))
                output <- find_vignette_product(name, by = "texi2pdf", engine = engine)
            }, error = function(e) {
                stop(gettextf("compiling TeX file %s failed with message:\n%s",
                 sQuote(output), conditionMessage(e)),
                 domain = NA, call. = FALSE)
            })
	}

        if(!file.copy(output, outVignetteDir, overwrite = TRUE))
            stop(gettextf("cannot copy '%s' to '%s'",
                          output,
                          outVignetteDir),
                 domain = NA)
    }

    if (any(!upToDate))
    compactPDF(outVignetteDir, gs_quality = "ebook")

    ## Need to change out of this dir before we delete it,
    ## at least on Windows.
    setwd(cwd)
    unlink(buildDir, recursive = TRUE)
    ## Now you need to update the HTML index!
    ## This also creates the .R files
    .install_package_vignettes2(dir, outDir, tangle = TRUE)
    invisible()
}

### * .install_package_namespace_info

.install_package_namespace_info <-
function(dir, outDir)
{
    dir <- file_path_as_absolute(dir)
    nsFile <- file.path(dir, "NAMESPACE")
    if(!file_test("-f", nsFile)) return(invisible())
    nsInfoFilePath <- file.path(outDir, "Meta", "nsInfo.rds")
    if(file_test("-nt", nsInfoFilePath, nsFile)) return(invisible())
    nsInfo <- parseNamespaceFile(basename(dir), dirname(dir))
    outMetaDir <- file.path(outDir, "Meta")
    if(!dir.exists(outMetaDir) && !dir.create(outMetaDir))
        stop(gettextf("cannot open directory '%s'", outMetaDir),
             domain = NA)
    saveRDS(nsInfo, nsInfoFilePath)
    invisible()
}

### * .vinstall_package_namespaces_as_RDS

## called from src/library/Makefile
.vinstall_package_namespaces_as_RDS <-
function(dir, packages)
{
    ## For the given packages installed in @file{dir} which have a
    ## NAMESPACE file, install the namespace info as R metadata.
    ## Really only useful for base packages under Unix.
    ## See @file{src/library/Makefile.in}.

    for(p in unlist(strsplit(packages, "[[:space:]]+")))
        .install_package_namespace_info(file.path(dir, p),
                                        file.path(dir, p))
    invisible()
}

### * .install_package_Rd_objects

## called from src/library/Makefile
.install_package_Rd_objects <-
function(dir, outDir, encoding = "unknown")
{
    dir <- file_path_as_absolute(dir)
    mandir <- file.path(dir, "man")
    manfiles <- if(!dir.exists(mandir)) character()
    else list_files_with_type(mandir, "docs")
    manOutDir <- file.path(outDir, "help")
    dir.create(manOutDir, FALSE)
    db_file <- file.path(manOutDir,
                         paste0(basename(outDir), ".rdx"))
    built_file <- file.path(dir, "build", "partial.rdb")
    macro_files <- list.files(file.path(dir, "man", "macros"), pattern = "\\.Rd$", full.names = TRUE)
    if (length(macro_files)) {
    	macroDir <- file.path(manOutDir, "macros")
    	dir.create(macroDir, FALSE)
    	file.copy(macro_files, macroDir, overwrite = TRUE)
    }
    ## Avoid (costly) rebuilding if not needed.
    ## Actually, it seems no more costly than these tests, which it also does
    pathsFile <- file.path(manOutDir, "paths.rds")
    if(!file_test("-f", db_file) || !file.exists(pathsFile) ||
       !identical(sort(manfiles), sort(readRDS(pathsFile))) ||
       !all(file_test("-nt", db_file, manfiles))) {
        db <- .build_Rd_db(dir, manfiles, db_file = db_file,
                           encoding = encoding, built_file = built_file)
        nm <- as.character(names(db)) # Might be NULL
        saveRDS(structure(nm,
                          first = nchar(file.path(mandir)) + 2L),
                pathsFile)
        names(db) <- sub("\\.[Rr]d$", "", basename(nm))
        makeLazyLoadDB(db, file.path(manOutDir, basename(outDir)))
    }
    invisible()
}

### * .install_package_demos

## called from basepkg.mk and .install_packages
.install_package_demos <-
function(dir, outDir)
{
    ## NB: we no longer install 00Index
    demodir <- file.path(dir, "demo")
    if(!dir.exists(demodir)) return()
    demofiles <- list_files_with_type(demodir, "demo", full.names = FALSE)
    if(!length(demofiles)) return()
    demoOutDir <- file.path(outDir, "demo")
    if(!dir.exists(demoOutDir)) dir.create(demoOutDir)
    file.copy(file.path(demodir, demofiles), demoOutDir,
              overwrite = TRUE)
}


### * .find_cinclude_paths

.find_cinclude_paths <-
function(pkgs, lib.loc = NULL, file = NULL)
{
    ## given a character string of comma-separated package names,
    ## find where the packages are installed and generate
    ## -I"/path/to/package/include" ...

    if(!is.null(file)) {
        tmp <- read.dcf(file, "LinkingTo")[1L, 1L]
        if(is.na(tmp)) return(invisible())
        pkgs <- tmp
    }
    pkgs <- strsplit(pkgs[1L], ",[[:blank:]]*")[[1L]]
    paths <- find.package(pkgs, lib.loc, quiet=TRUE)
    if(length(paths))
	cat(paste(paste0('-I"', paths, '/include"'), collapse=" "))
    return(invisible())
}

### * .Rtest_package_depends_R_version

.Rtest_package_depends_R_version <-
function(dir)
{
    if(missing(dir)) dir <- "."
    meta <- .read_description(file.path(dir, "DESCRIPTION"))
    deps <- .split_description(meta, verbose = TRUE)$Rdepends2
    status <- 0
    current <- getRversion()
    for(depends in deps) {
        ## .split_description will have ensured that this is NULL or
        ## of length 3.
        if(length(depends) > 1L) {
            ## .check_package_description will insist on these operators
            if(depends$op %notin% c("<=", ">=", "<", ">", "==", "!="))
                message("WARNING: malformed 'Depends' field in 'DESCRIPTION'")
            else {
                status <- if(inherits(depends$version, "numeric_version"))
                    !do.call(depends$op, list(current, depends$version))
                else {
                    ver <- R.version
                    if (ver$status %in% c("", "Patched")) FALSE
                    else !do.call(depends$op,
                                 list(ver[["svn rev"]],
                                      as.numeric(sub("^r", "", depends$version))))
                }
            }
            if(status != 0) {
                package <- Sys.getenv("R_PACKAGE_NAME")
                if(!nzchar(package))
                    package <- meta["Package"]
                msg <- if(nzchar(package))
                    gettextf("ERROR: this R is version %s, package '%s' requires R %s %s",
                                    current, package,
                                    depends$op, depends$version)
                else
                    gettextf("ERROR: this R is version %s, required is R %s %s",
                                    current, depends$op, depends$version)
                message(strwrap(msg, exdent = 2L))
                break
            }
        }
    }
    status
}

## no longer used
.test_package_depends_R_version <-
function(dir)
    q(status = .Rtest_package_depends_R_version(dir))


### * .test_load_package

.test_load_package <- function(pkg_name, lib)
{
    options(warn = 1)
    res <- try(suppressPackageStartupMessages(
	library(pkg_name, lib.loc = lib, character.only = TRUE, logical.return = TRUE)))
    if (inherits(res, "try-error") || !res)
        stop("loading failed", call. = FALSE)
}


### * checkRdaFiles

checkRdaFiles <- function(paths)
{
    if(length(paths) == 1L && dir.exists(paths)) {
        paths <- Sys.glob(c(file.path(paths, "*.rda"),
                            file.path(paths, "*.RData")))
        ## Exclude .RData, which this may or may not match
        paths <- paths[!endsWith(paths, "/.RData")]
    }
    res <- data.frame(size = NA_real_, ASCII = NA,
                      compress = NA_character_, version = NA_integer_,
                      stringsAsFactors = FALSE)
    res <- res[rep_len(1L, length(paths)), ]
    row.names(res) <- paths
    keep <- file.exists(paths)
    res$size[keep] <- file.size(paths)[keep]
    for(p in paths[keep]) {
        magic <- readBin(p, "raw", n = 5)
        res[p, "compress"] <- if(all(magic[1:2] == c(0x1f, 0x8b))) "gzip"
        else if(rawToChar(magic[1:3]) == "BZh") "bzip2"
        else if(magic[1L] == 0xFD && rawToChar(magic[2:5]) == "7zXZ") "xz"
        else if(grepl("RD[ABX][1-9]", rawToChar(magic), useBytes = TRUE)) "none"
        else "unknown"
        con <- gzfile(p)
        magic <- readChar(con, 5L, useBytes = TRUE)
        close(con)
        if (grepl("RD[ABX][1-9]", magic, useBytes = TRUE)) {
            res[p, "ASCII"]  <- substr(magic, 3, 3) == "A"
            ver <- sub("(RD[ABX])([1-9])", "\\2", magic, useBytes = TRUE)
            res[p, "version"] <- as.integer(ver)
        }
    }
    res
}

### * resaveRdaFiles

resaveRdaFiles <- function(paths,
                           compress = c("auto", "gzip", "bzip2", "xz"),
                           compression_level, version = NULL)
{
    if(length(paths) == 1L && dir.exists(paths))
        paths <- Sys.glob(c(file.path(paths, "*.rda"),
                            file.path(paths, "*.RData")))
    compress <- match.arg(compress)
    if (missing(compression_level))
        compression_level <- switch(compress, "gzip" = 6L, 9L)

    getVerLoad <- function(file)
    {
        con <- gzfile(file, "rb"); on.exit(close(con))
        ## The .Internal gives an errror on version-1 files
        tryCatch(.Internal(loadInfoFromConn2(con))$version,
                 error = function(e) 1L)
    }
    if(is.null(version)) version <- 2L # for maximal back-compatibility

    for(p in paths) {
        ver <- max(version, getVerLoad(p))  # to avoid losing features
        env <- new.env(hash = TRUE) # probably small, need not be
        suppressPackageStartupMessages(load(p, envir = env))
        if(compress == "auto") {
            f1 <- tempfile()
            save(file = f1, list = ls(env, all.names = TRUE), envir = env,
                 version = ver)
            f2 <- tempfile()
            save(file = f2, list = ls(env, all.names = TRUE), envir = env,
                 compress = "bzip2", version = ver)
            ss <- file.size(c(f1, f2)) * c(0.9, 1.0)
            names(ss) <- c(f1, f2)
            if(ss[1L] > 10240) {
                f3 <- tempfile()
                save(file = f3, list = ls(env, all.names = TRUE), envir = env,
                     compress = "xz", version = ver)
                ss <- c(ss, file.size(f3))
		names(ss) <- c(f1, f2, f3)
            }
            nm <- names(ss)
            ind <- which.min(ss)
            file.copy(nm[ind], p, overwrite = TRUE)
            unlink(nm)
        } else
            save(file = p, list = ls(env, all.names = TRUE), envir = env,
                 compress = compress, compression_level = compression_level,
                 version = ver)
    }
}

### * compactPDF

compactPDF <-
    function(paths, qpdf = Sys.which(Sys.getenv("R_QPDF", "qpdf")),
             gs_cmd = Sys.getenv("R_GSCMD", ""),
             gs_quality = Sys.getenv("GS_QUALITY", "none"),
             gs_extras = character(), verbose = FALSE)
{
    use_qpdf <- nzchar(qpdf)
    qpdf_flags <- "--object-streams=generate"
    if(use_qpdf) {
        ## <NOTE>
        ## Before 2018-09, we passed
        ##   --stream-data=compress
        ## to qpdf: but this is now deprecated, corresponds to
        ## the default since at least qpdf 6.0.0, and it at
        ## least one case made less compression when given.
        ## OTOH, people were using versions as old as 2.2.2.
        ## </NOTE>
        ver <- system2(qpdf, "--version", TRUE)[1L]
        ver <- as.numeric_version(sub("qpdf version ", "", ver, fixed=TRUE))
        if(!is.na(ver) && ver < "6.0.0")
            qpdf_flags <- c("--stream-data=compress", qpdf_flags)
    }
    gs_quality <- match.arg(gs_quality, c("none", "printer", "ebook", "screen"))
    use_gs <- if(gs_quality != "none") nzchar(gs_cmd <- find_gs_cmd(gs_cmd)) else FALSE
    if(verbose) cat(sprintf("qs_quality=\"%s\" : use_gs=%s, use_qpdf=%s\n",
                            gs_quality, use_gs, use_qpdf))
    if (!use_gs && !use_qpdf) return()
    if(length(paths) == 1L && dir.exists(paths))
        paths <- Sys.glob(file.path(paths, "*.pdf"))
    if(verbose) cat(sprintf("#{pdf}s = length(paths) = %d\n", length(paths)))
    dummy <- rep.int(NA_real_, length(paths))
    ans <- data.frame(old = dummy, new = dummy, row.names = paths)
    ## These should not have spaces, but quote below to be safe.
    tf <- tempfile("pdf"); tf2 <- tempfile("pdf")
    verb2 <- verbose >= 2
    for (p in paths) {
        res <- 0
        if(verbose) cat(sprintf("- %s:%s", p, if(verb2)"\n" else " "))
        if (use_gs) {
            res <- system2(gs_cmd,
                           c("-q -dNOPAUSE -dBATCH -sDEVICE=pdfwrite",
                             sprintf("-dPDFSETTINGS=/%s", gs_quality),
                             "-dCompatibilityLevel=1.5",
                             "-dAutoRotatePages=/None",
                             "-dPrinted=false",
                             sprintf("-sOutputFile=%s", shQuote(tf)),
                             gs_extras, shQuote(p)), verb2, verb2)
            if(verbose) {
                res0 <- (verb2 && !length(res))
                cat(sprintf("   gs: res=%s; ", if(res0) "<>" else res))
                if(verb2) res <- if(res0) 0 else attr(res, "status")
            }
            if(!res && use_qpdf) {
                unlink(tf2) # precaution
                file.rename(tf, tf2)
                res <- system2(qpdf, c(qpdf_flags, shQuote(tf2), shQuote(tf)),
                               verb2, verb2)
                if(verbose) {
                    res0 <- (verb2 && !length(res))
                    cat(sprintf(" + qpdf: res=%s; ", if(res0) "<>" else res))
                    if(verb2) res <- if(res0) 0 else attr(res, "status")
                }
                unlink(tf2)
            }
        } else if(use_qpdf) {
            res <- system2(qpdf, c(qpdf_flags, shQuote(p), shQuote(tf)),
                           verb2, verb2)
            if(verbose) {
                res0 <- (verb2 && !length(res))
                cat(sprintf(" only qpdf: res=%s; ", if(res0) "<>" else res))
                if(verb2) res <- if(res0) 0 else attr(res, "status")
            }
        }
        if(!res && file.exists(tf)) {
            old <- file.size(p); new <-  file.size(tf)
            if(verbose)
                cat(sprintf("\n    ==> (new=%g)/(old=%g) = %g", new,old, new/old))
            if(new/old < 0.9 && new < old - 1e4) {
                if(verbose) cat(" =====> using it !!\n")
                file.copy(tf, p, overwrite = TRUE)
                ans[p, ] <- c(old, new)
            } else if(verbose) cat(" .. not worth using\n")
        } else if(verbose) cat("\n")
        unlink(tf)
    }
    structure(stats::na.omit(ans), class = c("compactPDF", "data.frame"))
}

find_gs_cmd <- function(gs_cmd = "")
{
    if(!nzchar(gs_cmd)) {
        if(.Platform$OS.type == "windows") {
            gsexe <- Sys.getenv("R_GSCMD")
            if (!nzchar(gsexe)) gsexe <- Sys.getenv("GSC")
            gs_cmd <- Sys.which(gsexe)
            if (!nzchar(gs_cmd)) gs_cmd <- Sys.which("gswin64c")
            if (!nzchar(gs_cmd)) gs_cmd <- Sys.which("gswin32c")
            gs_cmd
        } else Sys.which(Sys.getenv("R_GSCMD", "gs"))
    } else Sys.which(gs_cmd)
}

format.compactPDF <- function(x, ratio = 0.9, diff = 1e4, ...)
{
    if(!nrow(x)) return(character())
    z <- y <- x[with(x, new/old < ratio & new < old - diff), ]
    if(!nrow(z)) return(character())
    z[] <- lapply(y, function(x) sprintf("%.0fKb", x/1024))
    large <- y$new >= 1024^2
    z[large, ] <- lapply(y[large, ], function(x) sprintf("%.1fMb", x/1024^2))
    paste('  compacted', sQuote(basename(row.names(y))),
          'from', z[, 1L], 'to', z[, 2L])
}

### * add_datalist

add_datalist <- function(pkgpath, force = FALSE, small.size = 1024^2)
{
    dlist <- file.path(pkgpath, "data", "datalist")
    if (!force && file.exists(dlist)) return()
    size <- sum(file.size(Sys.glob(file.path(pkgpath, "data", "*"))))
    if(size <= small.size) return()
    z <- list_data_in_pkg(dir = pkgpath, use_datalist = FALSE)
    if(!length(z)) return()
    con <- file(dlist, "w")
    for (nm in names(z)) {
        zz <- z[[nm]]
        if (length(zz) == 1L && zz == nm) writeLines(nm, con)
        else cat(nm, ": ", paste(zz, collapse = " "), "\n",
                 sep = "", file = con)
    }
    close(con)
    invisible()
}


### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
#  File src/library/tools/R/apitools.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 2024 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

##
## Work out the function API from information in WRE
##

## WRE data is now installed in system.file(package = "tools", "wre.txt")
## WRE(newpath) forces a new load with the new path.

apidata <-
    list2env(
        list(
            wrelines = NULL,
            wreloc = NULL,
            fapi = NULL,
            rfuns = NULL))

resetAPI <- function(newloc = "") {
    if (newloc != "")
        apidata$wreloc <- newloc
    apidata$wrelines <- NULL
    apidata$fapi <- NULL
    ## reset rfuns also?
}

WRE <- function() {
    if (is.null(apidata$wrelines)) {
        if (is.null(apidata$wreloc)) {
            apidata$wreloc <- system.file(package = "tools", "wre.txt")
            if (apidata$wreloc == "")
                apidata$wreloc <-
                    "https://svn.r-project.org/R/trunk/doc/manual/R-exts.texi"
        }
        apidata$wrelines <- readLines(apidata$wreloc)
    }
    apidata$wrelines
}

unmap <- function(x) sub("^Rf_", "", gsub("^_|_$", "", trimws(x)))

getOneFunAPI <- function(apitype) {
    wrelines <- WRE()
    fpat <- sprintf("^@(%s)fun +", apitype)
    hpat <- sprintf("^@(%s)hdr +", apitype)
    funs <- sub(fpat, "", grep(fpat, wrelines, value = TRUE))
    hdrs <- sub(hpat, "", grep(hpat, wrelines, value = TRUE))
    wAPI <- data.frame(name = funs, loc = rep("WRE", length(names)))
    getHdrAPI <- function(hdr) {
        hfuns <- getFunsHdr(file.path(R.home("include"), hdr))
        data.frame(name = hfuns, loc = rep(hdr, length(hfuns)))
    }
    hAPI <- lapply(hdrs, getHdrAPI)
    val <- rbind(wAPI, do.call(rbind, hAPI))
    val$apitype <- rep(apitype, nrow(val))
    val$unmapped <-unmap(val$name)
    rownames(val) <- NULL
    val
}

getFunAPI <- function() {
    apitypes <- c("api", "eapi", "emb")
    val <- do.call(rbind, lapply(apitypes, getOneFunAPI))
    val <- unique(val)
    val <- by(val,
              list(val$unmapped),
              ## picks max WRE > api > eapi > emb
              ## picks first if unmapped and mapped are in WRE
              function(x) if (nrow(x) > 1) x[1, ] else x,
              simplify = FALSE)
    val <- do.call(rbind, val)
    val$unmapped <- NULL ## not needed in final output
    rownames(val) <- NULL
    val
}

funAPI <- function() {
    if (is.null(apidata$fapi))
        apidata$fapi <- getFunAPI()
    apidata$fapi
}

## getFunsHdr tries to get the functions declared in a header file
## without additional tools beyond cc -E. Using a proper
## header-parsing tool would be more accurate, but this seems adequate
## for now.
getFunsHdr <- function(fpath, lines) {
    if (missing(lines)) {
        lines <- readLines(fpath)
        name <- basename(fpath)
    }
    else name <- NULL

    ## NORET has to be handled before ccE since what it expands into varies
    lines <- ifelse(grepl("^#", lines),
                    lines,
                    gsub(r"{.*\s*NORET\s*}", " ", lines))

    lines <- lines[! grepl("^#\\s*error", lines)] ## for GraphicsDevice.h

    lines <- ccE(lines)
    lines <- dropBraces(lines)

    ## these could be incorporated into the regex
    lines <- gsub(r"{\s*(const|extern|long|unsigned)\s*}", "", lines)
    lines <- sub(r"{^\s*(\w*[(])}", "void \\1", lines)
    lines <- gsub(r"{\(\s*\*\s*(\w+)\s*\)}", "(\\1)", lines)

    ## original from SO: https://stackoverflow.com/questions/476173/regex-to-pull-out-c-function-prototype-declarations
    ## funcRegexp <- r"{^\s*(?:(?:inline|static)\s+){0,2}(?!else|typedef|return)\w+\s+\*?\s*(\w+)\s*\([^0]+\)\s*;?}"
    ## allow for parens around function name
    ## make closing paren for arguments optional
    funcRegexp <- r"{^\s*(?:(?:inline|static)\s+){0,2}(?!else|typedef|return)\w+\s*\*?\s*\(?(\w+)\)?\s*\([^0]+\)?\s*;?}"

    m <- gregexec(funcRegexp, lines, perl = TRUE)
    v <- regmatches(lines, m)
    val <- sapply(v[lengths(v) > 0], `[[`, 2)
    val <- unique(as.character(val))

    ## drop halucinations
    val <- val[! (val %in% letters | val %in% LETTERS)]
    val <- val[! grepl("_t$", val)]
    val <- val[! grepl("user_(unif|norm)", val)]
    val <- val[! grepl("Quartz|Win32", val)]

    val
}

ccE <- function(lines, include = R.home("include"), clean = TRUE) {
    if (Sys.which("cc") == "")
        stop("'cc' is not on the path")
    tfile <- tempfile(fileext = ".h")
    on.exit(unlink(tfile))
    writeLines(lines, tfile)
    cmd <- sprintf("cc -E -I%s %s", include, tfile)
    val <- system(cmd, intern=TRUE)
    if (clean)
        ccEclean(val, tfile)
    else val
}

ccEclean <- function(lines, pattern = "Rtmp") {
    fline <- grepl("^#", lines)
    keep <- grepl(pattern, lines[fline])
    len <- diff(c(which(fline), length(lines) + 1))
    keep <- unlist(mapply(rep, keep, len, USE.NAMES = FALSE))
    lines <- lines[keep & ! fline]
    lines
}

dropBraces <- function(lines) {
    ## drop {...} fully within a line
    lines <- sub("[{].*[}]", " ", lines)

    ## drop {...} crossing several lines
    start <- grepl("[{]", lines)
    end <- grepl("[}]", lines)
    ## could check for balance
    lines <- lines[cumsum(start - end) == 0 | start | end]
    lines <- sub("[{].*", "", lines)      ## keep stuff before {
    lines <- lines[! grepl(".*[}]", lines)] ## don't keep stuff after }

    lines
}


##
## Check a shared library's use of R entry points
##

checkLibAPI <- function(lpath) {
    ldata <- readFileSyms(lpath)
    lsyms <- ldata[ldata$type == "U", ]$name
    lsyms <- inRfuns(lsyms)
    lsyms <- data.frame(name = lsyms, unmapped = unmap(lsyms))
    api <- funAPI()
    api$unmapped <- unmap(api$name)
    api$name <- NULL
    api$loc <- NULL
    val <- merge(lsyms, api, all.x = TRUE)
    val <- val[order(val$apitype), ]
    val$unmapped <- NULL ## not needed in final output
    rownames(val) <- NULL
    val
}

readFileSyms <- function(fpath) {
    ## this uses nm
    ## could try objdump if nm doesn't work
    v <- read_symbols_from_object_file(fpath)
    if (is.null(v))
        data.frame(name = character(0), type = character(0))
    else as.data.frame(v)[c("name", "type")]
}

## crude approach based on string matching
## **** this is to crude -- needs to allow more
inRfunsCrude <- function(syms) {
    syms <- union(syms[syms == toupper(syms)],
                  grep("^_?Rf?_", syms, value = TRUE))
    pat <- "R_MB_CUR_MAX|R_BaseNamespace|R_BlankScalarString|R_BlankString"
    pat <- sprintf("%s|R_CStackDir|R_CStackLimit|R_CStackStart", pat)
    pat <- sprintf("%s|R_Consolefile|R_CurrentExpression|R_Interactive", pat)
    pat <- sprintf("%s|R_Outputfile|R_Srcref|R_TempDir", pat)
    pat <- sprintf("%s|R_compact_.*_class|R_ignore_SIGPIPE", pat)
    pat <- sprintf("%s|R_interrupts_pending|R_interrupts_suspended", pat)
    pat <- sprintf("%s|R_isForkedChild", pat)
    pat <- sprintf("%s|R_NilValue|R_MissingArg|R_Visible", pat)
    pat <- sprintf("%s|R_.*Symbol$|R_dot_|R_Na", pat)
    pat <- sprintf("%s|R_NilValue|R_GlobalEnv|R_BaseEnv|R_EmptyEnv", pat)
    pat <- sprintf("%s|R_(Pos|Neg)Inf|R_.*Value$|R_.*Handlers$", pat)
    syms[! grepl(pat, syms)]
}

## approach based on computing the entry points in the executable and core libs
## fall back to the crude approach if entry points can't be found
inRfuns <- function(syms) {
    rfuns <- Rfuns()
    if (length(rfuns) == 0)
        inRfunsCrude(syms)
    else
        syms[unmap(syms) %in% unmap(rfuns)]
}

cleanRfuns <- function(val) {
    ## if Rf_XLENGTH and XLENGTH are both there then keep Rf_XLENGTH
    if (any(grepl("^_*Rf_XLENGTH_*$", val)) &&
        any(grepl("^_*XLENGTH_*$", val)))
        val <- val[! grepl("^_*XLENGTH_*$", val)]
    
    ## drop tre_ stuff if it is there and some others
    val[! grepl("tre_|^_*(main|MAIN|start)_*$|yyparse", val)]
}

getRfuns <- function() {
    pat <- sprintf("(\\.dylib|%s)$", .Platform$dynlib.ext)
    ofiles <- c(file.path(R.home("bin"), "exec", "R"),
                dir(R.home("lib"), pattern = pat, full.names = TRUE),
                dir(R.home("modules"), pattern = pat, full.names = TRUE))
    data <- do.call(rbind, lapply(ofiles, readFileSyms))
    fdata <- data[data$type == "T", ]
    cleanRfuns(fdata$name)
}

Rfuns <- function() {
    if (is.null(apidata$rfuns))
        apidata$rfuns <- getRfuns()
    apidata$rfuns
}


##
## Check an installed package's use of R entry points
##

checkPkgAPI <- function(pkg, lib.loc = NULL, all = FALSE) {
    libdir <- system.file("libs", package = pkg, lib.loc = lib.loc)
    libs <- Sys.glob(file.path(libdir, sprintf("*%s", .Platform$dynlib.ext)))
    if (length(libs) > 0) {
        val <- do.call(rbind, lapply(libs, checkLibAPI))
        if (! all)
            val <- val[is.na(val$apitype), ]
        val <- unique(val)
        rownames(val) <- NULL
        val
    }
    else NULL
}

checkAllPkgsAPI <- function(lib.loc = NULL, priority = NULL, all = FALSE,
                            Ncpus = getOption("Ncpus", 1L),
                            verbose = getOption("verbose")) {
    p <- rownames(utils::installed.packages(lib.loc = lib.loc,
                                            priority = priority))
    checkOne <- function(pkg) {
        data <- checkPkgAPI(pkg, lib.loc = lib.loc, all = all)
        if (! is.null(data))
            data$pkg <- rep(pkg, nrow(data))
        data
    }
    val <- do.call(rbind, .package_apply(p, checkOne,
                                         Ncpus = Ncpus, verbose = verbose))
    rownames(val) <- NULL
    val
}


##
## Find R entry points and variables used in installed packages
##


clear_rownames <- function(val) {
    rownames(val) <- NULL
    val
}

rbind_list <- function(args)
    clear_rownames(do.call(rbind, args))

ofile_syms <- function(fname, keep = c("F", "V", "U")) {
    ## this uses nm on Linux/macOS; probably doesn't work on Windows, so bail
    stopifnot(isFALSE(.Platform$OS.type == "windows"))
    v <- read_symbols_from_object_file(fname)
    if (is.character(v) && nrow(v) == 0) 
        ofile_syms_od(fname, keep)
    else if (is.null(v))
        data.frame(name = character(0), type = character(0))
    else {
        match_type <-function(type)
            ifelse(type == "T", "F", ifelse(type == "U", "U", "V"))
        val <- as.data.frame(v)[c("name", "type")]
        val <- val[val$type %in% c("U", "B", "D", "T"), ]
        val$type <- match_type(val$type)
        val <- val[val$type %in% keep, ]
        val
    }    
}

ofile_syms_od <- function(fpath, keep = c("F", "V", "U")) {
    if (Sys.which("objdump") == "")
        stop("'objdump' is not on the path")
    v <- system(sprintf("objdump -T %s", fpath), intern = TRUE)
    v <- grep("\t", v, value = TRUE)      ## data lines contain a \t
    name <- sub(".*\t.* (.*$)", "\\1", v) ## the name is at the end after the \t
    type <- sub(".* (.*)\t.*", "\\1", v)  ## the type is right before the \t
    ttbl <-
        c("*UND*" = "U", ".text" = "F", ".bss" = "V", ".data" = "V", w = "w")
    val <- data.frame(name, type = ttbl[match(type, names(ttbl), length(ttbl))])
    val <- val[val$type %in% keep, ]
    clear_rownames(val[order(val$name), ])
}

Rsyms <- function(keep = c("F", "V")) {
    rsyms <- apidata$rsyms
    if (is.null(rsyms)) {
        ofiles <- c(file.path(R.home("bin"), "exec", "R"),
                    dir(R.home("lib"), full.names = TRUE),
                    dir(R.home("modules"), full.names = TRUE))
        rsyms <- rbind_list(lapply(ofiles, ofile_syms, keep))
        apidata$rsyms <- rsyms
    }
    rsyms
}

pkgRsyms <- function(pkg, lib.loc = NULL) {
    libdir <- system.file("libs", package = pkg, lib.loc = lib.loc)
    libs <- Sys.glob(file.path(libdir, "*.so"))
    if (length(libs) > 0) {
        val <- rbind_list(lapply(libs, ofile_syms, keep = "U"))
        val$package <- rep(pkg, nrow(val))
        val$type <- NULL
        merge(val, Rsyms())
    }
    else NULL
}

allPkgsRsyms <- function(lib.loc = NULL,
                           Ncpus = getOption("Ncpus", 1L),
                           verbose = getOption("verbose")) {
    p <- rownames(utils::installed.packages(lib.loc = lib.loc))
    rbind_list(.package_apply(p, pkgRsyms, Ncpus = Ncpus, verbose = verbose))
}

#  File src/library/tools/R/assertCondition.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 2013-2023 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.	 See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

assertCondition <-
    function(expr, ...,
             .exprString = .deparseTrim(substitute(expr), cutoff = 30L),
             verbose = FALSE)
{
    getConds <- function(expr) {
	conds <- list()
        withCallingHandlers(
            tryCatch(expr, error = function(e) conds <<- c(conds, list(e))),
            warning = function(w) {
                conds <<- c(conds, list(w))
                tryInvokeRestart("muffleWarning")
            },
            condition = function(cond)
                conds <<- c(conds, list(cond)))
	conds
    }
    conds <- c(...)
    .Wanted <- if(length(conds)) paste(conds, collapse = " or ")
               else "any condition"
    res <- getConds(expr)
    if(length(res)) {
	if(is.null(conds)) {
            if(verbose)
                message("assertConditon: successfully caught a condition", domain = NA)
	    invisible(res)
        }
	else {
	    ii <- vapply(res,
                         function(cond) any(class(cond) %in% conds),
                         NA)
	    if(any(ii)) {
                if(verbose) {
                    found <- unique(unlist(lapply(res[ii], function(cond)
                        class(cond)[class(cond) %in% conds])))
                    message(sprintf("assertCondition: caught %s",
                                    paste(dQuote(found), collapse =", ")), domain = NA)
                }
		invisible(res)
            }
	    else {
                .got <- unique(unlist(lapply(res, function(obj) class(obj)[[1L]])))
		stop(gettextf("Got %s in evaluating %s; wanted %s",
			      paste(.got, collapse = ", "), .exprString, .Wanted),
                     domain = NA)
            }
	}
    }
    else
	stop(gettextf("Failed to get %s in evaluating %s",
		      .Wanted, .exprString), domain = NA)
}

assertError <- function(expr, classes = "error", verbose = FALSE) {
    d.expr <- .deparseTrim(substitute(expr), cutoff = 30L)
    tryCatch(res <- assertCondition(expr, classes, .exprString = d.expr),
             error = function(e)
                 stop(gettextf("Failed to get error in evaluating %s", d.expr),
                      call. = FALSE, domain = NA)
             )
    if(verbose) {
        error <- res[vapply(res,
                            function(cond) any(match(classes, class(cond), 0L) > 0L),
                            NA)]
        message(sprintf("Asserted error: %s", error[[1L]]$message),
                domain = NA)
    }
    invisible(res)
}

assertWarning <- function(expr, classes = "warning", verbose = FALSE) {
    d.expr <- .deparseTrim(substitute(expr), cutoff = 30L)
    res <- assertCondition(expr, classes, .exprString = d.expr)
    if(any(vapply(res,
                  function(cond) "error" %in% class(cond),
                  NA)))
        stop(gettextf("Got warning in evaluating %s, but also an error", d.expr),
             domain = NA)
    if(verbose) {
        warning <- res[vapply(res,
                              function(cond) any(match(classes, class(cond), 0L) > 0L),
                              NA)]
        message(sprintf("Asserted warning: %s", warning[[1L]]$message),
                domain = NA)
    }
    invisible(res)
}

.deparseTrim <- function(expr, cutoff = 30L) {
    res <- deparse(expr)
    if(length(res) > 1) {
        if(res[[1]] == "{") {
            exprs <- sub("^[ \t]*", "", res[c(-1L, -length(res))])
            res <- paste0("{", paste(exprs, collapse = "; "), "}")
        }
        else
            res <- paste(res[[1]], " ...")
    }
    if(nchar(res) > cutoff)
        paste(substr(res, 1, cutoff), " ...")
    else
        res
}
#  File src/library/tools/R/bibstyle.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2019 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

# Functions for making Rd and human readable versions of bibentry records.

# Clean up LaTeX accents and braces
cleanupLatex <- function(x) {
    if (!length(x)) return(x)
    latex <- tryCatch(parseLatex(x), error = identity)
    if (inherits(latex, "error")) {
    	x
    } else {
    	deparseLatex(latexToUtf8(latex), dropBraces=TRUE)
    }
}

makeJSS <- function() {

    # First, some utilities

    collapse <- function(strings)
        paste(strings, collapse="\n")

    # Add a period if there's no sentence punctuation already
    addPeriod <- function(string)
        sub("([^.?!])$", "\\1.", string)

    # Separate args by sep, add a period at the end.
    sentence <- function(..., sep = ", ") {
        strings <- c(...)
        if (length(strings)) {
            addPeriod(paste(strings, collapse = sep))
        }
    }

    # Now some simple markup

    plain <- function(pages)
        if (length(pages)) collapse(pages)

    plainclean <- function(s) plain(cleanupLatex(s))

    emph <- function(s)
        if (length(s)) paste0("\\emph{", collapse(s), "}")

    emphclean <- function(s) emph(cleanupLatex(s))

    # This creates a function to label a field by adding a prefix or
    # suffix (or both)

    label <- function(prefix=NULL, suffix=NULL, style=plain) {
        force(prefix); force(suffix); force(style)
        function(s)
            if (length(s)) style(paste0(prefix, collapse(s), suffix))
    }

    labelclean <- function(prefix=NULL, suffix=NULL, style=plain) {
        f <- label(prefix, suffix, style)
        function(s) f(cleanupLatex(s))
    }

    # Now the formatters for each particular field.  These take
    # a character vector; if length zero, they return NULL, otherwise
    # a single element character vector putting everything together

    fmtAddress <- plainclean
    fmtBook <- emphclean
    fmtBtitle <- emphclean
    fmtChapter <- labelclean(prefix="chapter ")
    fmtDOI <- label(prefix="\\doi{", suffix="}")
    fmtEdition <- labelclean(suffix=" edition")
    fmtEprint <- plain
    fmtHowpublished <- plainclean
    fmtISBN <- label(prefix = "ISBN ")
    fmtISSN <- label(prefix="ISSN ")
    fmtInstitution <- plainclean
    fmtNote <- plainclean
    fmtPages <- plain
    fmtSchool <- plainclean
    ## fmtTechreportnumber <- labelclean(prefix="Technical Report ")
    fmtUrl <- label(prefix="\\url{", suffix="}")
    fmtTitle <- function(title) 
        if (length(title)) {
            title <- gsub("%", "\\\\\\%", title)
            paste0("\\dQuote{",
                   addPeriod(collapse(cleanupLatex(title))), "}")
        }
    fmtYear <- function(year) {
        if (!length(year)) year <- "????"
        paste0("(", collapse(year), ")")
    }

    fmtType <- function(type, default) {
        if(length(type) && any(nzchar(type)))
            plainclean(type)
        else
            default
    }

    # Now some more complicated ones that look at multiple fields
    volNum <- function(paper) {
        if (length(paper$volume)) {
            result <- paste0("\\bold{", collapse(paper$volume), "}")
            if (length(paper$number))
                result <- paste0(result, "(", collapse(paper$number), ")")
            result
        }
    }

    ## Format one person object in short "Murdoch DJ" format
    shortName <- function(person) {
        if (length(person$family)) {
            result <- cleanupLatex(person$family)
            if (length(person$given))
                paste(result,
                      paste(substr(sapply(person$given, cleanupLatex),
                                   1, 1), collapse=""))
            else result
        }
        else
            paste(cleanupLatex(person$given), collapse=" ")
    }

    # Format all authors for one paper
    authorList <- function(paper) {
        names <- sapply(paper$author, shortName)
        if (length(names) > 1L)
            result <- paste(names, collapse = ", ")
        else
            result <- names
        result
    }

    # Format all editors for one paper
    editorList <- function(paper) {
        names <- sapply(paper$editor, shortName)
        if (length(names) > 1L)
            result <- paste(paste(names, collapse = ", "), "(eds.)")
        else if (length(names))
            result <- paste(names, "(ed.)")
        else
            result <- NULL
        result
    }

    extraInfo <- function(paper) {
    	# PR#17725:  DOIs can contain % signs, and need multiple 
    	#            levels of escaping when translated to Rd.
    	escapeDOIPercent <- function(s) gsub("%", 
    					  paste0(strrep("\\", 11L), "%"),
    					  fixed = TRUE,
    					  s)
        result <- paste(c(fmtDOI(escapeDOIPercent(paper$doi)), fmtNote(paper$note),
                          fmtEprint(paper$eprint), fmtUrl(paper$url)),
                        collapse=", ")
        if (nzchar(result)) result
    }

    bookVolume <- function(book) {
        result <- ""
        if (length(book$volume))
            result <- paste("volume", collapse(book$volume))
        if (length(book$number))
            result <- paste(result, "number", collapse(book$number))
        if (length(book$series))
            result <- paste(result, "series", collapse(book$series))
        if (nzchar(result)) result
    }

    bookPublisher <- function(book) {
        if (length(book$publisher)) {
            result <- collapse(book$publisher)
            if (length(book$address))
                result <- paste(result, collapse(book$address), sep = ", ")
            result
        }
    }

    procOrganization <- function(paper) {
        if (length(paper$organization)) {
            result <- collapse(cleanupLatex(paper$organization))
            if (length(paper$address))
                result <- paste(result, collapse(cleanupLatex(paper$address)), sep =", ")
            result
        }
    }

    fmtTechreportnumber <- function(paper) {
        if(length(paper$number)) {
            paste(fmtType(paper$type, "Technical Report"),
                  plainclean(paper$number))
        }
    }

    formatArticle <- function(paper) {
        collapse(c(fmtPrefix(paper),
                   sentence(authorList(paper), fmtYear(paper$year), sep = " "),
                   fmtTitle(paper$title),
                   sentence(fmtBook(paper$journal), volNum(paper),
                            fmtPages(paper$pages)),
                   sentence(fmtISSN(paper$issn), extraInfo(paper))))
    }

    formatBook <- function(book) {
        authors <- authorList(book)
        if(!length(authors))
            authors <- editorList(book)

        collapse(c(fmtPrefix(book),
                   sentence(authors, fmtYear(book$year), sep = " "),
                   sentence(fmtBtitle(book$title), bookVolume(book),
                            fmtEdition(book$edition)),
                   sentence(bookPublisher(book)),
                   sentence(fmtISBN(book$isbn), extraInfo(book))))
    }

    formatInbook <- function(paper) {
        authors <- authorList(paper)
        editors <- editorList(paper)
        if(!length(authors)) {
            authors <- editors
            editors <- NULL
        }
        collapse(c(fmtPrefix(paper),
                   sentence(authors, fmtYear(paper$year), sep =" "),
                   fmtTitle(paper$title),
                   paste("In", sentence(editors, fmtBtitle(paper$booktitle),
                                        bookVolume(paper),
                                        fmtChapter(paper$chapter),
                                        fmtEdition(paper$edition),
                                        fmtPages(paper$pages))),
                   sentence(bookPublisher(paper)),
                   sentence(fmtISBN(paper$isbn), extraInfo(paper))))
    }

    formatIncollection <- function(paper) {
        collapse(c(fmtPrefix(paper),
                   sentence(authorList(paper), fmtYear(paper$year), sep = " "),
                   fmtTitle(paper$title),
                   paste("In", sentence(editorList(paper),
                                        fmtBtitle(paper$booktitle),
                                        bookVolume(paper),
                                        fmtEdition(paper$edition),
                                        fmtPages(paper$pages))),
                   sentence(bookPublisher(paper)),
                   sentence(fmtISBN(paper$isbn), extraInfo(paper))))
    }

    formatInProceedings <- function(paper)
        collapse(c(fmtPrefix(paper),
                   sentence(authorList(paper), fmtYear(paper$year), sep = " "),
                   fmtTitle(paper$title),
                   paste("In", sentence(editorList(paper),
                                        fmtBtitle(paper$booktitle),
                                        bookVolume(paper),
                                        fmtEdition(paper$edition),
                                        fmtPages(paper$pages))),
                   sentence(procOrganization(paper)),
                   sentence(fmtISBN(paper$isbn), extraInfo(paper))))

    formatManual <- function(paper) {
        collapse(c(fmtPrefix(paper),
                   sentence(authorList(paper), fmtYear(paper$year), sep = " "),
                   sentence(fmtBtitle(paper$title), bookVolume(paper),
                            fmtEdition(paper$edition)),
                   sentence(procOrganization(paper)),
                   sentence(fmtISBN(paper$isbn), extraInfo(paper))))
    }

    formatMastersthesis <- function(paper) {
        collapse(c(fmtPrefix(paper),
                   sentence(authorList(paper), fmtYear(paper$year), sep = " "),
                   sentence(fmtBtitle(paper$title)),
                   sentence(fmtType(paper$type, "Master's thesis"),
                            fmtSchool(paper$school),
                            fmtAddress(paper$address)),
                   sentence(extraInfo(paper))))
    }

    formatPhdthesis <- function(paper) {
        collapse(c(fmtPrefix(paper),
                   sentence(authorList(paper), fmtYear(paper$year), sep = " "),
                   sentence(fmtBtitle(paper$title)),
                   sentence(fmtType(paper$type, "Ph.D. thesis"),
                            fmtSchool(paper$school),
                            fmtAddress(paper$address)),
                   sentence(extraInfo(paper))))
    }

    formatMisc <- function(paper) {
        collapse(c(fmtPrefix(paper),
                   sentence(authorList(paper), fmtYear(paper$year), sep = " "),
                   fmtTitle(paper$title),
                   sentence(fmtHowpublished(paper$howpublished)),
                   sentence(extraInfo(paper))))
    }

    formatProceedings <- function(book) {
        if (is.null(book$editor)) editor <- "Anonymous (ed.)"
        else editor <- editorList(book)
        collapse(c(fmtPrefix(book), # not paper
                   sentence(editor, fmtYear(book$year), sep = " "),
                   sentence(fmtBtitle(book$title), bookVolume(book)),
                   sentence(procOrganization(book)),
                   sentence(fmtISBN(book$isbn), fmtISSN(book$issn),
                            extraInfo(book))))
    }

    formatTechreport <- function(paper) {
        collapse(c(fmtPrefix(paper),
                   sentence(authorList(paper), fmtYear(paper$year), sep = " "),
                   fmtTitle(paper$title),
                   sentence(fmtTechreportnumber(paper),
                            fmtInstitution(paper$institution),
                            fmtAddress(paper$address)),
                   sentence(extraInfo(paper))))
    }

    formatUnpublished <- function(paper) {
        collapse(c(fmtPrefix(paper),
                   sentence(authorList(paper), fmtYear(paper$year), sep = " "),
                   fmtTitle(paper$title),
                   sentence(extraInfo(paper))))
    }

    sortKeys <- function(bib) {
        result <- character(length(bib))
        for (i in seq_along(bib)) {
            authors <- authorList(bib[[i]])
            if (!length(authors))
                authors <- editorList(bib[[i]])
            if (!length(authors))
                authors <- ""
            result[i] <- authors
        }
        result
    }

    # Replace this if you want a bibliography style
    # that puts a prefix on each entry, e.g. [n]
    # The formatting routine will have added a field .index
    # as a 1-based index within the complete list.

    fmtPrefix <- function(paper) NULL

    cite <- function(key, bib, ...)
        utils::citeNatbib(key, bib, ...) # the defaults are JSS style

    environment()
}

bibstyle <- local({
    styles <- list(JSS = makeJSS())
    default <- "JSS"
    function(style, envir, ..., .init = FALSE, .default=TRUE) {
        newfns <- list(...)
        if (missing(style) || is.null(style)) {
            if (!missing(envir) || length(newfns) || .init)
            	stop("Changes require specified 'style'")
            style <- default
        } else {
	    if (!missing(envir)) {
		stopifnot(!.init)
		styles[[style]] <<- envir
	    }
	    if (.init) styles[[style]] <<- makeJSS()
	    if (length(newfns) && style == "JSS")
		stop("The default JSS style may not be modified.")
	    for (n in names(newfns))
		assign(n, newfns[[n]], envir=styles[[style]])
            if (.default)
            	default <<- style
        }
        styles[[style]]
    }
})

getBibstyle <- function(all = FALSE) {
    if (all)
    	names(environment(bibstyle)$styles)
    else
    	environment(bibstyle)$default
}

toRd.bibentry <- function(obj, style=NULL, ...) {
    obj <- sort(obj, .bibstyle=style)
    style <- bibstyle(style, .default = FALSE)
    env <- new.env(hash = FALSE, parent = style)
    bib <- unclass(obj)
    result <- character(length(bib))
    for (i in seq_along(bib)) {
    	env$paper <- bib[[i]]
    	result[i] <- with(env,
    	    switch(attr(paper, "bibtype"),
    	    Article = formatArticle(paper),
    	    Book = formatBook(paper),
    	    InBook = formatInbook(paper),
    	    InCollection = formatIncollection(paper),
    	    InProceedings = formatInProceedings(paper),
    	    Manual = formatManual(paper),
    	    MastersThesis = formatMastersthesis(paper),
    	    Misc = formatMisc(paper),
    	    PhdThesis = formatPhdthesis(paper),
    	    Proceedings = formatProceedings(paper),
    	    TechReport = formatTechreport(paper),
    	    Unpublished = formatUnpublished(paper),
    	    paste("bibtype", attr(paper, "bibtype"),"not implemented") ))
    }
    gsub("(^|[^\\])((\\\\\\\\)*)%", "\\1\\2\\\\%", result)
}
#  File src/library/tools/R/build.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2025 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

#### R based engine for R CMD build

## R developers can use this to debug the function by running it
## directly as tools:::.build_packages(args), where the args should
## be what commandArgs(TRUE) would return, that is a character vector
## of (space-delimited) terms that would be passed to R CMD build.

writeDefaultNamespace <-
function(filename, desc = file.path(dirname(filename), "DESCRIPTION"))
{
    pkgInfo <- .split_description(.read_description(desc))
    pkgs <- unique(c(names(pkgInfo$Imports), names(pkgInfo$Depends)))
    pkgs <- pkgs[pkgs != "base"]

    writeLines(c("# Default NAMESPACE created by R",
                 "# Remove the previous line if you edit this file",
    		 "",
    		 "# Export all names",
                 "exportPattern(\"^[^.]\")",
		 if (length(pkgs))
		     c("",
		       "# Import all packages listed as Imports or Depends",
		       "import(",
		       paste0("  ", pkgs, collapse = ",\n"),
		       ")")),
    	       filename)
}


### formerly Perl R::Utils::get_exclude_patterns

## Return list of file patterns excluded by R CMD build.
## Not exported.
## Has Unix-style '/' path separators hard-coded, but that is what dir() uses.
get_exclude_patterns <- function()
    c("^\\.Rbuildignore$",
      "(^|/)\\.DS_Store$",
      "^\\.(RData|Rhistory)$",
      "~$", "\\.bak$", "\\.sw.$",
      "(^|/)\\.#[^/]*$", "(^|/)#[^/]*#$",
      ## Outdated ...
      "^TITLE$", "^data/00Index$",
      "^inst/doc/00Index\\.dcf$",
      ## Autoconf
      "^config\\.(cache|log|status)$",
      "(^|/)autom4te\\.cache$", # ncdf4 had this in subdirectory 'tools'
      ## Windows dependency files
      "^src/.*\\.d$", "^src/Makedeps$",
      ## IRIX, of some vintage
      "^src/so_locations$",
      ## Sweave detrius
      "^inst/doc/Rplots\\.(ps|pdf)$"
      ## GNU Global
    , "^(GPATH|GRTAGS|GTAGS)$"
      )


## Check for files listed in .Rbuildignore or get_exclude_patterns()
inRbuildignore <- function(files, pkgdir) {
    exclude <- rep.int(FALSE, length(files))
    ignore <- get_exclude_patterns()
    ## handle .Rbuildignore:
    ## 'These patterns should be Perl regexps, one per line,
    ##  to be matched against the file names relative to
    ##  the top-level source directory.'
    ignore_file <- file.path(pkgdir, ".Rbuildignore")
    if (file.exists(ignore_file))
	ignore <- c(ignore, readLines(ignore_file, warn = FALSE))
    for(e in ignore[nzchar(ignore)])
	exclude <- exclude | grepl(e, files, perl = TRUE,
				ignore.case = TRUE)
    exclude
}

### based on Perl build script

.build_packages <- function(args = NULL, no.q = interactive())
{
    ## on Windows, this requires   sh make

    WINDOWS <- .Platform$OS.type == "windows"

    Sys.umask("022") # Perl version did not have this.

    writeLinesNL <- function(text, file)
    {
        ## a version that uses NL line endings everywhere
        con <- file(file, "wb")
        on.exit(close(con))
        writeLines(text, con)
    }

    ## This version of system_with_capture merges stdout and stderr
    ## Used to run R to install package and build vignettes.
    system_with_capture <- function (command, args) {
        outfile <- tempfile("xshell")
        on.exit(unlink(outfile))
        status <- system2(command, args, stdout=outfile, stderr=outfile)
        list(status = status, stdout = readLines(outfile, warn = FALSE))
    }
    ## Run silently
    Ssystem <- function(command, args = character(), ...)
        system2(command, args, stdout = NULL, stderr = NULL, ...)

    do_exit <-
	if(no.q)
	    function(status) (if(status) stop else message)(
		".build_packages() exit status ", status)
	else
	    function(status) q("no", status = status, runLast = FALSE)

    ## Used for BuildVignettes, BuildManual, BuildKeepEmpty,
    ## and (character not logical) BuildResaveData
    parse_description_field <-
        function(desc, field, default = TRUE, logical = TRUE)
            str_parse(desc[field], default=default, logical=logical)

    Usage <- function() {
        cat("Usage: R CMD build [options] pkgdirs",
            "",
            "Build R packages from package sources in the directories specified by",
            sQuote("pkgdirs"),
            "",
            "Options:",
            "  -h, --help		print short help message and exit",
            "  -v, --version		print version info and exit",
            "",
            "  --force               force removal of INDEX file",
            "  --keep-empty-dirs     do not remove empty dirs",
            "  --no-build-vignettes  do not (re)build package vignettes",
            "  --no-manual           do not build the PDF manual even if \\Sexprs are present",
            "  --resave-data=        re-save data files as compactly as possible:",
            '                        "no", "best", "gzip" (default)',
            "  --resave-data         same as --resave-data=best",
            "  --no-resave-data      same as --resave-data=no",
            "  --compact-vignettes=  try to compact PDF files under inst/doc:",
            '                        "no" (default), "qpdf", "gs", "gs+qpdf", "both"',
            "  --compact-vignettes   same as --compact-vignettes=qpdf",
            "  --compression=        type of compression to be used on tarball:",
            '                        "gzip" (default), "none", "bzip2", "xz", "zstd"',
            "  --md5                 add MD5 sums",
            "  --log                 log to file 'pkg-00build.log' when processing ",
            "                        the pkgdir with basename 'pkg'",
            "  --user=               explicitly set the tarball creator name (for 'Packaged:')",
            "                        instead of 'Sys.info()[\"user\"]' or the \"LOGNAME\" env variable",
            "",
            "Report bugs at <https://bugs.R-project.org>.", sep = "\n")
    }

    add_build_stamp_to_description_file <- function(ldpath, pkgdir, user)
    {
        db <- .read_description(ldpath)
        if(dir.exists(file.path(pkgdir, "src")))
            db["NeedsCompilation"] <- "yes"
        else if(is.na(db["NeedsCompilation"]))
            db["NeedsCompilation"] <- "no"
        db["Packaged"] <-
            sprintf("%s; %s",
                    format(Sys.time(), "%Y-%m-%d %H:%M:%S",
                           tz = 'UTC', usetz = TRUE),
                    user)
        ## also add_expanded_R_fields -- when not empty:
        fields <- .expand_package_description_db_R_fields(db)
        .write_description(if(length(fields)) c(db, fields) else db, ldpath)
    }

    temp_install_pkg <- function(pkgdir, libdir) {
	dir.create(libdir, mode = "0755", showWarnings = FALSE)
        if(nzchar(install_dependencies) &&
           all((repos <- getOption("repos")) != "@CRAN@")) {
            ## try installing missing dependencies too
            available <- utils::available.packages(repos = repos)
            db <- .read_description(file.path(pkgdir, "DESCRIPTION"))
            package <- db["Package"]
            available <-
                rbind(available[available[, "Package"] != package, ,
                                drop = FALSE],
                      db[colnames(available)])
            depends <- package_dependencies(package, available,
                                            which = install_dependencies)
            depends <- setdiff(unlist(depends),
                               rownames(utils::installed.packages()))
            if(length(depends)) {
                message(paste(strwrap(sprintf("installing dependencies %s",
                                              paste(sQuote(sort(depends)),
                                                    collapse = ", ")),
                                      exdent = 2L),
                              collapse = "\n"), domain = NA)
                utils::install.packages(depends,
                                        libdir,
                                        available =
                                            available[-nrow(available), ,
                                                      drop = FALSE],
                                        dependencies = NA)
            }
        }
        ## assume vignettes only need one arch
        if (WINDOWS) {
            cmd <- file.path(R.home("bin"), "Rcmd.exe")
            args <- c("INSTALL -l", shQuote(libdir),
                      "--no-multiarch", shQuote(pkgdir))
        } else {
            cmd <- file.path(R.home("bin"), "R")
            args <- c("CMD", "INSTALL -l", shQuote(libdir),
                      "--no-multiarch", shQuote(pkgdir))
        }
	res <- system_with_capture(cmd, args)
	if (res$status) {
	    printLog(Log, "      -----------------------------------\n")
	    printLog0(Log, paste(c(res$stdout, ""),  collapse = "\n"))
	    printLog(Log, "      -----------------------------------\n")
	    unlink(libdir, recursive = TRUE)
	    printLog(Log, "ERROR: package installation failed\n")
	    do_exit(1L)
	}
	Sys.setenv("R_BUILD_TEMPLIB" = libdir)
	TRUE
    } ## {temp_install_pkg}

    prepare_pkg <- function(pkgdir, desc, Log)
    {
        owd <- setwd(pkgdir); on.exit(setwd(owd))
##        pkgname <- basename(pkgdir)
        checkingLog(Log, "DESCRIPTION meta-information")
        res <- try(.check_package_description("DESCRIPTION"))
        if (inherits(res, "try-error")) {
            resultLog(Log, "ERROR")
            messageLog(Log, "running '.check_package_description' failed")
        } else {
            if (any(lengths(res))) {
                resultLog(Log, "ERROR")
                print(res) # FIXME print to Log?
                do_exit(1L)
            } else resultLog(Log, "OK")
        }
        cleanup_pkg(pkgdir, Log)

        libdir <- tempfile("Rinst")

        ensure_installed <- function()
	    if (!pkgInstalled) {
		messageLog(Log,
			   "installing the package to build vignettes")
		pkgInstalled <<- temp_install_pkg(pkgdir, libdir)
	    }

        pkgInstalled <- build_Rd_db(pkgdir, libdir, desc)

        if (file.exists("INDEX")) update_Rd_index("INDEX", "man", Log)
        doc_dir <- file.path("inst", "doc")
        if ("makefile" %in% dir(doc_dir)) { # avoid case-insensitive match
            messageLog(Log, "renaming 'inst/doc/makefile' to 'inst/doc/Makefile'")
            file.rename(file.path(doc_dir, "makefile"),
                        file.path(doc_dir, "Makefile"))
        }
        if (vignettes &&
            parse_description_field(desc, "BuildVignettes", TRUE)) {

            vignette_index_path <- file.path("build", "vignette.rds")
            if(file.exists(vignette_index_path))
                unlink(vignette_index_path)

## this is not a logical field
##	    if (nchar(parse_description_field(desc, "VignetteBuilder", "")))
##		ensure_installed()

            ## PR#15775: check VignetteBuilder packages are installed
            ## This is a bit wasteful: we do not need them in this process
            loadVignetteBuilder(pkgdir, TRUE)

            ## Look for vignette sources
            vigns <- pkgVignettes(dir = '.', check = TRUE)
            if (!is.null(vigns) && length(vigns$docs)) {
                ensure_installed()
                ## Good to do this in a separate process: it might die
                creatingLog(Log, "vignettes")
                R_LIBS <- Sys.getenv("R_LIBS", NA_character_)
                if (!is.na(R_LIBS)) {
                    on.exit(Sys.setenv(R_LIBS = R_LIBS), add = TRUE)
                    Sys.setenv(R_LIBS = path_and_libPath(libdir, R_LIBS))
                } else { # no .libPaths() here (speed; ok ?)
                    on.exit(Sys.unsetenv("R_LIBS"), add = TRUE)
                    Sys.setenv(R_LIBS = libdir)
                }

                ## Tangle (and weave) all vignettes now.

                cmd <- file.path(R.home("bin"), "Rscript")
                args <- c("--vanilla",
                          "--default-packages=", # some vignettes assume methods
                          "-e", shQuote("tools::buildVignettes(dir = '.', tangle = TRUE)"))
                ## since so many people use 'R CMD' in Makefiles,
                oPATH <- Sys.getenv("PATH")
                Sys.setenv(PATH = paste(R.home("bin"), oPATH,
                           sep = .Platform$path.sep))
                res <- system_with_capture(cmd, args)
                Sys.setenv(PATH = oPATH)
                if (res$status) {
                    resultLog(Log, "ERROR")
                    printLog0(Log, paste(c(res$stdout, ""),  collapse = "\n"))
                    do_exit(1L)
                } else {
                    # Rescan for weave and tangle output files
                    vigns <- pkgVignettes(dir = '.', output = TRUE, source = TRUE)
                    stopifnot(!is.null(vigns))

                    resultLog(Log, "OK")
                }

                ## We may need to install them.
                if (basename(vigns$dir) == "vignettes") {
                    ## inst may not yet exist
                    dir.create(doc_dir, recursive = TRUE, showWarnings = FALSE)
                    tocopy <- unique(c(vigns$docs, vigns$outputs,
                                       unlist(vigns$sources)))
                    copied <- file.copy(tocopy, doc_dir, copy.date = TRUE)
                    if (!all(copied)) {
                    	warning(sprintf(ngettext(sum(!copied),
                                                 "%s file\n", "%s files\n"),
                                        sQuote("inst/doc")),
                    	        strwrap(paste(sQuote(basename(tocopy[!copied])), collapse=", "),
                    	                indent = 4, exdent = 2),
			        "\n  ignored as vignettes have been rebuilt.",
			        "\n  Run R CMD build with --no-build-vignettes to prevent rebuilding.",
			     call. = FALSE)
			file.copy(tocopy[!copied], doc_dir, overwrite = TRUE, copy.date = TRUE)
		    }
                    unlink(c(vigns$outputs, unlist(vigns$sources)))
                    extras_file <- file.path("vignettes", ".install_extras")
                    if (file.exists(extras_file)) {
                        extras <- readLines(extras_file, warn = FALSE)
                        if(length(extras)) {
                            allfiles <- dir("vignettes", all.files = TRUE,
                                            full.names = TRUE, recursive = TRUE,
                                            include.dirs = TRUE)
                            inst <- rep.int(FALSE, length(allfiles))
                            for (e in extras)
                                inst <- inst | grepl(e, allfiles, perl = TRUE,
                                                     ignore.case = TRUE)
                            file.copy(allfiles[inst], doc_dir, recursive = TRUE, copy.date = TRUE)
                        }
                    }
                }

		vignetteIndex <- .build_vignette_index(vigns)

		if(NROW(vignetteIndex) > 0L) {
		    ## remove any files with no R code (they will have header comments).
		    ## if not correctly declared they might not be in the current encoding
		    sources <- vignetteIndex$R
		    for(i in seq_along(sources)) {
			file <- file.path(doc_dir, sources[i])
			if (!file_test("-f", file)) next
			bfr <- readLines(file, warn = FALSE)
			if(all(grepl("(^###|^[[:space:]]*$)", bfr, useBytes = TRUE))) {
			    unlink(file)
			    vignetteIndex$R[i] <- ""
			}
		    }
		}

		## Save the list
		dir.create("build", showWarnings = FALSE)
		## version = 2L for maximal back-compatibility
		saveRDS(vignetteIndex,
			file = vignette_index_path,
			version = 2L)
            }
        } else {
            fv <- file.path("build", "vignette.rds")
            if(file.exists(fv)) {
                checkingLog(Log, "vignette meta-information")
                db <- readRDS(fv)
                pdfs <- file.path("inst", "doc", db[nzchar(db$PDF), ]$PDF)
                missing <- !file.exists(pdfs)
                if(any(missing)) {
                    msg <- c("Output(s) listed in 'build/vignette.rds' but not in package:",
                             strwrap(sQuote(pdfs[missing]), indent = 2L, exdent = 2L),
                             "Run R CMD build without --no-build-vignettes to re-create")
                    errorLog(Log, paste(msg, collapse = "\n"))
                    do_exit(1L)
                } else resultLog(Log, "OK")
            }
        }
        if (compact_vignettes != "no" &&
            length(pdfs <- dir(doc_dir, pattern = "[.]pdf", recursive = TRUE,
                               full.names = TRUE))) {
            messageLog(Log, "compacting vignettes and other PDF files")
            if(compact_vignettes %in% c("gs", "gs+qpdf", "both")) {
                gs_cmd <- find_gs_cmd()
                gs_quality <- "ebook"
            } else {
                gs_cmd <- ""
                gs_quality <- "none"
            }
            qpdf <-
                if(compact_vignettes %in% c("qpdf", "gs+qpdf", "both"))
                    Sys.which(Sys.getenv("R_QPDF", "qpdf")) else ""
            res <- compactPDF(pdfs, qpdf = qpdf,
                              gs_cmd = gs_cmd, gs_quality = gs_quality)
            res <- format(res, diff = 1e5)
            if(length(res))
                printLog0(Log, paste0("  ", format(res), collapse = "\n"), "\n")
        }
        if (pkgInstalled) {
            unlink(libdir, recursive = TRUE)

	    ## And finally, clean up again.
            cleanup_pkg(pkgdir, Log)
        }
    } ## {prepare_pkg}

    cleanup_pkg <- function(pkgdir, Log)
    {
        owd <- setwd(pkgdir); on.exit(setwd(owd))
        pkgname <- basename(pkgdir)
        if (dir.exists("src")) {
            setwd("src")
            messageLog(Log, "cleaning src")
            if (WINDOWS) {
                have_make <- nzchar(Sys.which(Sys.getenv("MAKE", "make")))
                if (file.exists(fn <- "Makefile.ucrt") || file.exists(fn <- "Makefile.win")) {
                    if (have_make)
                        Ssystem(Sys.getenv("MAKE", "make"), paste0("-f ", fn, " clean"))
                    else warning("unable to run 'make clean' in 'src'",
                                 domain = NA)
                } else {
                    if (file.exists(fn <- "Makevars.ucrt") || file.exists(fn <- "Makevars.win")) {
                        if (have_make) {
                            makefiles <- paste("-f",
                                               shQuote(file.path(R.home("share"), "make", "clean.mk")),
                                           "-f", fn)
                            Ssystem(Sys.getenv("MAKE", "make"),
                                    c(makefiles, "clean"))
                        } else warning("unable to run 'make clean' in 'src'",
                                       domain = NA)
                    }
                    ## Also cleanup possible leftovers ...
                    unlink(c(Sys.glob(c("*.o", "*.so", "*.dylib", "*.mod")),
                             paste0(pkgname, c(".a", ".dll", ".def")),
                             "symbols.rds"))
                    if (dir.exists(".libs")) unlink(".libs", recursive = TRUE)
                    if (dir.exists("_libs")) unlink("_libs", recursive = TRUE)
                }
            } else {
                makefiles <- paste("-f",
                                   shQuote(file.path(R.home("etc"),
                                                     Sys.getenv("R_ARCH"),
                                                     "Makeconf")))
                if (file.exists("Makefile")) {
                    makefiles <- paste(makefiles, "-f", "Makefile")
                    Ssystem(Sys.getenv("MAKE", "make"), c(makefiles, "clean"))
                } else {
                    if (file.exists("Makevars")) {
                        ## ensure we do have a 'clean' target.
                        makefiles <- paste(makefiles, "-f",
                                       shQuote(file.path(R.home("share"), "make", "clean.mk")),
                                           "-f Makevars")
                        Ssystem(Sys.getenv("MAKE", "make"),
                                c(makefiles, "clean"))
                    }
                    ## Also cleanup possible leftovers ...
                    unlink(c(Sys.glob(c("*.o", "*.so", "*.dylib", "*.mod")),
                             paste0(pkgname, c(".a", ".dll", ".def")),
                             "symbols.rds"))
                    if (dir.exists(".libs")) unlink(".libs", recursive = TRUE)
                    if (dir.exists("_libs")) unlink("_libs", recursive = TRUE)
                }
            }
        }
        setwd(owd)
        ## It is not clear that we want to do this: INSTALL should do so.
        ## Also, certain environment variables should be set according
        ## to 'Writing R Extensions', but were not in Perl version (nor
        ## was cleanup.win used).
        if (WINDOWS) {
            has_cleanup_ucrt <- file.exists("cleanup.ucrt")
            if (has_cleanup_ucrt || file.exists("cleanup.win")) {
                ## check we have sh.exe first
                if (nzchar(Sys.which("sh.exe"))) {
                    Sys.setenv(R_PACKAGE_NAME = pkgname)
                    Sys.setenv(R_PACKAGE_DIR = pkgdir)
                    Sys.setenv(R_LIBRARY_DIR = dirname(pkgdir))
                    if (has_cleanup_ucrt) {
                        messageLog(Log, "running 'cleanup.ucrt'")
                        Ssystem("sh", "./cleanup.ucrt")
                    } else {
                        messageLog(Log, "running 'cleanup.win'")
                        Ssystem("sh", "./cleanup.win")
                    }
                }
            }
        } else if (file_test("-x", "cleanup")) {
            Sys.setenv(R_PACKAGE_NAME = pkgname)
            Sys.setenv(R_PACKAGE_DIR = pkgdir)
            Sys.setenv(R_LIBRARY_DIR = dirname(pkgdir))
            messageLog(Log, "running 'cleanup'")
            Ssystem("./cleanup")
        }
        revert_install_time_patches()
    }

    update_Rd_index <- function(oldindex, Rd_files, Log)
    {
        newindex <- tempfile()
	res <- tryCatch(
	    Rdindex(Rd_files, newindex),
	    error = function(e) {
		errorLog(Log, "computing Rd index failed:",
			 conditionMessage(e))
		do_exit(1L)
	    })
        checkingLog(Log, "whether ", sQuote(oldindex), " is up-to-date")
        if (file.exists(oldindex)) {
            ol <- readLines(oldindex, warn = FALSE) # e.g. BaM had missing final NL
            nl <- readLines(newindex)
            if (!identical(ol, nl)) {
                resultLog(Log, "NO")
               if (force) {
                    messageLog(Log, "removing ", sQuote(oldindex),
			      " as '--force' was given")
                    unlink(oldindex)
                } else {
                    messageLog(Log, "use '--force' to remove ",
			      "the existing ", sQuote(oldindex))
                    unlink(newindex)
                }
            } else {
                resultLog(Log, "OK")
                unlink(newindex)
            }
        } else {
            resultLog(Log, "NO")
            messageLog(Log, "creating new ", sQuote(oldindex))
            file.rename(newindex, oldindex)
        }
    }

    build_Rd_db <- function(pkgdir, libdir, desc) {

        build_partial_Rd_db_path <-
            file.path("build", "partial.rdb")
        if(file.exists(build_partial_Rd_db_path))
            unlink(build_partial_Rd_db_path)

        ## Use a full path as this could be passed to ..Rd2pdf().
        build_refman_path <-
            file.path(pkgdir, "build", paste0(basename(pkgdir), ".pdf"))
        if(file.exists(build_refman_path))
            unlink(build_refman_path)

        db <- .build_Rd_db(pkgdir, stages = NULL,
                           os = c("unix", "windows"), step = 1)
    	if (!length(db)) return(FALSE)

    	## Strip the pkgdir off the names
    	names(db) <- substring(names(db),
                               nchar(file.path(pkgdir, "man")) + 2L)

        btinfo <- do.call(rbind,
                          lapply(db, .Rd_get_Sexpr_build_time_info))
        if(!any(btinfo[, "\\Sexpr"])) {
            return(FALSE)
        } else {
            ## <FIXME>
            ## Remove eventually.
            ## If we only have Sexprs we never process when building,
            ## for now create an empty partial db to make older versions
            ## of the CRAN incoming check code happy.
            if(!any(btinfo[, c("build", "later")])) {
                dir.create("build", showWarnings = FALSE)
                saveRDS(structure(list(), names = character()),
                        build_partial_Rd_db_path, version = 2L)
                return(FALSE)
            }
            ## </FIXME>
        }

	messageLog(Log, "installing the package to process help pages")

        dir.create(libdir, mode = "0755", showWarnings = FALSE)
        savelib <- .libPaths()
        .libPaths(c(libdir, savelib))
        on.exit(.libPaths(savelib), add = TRUE)

        temp_install_pkg(pkgdir, libdir)

        containsBuildSexprs <- which(btinfo[, "build"])
	if(length(containsBuildSexprs)) {
	    for (i in containsBuildSexprs) {
		db[[i]] <- prepare_Rd(db[[i]], stages = "build",
                                      stage2 = FALSE, stage3 = FALSE)
                ## There could be build Sexprs giving install/render
                ## Sexprs ...
                btinfo[i, ] <- .Rd_get_Sexpr_build_time_info(db[[i]])
            }
	    messageLog(Log, "saving partial Rd database")
	    partial <- db[containsBuildSexprs]
	    dir.create("build", showWarnings = FALSE)
	    ## version = 2L for maximal back-compatibility
	    saveRDS(partial, build_partial_Rd_db_path, version = 2L)
	}

        containsLaterSexprs <- which(btinfo[, "later"])
        if(length(containsLaterSexprs)) {
	    for (i in containsLaterSexprs) {
		db[[i]] <- prepare_Rd(db[[i]], stages = c("install", "render"),
                                      stage2 = FALSE, stage3 = FALSE)
            }
            stage23 <- db[containsLaterSexprs]
            dir.create("build", showWarnings = FALSE)
            build_stage23_Rd_db_path <-
                file.path("build", "stage23.rdb")
            if(file.exists(build_stage23_Rd_db_path))
                unlink(build_stage23_Rd_db_path)
            saveRDS(stage23, build_stage23_Rd_db_path, version = 2L)
        }

	needRefman <- manual &&
            parse_description_field(desc, "BuildManual", FALSE) &&
            any(btinfo[, "later"])
	if (needRefman) {
	    messageLog(Log, "building the PDF package manual")
	    dir.create("build", showWarnings = FALSE)
	    ..Rd2pdf(c("--force", "--no-preview", "--quiet",
	               paste0("--output=", build_refman_path),
	               pkgdir), quit = FALSE)
        }
	return(TRUE)
    } ## {build_Rd_db}

    ## also fixes up missing final NL
    fix_nonLF_in_files <- function(pkgname, dirPattern, Log)
    {
        sDir <- file.path(pkgname, c("src", "inst/include"))
        files <- dir(sDir, pattern = dirPattern,
                     full.names = TRUE, recursive = TRUE)
        for (ff in files) {
            old_time <- file.mtime(ff)
            lines <- readLines(ff, warn = FALSE)
            writeLinesNL(lines, ff)
            Sys.setFileTime(ff, old_time)
        }
   }

    fix_nonLF_in_source_files <- function(pkgname, Log) {
        fix_nonLF_in_files(pkgname, dirPattern = "\\.([cfh]|cc|cpp|hpp)$", Log)
    }

    fix_nonLF_in_make_files <- function(pkgname, Log) {
        fix_nonLF_in_files(pkgname,
                           paste0("^(",
                                  paste(c("Makefile", "Makefile.in", "Makefile.win", "Makefile.ucrt",
                                          "Makevars", "Makevars.in", "Makevars.win", "Makevars.ucrt"),
                                        collapse = "|"), ")$"), Log)
        ## Other Makefiles
        makes <- dir(pkgname, pattern = "^Makefile$",
                     full.names = TRUE, recursive = TRUE)
        for (ff in makes) {
            lines <- readLines(ff, warn = FALSE)
            writeLinesNL(lines, ff)
        }
    }

    fix_nonLF_in_config_files <- function(pkgname, Log) {
        files <- dir(pkgname, pattern = "^(configure|cleanup)$",
                     full.names = TRUE, recursive = TRUE)
        ## FIXME: This "destroys" all timestamps
        for (ff in files) {
            lines <- readLines(ff, warn = FALSE)
            writeLinesNL(lines, ff)
        }
   }

    find_empty_dirs <- function(d)
    {
        ## dir(recursive = TRUE) did not include directories, so
        ## we needed to do this recursively
        files <- dir(d, all.files = TRUE, full.names = TRUE)
        for (dd in files[dir.exists(files)]) {
            if (grepl("/\\.+$", dd)) next
            find_empty_dirs(dd)
        }
        ## allow per-package override
        keep_empty1 <- parse_description_field(desc, "BuildKeepEmpty",
                                               keep_empty)
        if (!keep_empty1) # might have removed a dir
            files <- dir(d, all.files = TRUE, full.names = TRUE)
        if (length(files) <= 2L) { # always has ., ..
            if (keep_empty1) {
                printLog(Log, "WARNING: directory ", sQuote(d), " is empty\n")
            } else {
                unlink(d, recursive = TRUE)
                printLog(Log, "Removed empty directory ", sQuote(d), "\n")
            }
        }
    }

    fixup_R_dep <- function(pkgname, ver = "2.10")
    {
        desc <- .read_description(file.path(pkgname, "DESCRIPTION"))
        Rdeps <- .split_description(desc)$Rdepends2
        for(dep in Rdeps) {
            if(dep$op != '>=') next
            if(dep$version >= package_version(ver)) return()
        }

        flatten <- function(x) {
            if(length(x) == 3L)
                paste0(x$name, " (", x$op, " ", x$version, ")")
            else x[[1L]]
        }
        deps <- desc["Depends"]
        desc["Depends"] <- if(!is.na(deps)) {
            deps <- .split_dependencies(deps)
            deps <- deps[names(deps) != "R"] # could be more than one
            paste(c(sprintf("R (>= %s)", ver), sapply(deps, flatten)),
                  collapse = ", ")
        } else sprintf("R (>= %s)", ver)

        .write_description(desc, file.path(pkgname, "DESCRIPTION"))

        printLog(Log,
                 "  NB: this package now depends on R (>= ", ver, ")\n")
    }

    resave_data_rda <- function(pkgname, resave_data)
    {
        if (resave_data == "no") return()
        ddir <- file.path(pkgname, "data")
        if(resave_data == "best") {
            files <- Sys.glob(c(file.path(ddir, "*.rda"),
                                file.path(ddir, "*.RData"),
                                file.path(pkgname, "R", "sysdata.rda")))
            messageLog(Log, "re-saving image files")
            resaveRdaFiles(files)
            rdas <- checkRdaFiles(files)
            if(any(rdas$compress %in% c("bzip2", "xz")))
                fixup_R_dep(pkgname, "2.10")
        } else {
            ## ddir need not exist if just R/sysdata.rda
            rdas <- checkRdaFiles(Sys.glob(c(file.path(ddir, "*.rda"),
                                             file.path(ddir, "*.RData"))))
            if(nrow(rdas)) {
                update <- with(rdas, ASCII | compress == "none" | version < 2)
                if(any(update)) {
                    messageLog(Log, "re-saving image files")
                    resaveRdaFiles(row.names(rdas)[update], "gzip")
                }
            }
            if(file.exists(f <- file.path(pkgname, "R", "sysdata.rda"))) {
                rdas <- checkRdaFiles(f)
                update <- with(rdas, ASCII | compress == "none" | version < 2)
                if(any(update)) {
                    messageLog(Log, "re-saving sysdata.rda")
                    resaveRdaFiles(f, "gzip")
                }
            }
        }
    }


    resave_data_others <- function(pkgname, resave_data)
    {
        if (resave_data == "no") return()
        if(!dir.exists(ddir <- file.path(pkgname, "data")))
            return()
        ddir <- normalizePath(ddir)
        dataFiles <- filtergrep("\\.(rda|RData)$",
                                list_files_with_type(ddir, "data"))
        if (!length(dataFiles)) return()
        resaved <- character()
        on.exit(unlink(resaved))
        Rs <- grep("\\.[Rr]$", dataFiles, value = TRUE)
        if (length(Rs)) { # these might use .txt etc
            messageLog(Log, "re-saving .R files as .rda")
            ## ensure utils is visible
            ##   library("utils")
            lapply(Rs, function(x){
                envir <- new.env(hash = TRUE)
                sys.source(x, chdir = TRUE, envir = envir)
                ## version = 2L for maximal back-compatibility
                save(list = ls(envir, all.names = TRUE),
                     file = sub("\\.[Rr]$", ".rda", x),
                     compress = TRUE, compression_level = 9,
                     envir = envir,
                     version = 2L)
                resaved <<- c(resaved, x)
            })
            printLog(Log,
                     "  NB: *.R converted to .rda: other files may need to be removed\n")
        }
        tabs <- grep("\\.(CSV|csv|TXT|tab|txt)$", dataFiles, value = TRUE)
        if (length(tabs)) {
            messageLog(Log, "re-saving tabular files")
            if (resave_data == "gzip") {
                lapply(tabs, function(nm) {
                    ## DiceDesign/data/greenwood.table.txt is missing NL
                    x <- readLines(nm, warn = FALSE)
                    con <- gzfile(paste0(nm, ".gz"), "wb")
                    writeLines(x, con)
                    close(con)
                    resaved <<- c(resaved, nm)
                })
            } else {
                OK <- TRUE
                lapply(tabs, function(nm) {
                    x <- readLines(nm, warn = FALSE)
                    nm3 <- paste(nm, c("gz", "bz2", "xz"), sep = ".")
                    con <- gzfile(nm3[1L], "wb", compression = 9L); writeLines(x, con); close(con)
                    con <- bzfile(nm3[2L], "wb", compression = 9L); writeLines(x, con); close(con)
                    con <- xzfile(nm3[3L], "wb", compression = 9L); writeLines(x, con); close(con)
                    sizes <- file.size(nm3) * c(0.9, 1, 1)
                    ind <- which.min(sizes)
                    if(ind > 1) OK <<- FALSE
                    resaved <<- c(resaved, nm, nm3[-ind])
                })
                if (!OK) fixup_R_dep(pkgname, "2.10")
            }
        }
    } ## {resave_data_others}

    force <- FALSE
    vignettes <- TRUE
    manual <- TRUE  # Install the manual if Rds contain \Sexprs
    with_md5 <- FALSE
    with_log <- FALSE
##    INSTALL_opts <- character()
    pkgs <- character()
    options(showErrorCalls = FALSE, warn = 1)

    ## Read in build environment file.
    Renv <- Sys.getenv("R_BUILD_ENVIRON", unset = NA_character_)
    if(!is.na(Renv)) {
        ## Do not read any build environment file if R_BUILD_ENVIRON is
        ## set to empty of something non-existent.
        if(nzchar(Renv) && file.exists(Renv)) readRenviron(Renv)
    } else {
        ## Read in ~/.R/build.Renviron[.rarch] (if existent).
        rarch <- .Platform$r_arch
        if (nzchar(rarch) &&
            file.exists(Renv <- paste0("~/.R/build.Renviron.", rarch)))
            readRenviron(Renv)
        else if (file.exists(Renv <- "~/.R/build.Renviron"))
            readRenviron(Renv)
    }

    ## Configurable variables.
    compact_vignettes <- Sys.getenv("_R_BUILD_COMPACT_VIGNETTES_", "no")
    resave_data <- Sys.getenv("_R_BUILD_RESAVE_DATA_", "gzip")

    keep_empty <-
        config_val_to_logical(Sys.getenv("_R_BUILD_KEEP_EMPTY_DIRS_", "FALSE"))

    install_dependencies <- Sys.getenv("_R_BUILD_INSTALL_DEPENDENCIES_")
    if(nzchar(install_dependencies) &&
       (install_dependencies %notin% c("strong", "most", "all")))
        install_dependencies <-
            if(config_val_to_logical(install_dependencies)) "most" else ""

    if (is.null(args)) {
        args <- commandArgs(TRUE)
        ## it seems that splits on spaces, so try harder.
        args <- paste(args, collapse = " ")
        args <- strsplit(args,'nextArg', fixed = TRUE)[[1L]][-1L]
    }

    user <- NULL
    compression <- "gzip"
    while(length(args)) {
        a <- args[1L]
        if (a %in% c("-h", "--help")) {
            Usage()
            do_exit(0L)
        }
        else if (a %in% c("-v", "--version")) {
            cat("R add-on package builder: ",
                R.version[["major"]], ".",  R.version[["minor"]],
                " (r", R.version[["svn rev"]], ")\n", sep = "")
            cat("",
                .R_copyright_msg(1997),
                "This is free software; see the GNU General Public License version 2",
                "or later for copying conditions.  There is NO warranty.",
                sep = "\n")
            do_exit(0L)
        } else if (a == "--force") {
            force <- TRUE
        } else if (a == "--keep-empty-dirs") {
            keep_empty <- TRUE
        } else if (a == "--no-build-vignettes") {
            vignettes <- FALSE
        } else if (a == "--no-vignettes") { # pre-3.0.0 version
            stop("'--no-vignettes' is defunct:\n  use '--no-build-vignettes' instead",
                 call. = FALSE, domain = NA)
        } else if (a == "--resave-data") {
            resave_data <- "best"
        } else if (a == "--no-resave-data") {
            resave_data <- "no"
        } else if (substr(a, 1, 14) == "--resave-data=") {
            resave_data <- substr(a, 15, 1000)
        } else if (a == "--no-manual") {
            manual <- FALSE
        } else if (substr(a, 1, 20) == "--compact-vignettes=") {
            compact_vignettes <- substr(a, 21, 1000)
        } else if (a == "--compact-vignettes") {
            compact_vignettes <- "qpdf"
        } else if (a == "--md5") {
            with_md5 <- TRUE
        } else if (a == "--log") {
            with_log <- TRUE
        } else if (substr(a, 1, 23) == "--install-dependencies=") {
            install_dependencies <- substr(a, 24, 1000)
        } else if (a == "--install-dependencies") {
            install_dependencies <- "most"
        } else if (substr(a, 1, 14) == "--compression=") {
            compression <- match.arg(substr(a, 15, 1000),
                                     c("none", "gzip", "bzip2", "xz", "zstd"))
        } else if (substr(a, 1, 7) == "--user=") {
            user <- substr(a, 8, 64)
        } else if (startsWith(a, "-")) {
            message("Warning: unknown option ", sQuote(a))
        } else pkgs <- c(pkgs, a)
        args <- args[-1L]
    }

    if(compact_vignettes %notin% c("no", "qpdf", "gs", "gs+qpdf", "both")) {
        warning(gettextf("invalid value for '--compact-vignettes', assuming %s",
                         "\"qpdf\""),
                domain = NA)
        compact_vignettes <-"qpdf"
    }
    if(is.null(user)) { # not set by --user=*
        user <- Sys.info()[["user"]]
        if(user == "unknown") user <- Sys.getenv("LOGNAME")
    }

    Sys.unsetenv("R_DEFAULT_PACKAGES")

    startdir <- getwd()
    if (is.null(startdir))
        stop("current working directory cannot be ascertained")
##    R_platform <- Sys.getenv("R_PLATFORM", "unknown-binary")
##    libdir <- tempfile("Rinst")

    if (WINDOWS) {
        ## Some people have *assumed* that R_HOME uses / in Makefiles
        ## Spaces in paths might still cause trouble.
        rhome <- chartr("\\", "/", R.home())
        Sys.setenv(R_HOME = rhome)
    }

    for(pkg in pkgs) {
        ## remove any trailing /, for Windows' sake
        pkg <- sub("/$", "", pkg)
        ## Argh.  For logging we should really know the actual name of
        ## the package being built, but this needs first establishing
        ## the actual pkgdir (see below) and then getting the package
        ## name from the DESCRIPTION file ... and problems in these
        ## steps (currently) already get logged.  So for now try using
        ## the basename of pkg (one could try renaming at the end, but
        ## that will only work in case of success ...)
        Log <- if(with_log)
            newLog(paste0(file.path(startdir, basename(pkg)),
                          "-00build.log"))
        else
            newLog()
        ## 'Older versions used $pkg as absolute or relative to $startdir.
        ## This does not easily work if $pkg is a symbolic link.
        ## Hence, we now convert to absolute paths.'
        setwd(startdir)
	res <- tryCatch(setwd(pkg), error = function(e) {
            errorLog(Log, "cannot change to directory ", sQuote(pkg))
            do_exit(1L)
        })
        pkgdir <- getwd()
        pkgname <- basename(pkgdir)
        checkingLog(Log, "for file ", sQuote(file.path(pkg, "DESCRIPTION")))
        f <- file.path(pkgdir, "DESCRIPTION")
        if (file.exists(f)) {
            desc <- try(.read_description(f))
            if (inherits(desc, "try-error") || !length(desc)) {
                resultLog(Log, "EXISTS but not correct format")
                do_exit(1L)
            }
            resultLog(Log, "OK")
        } else {
            resultLog(Log, "NO")
            do_exit(1L)
        }
	if(is.na(intname <- desc["Package"]) || !length(intname) ||
	   !nzchar(intname)) {
	    errorLog(Log, "invalid 'Package' field"); do_exit(1L)
	}
        ## make a copy, cd to parent of copy
        setwd(dirname(pkgdir))
        filename <- paste0(intname, "_", desc["Version"], ".tar")
        filepath <- file.path(startdir, filename)
        Tdir <- tempfile("Rbuild")
        dir.create(Tdir, mode = "0755")
        if (WINDOWS) {
            ## This preserves read-only for files, and dates
            if (!file.copy(pkgname, Tdir, recursive = TRUE, copy.date = TRUE)) {
                errorLog(Log, "copying to build directory failed")
                do_exit(1L)
            }
        } else {
            ## This should preserve dates and permissions (subject to
            ## umask, if that is consulted which it seems it usually is not).
            ## Permissions are increased later.
            ## -L is to follow (de-reference) symlinks
            ## --preserve is GNU only: at least macOS, FreeBSD and Solaris
            ##   have non-GNU cp's as it seems do some Linuxen.
            ver <- suppressWarnings(system2("cp", "--version", stdout = TRUE,
                                            stderr = FALSE))
            GNU_cp <- any(grepl("GNU coreutils", ver))
	    cp_sw <- if(GNU_cp) "-LR --preserve=timestamps" else "-pLR"
            if (system2("cp", c(cp_sw, shQuote(pkgname), shQuote(Tdir)))) {
                errorLog(Log, "copying to build directory failed")
                do_exit(1L)
            }
        }
        setwd(Tdir)

        ## Now correct the package name (PR#9266)
        if (pkgname != intname) {
            if (!file.rename(pkgname, intname)) {
                message(gettextf("Error: cannot rename directory to %s",
                                 sQuote(intname)), domain = NA)
                do_exit(1L)
            }
            pkgname <- intname
        }

        ## prepare the copy
        messageLog(Log, "preparing ", sQuote(pkgname), ":")
        prepare_pkg(normalizePath(pkgname, "/"), desc, Log);
        owd <- setwd(pkgname)
        ## remove exclude files
        allfiles <- dir(".", all.files = TRUE, recursive = TRUE,
                        full.names = TRUE, include.dirs = TRUE)
        allfiles <- substring(allfiles, 3L)  # drop './'
        bases <- basename(allfiles)

        exclude <- inRbuildignore(allfiles, pkgdir)

        isdir <- dir.exists(allfiles)
        ## old (pre-2.10.0) dirnames
        exclude <- exclude | (isdir & (bases %in%
                                       c("check", "chm", .vc_dir_names)))
        exclude <- exclude | (isdir & grepl("([Oo]ld|\\.Rcheck)$", bases))
        ## FIXME: GNU make uses GNUmakefile (note capitalization)
        exclude <- exclude | bases %in% c("Read-and-delete-me", "GNUMakefile")
        ## Mac resource forks
        exclude <- exclude | startsWith(bases, "._")
        exclude <- exclude | (isdir & grepl("^src.*/[.]deps$", allfiles))
	## Windows DLL resource file
        exclude <- exclude | (allfiles == paste0("src/", pkgname, "_res.rc"))
        ## inst/doc/.Rinstignore is a mistake
        exclude <- exclude | endsWith(allfiles, "inst/doc/.Rinstignore") |
            endsWith(allfiles, "inst/doc/.build.timestamp") |
            endsWith(allfiles, "vignettes/.Rinstignore")
        ## leftovers
        exclude <- exclude | grepl("^.Rbuildindex[.]", allfiles)
        ## or simply?  exclude <- exclude | startsWith(allfiles, ".Rbuildindex.")
        exclude <- exclude | (bases %in% .hidden_file_exclusions)
        ## exclude (old) source tarballs and binary packages (PR#17828)
        exts <- "\\.(tar\\.gz|tar|tar\\.bz2|tar\\.xz|tgz|zip)"
        exclude <- exclude | grepl(paste0("^", pkgname, "_[0-9.-]+", exts, "$"),
                                   allfiles)
        unlink(allfiles[exclude], recursive = TRUE, force = TRUE,
               expand = FALSE)
        setwd(owd)

        ## Fix up man, R, demo inst/doc directories
        res <- .check_package_subdirs(pkgname, TRUE)
        if (any(lengths(res))) {
            messageLog(Log, "excluding invalid files")
            print(res) # FIXME print to Log?
        }
        setwd(Tdir)
        ## Fix permissions for all files to be at least 644, and dirs 755
        ## Not restricted by umask.
	if (!WINDOWS) .Call(C_dirchmod, pkgname, group.writable=FALSE)
        ## Add build stamp *and* expaned R fields to the DESCRIPTION file:
        add_build_stamp_to_description_file(file.path(pkgname, "DESCRIPTION"),
                                            pkgdir, user)
        messageLog(Log,
                   "checking for LF line-endings in source and make files and shell scripts")
        fix_nonLF_in_source_files(pkgname, Log)
        fix_nonLF_in_make_files(pkgname, Log)
        fix_nonLF_in_config_files(pkgname, Log)
        messageLog(Log, "checking for empty or unneeded directories");
        find_empty_dirs(pkgname)
        for(dir in c("Meta", "R-ex", "chtml", "help", "html", "latex")) {
            d <- file.path(pkgname, dir)
            if (dir.exists(d)) {
                msg <- paste("WARNING: Removing directory",
                             sQuote(d),
                             "which should only occur",
                             "in an installed package")
                printLog(Log, paste(strwrap(msg, indent = 0L, exdent = 2L),
                                    collapse = "\n"), "\n")
                unlink(d, recursive = TRUE)
            }
        }
        ## remove subarch build directories
        unlink(file.path(pkgname,
                         c("src-i386", "src-x64", "src-x86_64", "src-ppc")),
               recursive = TRUE)

        ## work on 'data' directory if present
        if(dir.exists(file.path(pkgname, "data")) ||
           file_test("-f", file.path(pkgname, "R", "sysdata.rda"))) {
            if(!str_parse_logic(desc["LazyData"], FALSE)) {
                messageLog(Log,
                           "looking to see if a 'data/datalist' file should be added")
                ## in some cases data() needs the package installed as
                ## there are links to the package's namespace
                tryCatch(add_datalist(pkgname),
                         error = function(e)
                             printLog(Log, "  unable to create a 'datalist' file: may need the package to be installed\n"))
            }
            ## allow per-package override
            resave_data1 <- parse_description_field(desc, "BuildResaveData",
                                                    resave_data, logical=FALSE)
            resave_data_others(pkgname, resave_data1)
            resave_data_rda(pkgname, resave_data1)
        }

        ## clean up DESCRIPTION file if there is (now) no data directory.
        if (!dir.exists(file.path(pkgname, "data"))) {
            desc <- file.path(pkgname, "DESCRIPTION")
            db <- .read_description(desc)
            ndb <- names(db)
            omit <- character()
            for (x in c("LazyData", "LazyDataCompression"))
                if (x %in% ndb) omit <- c(omit, x)
            if (length(omit)) {
                printLog(Log,
                         sprintf("Omitted %s from DESCRIPTION\n",
                                 paste(sQuote(omit), collapse = " and ")))
                db <- db[!(names(db) %in% omit)]
                .write_description(db, desc)
            }
        }

        ## add dependency on R >= 3.5.0 to DESCRIPTION if there are files in
        ## serialization version 3
        desc <- .read_description(file.path(pkgname, "DESCRIPTION"))
        Rdeps <- .split_description(desc)$Rdepends2
        hasDep350 <- FALSE
        hasDep410 <- FALSE
        hasDep420 <- FALSE
        hasDep430 <- FALSE
        for(dep in Rdeps) {
            if(dep$op != '>=') next
            if(dep$version >= "3.5.0") hasDep350 <- TRUE
            if(dep$version >= "4.1.0") hasDep410 <- TRUE
            if(dep$version >= "4.2.0") hasDep420 <- TRUE
            if(dep$version >= "4.3.0") hasDep430 <- TRUE
        }
        if(!hasDep350) {
            ## re-read files after exclusions have been applied
            allfiles <- dir(".", all.files = TRUE, recursive = TRUE,
                            full.names = TRUE)
            allfiles <- substring(allfiles, 3L)  # drop './'
            vers  <- get_serialization_version(allfiles)
            toonew <- names(vers[vers >= 3L])
            if (length(toonew)) {
                fixup_R_dep(pkgname, "3.5.0")
                msg <- paste("WARNING: Added dependency on R >= 3.5.0 because",
                             "serialized objects in serialize/load version 3",
                             "cannot be read in older versions of R.")
                printLog(Log,
                         paste(c(strwrap(msg, indent = 2L, exdent = 2L),
                                 "  File(s) containing such objects:",
                                 paste0("  ", .pretty_format(sort(toonew)))),
                               collapse = "\n"),
                         "\n")
            }
        }
        if(!hasDep430 &&
           !is.null(tab <- .package_code_using_R_4.x_syntax(pkgname))) {
            msg <- files <- NULL
            if(length(i <- which(tab$needs == "4.3.0"))) {
                fixup_R_dep(pkgname, "4.3.0")
                msg <- paste("WARNING: Added dependency on R >= 4.3.0 because",
                             "package code uses the pipe placeholder at the head of a chain of extractions syntax added in R 4.3.0.")
                files <- unique(tab$file[i])
            } else if(!hasDep420 &&
                      length(i <- which(tab$needs == "4.2.0"))) {
                fixup_R_dep(pkgname, "4.2.0")
                msg <- paste("WARNING: Added dependency on R >= 4.2.0 because",
                             "package code uses the pipe placeholder syntax added in R 4.2.0")
                files <- unique(tab$file[i])
            } else if(!hasDep410 &&
                      length(i <- which(tab$needs == "4.1.0"))) {
                fixup_R_dep(pkgname, "4.1.0")
                msg <- paste("WARNING: Added dependency on R >= 4.1.0 because",
                             "package code uses the pipe |> or function shorthand \\(...) syntax added in R 4.1.0.")
                files <- unique(tab$file[i])
            }
            if(length(msg)) {
                printLog(Log,
                         paste(c(strwrap(msg, indent = 2L, exdent = 2L),
                                 "  File(s) using such syntax:",
                                 paste0("  ", .pretty_format(sort(files)))),
                               collapse = "\n"),
                         "\n")
            }
        }

	## add NAMESPACE if the author didn't write one
	if(!file.exists(namespace <- file.path(pkgname, "NAMESPACE")) ) {
	    messageLog(Log, "creating default NAMESPACE file")
	    writeDefaultNamespace(namespace)
	}

        if(with_md5) {
	    messageLog(Log, "adding MD5 file")
            .installMD5sums(pkgname)
        } else {
            ## remove any stale file
            unlink(file.path(pkgname, "MD5"))
        }

        ## Finalize
        ext <- switch(compression,
                      "none"="", "gzip"= ".gz", "bzip2" = ".bz2",
                      "xz" = ".xz", "zstd" = ".zst")
        filename <- paste0(pkgname, "_", desc["Version"], ".tar", ext)
        filepath <- file.path(startdir, filename)
        ## NB: ../../../../tests/reg-packages.R relies on this exact format!
        messageLog(Log, "building ", sQuote(filename))
        res <- utils::tar(filepath, pkgname, compression = compression,
                          compression_level = 9L,
                          tar = Sys.getenv("R_BUILD_TAR"),
                          extra_flags = NULL) # use trapdoor
        if (res) {
            errorLog(Log, "packaging into tarball failed")
            do_exit(1L)
        }
        message("") # blank line

        setwd(startdir)
        unlink(Tdir, recursive = TRUE)
        closeLog(Log)
    }
    do_exit(0L)
}
#  File src/library/tools/R/check.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2025 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

###- R based engine for R CMD check

## R developers can use this to debug the function by running it
## directly as tools:::.check_packages(args), where the args should
## be what commandArgs(TRUE) would return, that is a character vector
## of (space-delimited) terms that would be passed to R CMD check.


get_timeout <- function(tlim)
{
    if(is.character(tlim)) {
        if(endsWith(tlim, "m"))
            tlim <- 60*as.numeric(sub("m$", "", tlim))
        else if(endsWith(tlim, "h"))
            tlim <- 3600*as.numeric(sub("h$", "", tlim))
        else if(endsWith(tlim, "s"))  # for completeness, like GNU timeout.
            tlim <- as.numeric(sub("s$", "", tlim))
    }
    tlim <- as.numeric(tlim)
    if(is.na(tlim) || tlim < 0) tlim <- 0
    tlim
}

report_timeout <- function(tlim)
{
    tlim <- trunc(tlim)
    if (tlim >= 3600)
        warning(gettextf("elapsed-time limit of %g %s reached for sub-process",
                         round(tlim/3600, 1L), "hours"),
                domain = NA, call. = FALSE)
    else if (tlim >= 60)
        warning(gettextf("elapsed-time limit of %g %s reached for sub-process",
                         round(tlim/60, 1L), "minutes"),
                domain = NA, call. = FALSE)
    else
        warning(gettextf("elapsed-time limit of %g %s reached for sub-process",
                         tlim, "seconds"),
                domain = NA, call. = FALSE)
}

## Find serialized objects (for load() and for readRDS()) in "allfiles" and
## report serialization versions (0 means not a serialized object,
## 1 means either version-1 or not a serialized object, 2 and more means
## serialized object of that version).
##
## These are most commonly data/*.{Rdata,rda}, R/sysdata.rda files,
## and build/vignette.rds
## But packages have other .rds files in many places.
## Despite its name, build/partial.rdb is created by saveRDS.
##
get_serialization_version <- function(allfiles)
{
    getVerLoad <- function(file)
    {
        ## This could look at the magic number, but for a short
        ## while version 3 files were produced with a version-2
        ## magic number. loadInfoFromConn2 checks if the magic number
        ## is sensible.
        con <- gzfile(file, "rb"); on.exit(close(con))
        ## The .Internal gives an error on version-1 files
        ## (and on non-serialized files)
        tryCatch(.Internal(loadInfoFromConn2(con))$version,
                 error = function(e) 1L)
    }
    getVerSer <- function(file)
    {
        con <- gzfile(file, "rb"); on.exit(close(con))
        ## In case this is not a serialized object
        tryCatch(.Internal(serializeInfoFromConn(con))$version,
                 error = function(e) 0L)
    }
    loadfiles <- grep("[.](rda|RData|Rdata|rdata|Rda|bam|Rbin)$",
                      allfiles, value = TRUE)
    serfiles <- c(grep("[.](rds|RDS|Rds|rdx)$", allfiles, value = TRUE),
                  grep("build/partial[.]rdb$", allfiles, value = TRUE))
    vers1 <- sapply(loadfiles, getVerLoad)
    vers2 <- sapply(serfiles, getVerSer)
    c(vers1, vers2)
}

## Used for INSTALL and Rd2pdf
run_Rcmd <- function(args, out = "", env = "", timeout = 0)
{
    status <- if(.Platform$OS.type == "windows")
        system2(file.path(R.home("bin"), "Rcmd.exe"), args, out, out,
                timeout = get_timeout(timeout))
    else
        system2(file.path(R.home("bin"), "R"), c("CMD", args), out, out,
                env = env, timeout = get_timeout(timeout))
    if(identical(status, 124L)) report_timeout(timeout)
    status
}

R_runR <- function(cmd = NULL, Ropts = "", env = "",
                   stdout = TRUE, stderr = TRUE, stdin = NULL,
                   arch = "", timeout = 0)
{
    timeout <- get_timeout(timeout)
    out <- if (.Platform$OS.type == "windows") {
        ## workaround Windows problem with input = cmd
        if (!is.null(cmd)) {
           ## In principle this should escape \
           Rin <- tempfile("Rin"); on.exit(unlink(Rin)); writeLines(cmd, Rin)
        } else Rin <- stdin
        suppressWarnings(system2(if(nzchar(arch)) file.path(R.home(), "bin", arch, "Rterm.exe")
                                 else file.path(R.home("bin"), "Rterm.exe"),
                                 c(Ropts, paste("-f", shQuote(Rin))), stdout, stderr,
                                 env = env, timeout = timeout))
    } else {
        suppressWarnings(system2(file.path(R.home("bin"), "R"),
                                 c(if(nzchar(arch)) paste0("--arch=", arch), Ropts),
                                 stdout, stderr, stdin, input = cmd, env = env,
                                 timeout = timeout))
    }
    if(identical(out, 124L) || identical(attr(out, "status"), 124L))
        report_timeout(timeout)
    out
}

setRlibs <-
    function(lib0 = "", pkgdir = ".", suggests = FALSE, libdir = NULL,
             self = FALSE, self2 = TRUE, quote = FALSE, LinkingTo = FALSE,
             tests = FALSE)
{
    WINDOWS <- .Platform$OS.type == "windows"
    useJunctions <- WINDOWS && !nzchar(Sys.getenv("R_WIN_NO_JUNCTIONS"))

    # flink assumes it is only being used for package directories
    # containing DESCRIPTION!
    flink <- function(from, to) {
        if(WINDOWS) {
            if(useJunctions) {
                Sys.junction(from, to)
                if(file.exists(file.path(to, basename(from), "DESCRIPTION")))
                    return()
                unlink(file.path(to, basename(from)), recursive = TRUE)
            }
            res <- file.copy(from, to, recursive = TRUE)
        } else
            res <- file.symlink(from, to)
        if (!res) stop(gettextf("cannot link from %s", from), domain = NA)
    }

    pi <- .split_description(.read_description(file.path(pkgdir, "DESCRIPTION")))
    thispkg <- unname(pi$DESCRIPTION["Package"])

    ## We need to make some assumptions about layout: this version
    ## assumes .Library contains standard and recommended packages
    ## and nothing else.
    tmplib <- tempfile("RLIBS_")
    dir.create(tmplib)
    ## Since this is under the session directory and only contains
    ## symlinks and dummies (hence will be small) we never clean it up.

    test_recommended <-
        config_val_to_logical(Sys.getenv("_R_CHECK_NO_RECOMMENDED_", "FALSE"))

    if(test_recommended) {
        ## Now add dummies for recommended packages (removed later if declared)
        recommended <- .get_standard_package_names()$recommended
        ## grDevices has :: to KernSmooth
        ## stats has ::: to Matrix, Matrix depends on lattice
        ## which gives false positives in MASS and Rcpp
        ## codetools is really part of tools
        exceptions <- "codetools"
        if (thispkg %in% c("MASS", "Rcpp"))
            exceptions <- c(exceptions, "Matrix", "lattice")
        if (thispkg %in%
            c("Modalclust", "aroma.core", "iWebPlots",
              "openair", "oce", "pcalg", "tileHMM"))
            exceptions <- c(exceptions, "KernSmooth")
        recommended <- recommended %w/o% exceptions
        for(pkg in recommended) {
            if(pkg == thispkg) next
            dir.create(pd <- file.path(tmplib, pkg))
            ## some people remove recommended packages ....
            f <- file.path(.Library, pkg, "DESCRIPTION")
            if(file.exists(f)) file.copy(f, pd)
            ## to make sure find.package throws an error:
            close(file(file.path(pd, "dummy_for_check"), "w"))
        }
    }

    unlink_dummies <- function(pkgs) {
        pd <- file.path(tmplib, pkgs)
        unlink(pd[file.exists(file.path(pd, "dummy_for_check"))], TRUE)
    }

    sug <- if (suggests)  names(pi$Suggests)
    else {
        ## we always need to be able to recognise 'vignettes'
        VB <- unname(pi$DESCRIPTION["VignetteBuilder"])
        sug <- if(is.na(VB)) character()
        else {
            VB <- unlist(strsplit(VB, ","))
            sug <- unique(gsub('[[:space:]]', '', VB))
            ## too many people forgot this, but it will never get fixed if made an exception.
            ## if("knitr" %in% VB) sug <- c(sug, "rmarkdown")
            sug
        }
        if(tests) ## we need the test-suite package available
            c(sug, intersect(names(pi$Suggests), c("RUnit", "testthat", "tinytest")))
        else sug
    }
    deps <- unique(c(names(pi$Depends), names(pi$Imports),
                     if(LinkingTo) names(pi$LinkingTo),
                     sug))
    if(length(libdir) && self2) flink(file.path(libdir, thispkg), tmplib)
    ## .Library is not necessarily canonical, but the .libPaths version is.
    lp <- .libPaths()
    poss <- c(lp[length(lp)], .Library)
    already <- thispkg
    more <- unique(deps %w/o% already) # should not depend on itself ...
    while(length(more)) {
        m0 <- more; more <- character()
        for (pkg in m0) {
            if (test_recommended) {
                if (pkg %in% recommended) unlink(file.path(tmplib, pkg), TRUE)
                ## hard-code dependencies for now.
                if (pkg == "mgcv")
                    unlink_dummies(c("Matrix", "lattice", "nlme") %w/o% thispkg)
                if (pkg == "Matrix")
                    unlink_dummies("lattice" %w/o% thispkg)
                if (pkg == "class")
                    unlink_dummies("MASS" %w/o% thispkg)
                if (pkg == "nlme")
                    unlink_dummies("lattice" %w/o% thispkg)
            }
            where <- find.package(pkg, quiet = TRUE)
            if(length(where)) {
                if (dirname(where) %notin% poss)
                    flink(where, tmplib)
                else if (!test_recommended)
                    # If the package is in the standard library we can
                    # assume dependencies have been met, but we can
                    # only skip the traversal if we aren't testing recommended
                    # packages, because loading will fail if there is
                    # an indirect dependency to one that has been hidden
                    # by a dummy in tmplib.
                    next
                pi <- readRDS(file.path(where, "Meta", "package.rds"))
                more <- c(more, names(pi$Depends), names(pi$Imports),
                          names(pi$LinkingTo))
            }
        }
        already <- c(already, m0)
        more <- unique(more %w/o% already)
    }
    if (self) flink(normalizePath(pkgdir), tmplib)
    # print(dir(tmplib))
    rlibs <- tmplib
    if (nzchar(lib0)) rlibs <- c(lib0, rlibs)
    rlibs <- paste(rlibs, collapse = .Platform$path.sep)
    if(quote) rlibs <- shQuote(rlibs)
    c(paste0("R_LIBS=", rlibs),
      if(WINDOWS) "R_ENVIRON_USER='no_such_file'" else "R_ENVIRON_USER=''",
      "R_LIBS_USER='NULL'",
      "R_LIBS_SITE='NULL'")
}

add_dummies <- function(dir, Log)
{
    dir1 <- file.path(dir, "R_check_bin")
    if (dir.exists(file.path(dir1))) {
        messageLog(Log, "directory ", sQuote(dir1), " already exists")
        return()
    }
    dir.create(dir1)
    if (!dir.exists(dir1)) {
        messageLog(Log, "creation of directory ", sQuote(dir1), " failed")
        return()
    }
    Sys.setenv(PATH = env_path(dir1, Sys.getenv("PATH")))
    if(.Platform$OS.type != "windows") {
        writeLines(c('echo "\'R\' should not be used without a path -- see par. 1.6 of the manual"',
                     'exit 1'),
                   p1 <- file.path(dir1, "R"))
        writeLines(c('echo "\'Rscript\' should not be used without a path -- see par. 1.6 of the manual"',
                     'exit 1'),
                   p2 <- file.path(dir1, "Rscript"))
        Sys.chmod(c(p1, p2), "0755")
    } else {
        ## currently untested
        writeLines(c('@ECHO OFF',
                     'echo "\'R\' should not be used without a path -- see par. 1.6 of the manual"',
                     'exit /b 1'),
                   p1 <- file.path(dir1, "R.bat"))
        writeLines(c('@ECHO OFF',
                     'echo "\'Rscript\' should not be used without a path -- see par. 1.6 of the manual"',
                     'exit /b 1'),
                   p2 <- file.path(dir1, "Rscript.bat"))
   }
}

###- The main function for "R CMD check"
.check_packages <- function(args = NULL, no.q = interactive(), warnOption = 1)
{
    WINDOWS <- .Platform$OS.type == "windows"
    ## this requires on Windows: file.exe (optional)

    wrapLog <- function(...) {
        text <- paste(..., collapse = " ")
        ## strwrap expects paras separated by blank lines.
        ## Perl's wrap split on \n
        text <- strsplit(text, "\n")[[1L]]
        printLog(Log, paste(strwrap(text), collapse = "\n"), "\n")
    }
    InstLog <-NA_character_ # set in check_src()

  ## used for R_runR2 and
  ## .check_package_description
  ## .check_package_description_encoding
  ## .check_package_license
  ## .check_demo_index
  ## .check_vignette_index
  ## .check_package_subdirs
  ## .check_citation
  ## .check_package_ASCII_code
  ## .check_package_code_syntax
  ## .check_packages_used
  ## .check_package_code_shlib
  ## .check_package_code_startup_functions
  ## .check_package_code_unload_functions
  ## .check_package_code_tampers
  ## .check_package_code_assign_to_globalenv
  ## .check_package_code_attach
  ## .check_package_code_data_into_globalenv
  ## .check_package_parseRd
  ## .check_Rd_metadata
  ## .check_Rd_line_widths
  ## .check_Rd_xrefs
  ## .check_Rd_contents
  ## .check_package_datasets
  ## .check_package_compact_datasets
  ## .check_package_compact_sysdata
  ## .check_make_vars
  ## check_compiled_code
  ## Checking loading
  ## Rdiff on reference output
  ## Creating -Ex.R
  ## Running examples (run_one_arch)
  ## .runPackageTestsR
  ## .run_one_vignette
  ## buildVignettes

    def_tlim <- get_timeout(Sys.getenv("_R_CHECK_ELAPSED_TIMEOUT_"))
    R_runR0 <- function(..., timeout = def_tlim)
            R_runR(..., timeout = timeout)

    ## Used for
    ## .check_packages_used_in_examples
    ## .check_packages_used_in_tests
    ## .check_packages_used_in_vignettes
    ## checkS3methods
    ## checkReplaceFuns
    ## checkFF
    ## .check_code_usage_in_package (with full set)
    ## .check_bogus_return (with full set)
    ## .check_dotInternal (with full set)
    ## undoc, codoc, codocData, codocClasses
    ## checkDocFiles, checkDocStyle
    ## The default set of packages here are as they are because
    ## .get_S3_generics_as_seen_from_package needs utils,graphics,stats
    ##  Used by checkDocStyle (which needs the generic visible) and checkS3methods.
    R_runR2 <-
        if(WINDOWS) {
            function(cmd,
                     env = "R_DEFAULT_PACKAGES=utils,grDevices,graphics,stats",
                     timeout = 0)
                {
                    out <- R_runR(cmd, R_opts2, env, timeout = timeout)
                    ## pesky gdata ....
                    filtergrep("^(ftype: not found|File type)", out)
                }
        } else
            function(cmd,
                     env = "R_DEFAULT_PACKAGES='utils,grDevices,graphics,stats'",
                     timeout = 0)
            {
                out <- R_runR(cmd, R_opts2, env, timeout = timeout)
                ## htmltools produced non-UTF-8 output in Dec 2015
                if (R_check_suppress_RandR_message)
                    out <- filtergrep('^Xlib: *extension "RANDR" missing on display',
                                      out, useBytes = TRUE)
                filtergrep("^OMP:", out)  ## LLVM's OpenMP with limits set
            }

    td0 <- Inf # updated below
    print_time <- function(t1, t2, Log)
    {
        td <- t2 - t1
        if(td[3L] < td0) return()
        td2 <- if (td[3L] > 600) {
            td <- td/60
            if(WINDOWS) sprintf(" [%dm]", round(td[3L]))
            else sprintf(" [%dm/%dm]", round(sum(td[-3L])), round(td[3L]))
        } else {
            if(WINDOWS) sprintf(" [%ds]", round(td[3L]))
            else sprintf(" [%ds/%ds]", round(sum(td[-3L])), round(td[3L]))
        }
        cat(td2)
        if (!is.null(Log) && Log$con > 0L) cat(td2, file = Log$con)
    }
    print_time0 <- function(t1, t2)
    {
        td <- t2 - t1
        if(td[3L] < td0) return(character())
        td2 <- if (td[3L] > 600) {
            td <- td/60
            if(WINDOWS) sprintf(" [%dm]", round(td[3L]))
            else sprintf(" [%dm/%dm]", round(sum(td[-3L])), round(td[3L]))
        } else {
            if(WINDOWS) sprintf(" [%ds]", round(td[3L]))
            else sprintf(" [%ds/%ds]", round(sum(td[-3L])), round(td[3L]))
        }
        td2
    }

    snapshot <- function()
    {
        snap1 <- function(dir, recursive = TRUE, user, udomain = NA,
                          notemp = FALSE)
        {
            foo <- list.files(dir, recursive = recursive, full.names = TRUE,
                              include.dirs = TRUE, no.. = TRUE)
            if (notemp) {
                isdir <- file.info(foo)$isdir
                poss <- grepl("^/tmp/Rtmp[A-Za-z0-9.]{6}$", foo,
                              useBytes = TRUE)
                foo <- foo[!(poss & isdir)]
            }
            owner <- file.info(foo)[, "uname"]
            sel <- (owner == user)
            if (!is.na(udomain)) {
                odomain <- file.info(foo)[, "udomain"]
                sel <- sel & (odomain == udomain)
            }
            foo[sel]
        }
        ## This should always give the uname for files created by the
        ## current user:
        user <- Sys.info()[["effective_user"]]
        udomain <- Sys.info()["udomain"]  ## NA (nonexistent) on Unix
        home <- normalizePath("~")
        xtra <- Sys.getenv("_R_CHECK_THINGS_IN_OTHER_DIRS_XTRA_", "")
        xtra <- if (nzchar(xtra)) strsplit(xtra, ";", fixed = TRUE)[[1L]]
                else character()
        dirs <- c(home, dirname(tempdir()), '/dev/shm',
                  ## taken from tools::R_user_dir, but package rappdirs
                  ## is similar with other possibilities on Windows.
                  if (.Platform$OS.type == "windows")
                      file.path(Sys.getenv("LOCALAPPDATA"), "R", "cache")
                  else if (Sys.info()["sysname"] == "Darwin")
                      file.path(home, "Library", "Caches", "org.R-project.R")
                  else file.path(home, ".cache"),
                  if (.Platform$OS.type == "windows")
                      file.path(Sys.getenv("APPDATA"), "R", "data")
                  else if (Sys.info()["sysname"] == "Darwin")
                      file.path(home, "Library", "Application Support", "org.R-project.R")
                  else file.path(home, ".local", "share"),
                  xtra)
        x <- vector("list", length(dirs)); names(x) <- dirs
        x[[1]] <- snap1(dirs[1], FALSE, user, udomain)
        x[[2]] <- snap1(dirs[2], FALSE, user, udomain, TRUE)
        x[[3]] <- snap1(dirs[3], TRUE, user, udomain)
        x[[4]] <- snap1(dirs[4], TRUE, user, udomain)
        for (i in seq_along(xtra))
            x[[4+i]] <- snap1(dirs[4+i], FALSE, user, udomain)
        x
    }

    parse_description_field <- function(desc, field, default)
        str_parse_logic(desc[field], default=default)

    check_pkg <- function(pkg, pkgname, pkgoutdir, startdir, libdir, desc,
                          is_base_pkg, is_rec_pkg, subdirs, extra_arch)
    {
        Sys.setenv("_R_CHECK_PACKAGE_NAME_" = pkgname)
        on.exit(Sys.unsetenv("_R_CHECK_PACKAGE_NAME_"))

        ## pkg is the argument we received from the main loop.
        ## pkgdir is the corresponding absolute path,

        checkingLog(Log, "package directory")
        setwd(startdir)
        pkg <- sub("/$", "", pkg)
        if (dir.exists(pkg)) {
            setwd(pkg) ## wrap in try()?
            pkgdir <- getwd()
            resultLog(Log, "OK")
        } else {
            errorLog(Log, "Package directory ", sQuote(pkg), " does not exist.")
            summaryLog(Log)
            do_exit(1L)
        }

        if (config_val_to_logical(Sys.getenv("_R_CHECK_FUTURE_FILE_TIMESTAMPS_",
                                             "FALSE"))) {
            now_local <- Sys.time()
            any <- FALSE
            checkingLog(Log, "for future file timestamps")
            ## allow skipping clock check on CRAN incoming systems
            if(config_val_to_logical(Sys.getenv("_R_CHECK_SYSTEM_CLOCK_", "TRUE"))) {
                ## First check time on system running 'check',
                ## by reading an external source in UTC
                notOK <- function(t) !length(t) || is.na(t[1]) # seen length > 1
                now <- tryCatch({
                    foo <- suppressWarnings(readLines("https://worldtimeapi.org/api/timezone/etc/UTC",
                                                      warn = FALSE))
                    ## gives time in sub-secs
                    as.POSIXct(gsub(".*\"datetime\":\"([^Z]*).*", "\\1", foo),
                               "UTC", "%Y-%m-%dT%H:%M:%S")
                }, error = function(e) NA)
                if(notOK(now)) { # try http (no 's')
                    now <- tryCatch({
                        foo <- suppressWarnings(readLines("http://worldtimeapi.org/api/timezone/etc/UTC",
                                                          warn = FALSE))
                        ## gives time in sub-secs
                        as.POSIXct(gsub(".*\"datetime\":\"([^Z]*).*", "\\1", foo),
                                   "UTC", "%Y-%m-%dT%H:%M:%S")
                    }, error = function(e) NA)
                }
                if(notOK(now)) { ## seemed permanently stopped, yet works 2025-02-08 and *-*-09
                    now <- tryCatch({
                        foo <- suppressWarnings(readLines("http://worldclockapi.com/api/json/utc/now",
                                                          warn = FALSE))
                        ## gives time in mins
                        as.POSIXct(gsub(".*\"currentDateTime\":\"([^Z]*).*", "\\1", foo),
                                   "UTC", "%Y-%m-%dT%H:%M")
                    }, error = function(e) NA)
                }
                if(notOK(now)) {
                    any <- TRUE
                    noteLog(Log, "unable to verify current time")
                } else {
                    ## 5 mins leeway seems a reasonable compromise;
                    if (abs(unclass(now_local) - unclass(now)[1]) > 300) { # "[1]": seen 'length > 1'
                        any <- TRUE
                        fmt <- "%Y-%m-%d %H:%M"
                        errorLog(Log, "This system is set to the wrong time: please correct")
                        now0 <- sprintf("  correct: %s (UTC)\n",
                                        format(now, fmt, tz = "UTC"))
                        local0 <- sprintf("   system: %s (UTC)\n",
                                          format(now_local, fmt, tz = "UTC"))
                        printLog0(Log, local0, now0)
                        summaryLog(Log)
                        do_exit(1L)
                    }
                }
            }

            ## Both files and directories get timestamps in the
            ## tarball, so future stamps give annoying messages.
            files <- list.files(all.files = TRUE, full.names = TRUE,
                                include.dirs = TRUE)
            files <- setdiff(files, c("./.", "./.."))
            ftimes <- file.mtime(files)
            ## Default 5 mins leeway is to allow for clock-skew from a file server.
            leeway <- Sys.getenv("_R_CHECK_FUTURE_FILE_TIMESTAMPS_LEEWAY_", "5m")
            leeway <- get_timeout(leeway)
            if (leeway <= 0) leeway <- 600
            fu <- unclass(ftimes) > unclass(now_local) + leeway
            if (any(fu)) {
                if (!any) warningLog(Log)
                any <- TRUE
                wrong <- sub("^[.]/", "", files[fu])
                printLog(Log, "Files with future time stamps:\n")
                printLog0(Log, .format_lines_with_indent(wrong), "\n")
            }
            if(!any) resultLog(Log, "OK")
        }

        haveR <- dir.exists("R") && !extra_arch

        if (!extra_arch) {
            if(dir.exists("build")) check_build()
            check_meta()  # Check DESCRIPTION meta-information.
            check_top_level()
            check_detritus()
            check_indices()
            check_subdirectories(haveR, subdirs)
            ## Check R code for non-ASCII chars which
            ## might be syntax errors in some locales.
            if (!is_base_pkg && haveR && R_check_ascii_code) check_non_ASCII()
        } # end of !extra_arch

        ## Check we can actually load the package: base is always loaded
        if (do_install && pkgname != "base") {
            if (this_multiarch) {
                Log$stars <<-  "**"
                for (arch in inst_archs) {
                    printLog(Log, "* loading checks for arch ", sQuote(arch), "\n")
                    check_loading(arch)
                }
                Log$stars <<-  "*"
            } else {
                check_loading()
            }
        }

        if (haveR) {
            check_R_code() # unstated dependencies, S3 methods, replacement, foreign
            check_R_files(is_rec_pkg) # codetools etc
        }

        check_Rd_files(haveR, chkInternal = R_check_Rd_internal_too)

        if (!extra_arch) check_data() # 'data' dir and sysdata.rda

        if (!is_base_pkg && !extra_arch) check_src_dir(desc)

        check_src()
        if(do_install &&
           dir.exists("src") &&
           length(so_symbol_names_table)) # suitable OS
            check_sos()

        if (dir.exists("src") &&
            length(files <- dir("src", pattern = "[.](f|f90|f95)$"))) {
            checkingLog(Log, "usage of KIND in Fortran files")
            bad <- character()
            details <- character()

            for (f in files) {
                lines <- readLines(file.path("src", f), warn = FALSE)
                ## skip comment lines
                lines <- grep("^([cC]| *!)", lines,
                              value = TRUE, invert = TRUE, useBytes = TRUE)
                if (any(z<-grepl("[(].*(kind|KIND) *= *[1-9].*[)]", lines,
                                  useBytes = TRUE))) {
                    bad <- c(bad, f)
                    details <- c(details, paste0(f, ":", lines[z]))
                }
            }
            if (!length(bad)) resultLog(Log, "OK")
            else {
                warningLog(Log)
                msg <-
                    ngettext(length(bad),
                             "Found the following file with non-portable usage of KIND:\n",
                             "Found the following files with non-portable usage of KIND:\n",
                             domain = NA)
                wrapLog(msg)
                verbose <-
                    config_val_to_logical(Sys.getenv("_R_CHECK_FORTRAN_KIND_DETAILS_", "FALSE"))
                if(verbose)
                    printLog0(Log, .format_lines_with_indent(details), "\n")
                else {
                    printLog0(Log, .format_lines_with_indent(bad), "\n")
                    msg <- "For details set environment variable _R_CHECK_FORTRAN_KIND_DETAILS_ to a true value."
                    wrapLog(msg)
                }
            }
         }

        check_rust()

        miss <- file.path("inst", "doc", c("Rplots.ps", "Rplots.pdf"))
        if (any(f <- file.exists(miss))) {
            checkingLog(Log, "for left-overs from vignette generation")
            warningLog(Log)
            printLog(Log,
                     paste("  file", paste(sQuote(miss[f]), collapse = ", "),
                           "will not be installed: please remove it\n"))
        }
        if (dir.exists("inst/doc")) {
            if (R_check_doc_sizes) check_doc_size()
            else if (as_cran)
                warningLog(Log, "'qpdf' is needed for checks on size reduction of PDFs")
        }
        if (dir.exists("inst/doc") && do_install) check_doc_contents()
        if (dir.exists("vignettes")) check_vign_contents(ignore_vignettes)
        ## R 4.5.0: remove this long-obsolete check
        ## if (!ignore_vignettes) {
        ##     if (dir.exists("inst/doc") && !dir.exists("vignettes")) {
        ##         pattern <- vignetteEngine("Sweave")$pattern
        ##         sources <- setdiff(list.files(file.path("inst", "doc"),
        ##                                       pattern = pattern),
        ##                            list.files("vignettes", pattern = pattern))
        ##         buildPkgs <- .get_package_metadata(".")["VignetteBuilder"]
        ##         if (!is.na(buildPkgs)) {
        ##             buildPkgs <- unlist(strsplit(buildPkgs, ","))
        ##             buildPkgs <- unique(gsub('[[:space:]]', '', buildPkgs))
        ##             ## next could be character()
        ##             engineList <- vignetteEngine(package = buildPkgs)
        ##             for(nm in names(engineList)) {
        ##                 pattern <- engineList[[nm]]$pattern
        ##                 sources <- c(sources,
        ##                              setdiff(list.files(file.path("inst", "doc"),
        ##                                                 pattern = pattern),
        ##                                      list.files("vignettes", pattern = pattern)))
        ##             }
        ##         }
        ##         sources <- unique(sources)
        ##         if(length(sources)) {
        ##             checkingLog(Log, "for old-style vignette sources")
        ##             msg <- c("Vignette sources only in 'inst/doc':",
        ##                      strwrap(paste(sQuote(sources), collapse = ", "),
        ##                              indent = 2L, exdent = 2L),
        ##                      "A 'vignettes' directory is required as from R 3.1.0",
        ##                      "and these will not be indexed nor checked")
        ##             ## warning or error eventually
        ##             noteLog(Log, paste(msg, collapse = "\n"))
        ##         }
        ##     }
        ## }

        setwd(pkgoutdir)

        ## Run the examples: this will be skipped if installation was
        if (dir.exists(file.path(libdir, pkgname, "help"))) {
            run_examples()
        } else if (dir.exists(file.path(pkgdir, "man"))) {
            checkingLog(Log, "examples")
            resultLog(Log, "SKIPPED")
        }

        ## Run the demos if requested (traditionally part of tests, as in base)
        if (dir.exists(file.path(pkgdir, "demo")))
            run_tests("demo", do_demo)

        ## Run the package-specific tests.
        tests_dir <- file.path(pkgdir, test_dir)
        if (test_dir != "tests" && !dir.exists(tests_dir)) {
            warningLog(Log)
            printLog(Log, "directory ", sQuote(test_dir), " not found\n")
        }
        if (dir.exists(tests_dir) && # trackObjs has only *.Rin
            length(dir(tests_dir, pattern = "\\.(R|r|Rin)$")))
            run_tests(test_dir, do_tests)

        ## Check package vignettes.
        setwd(pkgoutdir)
        if (!ignore_vignettes) run_vignettes(desc)

    } ## end{ check_pkg }

    check_file_names <- function()
    {
        ## Check for portable file names.
        checkingLog(Log, "for portable file names")

        ## Ensure that the names of the files in the package are valid
        ## for at least the supported OS types.  Under Unix, we
        ## definitely cannot have '/'.  Under Windows, the control
        ## characters as well as " * : < > ? \ | (i.e., ASCII
        ## characters 1 to 31 and 34, 36, 58, 60, 62, 63, 92, and 124)
        ## are or can be invalid.  (In addition, one cannot have
        ## one-character file names consisting of just ' ', '.', or
        ## '~'., and '~' has a special meaning for 8.3 short file
        ## names).

        ## Based on information by Uwe Ligges, Duncan Murdoch, and
        ## Brian Ripley: see also
        ## http://msdn.microsoft.com/en-us/library/aa365247%28VS.85%29.aspx

        ## In addition, Windows does not allow the following DOS type
        ## device names (by themselves or with possible extensions),
        ## see e.g.
        ## http://msdn.microsoft.com/library/default.asp?url=/library/en-us/fileio/fs/naming_a_file.asp
        ## http://msdn.microsoft.com/en-us/library/aa365247%28VS.85%29.aspx#naming_conventions
        ## and https://en.wikipedia.org/wiki/Filename (which as of
        ## 2007-04-22 is wrong about claiming that COM0 and LPT0 are
        ## disallowed):
        ##
        ## CON: Keyboard and display
        ## PRN: System list device, usually a parallel port
        ## AUX: Auxiliary device, usually a serial port
        ## NUL: Bit-bucket device
        ## CLOCK$: System real-time clock
        ## COM1, COM2, COM3, COM4, COM5, COM6, COM7, COM8, COM9:
        ##   Serial communications ports 1-9
        ## LPT1, LPT2, LPT3, LPT4, LPT5, LPT6, LPT7, LPT8, LPT9:
        ##   parallel printer ports 1-9

        ## In addition, the names of help files get converted to HTML
        ## file names and so should be valid in URLs.  We check that
        ## they are ASCII and do not contain %, which is what is known
        ## to cause troubles.

        allfiles <- dir(".", all.files = TRUE,
                        full.names = TRUE, recursive = TRUE)
        allfiles <- c(allfiles, unique(dirname(allfiles)))
        allfiles <- af <- sub("^./", "", allfiles)
        bad_files <- allfiles[grepl("[[:cntrl:]\"*/:<>?\\|]",
                                    basename(allfiles))]
        is_man <- endsWith(dirname(allfiles), "man")
        bad <- vapply(strsplit(basename(allfiles[is_man]), ""),
                      function(x) any(grepl("[^ -~]|%", x)),
                      NA)
        if (length(bad))
            bad_files <- c(bad_files, (allfiles[is_man])[bad])
        bad <- tolower(basename(allfiles))
        ## remove any extension(s) (see 'Writing R Extensions')
        bad <- sub("[.].*", "", bad)
        bad <- grepl("^(con|prn|aux|clock[$]|nul|lpt[1-9]|com[1-9])$", bad)
        bad_files <- c(bad_files, allfiles[bad])
        if (nb <- length(bad_files)) {
            errorLog(Log)
            msg <- ngettext(nb,
                            "Found the following file with a non-portable file name:\n",
                            "Found the following files with non-portable file names:\n",
                            domain = NA)
            wrapLog(msg)
            printLog0(Log, .format_lines_with_indent(bad_files), "\n")
            wrapLog("These are not valid file names",
                    "on all R platforms.\n",
                    "Please rename the files and try again.\n",
                    "See section 'Package structure'",
                    "in the 'Writing R Extensions' manual.\n")
            maybe_exit(1L)
        }

        ## Next check for name clashes on case-insensitive file systems
        ## (that is on Windows and (by default) on macOS).

        dups <- unique(allfiles[duplicated(tolower(allfiles))])
        if (nb <- length(dups)) {
            errorLog(Log)
            wrapLog("Found the following files with duplicate lower-cased file names:\n")
            printLog0(Log, .format_lines_with_indent(dups), "\n")
            wrapLog("File names must not differ just by case",
                    "to be usable on all R platforms.\n",
                    "Please rename the files and try again.\n",
                    "See section 'Package structure'",
                    "in the 'Writing R Extensions' manual.\n")
            maybe_exit(1L)
        }

        ## NB: the omission of ' ' is deliberate.
        non_ASCII_files <-
            allfiles[grepl("[^-A-Za-z0-9._!#$%&+,;=@^(){}\'[\\]]", #
                           basename(allfiles), perl = TRUE)]
        any <- FALSE
        if (nb <- length(non_ASCII_files)) {
            any <- TRUE
            warningLog(Log)
            msg <- ngettext(nb,
                            "Found the following file with a non-portable file name:\n",
                            "Found the following files with non-portable file names:\n",
                            domain = NA)
            wrapLog(msg)
            printLog0(Log, .format_lines_with_indent(non_ASCII_files), "\n")
            wrapLog("These are not fully portable file names.\n",
                    "See section 'Package structure'",
                    "in the 'Writing R Extensions' manual.\n")
        }

        ## now check lengths, as tarballs can only record up to 100 bytes
        ## plus perhaps 155 bytes as a prefix plus /
        af <- file.path(pkgname, af)
        lens <- nchar(af, "b")
        if (any(lens > 100L)) {
            bad_files <- af[lens > 100L]
            OK <- TRUE
            if (any(lens > 256L)) OK <- FALSE
            else { # check if can be splt
                for (f in bad_files) {
                    name <- charToRaw(f)
                    s <- max(which(name[1:155] == charToRaw("/")))
                    if(is.infinite(s) || s+100 < length(name)) {
                        OK <- FALSE; break
                    }
                }
                if (!OK) errorLog(Log)
                else if(!any) {
                    noteLog(Log)
                    any <- TRUE
                }
            }
            msg <- ngettext(length(bad_files),
                            "Found the following non-portable file path:\n",
                            "Found the following non-portable file paths:\n",
                            domain = NA)
            wrapLog(msg)
            printLog0(Log, .format_lines_with_indent(bad_files), "\n\n")
            wrapLog("Tarballs are only required to store paths of up to 100",
                    "bytes and cannot store those of more than 256 bytes,",
                    "with restrictions including to 100 bytes for the",
                    "final component.\n",
                    "See section 'Package structure'",
                    "in the 'Writing R Extensions' manual.\n")
            if (!OK)
                maybe_exit(1L)
        }
        if (!any) resultLog(Log, "OK")

        allfiles
    }

    check_permissions <- function(allfiles)
    {
        checkingLog(Log, "for sufficient/correct file permissions")

        ## This used to be much more 'aggressive', requiring that dirs
        ## and files have mode >= 00755 and 00644, respectively (with
        ## an error if not), and that files know to be 'text' have
        ## mode 00644 (with a warning if not).  We now only require
        ## that dirs and files have mode >= 00700 and 00400,
        ## respectively, and try to fix insufficient permission in the
        ## INSTALL code (Unix only).
        ##
        ## In addition, we check whether files 'configure' and
        ## 'cleanup' exists in the top-level directory but are not
        ## executable, which is most likely not what was intended.

        ## Phase A.  Directories at least 700, files at least 400.
        bad_files <- character()
        ##                 allfiles <- dir(".", all.files = TRUE,
        ##                                 full.names = TRUE, recursive = TRUE)
        ##                 allfiles <- sub("^./", "", allfiles)
        if(length(allfiles)) {
            mode <- file.mode(allfiles)
            bad_files <- allfiles[(mode & "400") < as.octmode("400")]
        }
        if(length(alldirs <- unique(dirname(allfiles)))) {
            mode <- file.mode(alldirs)
            bad_files <- c(bad_files,
                           alldirs[(mode & "700") < as.octmode("700")])
        }
        if (length(bad_files)) {
            errorLog(Log)
            wrapLog("Found the following files with insufficient permissions:\n")
            printLog0(Log, .format_lines_with_indent(bad_files), "\n")
            wrapLog("Permissions should be at least 700 for directories and 400 for files.\nPlease fix permissions and try again.\n")
            maybe_exit(1L)
        }

        ## Phase B.  Top-level scripts 'configure' and 'cleanup'
        ## should really be mode at least 500, or they will not be
        ## necessarily be used (or should we rather change *that*?)
        bad_files <- character()
        for (f in c("configure", "cleanup")) {
            if (!file.exists(f)) next
            mode <- file.mode(f)
            if ((mode & "500") < as.octmode("500"))
                bad_files <- c(bad_files, f)
        }
        if (length(bad_files)) {
            warningLog(Log)
            wrapLog("The following files should most likely be executable (for the owner):\n")
            printLog0(Log, .format_lines_with_indent(bad_files), "\n")
            printLog(Log, "Please fix their permissions\n")
        } else resultLog(Log, "OK")
    }

    ## Look for serialized objects, and check their version

    ## We need to so this before installation, which may create
    ## src/symbols.rds in the sources.
    check_serialization <- function(allfiles)
    {
        checkingLog(Log, "serialization versions")
        bad <- get_serialization_version(allfiles)
        bad <- names(bad[bad >= 3L])
        if(length(bad)) {
            msg <- "Found file(s) with version 3 serialization:"
            warningLog(Log, msg)
            printLog0(Log, paste0(.pretty_format(sort(bad)), "\n"))
            wrapLog("Such files are only readable in R >= 3.5.0.\n",
                    "Recreate them with R < 3.5.0 or",
                    "save(version = 2) or saveRDS(version = 2)",
                    "as appropriate")
        } else resultLog(Log, "OK")
    }

    check_meta <- function()
    {
        ## If we just installed the package (via R CMD INSTALL), we already
        ## validated most of the package DESCRIPTION metadata.  Otherwise,
        ## let us be defensive about this ...

        checkingLog(Log, "DESCRIPTION meta-information")
        dfile <- if (is_base_pkg) "DESCRIPTION.in" else "DESCRIPTION"
        any <- FALSE

        ## Check the encoding -- do first as it gives a WARNING
        Rcmd <- sprintf("tools:::.check_package_description_encoding(\"%s\")", dfile)
        out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
        if (length(out)) {
            warningLog(Log)
            any <- TRUE
            printLog0(Log, paste(out, collapse = "\n"), "\n")
        }

        ## FIXME: this does not need to be run in another process
        ## but that needs conversion to format().
        Rcmd <- sprintf("tools:::.check_package_description(\"%s\", TRUE)",
                        dfile)
        out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
        if (length(out)) {
            if(any(!grepl("^Malformed (Title|Description)", out))) {
                errorLog(Log)
                printLog0(Log, paste(out, collapse = "\n"), "\n")
                summaryLog(Log)
                do_exit(1L)
            } else {
                noteLog(Log)
                any <- TRUE
                printLog0(Log, paste(out, collapse = "\n"), "\n")
            }
        }

        ## Check the license.
        ## For base packages, the DESCRIPTION.in files have non-canonical
        ##   License: Part of R @VERSION@
        ## entries because these really are a part of R: hence, skip the
        ## check.
        check_license <- if (!is_base_pkg) {
            Check_license <- Sys.getenv("_R_CHECK_LICENSE_", NA_character_)
            if(is.na(Check_license)) {
                ## The check code conditionalizes *output* on _R_CHECK_LICENSE_.
                Sys.setenv('_R_CHECK_LICENSE_' = "TRUE")
                TRUE
            } else config_val_to_logical(Check_license)
        } else FALSE
        if (!isFALSE(check_license)) {
            Rcmd <- sprintf("tools:::.check_package_license(\"%s\", \"%s\")",
                            dfile, pkgdir)
            ## FIXME: this does not need to be run in another process
            out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
            if (length(out)) {
                if (check_license == "maybe") {
                    if (!any) warningLog(Log)
                } else if(any(startsWith(out, "Standardizable: FALSE"),
                              startsWith(out, "Invalid license file pointers:"))) {
                    if (!any) warningLog(Log)
                } else {
                    if (!any) noteLog(Log)
                }
                any <- TRUE
                printLog0(Log, paste(out, collapse = "\n"), "\n")
            }
        }

        ## .check_package_description() only checks Authors@R "if needed",
        ## and does not check for persons with no valid roles.
        db <- .read_description(dfile)
        if(!is.na(aar <- db["Authors@R"])) {
            lev <- if(check_incoming) 2L else 1L
            out <- .check_package_description_authors_at_R_field(aar,
                                                                 strict = lev)
            if(length(out)) {
                if(!any) noteLog(Log)
                any <- TRUE
                out <- .format_check_package_description_authors_at_R_field_results(out)
                printLog0(Log, paste(out, collapse = "\n"), "\n")
            }
            ## and there might be stale Authors and Maintainer fields
            yorig <- db[c("Author", "Maintainer")]
            if(check_incoming && any(!is.na(yorig))) {
                ## enc <- db["Encoding"]
                aar <- utils:::.read_authors_at_R_field(aar)
                y <- c(Author =
                       utils:::.format_authors_at_R_field_for_author(aar),
                       Maintainer =
                       utils:::.format_authors_at_R_field_for_maintainer(aar))
                ## ignore formatting as far as possible
                clean_up <- function(x) trimws(gsub("[[:space:]]+", " ", x))
                yorig <- sapply(yorig, clean_up)
                y <- sapply(y, clean_up)
                diff <- y != yorig
                ## <FIXME>
                ## Quick fix for consequences of c87095.
                if(diff[1L]
                   && grepl("<https://orcid.org/", y[1L], fixed = TRUE)) {
                    y1 <- gsub("ORCID: <https://orcid.org/",
                               "<https://orcid.org/",
                               y[1L], fixed = TRUE)
                    diff[1L] <- clean_up(y1) != yorig[1L]
                }
                if(diff[1L]
                   && grepl("<https://ror.org/", y[1L], fixed = TRUE)) {
                    y1 <- gsub("ROR: <https://ror.org/",
                               "<https://ror.org/",
                               y[1L], fixed = TRUE)
                    diff[1L] <- clean_up(y1) != yorig[1L]
                }
                ## </FIXME>
                ## <FIXME>
                ## Remove eventually.
                if(diff[1L]
                   && grepl("https://orcid.org/", y[1L], fixed = TRUE)) {
                    ## Argh.  Might be from using the new ORCID id
                    ## mechanism but having built with R < 3.5.0.
                    ## Let's ignore ...
                    aar$comment <- lapply(aar$comment, unname)
                    y1 <- utils:::.format_authors_at_R_field_for_author(aar)
                    diff[1L] <- clean_up(y1) != yorig[1L]
                }
                ## </FIXME>
                if(any(diff)) {
                    if(!any) noteLog(Log)
                    any <- TRUE
                    if(diff[1L]) {
                        printLog(Log, "Author field differs from that derived from Authors@R", "\n")
                        printLog(Log, "  Author:    ", sQuote(yorig[1L]), "\n")
                        printLog(Log, "  Authors@R: ", sQuote(y[1L]), "\n")
                        printLog(Log, "\n")
                    }
                    if(diff[2L]) {
                        printLog(Log, "Maintainer field differs from that derived from Authors@R", "\n")
                        printLog(Log, "  Maintainer: ", sQuote(yorig[2L]), "\n")
                        printLog(Log, "  Authors@R:  ", sQuote(y[2L]), "\n")
                        printLog(Log, "\n")
                    }
                }
            }
        }

        ## Also check logical fields for appropriate values.
        db <- .read_description(dfile)
        fields <- c("LazyData", "KeepSource", "ByteCompile", "UseLTO",
                    "StagedInstall", "Biarch", "BuildVignettes")
        bad <- fields[vapply(fields,
                             function(f) {
                                 !is.na(x <- db[f]) &&
                                     suppressWarnings(is.na(utils:::str2logical(x)))
                             },
                             NA)]
        if(length(bad)) {
            if(!any) noteLog(Log)
            any <- TRUE
            printLog(Log,
                     paste(c("Malformed field(s):", bad), collapse = " "),
                     "\n")
        }

        if(!is_base_pkg && is.na(db["Packaged"])) {
            if(!any) (noteLog(Log))
            any <- TRUE
            printLog(Log,
                     "Checking should be performed on sources prepared by 'R CMD build'.",
                     "\n")
        }

        if(!is.na(ncomp <- db["NeedsCompilation"])) {
            if (ncomp %notin% c("yes", "no")) {
                if(!any) noteLog(Log)
                any <- TRUE
                printLog(Log, "NeedsCompilation field must take value 'yes' or 'no'", "\n")
            }
            if((ncomp == "no") && dir.exists("src")) {
                if(!any) noteLog(Log)
                any <- TRUE
                printLog(Log, "NeedsCompilation field should likely be 'yes'", "\n")
            }
        }

        ## check for BugReports field added at R 3.4.0
        ## This used to check for empty first line as that
        ## breaks bug.report() in R <= 3.3.2 -- but read.dcf in those
        ## versions adds back the newline.
        if(!is.na(BR <- db["BugReports"])) {
            if (nzchar(BR)) {
                msg <- ""
                ## prior to 3.4.0 this was said to be
                ## 'a URL to which bug reports about the package
                ## should be submitted'
                ## We will take that to mean a http[s]:// URL,
                isURL <- grepl("^https?://[^ ]*$", BR)
                ## As from 3.4.0 bug.report() is able to extract
                ## an email addr.
                if(!isURL) {
                    findEmail <- function(x) {
                        x <- paste(x, collapse = " ")
                        if (grepl("mailto:", x))
                            sub(".*mailto:([^ ]+).*", "\\1", x)
                        else if (grepl("[^<]*<([^>]+)", x))
                            sub("[^<]*<([^>]+)>.*", "\\1", x)
                        else NA_character_
                    }
                    msg <- if (is.na(findEmail(BR))) {
                        if (grepl("(^|.* )[^ ]+@[[:alnum:]._]+", BR))
                            "BugReports field is not a suitable URL but appears to contain an email address\n  not specified by mailto: nor contained in < >\n   use the Contact field instead"
                        else
                            "BugReports field should be the URL of a single webpage"
                    } else
                        "BugReports field is not a suitable URL but contains an email address:\n   use the Contact field instead"
                }
            } else {
                msg <- "BugReports field should not be empty"
            }
            if (nzchar(msg)) {
                if(!any) noteLog(Log)
                any <- TRUE
                printLog(Log, msg, "\n")
           }
        }

        if(!is.na(lang <- db["Language"])) {
            s <- unlist(strsplit(lang, ", *"), use.names = FALSE)
            s <- s[!grepl(re_anchor(.make_RFC4646_langtag_regexp()), s)]
            if(length(s)) {
                if(!any) noteLog(Log)
                any <- TRUE
                printLog(Log,
                         paste(c("Language field contains the following invalid language tags:",
                                 paste0("  ", s)),
                               collapse = "\n"),
                         "\n")
            }
        }

        out <- format(.check_package_description2(dfile))
        if (length(out)) {
            if(!any) noteLog(Log)
            any <- TRUE
            printLog0(Log, paste(out, collapse = "\n"), "\n")
        }

        ## Dependence on say R >= 3.4.3 when 3.4 is current can
        ## cause problems with revdeps (and did for 3.2.x).
        ## We only check recent ones: maybe previous two
        ## (R-release and R-old-release) while this is R-devel
        Check_R_deps <- Sys.getenv("_R_CHECK_R_DEPENDS_", "FALSE")
        act <- if(Check_R_deps %in% c("note", "warn")) TRUE
               else config_val_to_logical(Check_R_deps)
        if(act) {
            Rver <-.split_description(db, verbose = TRUE)$Rdepends2
            if(length(Rver) && Rver[[1L]]$op == ">=") {
                ver <- unclass(Rver[[1L]]$version)[[1L]]
                thisver <- unclass(getRversion())[[1L]]
                ## needs updating if we ever go to 5.0
                notOK <- length(ver) == 3L && ver[3L] != 0
                if (notOK && (
                    ## report only for last two versions,
                    ## currently 3.5, 3.6 and 4.0
                    ((ver[1L] == 4L) && (ver[2L] >= max(0L, thisver[2L] - 2L)))
                    ||
                    ((ver[1L] == 3L) && (ver[2L] >= thisver[2L] + 5L))
                    )) {
                    ## This is not quite right: may have NOTE-d above
                    if(Check_R_deps == "warn") warningLog(Log)
                    else if(!any) noteLog(Log)
                    any <- TRUE
                    printLog0(Log,
                              sprintf("Dependence on R version %s not with patchlevel 0\n",
                                      sQuote(format(Rver[[1L]]$version))))
              }
            }
        }

        if(!is_base_pkg &&
           !.package_metadata_has_depends_on_R_at_least(db, "4.3.0") &&
           !is.null(tab <-
                      .package_code_using_R_4.x_syntax(dirname(dfile)))) {
            msg <- files <- NULL
            if(length(i <- which(tab$needs == "4.3.0"))) {
                if(!any) noteLog(Log)
                any <- TRUE
                msg <- "Missing dependency on R >= 4.3.0 because package code uses the pipe placeholder at the head of a chain of extractions syntax added in R 4.3.0."
                files <- unique(tab$file[i])
            } else if(length(i <- which(tab$needs == "4.2.0")) &&
                      !.package_metadata_has_depends_on_R_at_least(db,
                                                                   "4.2.0")) {
                if(!any) noteLog(Log)
                any <- TRUE
                msg <- "Missing dependency on R >= 4.2.0 because package code uses the pipe placeholder syntax added in R 4.2.0."
                files <- unique(tab$file[i])
            } else if(length(i <- which(tab$needs == "4.1.0")) &&
                      !.package_metadata_has_depends_on_R_at_least(db,
                                                                   "4.1.0")) {
                if(!any) noteLog(Log)
                any <- TRUE
                msg <- "Missing dependency on R >= 4.1.0 because package code uses the pipe |> or function shorthand \\(...) syntax added in R 4.1.0."
                files <- unique(tab$file[i])
            }
            if(length(msg)) {
                printLog(Log,
                         paste(c(strwrap(msg, indent = 2L, exdent = 2L),
                                 "  File(s) using such syntax:",
                                 paste0("  ", .pretty_format(sort(files)))),
                               collapse = "\n"),
                         "\n")
            }
        }

        if (!any) resultLog(Log, "OK")
        ## return (<never used in caller>):
        db
    } # check_meta()

    check_build <- function()
    {
        ## currently only checks vignettes
        if (ignore_vignettes) return()
        fv <- file.path("build", "vignette.rds")
        if(!file.exists(fv)) return()
        checkingLog(Log, "'build' directory")
        any <- FALSE
        db <- readRDS(fv)
        ## do as CRAN-pack does
        keep <- nzchar(db$PDF)
        if(any(!keep)) {
            if(!any) warningLog(Log)
            any <- TRUE
            msg <- c("Vignette(s) without any output listed in 'build/vignette.rds'",
                     strwrap(sQuote(db$file[!keep]), indent = 2L, exdent = 2L))
            printLog0(Log, paste(msg, collapse = "\n"), "\n")
        }
        elts <- file.path("inst", "doc", db[keep, ]$PDF)
        miss <- !file.exists(elts)
        if(any(miss)) {
            if(!any) warningLog(Log)
            any <- TRUE
            msg <- c("Output(s) listed in 'build/vignette.rds' but not in package:",
                     strwrap(sQuote(elts[miss]), indent = 2L, exdent = 2L))
            printLog0(Log, paste(msg, collapse = "\n"), "\n")
        }
        elts <- db[keep, ]$File
        miss <- (nzchar(elts) &
                 !file.exists(file.path("inst", "doc", elts)))
        if(any(miss)) {
            if(!any) warningLog(Log)
            any <- TRUE
            msg <- c("Source(s) listed in 'build/vignette.rds' but not in package:",
                     strwrap(sQuote(elts[miss]), indent = 2L, exdent = 2L))
            printLog0(Log, paste(msg, collapse = "\n"), "\n")
        }
        elts <- db[keep, ]$R
        miss <- (nzchar(elts) &
                 !file.exists(file.path("inst", "doc", elts)))
        if(any(miss)) {
            if(!any) warningLog(Log)
            any <- TRUE
            msg <- c("R code(s) listed in 'build/vignette.rds' but not in package:",
                     strwrap(sQuote(elts[miss]), indent = 2L, exdent = 2L))
            printLog0(Log, paste(msg, collapse = "\n"), "\n")
        }
        if(!any) resultLog(Log, "OK")
    }

    check_top_level <- function()
    {
        checkingLog(Log, "top-level files")
        topfiles <- Sys.glob(c("install.R", "R_PROFILE.R"))
        any <- FALSE
        if (length(topfiles)) {
            any <- TRUE
            warningLog(Log)
            printLog0(Log, .format_lines_with_indent(topfiles), "\n")
            wrapLog("These files are defunct.",
                    "See manual 'Writing R Extensions'.\n")
        }

        ## Look for sources for an autoconf-generated 'configure' file.
        ## (autoconf allows TEMPLATE-FILE to be specified, but no one does.)
        ## Should we try to re-generate by autoconf?
        if (file.exists("configure")
           && any(grepl("Generated by GNU Autoconf",
                        readLines("configure", warn = FALSE),
                        ignore.case = TRUE))) {
            if (!any(file.exists(c("configure.ac", "configure.in")))) {
                desc <- .read_description("DESCRIPTION")
                is_FOSS <- parse_description_field(desc, "License_is_FOSS", NA)
                if (is.na(is_FOSS)) {
                    val <- desc["License"]
                    is_FOSS <-
                        !is.na(val) && isTRUE(analyze_license(val)$is_FOSS)
                }
                if (is_FOSS) {
                    if (!any) warningLog(Log)
                    wrapLog("Found a", sQuote("configure"),
                            "file without source file",
                            sQuote("configure.ac"), "or",
                            sQuote("configure.in"), ".",
                            "An Open Source package must include its",
                            "autoconf sources.\n")
                } else {
                    if (!any) noteLog(Log)
                    wrapLog("Found a", sQuote("configure"),
                            "file without source file",
                            sQuote("configure.ac"), "or",
                            sQuote("configure.in"), ".",
                            "It is good practice to include autoconf sources.\n")
                }
                any <- TRUE
            } else {
                if (file.exists("configure.in")) {
                    any <- TRUE
                    noteLog(Log)
                    wrapLog("Found a", sQuote("configure.in"),
                            "file:", sQuote("configure.ac"),
                            "has long been preferred.\n")
                }
                check_autoconf <- check_incoming ||
                    config_val_to_logical(Sys.getenv("_R_CHECK_AUTOCONF_", "FALSE"))
                if (check_autoconf) {
                    arcf <- Sys.getenv("AUTORECONF", "autoreconf")
                    arcf <- strsplit(arcf, " ")[[1L]]
                    autoreconf <- arcf[1L]
                    autoreconf_options <- arcf[-1L]
                    if(nzchar(Sys.which(autoreconf))) {
                        ver <- system2(autoreconf, "--version",
                                       stdout = TRUE, stderr = TRUE)[1]
                        if (grepl("2[.]6[89]", ver))
                            autoreconf_options <-
                                unique(c(autoreconf_options,
                                         "--warnings=obsolete"))
                        td <- tempfile()
                        dir.create(td)
                        file.copy(".", td, recursive = TRUE)
                        od <- setwd(td)
                        autoreconf_options <- c(autoreconf_options, "-f", "-i")
                        out <- suppressWarnings(system2(autoreconf,
                                                        autoreconf_options,
                                                        stdout = TRUE,
                                                        stderr = TRUE,
                                                        timeout = 60))
                        setwd(od); unlink(td, recursive = TRUE)
                        if (length(out)) {
                            if(!any) {
                                any <- TRUE
                                warningLog(Log)
                            }
                            printLog0(Log, "  Output from running autoreconf:\n")
                            printLog0(Log, .format_lines_with_indent(out), "\n")
                        }
                    }
                }
            }
        }

        ## checkbashisms skips non-shell scripts, and bash ones with a message.
        if (config_val_to_logical(Sys.getenv("_R_CHECK_BASHISMS_", "FALSE"))
            && any(file.exists("configure", "cleanup"))) {
            msgs <- character()
            for (f in c("configure", "cleanup")) {
                ## /bin/bash is not portable
                if (file.exists(f) &&
                    any(grepl("^#! */bin/bash",
                              readLines(f, 1L, warn = FALSE)))) {
                    msg <- paste0("  ", sQuote(f), ": /bin/bash is not portable")
                    msgs <- c(msgs, msg)
                }
                ## and bash need not be installed at all.
                if (file.exists(f) &&
                    any(grepl("^#!.*env bash",
                              readLines(f, 1L, warn = FALSE)))) {
                    msg <- paste0("  ", sQuote(f), ": 'env bash' is not portable as bash need not be installed")
                    msgs <- c(msgs, msg)
                }
            }
            if(!any && length(msgs)) {
                any <- TRUE
                noteLog(Log, paste(msgs, collapse = "\n"))
            }

            if (!nzchar(Sys.which("checkbashisms"))) {
                if(!any) {
                    any <- TRUE
                    warningLog(Log)
                }
                printLog0(Log,
                          "A complete check needs the 'checkbashisms' script.\n")
                wrapLog("See section 'Configure and cleanup'",
                        "in the 'Writing R Extensions' manual.\n")
            } else {
                ff <- c("configure", "cleanup", "configure.ac", "configure.in")
                for (f in ff) {
                    ## skip autoconf scripts as checkbashisms warns on
                    ## system parts,  but we can check components
                    if (file.exists(f) &&
                        !any(grepl("Generated by GNU Autoconf",
                                   readLines(f, warn = FALSE)))) {
                        out <- suppressWarnings(system2("checkbashisms", c("-p -n", f),
                                       stdout = TRUE, stderr = TRUE,
                                       timeout = 60))
                        if (length(out) &&
                            !any(grepl("could not find any possible bashisms",
                                       out))) {
                            out <- grep("configure\\.ac: *Unterminated (quoted string|heredoc) found, EOF reached", out, value = TRUE, invert = TRUE)
                            ## Skip some reports on two lines
                            o <- grep("(does not appear to have a #! interpreter line|alternative test command|'function' is useless)", out)

                            ## these are all true but not things we want to report
                            if (length(o)) out <- out[-sort(c(o,o+1L))]
                            o <- grep("($RANDOM)", out, fixed = TRUE)
                            if (length(o)) out <- out[-sort(c(o,o+1L))]
                            o <- grep("(enable)", out, fixed = TRUE)
                            if (length(o)) out <- out[-sort(c(o,o+1L))]
                            o <- grep("(time)", out, fixed = TRUE)
                            if (length(o)) out <- out[-sort(c(o,o+1L))]
                            o <- grep("([^] should be [!])", out, fixed = TRUE)
                            if (length(o)) out <- out[-sort(c(o,o+1L))]
                            o <- grep("('((' should be '$((')", out, fixed = TRUE)
                            if (length(o)) out <- out[-sort(c(o,o+1L))]
                            o <- grep("(brace expansion)", out, fixed = TRUE)
                            if (length(o)) out <- out[-sort(c(o,o+1L))]
                            o <- grep("(<<< here string)", out, fixed = TRUE)
                            if (length(o)) out <- out[-sort(c(o,o+1L))]

##                if(config_val_to_logical(Sys.getenv("_R_CHECK_BASHISMS_EXTRA_", "FALSE")))
                            ## gets confused by '1>&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD'
                            ## which admittedly is sub-optimal.
                            o <- grep("(should be >word 2>&1)", out, fixed = TRUE)
                            if(length(o)) {
                                ## may have continuation lines
                                for (j in rev(o)) {
                                    o2 <- grep("\\\\$", out[j + 1L:min(10L, length(out) -j)])
                                    mm <- if(length(o2)) max(o2)+1L else 1L
                                    out <- out[-(j + (0L:mm))]
                                }
                            }

                            if (length(out)) {
                                if(!any) {
                                    any <- TRUE
                                    noteLog(Log)
                                }
                                printLog0(Log,
                                          .format_lines_with_indent(out),
                                          "\n")
                            }
                        }
                    }
                }
            }
        }

        if(check_incoming) {
            ## CRAN must be able to convert
            ##   inst/README.md or README.md
            ##   inst/NEWS.md or NEWS.md
            ## to HTML using pandoc: check that this works fine.
            md_files <-
                c(Filter(file.exists,
                         c(file.path("inst", "README.md"),
                           "README.md"))[1L],
                  Filter(file.exists,
                         c(file.path("inst", "NEWS.md"),
                           "NEWS.md"))[1L])
            md_files <- md_files[!is.na(md_files)]
            if(length(md_files)) {
                if(nzchar(Sys.which("pandoc"))) {
                    for(ifile in md_files) {
                        ofile <- tempfile("pandoc", fileext = ".html")
                        out <- .pandoc_md_for_CRAN(ifile, ofile)
                        if(out$status) {
                            if(!any) warningLog(Log)
                            any <- TRUE
                            printLog(Log,
                                     sprintf("Conversion of '%s' failed:\n",
                                             ifile),
                                     paste(out$stderr, collapse = "\n"),
                                     "\n")
                        }
                        unlink(ofile)
                    }
                } else {
                    if(!any) noteLog(Log)
                    any <- TRUE
                    printLog(Log,
                             "Files 'README.md' or 'NEWS.md' cannot be checked without 'pandoc' being installed.\n")
                }
            }
            ## CRAN must be able to convert inst/NEWS.Rd to *valid* HTML
            ## using Rd2HTML_NEWS_in_Rd(): check that this works fine.
            if(file.exists(nfile <- file.path("inst", "NEWS.Rd")) &&
               nzchar(Tidy <- .find_tidy_cmd())) {
                out <- tempfile()
                bad <- tryCatch({
                    Rd2HTML_NEWS_in_Rd(nfile, out, concordance = TRUE)
                    tidy_validate(out, tidy = Tidy)
                },
                error = identity)
                if(inherits(bad, "error")) {
                    if(!any) noteLog(Log)
                    any <- TRUE
                    printLog0(Log,
                              c("Encountered the following errors for NEWS.Rd HTML conversion/validation:\n",
                                paste(conditionMessage(bad), collapse = "\n"),
                                "\n"))
                }
                else if(NROW(bad)) {
                    if(!any) noteLog(Log)
                    any <- TRUE
                    printLog0(Log,
                              c("Found the following validation problem for the NEWS.Rd HTML conversion:\n",
                                sprintf("NEWS.html:%s:%s (%s:%s): %s\n",
                                        bad[, "line"],
                                        bad[, "col"],
                                        bad[, "srcFile"],
                                        bad[, "srcLine"],
                                        bad[, "msg"])))
                }
            }
        }
        topfiles <- Sys.glob(c("LICENCE", "LICENSE"))
        if (length(topfiles)) {
            ## Are these mentioned in DESCRIPTION?
            lic <- desc["License"]
            if(!is.na(lic)) {
                found <- vapply(topfiles,
                                function(x) grepl(x, lic, fixed = TRUE),
                                NA)
                topfiles <- topfiles[!found]
                if (length(topfiles)) {
                    if(!any) noteLog(Log)
                    any <- TRUE
                    one <- (length(topfiles) == 1L)
                    msg <- c(if(one) "File" else "Files",
                             "\n",
                             .format_lines_with_indent(topfiles),
                             "\n",
                             if(one) {
                                 "is not mentioned in the DESCRIPTION file.\n"
                             } else {
                                 "are not mentioned in the DESCRIPTION file.\n"
                             })
                    printLog(Log, msg)
                }
            }
        }
        topfiles <- Sys.glob(file.path("inst", c("LICENCE", "LICENSE")))
        if (length(topfiles)) {
            ## Are these mentioned in DESCRIPTION?
            lic <- desc["License"]
            if(!is.na(lic)) {
                found <- vapply(basename(topfiles),
                                function(x) grepl(x, lic, fixed = TRUE),
                                NA)
                topfiles <- topfiles[!found]
                if (length(topfiles)) {
                    if(!any) noteLog(Log)
                    any <- TRUE
                    one <- (length(topfiles) == 1L)
                    msg <- c(if(one) "File" else "Files",
                             "\n",
                             .format_lines_with_indent(topfiles),
                             "\n",
                             if(one) {
                                 "will install at top-level and is not mentioned in the DESCRIPTION file.\n"
                             } else {
                                 "will install at top-level and are not mentioned in the DESCRIPTION file.\n"
                             })
                    printLog(Log, msg)
                }
            }
        }
        if (!is_base_pkg && R_check_toplevel_files) {
            ## any others?
            if(is.null(topfiles0)) {
                topfiles <- dir()
                ## Now check if any of these were created since we started
                topfiles <-
                    topfiles[file.info(topfiles, extra_cols = FALSE)$ctime
                             <= .unpack.time]
            } else topfiles <- topfiles0
            known <- c("DESCRIPTION", "INDEX", "LICENCE", "LICENSE",
                       "LICENCE.note", "LICENSE.note",
                       "MD5", "NAMESPACE", "NEWS", "PORTING",
                       "COPYING", "COPYING.LIB", "GPL-2", "GPL-3",
                       "BUGS", "Bugs",
                       "ChangeLog", "Changelog", "CHANGELOG", "CHANGES", "Changes",
                       "INSTALL", "README", "THANKS", "TODO", "ToDo",
                       "INSTALL.windows",
                       "README.md", "NEWS.md",
                       "configure", "configure.win", "cleanup", "cleanup.win",
                       "configure.ucrt", "cleanup.ucrt",
                       "configure.ac", "configure.in",
                       "datafiles",
                       "R", "data", "demo", "exec", "inst", "man",
                       "po", "src", "tests", "vignettes",
                       "build",       # used by R CMD build
                       ".aspell",     # used for spell checking packages
                       "java", "tools", "noweb") # common dirs in packages.
            topfiles <- setdiff(topfiles, known)
            if (file.exists(file.path("inst", "AUTHORS")))
                topfiles <- setdiff(topfiles, "AUTHORS")
            if (file.exists(file.path("inst", "COPYRIGHTS")))
                topfiles <- setdiff(topfiles, "COPYRIGHTS")
            if (lt <- length(topfiles)) {
                if(!any) noteLog(Log)
                any <- TRUE
                printLog(Log, ## dirs are files, but maybe not on Windows
                         if(lt > 1L) "Non-standard files/directories found at top level:\n"
                         else "Non-standard file/directory found at top level:\n" )
                msg <- strwrap(paste(sQuote(topfiles), collapse = " "),
                               indent = 2L, exdent = 2L)
                printLog0(Log, paste(c(msg, ""), collapse="\n"))
                cp <- grep("^copyright", topfiles,
                           ignore.case = TRUE, value = TRUE)
                if (length(cp))
                    printLog(Log, "Copyright information should be in file inst/COPYRIGHTS\n")
                if("AUTHORS" %in% topfiles)
                    printLog(Log, "Authors information should be in file inst/AUTHORS\n")
            }
        }
        if (!any) resultLog(Log, "OK")
    }

    check_detritus <- function()
    {
        checkingLog(Log, "for left-over files")
        files <- dir(".", full.names = TRUE, recursive = TRUE)
        bad <- grep("svn-commit[.].*tmp$", files, value = TRUE)
        bad <- c(bad, grep("^[.]/[^/]*[.][rR]d$", files, value = TRUE))
        if (length(bad)) {
            bad <- sub("^[.]/", paste0(pkgname, "/"), bad)
            noteLog(Log)
            printLog0(Log,
                      "The following files look like leftovers:\n",
                      paste(strwrap(paste(sQuote(bad), collapse = ", "),
                                    indent = 2, exdent = 2), collapse = "\n"),
                      "\nPlease remove them from your package.\n")
        } else resultLog(Log, "OK")
    }


    check_indices <- function()
    {
        ## Check index information.
        checkingLog(Log, "index information")
        any <- FALSE
        if (file.exists("INDEX") &&
            !length(readLines("INDEX", warn = FALSE))) {
            any <- TRUE
            warningLog(Log, "Empty file 'INDEX'.")
        }
        if (dir.exists("demo")) {
            index <- file.path("demo", "00Index")
            if (!file.exists(index) ||
                !length(readLines(index, warn = FALSE))) {
                if(!any) warningLog(Log)
                any <- TRUE
                printLog0(Log,
                          sprintf("Empty or missing file %s.\n",
                                  sQuote(index)))
            } else {
                Rcmd <- paste0(opWarn_string, "\ntools:::.check_demo_index(\"demo\")\n")
                ## FIXME: this does not need to be run in another process
                out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
                if(length(out)) {
                    if(!any) warningLog(Log)
                    any <- TRUE
                    printLog0(Log, paste(c(out, ""), collapse = "\n"))
                }
            }
        }
        if (dir.exists(file.path("inst", "doc"))) {
            Rcmd <- paste0(opWarn_string, "\ntools:::.check_vignette_index(\"inst/doc\")\n")
            ## FIXME: this does not need to be run in another process
            out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
            if(length(out)) {
                if(!any) warningLog(Log)
                any <- TRUE
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
            }
        }
        if (any)
            wrapLog("See sections 'The INDEX file' and 'Package subdirectories' in the 'Writing R Extensions' manual.\n")
        else resultLog(Log, "OK")
    }

    check_subdirectories <- function(haveR, subdirs)
    {
        checkingLog(Log, "package subdirectories")
        any <- FALSE
        if (haveR && !length(list_files_with_type("R", "code")) &&
            !file.exists(file.path("R", "sysdata.rda"))) {
            haveR <- FALSE
            warningLog(Log, "Found directory 'R' with no source files.")
            any <- TRUE
        }
        if (R_check_subdirs_nocase) {
            ## Argh.  We often get submissions where 'R' comes out as 'r',
            ## or 'man' comes out as 'MAN', and we've just ran into 'DATA'
            ## instead of 'data' (2007-03-31).  Maybe we should warn about
            ## this unconditionally ...
            ## <FIXME>
            ## Actually, what we should really do is check whether there is
            ## any directory with lower-cased name matching a lower-cased
            ## name of a standard directory, while differing in name.
            ## </FIXME>

            ## Watch out for case-insensitive file systems
            if ("./r" %in% list.dirs(recursive = FALSE)) {
                if (!any) warningLog(Log)
                any <- TRUE
                printLog(Log, "Found subdirectory 'r'.\n",
                         "Most likely, this should be 'R'.\n")
            }
            if ("./MAN" %in% list.dirs(recursive = FALSE)) {
                if (!any) warningLog(Log)
                any <- TRUE
                printLog(Log, "Found subdirectory 'MAN'.\n",
                         "Most likely, this should be 'man'.\n")
            }
            if ("./DATA" %in% list.dirs(recursive = FALSE)) {
                if (!any) warningLog(Log)
                any <- TRUE
                printLog(Log, "Found subdirectory 'DATA'.\n",
                         "Most likely, this should be 'data'.\n")
            }
        }

        all_dirs <- list.dirs(".")

        ## several packages have had check dirs in the sources, e.g.
        ## ./languageR/languageR.Rcheck
        ## ./locfdr/man/locfdr.Rcheck
        ## ./clustvarsel/inst/doc/clustvarsel.Rcheck
        ## ./bicreduc/OldFiles/bicreduc.Rcheck
        ## ./waved/man/waved.Rcheck
        ## ./waved/..Rcheck
        ind <- endsWith(all_dirs, ".Rcheck")
        if(any(ind)) {
            if(!any) warningLog(Log)
            any <- TRUE
            msg <- ngettext(sum(ind),
                            "Found the following directory with the name of a check directory:\n",
                            "Found the following directories with names of check directories:\n", domain = NA)
            printLog0(Log, msg,
                      .format_lines_with_indent(all_dirs[ind]),
                      "\n",
                      "Most likely, these were included erroneously.\n")
        }

        ## Several packages had leftover Rd2dvi build directories in
        ## their sources
        ind <- grepl("^\\.Rd2(dvi|pdf)", basename(all_dirs))
        if(any(ind)) {
            if(!any) warningLog(Log)
            any <- TRUE
            msg <- ngettext(sum(ind),
                            "Found the following directory with the name of a Rd2pdf directory:\n",
                            "Found the following directories with names of Rd2pdf directories:\n", domain = NA)
           printLog0(Log, msg,
                     .format_lines_with_indent(all_dirs[ind]),
                     "\n",
                     "Most likely, these were included erroneously.\n")
        }


        if(!is_base_pkg && (is_tar || R_check_vc_dirs)) {
            ## Packages also should not contain version control subdirs
            ## provided that we check a .tar.gz or know we unpacked one.
            ind <- basename(all_dirs) %in% .vc_dir_names
            if(any(ind)) {
                if(!any) warningLog(Log)
                any <- TRUE
            msg <- ngettext(sum(ind),
                            "Found the following directory with the name of a version control directory:\n",
                            "Found the following directories with names of version control directories:\n", domain = NA)
                printLog0(Log, msg,
                          .format_lines_with_indent(all_dirs[ind]),
                          "\n",
                          "These should not be in a package tarball.\n")
            }
        }

        if (subdirs != "no") {
            Rcmd <- "tools:::.check_package_subdirs(\".\")\n"
            ## We don't run this in the C locale, as we only require
            ## certain filenames to start with ASCII letters/digits, and not
            ## to be entirely ASCII.
            out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
            if(length(out)) {
                if(!any) warningLog(Log)
                any <- TRUE
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
                wrapLog("Please remove or rename the files.\n",
                        "See section 'Package subdirectories'",
                        "in the 'Writing R Extensions' manual.\n")
            }
        }

        ## Subdirectory 'data' without data sets?
        if (dir.exists("data") &&
            !length(list_files_with_type("data", "data"))) {
            if (!any) warningLog(Log)
            any <- TRUE
            printLog(Log, "Subdirectory 'data' contains no data sets.\n")
       }
        ## Subdirectory 'demo' without demos?

        if (dir.exists("demo")) {
            demos <- list_files_with_type("demo", "demo")
            if(!length(demos)) {
                if (!any) warningLog(Log)
                any <- TRUE
                printLog(Log, "Subdirectory 'demo' contains no demos.\n")
            } else {
                ## check for non-ASCII code in each demo
                bad <- character()
                for(d in demos) {
                    x <- readLines(d, warn = FALSE)
                    asc <- iconv(x, "latin1", "ASCII")
                    ind <- is.na(asc) | asc != x
                    if (any(ind)) bad <- c(bad, basename(d))
                }
                if (length(bad)) {
                    if (!any) warningLog(Log)
                    any <- TRUE
                    printLog(Log, "Demos with non-ASCII characters:")
                    if(length(bad) > 1L)
                        printLog0(Log, "\n",
                                  .format_lines_with_indent(bad), "\n")
                    else printLog0(Log, "  ", bad, "\n")
                    wrapLog("Portable packages must use only ASCII",
                            "characters in their demos.\n",
                            "Use \\uxxxx escapes for other characters.\n")
                    demos <- demos[basename(demos) %notin% bad]
                }
                ## check we can parse each demo.
                bad <- character()
                for(d in demos)
                    tryCatch(parse(file = d),
                             error = function(e) bad <<- c(bad, basename(d)))
                if (length(bad)) {
                    if (!any) warningLog(Log)
                    any <- TRUE
                    printLog(Log, "Demos which do not contain valid R code:")
                    if(length(bad) > 1L)
                        printLog0(Log, "\n",
                                  .format_lines_with_indent(bad), "\n")
                    else printLog0(Log, "  ", bad, "\n")
               }
            }
        }

        ## Subdirectory 'exec' without files?
        if (dir.exists("exec") && !length(dir("exec"))) {
            if (!any) warningLog(Log)
            any <- TRUE
            printLog(Log, "Subdirectory 'exec' contains no files.\n")
        }

        ## Subdirectory 'inst' without files?
        if (dir.exists("inst") && !length(dir("inst", recursive = TRUE))) {
            if (!any) warningLog(Log)
            any <- TRUE
            printLog(Log, "Subdirectory 'inst' contains no files.\n")
        }

        ## Subdirectory 'src' without sources?
        if (dir.exists("src")) {
            ## <NOTE>
            ## If there is a Makefile (or a Makefile.win), we cannot assume
            ## that source files have the predefined extensions.
            ## </NOTE>
            if (!any(file.exists(file.path("src",
                                           c("Makefile", "Makefile.win",
                                             "Makefile.ucrt",
                                             "install.libs.R"))))) {
                if (!length(dir("src", pattern = "\\.([cfmM]|cc|cpp|f90|f95|mm)"))) {
                    if (!any) warningLog(Log)
                    printLog(Log, "Subdirectory 'src' contains no source files.\n")
                    any <- TRUE
                }
            }
        }

        ## Do subdirectories of 'inst' interfere with R package system
        ## subdirectories?
        if (dir.exists("inst")) {
            ## These include pre-2.10.0 ones
            R_system_subdirs <-
                c("Meta", "R", "data", "demo", "exec", "libs",
                  "man", "help", "html", "latex", "R-ex", "build")
            allfiles <- dir("inst", full.names = TRUE)
            alldirs <- allfiles[dir.exists(allfiles)]
            suspect <- basename(alldirs) %in% R_system_subdirs
            if (any(suspect)) {
                ## check they are non-empty
                suspect <- alldirs[suspect]
                suspect <- suspect[vapply(suspect, function(x) {
                    length(dir(x, all.files = TRUE)) > 2L
                    },
                    NA)]
                if (length(suspect)) {
                    if (!any) warningLog(Log)
                    any <- TRUE
                    wrapLog("Found the following non-empty",
                            "subdirectories of 'inst' also",
                            "used by R:\n")
                    printLog0(Log, .format_lines_with_indent(suspect), "\n")
                    wrapLog("It is recommended not to interfere",
                            "with package subdirectories used by R.\n")
                }
            }
        }


        ## Valid CITATION metadata?
        if (file.exists(file.path("inst", "CITATION"))) {
            Rcmd <- if(do_install)
                sprintf("tools:::.check_citation(\"inst/CITATION\", \"%s\")\n",
                        file.path(if(is_base_pkg) .Library else libdir,
                                  pkgname))
            else
                "tools:::.check_citation(\"inst/CITATION\")\n"
            out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=utils")
            if(length(out)) {
                if(!any) warningLog(Log)
                any <- TRUE
                printLog(Log,
                         "Invalid citation information in 'inst/CITATION':\n")
                printLog0(Log, .format_lines_with_indent(out), "\n")
            }
        }

        ## CITATION files in non-standard places?
        ## Common problems: rather than inst/CITATION, have
        ##   CITATION
        ##   CITATION.txt
        ##   inst/doc/CITATION
        ## Of course, everything in inst is justifiable, so only give a
        ## note for now.
        files <- dir(".", pattern = "^CITATION.*", recursive = TRUE)
        files <- files[file_path_sans_ext(basename(files)) == "CITATION" &
                       files != file.path("inst", "CITATION")]
        if(length(files)) {
            if(!any) noteLog(Log)
            any <- TRUE
            msg <- ngettext(length(files),
                            "Found the following CITATION file in a non-standard place:\n",
                            "Found the following CITATION files in a non-standard place:\n", domain = NA)
            wrapLog(msg)
            printLog0(Log, .format_lines_with_indent(files), "\n")
            wrapLog("Most likely 'inst/CITATION' should be used instead.\n")
        }

        ## Valid package news?
        ## This used to only look at inst/NEWS.Rd and warn about
        ## problems found in these.  For simplicity, when adding support
        ## for checking news in md or plain text, consistently only NOTE
        ## problems.
        ## Gather errors and warnings when reading the news.  We
        ## currently report all these together.
        .messages <- NULL
        .ehandler <- function(e) {
            .messages <<- conditionMessage(e)
        }
        .whandler <- function(e) {
            .messages <<- c(.messages, conditionMessage(e))
            tryInvokeRestart("muffleWarning")
        }
        ## (Could also gather the conditions, and get the messages from
        ## these.)

        nread <- NULL
        if(file.exists(nfile <- file.path("inst", "NEWS.Rd")))
            nread <- .build_news_db_from_package_NEWS_Rd
        else if(file.exists(nfile <- "NEWS.md") &&
                ## The news in md reader needs commonmark and xml2.
                requireNamespace("commonmark", quietly = TRUE) &&
                requireNamespace("xml2", quietly = TRUE))
            nread <- .build_news_db_from_package_NEWS_md
        else if(file.exists(nfile <- "NEWS") &&
                config_val_to_logical(Sys.getenv("_R_CHECK_NEWS_IN_PLAIN_TEXT_",
                                                 "FALSE")))
            nread <- .news_reader_default

        if(!is.null(nread)) {
            bad <- FALSE
            news <- withCallingHandlers(tryCatch(nread(nfile),
                                                 error = .ehandler),
                                        warning = .whandler)
            if(length(.messages)) {
                if(!any) noteLog(Log)
                any <- TRUE
                printLog(Log,
                         sprintf("Problems with news in '%s':\n", nfile))
                bad <- TRUE
                printLog0(Log,
                          paste0("  ",
                                 unlist(strsplit(.messages, "\n", fixed = TRUE)),
                                 collapse = "\n"),
                          "\n")
            } else {
                ## No complaints from the reader, but did it actually
                ## read anything?
                if(!inherits(news, "news_db") || !nrow(news)) {
                    if(!any) noteLog(Log)
                    any <- TRUE
                    if(!bad)
                        printLog(Log,
                                 sprintf("Problems with news in '%s':\n",
                                         nfile))
                    bad <- TRUE
                    printLog(Log, "No news entries found.\n")
                }
                ## Could also check whether the current package version
                ## has a corresponding news entry.
            }
        }

        if(!any) resultLog(Log, "OK")
    }

    check_non_ASCII <- function()
    {
        checkingLog(Log, "code files for non-ASCII characters")
        out <- R_runR0("tools:::.check_package_ASCII_code('.')",
                       R_opts2,
                       c("R_DEFAULT_PACKAGES=NULL",
                         "_R_NO_REPORT_MISSING_NAMESPACES_=true"))
        if (length(out)) {
            warningLog(Log)
            msg <- ngettext(length(out),
                            "Found the following file with non-ASCII characters:\n",
                            "Found the following files with non-ASCII characters:\n",
                            domain = NA)
            wrapLog(msg)
            printLog0(Log, .format_lines_with_indent(out), "\n")
            wrapLog("Portable packages must use only ASCII",
                    "characters in their R code and NAMESPACE directives,",
                    "except perhaps in comments.\n",
                    "Use \\uxxxx escapes for other characters.\n",
                    "Function", sQuote("tools::showNonASCIIfile"),
                    "can help in finding non-ASCII characters in files.\n")
        } else resultLog(Log, "OK")

        checkingLog(Log, "R files for syntax errors")
        Rcmd  <- paste0(opWarn_string, ";tools:::.check_package_code_syntax(\"R\")")
        out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
        if (any(startsWith(out, "Error"))) {
            errorLog(Log)
            printLog0(Log, paste(c(out, ""), collapse = "\n"))
            maybe_exit(1L)
        } else if (length(out)) {
            warningLog(Log)
            printLog0(Log, paste(c(out, ""), collapse = "\n"))
        } else resultLog(Log, "OK")
    }

    check_R_code <- function()
    {
        checkingLog(Log, "dependencies in R code")
        if (do_install) {
            Rcmd <- paste(opW_shE_F_str,
                          sprintf("tools:::.check_packages_used(package = \"%s\")\n", pkgname))

            out <- R_runR2(Rcmd, "R_DEFAULT_PACKAGES=NULL")
            if (length(out)) {
                if(any(grepl("(not declared from|Missing or unexported object|Including base/recommended)", out))) warningLog(Log)
                else noteLog(Log)
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
                ## wrapLog(msg_DESCRIPTION)
            } else resultLog(Log, "OK")
        } else {
            ## this needs to read the package code, and will fail on
            ## syntax errors such as non-ASCII code.
            Rcmd <- paste(opW_shE_F_str,
                          sprintf("tools:::.check_packages_used(dir = \"%s\")\n", pkgdir))

            out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
            if (length(out)) {
                if(any(grepl("(not declared from|Missing or unexported object)", out))) warningLog(Log)
                else noteLog(Log)
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
                ## wrapLog(msg_DESCRIPTION)
            } else resultLog(Log, "OK")
        }

        ## Check whether methods have all arguments of the corresponding
        ## generic.
        checkingLog(Log, "S3 generic/method consistency")
        Rcmd <- paste(opWarn_string, "\n",
                      "options(expressions=1000)\n",
                      if (do_install)
                      sprintf("tools::checkS3methods(package = \"%s\")\n", pkgname)
                      else
                      sprintf("tools::checkS3methods(dir = \"%s\")\n", pkgdir))
        out <- R_runR2(Rcmd)
        if (length(out)) {
            pos <- which(startsWith(out, "Mismatches for") |
                         startsWith(out, "Apparent methods"))
            if(!length(pos)) {
                out1 <- out
                out2 <- character()
            } else {
                pos <- pos[1L]
                out1 <- out[seq_len(pos - 1L)]
                out2 <- out[seq.int(pos, length(out))]
            }
            if(length(out1)) {
                warningLog(Log)
                printLog0(Log, paste(c(out1, ""), collapse = "\n"))
                wrapLog("See section 'Generic functions and methods'",
                        "in the 'Writing R Extensions' manual.\n")
            } else
                noteLog(Log)
            if(length(out2)) {
                printLog0(Log,
                          paste(c(if(length(out1)) "", out2, ""),
                                collapse = "\n"))
                wrapLog("See section 'Registering S3 methods'",
                        "in the 'Writing R Extensions' manual.\n")
            }
        } else resultLog(Log, "OK")

        ## Check whether replacement functions have their final argument
        ## named 'value'.
        checkingLog(Log, "replacement functions")
        Rcmd <- paste(opWarn_string, "\n",
                      if (do_install)
                      sprintf("tools::checkReplaceFuns(package = \"%s\")\n", pkgname)
                      else
                      sprintf("tools::checkReplaceFuns(dir = \"%s\")\n", pkgdir))
        out <- R_runR2(Rcmd)
        if (length(out)) {
            ## <NOTE>
            ## We really want to stop if we find offending replacement
            ## functions.  But we cannot use error() because output may
            ## contain warnings ...
            warningLog(Log)
            ## </NOTE>
            printLog0(Log, paste(c(out, ""), collapse = "\n"))
            wrapLog("The argument of a replacement function",
                    "which corresponds to the right hand side",
                    "must be named 'value'.\n")
        } else resultLog(Log, "OK")

        ## Check foreign function calls.
        ## The neverending story ...
        ## For the time being, allow to turn this off by setting the environment
        ## variable _R_CHECK_FF_CALLS_ to an empty value.
        if (nzchar(R_check_FF)) {
            registration <-
                identical(R_check_FF, "registration") && install != "fake"
            checkingLog(Log, "foreign function calls")
            DUP <- R_check_FF_DUP
            if(as_cran) {
                Sys.setenv("_R_CHECK_FF_AS_CRAN_" = "TRUE")
                DUP <- TRUE
            }
            Rcmd <- paste(opWarn_string, "\n",
                          if (do_install)
                          sprintf("tools::checkFF(package = \"%s\", registration = %s, check_DUP = %s)\n",
                                  pkgname, registration, DUP)
                          else
                          sprintf("tools::checkFF(dir = \"%s\", registration = %s, check_DUP = %s)\n",
                                  pkgdir, "FALSE", DUP))
            out <- R_runR2(Rcmd)
            Sys.unsetenv("_R_CHECK_FF_AS_CRAN_")
            if (length(out)) {
                if(any(grepl("^Foreign function calls? with(out| empty)", out)) ||
                   (!is_base_pkg && any(grepl("to a base package:", out))) ||
                   any(grepl("^Undeclared packages? in", out)) ||
                   any(grepl("parameter[s]*, expected ", out))
                   ) warningLog(Log)
                else noteLog(Log)
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
                if(!is_base_pkg && any(grepl("to a base package:", out)))
                    wrapLog("Packages should not make",
                            ".C/.Call/.External/.Fortran",
                            "calls to a base package.",
                            "They are not part of the API,",
                            "for use only by R itself",
                            "and subject to change without notice.")
                else if(any(grepl("with DUP:", out)))
                    wrapLog("DUP is no longer supported and will be ignored.")
                else
                    wrapLog("See chapter 'System and foreign language interfaces' in the 'Writing R Extensions' manual.\n")
            } else resultLog(Log, "OK")
        }
    }

    check_R_files <- function(is_rec_pkg)
    {
        checkingLog(Log, "R code for possible problems")
        t1 <- proc.time()
        if (!is_base_pkg) {
            Rcmd <- paste(opWarn_string, "\n",
                          sprintf("tools:::.check_package_code_shlib(dir = \"%s\")\n",
                                  pkgdir))
            out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
            if (length(out)) {
                errorLog(Log)
                wrapLog("Incorrect (un)loading of package",
                        "shared object.\n")
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
                wrapLog("The system-specific extension for",
                        "shared objects must not be added.\n",
                        "See ?library.dynam.\n")
                maybe_exit(1L)
            }
        }

        Rcmd <- paste(opWarn_string, "\n",
                      sprintf("tools:::.check_package_code_startup_functions(dir = \"%s\")\n",
                              pkgdir))
        out1 <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=")
        Rcmd <- paste(opWarn_string, "\n",
                      sprintf("tools:::.check_package_code_unload_functions(dir = \"%s\")\n",
                              pkgdir))
        out1a <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=")
        out1 <- if (length(out1) && length(out1a)) c(out1, "", out1a)
                else c(out1, out1a)

        out2 <- out3 <- out4 <- out5 <- out6 <- out7 <- out8 <- out9 <- out10 <- NULL

        if (!is_base_pkg && R_check_unsafe_calls) {
            Rcmd <- paste(opWarn_string, "\n",
                          sprintf("tools:::.check_package_code_tampers(dir = \"%s\")\n",
                                  pkgdir))
            out2 <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
        }

        if (R_check_use_codetools && do_install) {
            Rcmd <-
                paste(opWarn_string, "\n",
                      sprintf("tools:::.check_code_usage_in_package(package = \"%s\")\n", pkgname))
            if(config_val_to_logical(Sys.getenv("_R_CHECK_CODE_USAGE_WITH_ONLY_BASE_ATTACHED_",
                                                "true"))) {
                out3 <-  R_runR2(Rcmd, "R_DEFAULT_PACKAGES=NULL")
                if(length(pos <- which(startsWith(out3,
                                                  "Undefined global functions or variables:")))) {
                    Rcmd <-
                        sprintf("writeLines(strwrap(tools:::imports_for_undefined_globals(\"%s\"), exdent = 11))\n",
                                paste(utils::tail(out3, -pos),
                                      collapse = " "))
                    miss <- R_runR2(Rcmd, "R_DEFAULT_PACKAGES=")
                    ## base has no NAMESPACE
                    if(length(miss) && pkgname != "base") {
                        msg3 <- if(any(startsWith(miss,
                                                  "importFrom(\"methods\""))) {
                            strwrap("to your NAMESPACE file (and ensure that your DESCRIPTION Imports field contains 'methods').")
                        } else "to your NAMESPACE file."
                        out3 <- c(out3,
                                  c("Consider adding",
                                    paste0("  ", miss),
                                    msg3))
                    }
                }
            } else
                out3 <-  R_runR2(Rcmd, "R_DEFAULT_PACKAGES=")
        }

        if(!is_base_pkg && R_check_use_codetools && R_check_dot_internal) {
            details <- pkgname != "relax" # has .Internal in a 10,000 line fun
            Rcmd <- paste(opWarn_string, "\n",
                          if (do_install)
                              sprintf("tools:::.check_dotInternal(package = \"%s\",details=%s)\n", pkgname, details)
                          else
                              sprintf("tools:::.check_dotInternal(dir = \"%s\",details=%s)\n", pkgdir, details))
            out4 <- R_runR2(Rcmd, "R_DEFAULT_PACKAGES=")
            ## Hmisc, gooJSON, quantmod give spurious output
            if (!any(grepl("^Found.* .Internal call", out4))) out4 <- NULL
        }

        if(!is_base_pkg && R_check_code_assign_to_globalenv) {
            Rcmd <- paste(opWarn_string, "\n",
                          sprintf("tools:::.check_package_code_assign_to_globalenv(dir = \"%s\")\n",
                                  pkgdir))
            out5 <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=")
        }

        if(!is_base_pkg && R_check_code_attach) {
            Rcmd <- paste(opWarn_string, "\n",
                          sprintf("tools:::.check_package_code_attach(dir = \"%s\")\n",
                                  pkgdir))
            out6 <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=")
        }
        if(!is_base_pkg && R_check_code_data_into_globalenv) {
            Rcmd <- paste(opWarn_string, "\n",
                          sprintf("tools:::.check_package_code_data_into_globalenv(dir = \"%s\")\n",
                                  pkgdir))
            out7 <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=")
        }

        ## Use of deprecated, defunct and platform-specific devices?
        if(!is_base_pkg && R_check_use_codetools && R_check_depr_def) {
            win <- !is.na(OS_type) && OS_type == "windows"
            Rcmd <- paste(opWarn_string, "\n",
                          if (do_install)
                              sprintf("tools:::.check_depdef(package = \"%s\", WINDOWS = %s)\n", pkgname, win)
                          else
                              sprintf("tools:::.check_depdef(dir = \"%s\", WINDOWS = %s)\n", pkgdir, win))
            out8 <- R_runR2(Rcmd, "R_DEFAULT_PACKAGES=")
        }

        ## Potentially erroneous use of 'return' without '()'
        if (!is_base_pkg && R_check_use_codetools && R_check_bogus_return) {
            Rcmd <- paste(opWarn_string, "\n",
                          if (do_install)
                              sprintf("tools:::.check_bogus_return(package = \"%s\")\n", pkgname)
                          else
                              sprintf("tools:::.check_bogus_return(dir = \"%s\")\n", pkgdir))
            out9 <- R_runR2(Rcmd, "R_DEFAULT_PACKAGES=")
        }

        if(R_check_code_class_is_string) {
            Rcmd <- paste(opWarn_string, "\n",
                          sprintf("tools:::.check_package_code_class_is_string(dir = \"%s\")\n",
                                  pkgdir))
            out10 <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
        }

        t2 <- proc.time()
        print_time(t1, t2, Log)

        if (length(out1) || length(out2) || length(out3) ||
            length(out4) || length(out5) || length(out6) ||
            length(out7) || length(out8) || length(out9) ||
            length(out10)) {
            ini <- character()
            if(length(out4) ||
               (length(out8) &&
                any(startsWith(out8,
                               "Found the defunct/removed function"))))
                warningLog(Log) else noteLog(Log)
            if (length(out4)) {
                first <- grep("^Found.* .Internal call", out4)[1L]
                if(first > 1L) out4 <- out4[-seq_len(first-1)]
                printLog0(Log, paste(c(ini, out4, "", ""), collapse = "\n"))
                wrapLog(c("Packages should not call .Internal():",
                          "it is not part of the API,",
                          "for use only by R itself",
                          "and subject to change without notice."))
                ini <- ""
            }
            if (length(out8)) {
                printLog0(Log, paste(c(ini, out8, ""), collapse = "\n"))
                if(any(startsWith(out8, "Found the defunct/removed function")))
                    ini <- ""
            }
            ## All remaining checks give notes and not warnings.
            if(length(ini))
                ini <- c("",
                         "In addition to the above warning(s), found the following notes:",
                         "")

            if (length(out1)) {
                printLog0(Log, paste(c(ini, out1, ""), collapse = "\n"))
                ini <- ""
            }
            if (length(out2)) {
                printLog0(Log,
                          paste(c(ini,
                                  "Found the following possibly unsafe calls:",
                                  out2, ""),
                                collapse = "\n"))
                ini <- ""
            }
            if (length(out3)) {
                printLog0(Log, paste(c(ini, out3, ""), collapse = "\n"))
                ini <- ""
            }
            if (length(out5)) {
                printLog0(Log, paste(c(ini, out5, ""), collapse = "\n"))
                ini <- ""
            }
            if (length(out6)) {
                printLog0(Log, paste(c(ini, out6, ""), collapse = "\n"))
                ini <- ""
                wrapLog(gettextf("See section %s in '%s'.",
                                 sQuote("Good practice"), "?attach"))
           }
            if (length(out7)) {
                printLog0(Log, paste(c(ini, out7, ""), collapse = "\n"))
                ini <- ""
                wrapLog(gettextf("See section %s in '%s'.",
                                 sQuote("Good practice"), "?data"))
            }
            if (length(out9)) {
                printLog0(Log, paste(c(ini, out9, ""), collapse = "\n"))
                ini <- ""
            }
            if(length(out10)) {
                printLog0(Log, paste(c(ini, out10, ""), collapse = "\n"))
                ini <- ""
            }
        } else resultLog(Log, "OK")
    }

    check_Rd_files <- function(haveR, chkInternal = NA)
    {
        msg_writing_Rd <-
            c("See chapter 'Writing R documentation files' in the 'Writing R Extensions' manual.\n")

        if (dir.exists("man") && !extra_arch) {
            checkingLog(Log, "Rd files")
            t1 <- proc.time()
            minlevel <- if (is_base_pkg) -Inf else
                as.numeric(Sys.getenv("_R_CHECK_RD_CHECKRD_MINLEVEL_", "-1"))
            Rcmd <- paste(opWarn_string, "\n",
                          sprintf("tools:::.check_package_parseRd('.', minlevel=%s)\n", minlevel))
            ## This now evaluates \Sexpr, so run with usual packages.
            out <- R_runR0(Rcmd, R_opts2,
                           c(if(R_cdo) elibs_cdo else elibs,
                             "_R_NO_REPORT_MISSING_NAMESPACES_=true"))
            t2 <- proc.time()
            print_time(t1, t2, Log)
            if (length(out)) {
                if(length(grep(paste("^prepare.*Dropping empty section",
                                     "^checkRd: \\(-",
                                     "^  ", # continuation lines
                                     "NEWS.Rd",
                                     sep = "|"),
                               out, invert = TRUE)))
                    warningLog(Log)
                else noteLog(Log)
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
            } else resultLog(Log, "OK")

            checkingLog(Log, "Rd metadata")
            Rcmd <- paste(opWarn_string, "\n",
                          if (do_install)
                          sprintf("tools:::.check_Rd_metadata(package = \"%s\")\n", pkgname)
                          else
                          sprintf("tools:::.check_Rd_metadata(dir = \"%s\")\n", pkgdir))
            out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
            if (length(out)) {
                ## <FIXME>
                ## We should really use R() instead of R_runR0() to get
                ## the computed check results object itself.
                ## Change eventually ...
                ## </FIXME>
                tag <- out[!startsWith(out, "  ")]
                if(any(grepl("duplicated", tag, fixed = TRUE)))
                    warningLog(Log)
                else
                    noteLog(Log)
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
            } else resultLog(Log, "OK")
        }

        ## Check Rd line widths.
        if(dir.exists("man") && R_check_Rd_line_widths) {
            checkingLog(Log, "Rd line widths")
            Rcmd <- paste(opWarn_string, "\n",
                          if(do_install)
                          sprintf("suppressPackageStartupMessages(tools:::.check_Rd_line_widths(\"%s\", installed = TRUE))\n",
                                  file.path(if(is_base_pkg) .Library else libdir,
                                            pkgname))
                          else
                          sprintf("tools:::.check_Rd_line_widths(\"%s\")\n",
                                  pkgdir))
            out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
            if(length(out)) {
                noteLog(Log)
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
                wrapLog("These lines will be truncated in the PDF manual.\n")

            } else resultLog(Log, "OK")
        }

        ## Check cross-references in R documentation files.

        if (dir.exists("man") && R_check_Rd_xrefs) {
            checkingLog(Log, "Rd cross-references")
            Rcmd <- paste(opWarn_string, "\n",
                          if (do_install)
                          sprintf("tools:::.check_Rd_xrefs(package = \"%s\")\n", pkgname)
                          else
                          sprintf("tools:::.check_Rd_xrefs(dir = \"%s\")\n", pkgdir))
            any <- FALSE
            out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
            ## <FIXME>
            ## tools:::.check_Rd_xrefs() has localized messages, so
            ## grepping on its output is not a good idea.
            ## We should really use R() instead of R_runR0() to get
            ## the computed check results object itself.
            ## </FIXME>
            if(length(out) &&
               any((indb <- startsWith(out,
                                       "Missing link(s) in Rd file")) |
                   (inds <- startsWith(out,
                                       "Non-topic package-anchored link(s) in Rd file")))) {
                if(any(indb)) warningLog(Log) else noteLog(Log)
                any <- TRUE
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
                out <- NULL
            }

            ## The above checks whether Rd xrefs can be resolved within
            ## the package itself, the base and recommended packages,
            ## and its Imports and Depends.  Nowadays, we prefer that Rd
            ## xrefs to aliases not in the package itself and the base
            ## packages have package anchors so there is no ambiguity in
            ## resolving the xrefs,  Hence, at least optionally note the
            ## xrefs missing such package anchors.
            ##
            ## However, .Rd_xrefs_with_missing_package_anchors() uses
            ## the package source directory whereas the above uses
            ## .Rd_check_xrefs() typically for installed packages, so we
            ## do the optional check separately for now.

            if(config_val_to_logical(Sys.getenv("_R_CHECK_XREFS_NOTE_MISSING_PACKAGE_ANCHORS_",
                                                "FALSE"))) {
                bad <- tryCatch(.Rd_xrefs_with_missing_package_anchors(pkgdir),
                                error = identity)
                if(!inherits(bad, "error") && length(bad)) {
                    bad <- split(bad[, "Target"], bad[, "Source"])
                    msg <- c(if(any) "",
                             strwrap("Found the following Rd file(s) with Rd \\link{} targets missing package anchors:"),
                             strwrap(sprintf("  %s: %s",
                                             names(bad),
                                             vapply(bad, paste, "",
                                                    collapse = ", ")),
                                     exdent = 4L, indent = 2L),
                             strwrap("Please provide package anchors for all Rd \\link{} targets not in the package itself and the base packages."))
                    if(!any) {
                        noteLog(Log)
                        any <- TRUE
                    }
                    printLog0(Log, paste(c(msg, ""), collapse = "\n"))
                }
            }

            if(length(out)) {
                if(!any) {
                    if(R_check_use_log_info &&
                       !length(grep("Unknown package.*in Rd xrefs",
                                    out)))
                        infoLog(Log)
                    else
                        noteLog(Log)
                    any <- TRUE
                }
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
            }

            if(!any)
                resultLog(Log, "OK")
        }

        ## Check for missing documentation entries.
        if (!extra_arch && (haveR || dir.exists("data"))) {
            checkingLog(Log, "for missing documentation entries")
            Rcmd <- paste(opWarn_string, "\n",
                          if (do_install)
                          sprintf("tools::undoc(package = \"%s\")\n", pkgname)
                          else
                          sprintf("tools::undoc(dir = \"%s\")\n", pkgdir))
            ## This is needed to pick up undocumented S4 classes.
            ## even for packages which only import methods.
            ## But as that check needs to run get() on all the lazy-loaded
            ## promises, avoid if possible.
            ## desc exists in the body of this function.
            use_methods <- if(pkgname == "methods") TRUE else {
                pi <- .split_description(desc)
                "methods" %in% c(names(pi$Depends), names(pi$Imports))
            }
            out <- if (use_methods) {
                env <- if(WINDOWS) "R_DEFAULT_PACKAGES=utils,grDevices,graphics,stats,methods" else "R_DEFAULT_PACKAGES='utils,grDevices,graphics,stats,methods'"
                R_runR2(Rcmd, env = env)
            } else R_runR2(Rcmd)
            ## Grr, get() in undoc can change the search path
            ## Current example is TeachingDemos
            out <- out[!startsWith(out, "Loading required package:")]
            ## We do not need to report errors here as check ERRORs.
            ## err <- startsWith(out, "Error")
            ## if (any(err)) {
            ##     errorLog(Log)
            ##     printLog0(Log, paste(c(out, ""), collapse = "\n"))
            ##     maybe_exit(1L)
            ## } else
            if (length(out)) {
                warningLog(Log)
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
                wrapLog("All user-level objects",
                        "in a package",
                        if (any(startsWith(out, "Undocumented S4")))
                        "(including S4 classes and methods)",
                        "should have documentation entries.\n")
                wrapLog(msg_writing_Rd)
            } else resultLog(Log, "OK")
        }

        ## Check for code/documentation mismatches.
        if (dir.exists("man") && !extra_arch) {
            checkingLog(Log, "for code/documentation mismatches")
            if (!do_codoc) resultLog(Log, "SKIPPED")
            else {
                any <- FALSE
                ## Check for code/documentation mismatches in functions.
                if (haveR) {
                    Rcmd <- paste(opWarn_string, "\n",
                                  if (do_install)
                                  sprintf("tools::codoc(package = \"%s\")\n", pkgname)
                                  else
                                  sprintf("tools::codoc(dir = \"%s\")\n", pkgdir))
                    out <- R_runR2(Rcmd)
                    if (length(out)) {
                        any <- TRUE
                        warningLog(Log)
                        printLog0(Log, paste(c(out, ""), collapse = "\n"))
                    }
                }

                ## Check for code/documentation mismatches in data sets.
                if (do_install) {
                    Rcmd <- paste(opWarn_string, "\n",
                                  sprintf("tools::codocData(package = \"%s\")\n", pkgname))
                    out <- R_runR2(Rcmd)
                    if (length(out)) {
                        if (!any) warningLog(Log)
                        any <- TRUE
                        printLog0(Log, paste(c(out, ""), collapse = "\n"))
                    }
                }

                ## Check for code/documentation mismatches in S4 classes.
                if (do_install && haveR) {
                    Rcmd <- paste(opWarn_string, "\n",
                                  sprintf("tools::codocClasses(package = \"%s\")\n", pkgname))
                    out <- R_runR2(Rcmd)
                    if (length(out)) {
                        if (!any) warningLog(Log)
                        any <- TRUE
                        printLog0(Log, paste(c(out, ""), collapse = "\n"))
                    }
                }

                if (!any) resultLog(Log, "OK")
            }
        }

        ## Check Rd files, for consistency of \usage with \arguments (are
        ## all arguments shown in \usage documented in \arguments?) and
        ## aliases (do all functions shown in \usage have an alias?)
        if (dir.exists("man") && !extra_arch) {
            checkingLog(Log, "Rd \\usage sections")

            msg_doc_files <-
                c("Functions with \\usage entries",
                  "need to have the appropriate \\alias entries,",
                  "and all their arguments documented.\n",
                  "The \\usage entries must correspond to syntactically",
                  "valid R code.\n")
            any <- FALSE
            ## <NOTE>
            ## Hack to see whether all issues are from internal Rd files
            ## checked specially and only give a NOTE in this case.
            ## Ideally, we would use R() to get the check object and be
            ## able to compute on it, but that is not so simple ...
            Sys.setenv("__R_CHECK_DOC_FILES_NOTE_IF_ALL_SPECIAL__" =
                           "TRUE")
            Rcmd <- paste(opWarn_string, "\n",
                          sprintf("tools::checkDocFiles(%s, chkInternal=%s)\n",
                                  if(do_install)
                                       sprintf("package = \"%s\"", pkgname)
                                  else sprintf("dir = \"%s\"",     pkgdir), chkInternal))
            out <- R_runR2(Rcmd)
            if (length(out)) {
                any <- TRUE
                pos <- which(out ==
                             "All issues in internal Rd files checked specially.")
                if(length(pos)) {
                    noteLog(Log)
                    out <- out[-pos]
                } else
                    warningLog(Log)
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
                wrapLog(msg_doc_files)
                wrapLog(msg_writing_Rd)
            }
            ## </NOTE>

            if (R_check_Rd_style && haveR) {
                msg_doc_style <-
                    c("The \\usage entries for S3 methods should use",
                      "the \\method markup and not their full name.\n")

                Rcmd <- paste(opWarn_string, "\n",
                              if (do_install)
                                  sprintf("tools::checkDocStyle(package = \"%s\")\n", pkgname)
                              else
                                  sprintf("tools::checkDocStyle(dir = \"%s\")\n", pkgdir))
                out <- R_runR2(Rcmd)
                if (length(out)) {
                    if (!any) noteLog(Log)
                    any <- TRUE
                    printLog0(Log, paste(c(out, ""), collapse = "\n"))
                    wrapLog(msg_doc_style)
                    wrapLog(msg_writing_Rd)
                }
            }

            if (!any) resultLog(Log, "OK")
        }

        ## Check Rd contents
        if (dir.exists("man") && R_check_Rd_contents && !extra_arch) {
            checkingLog(Log, "Rd contents")
            Rcmd <- paste(opWarn_string, "\n",
                          sprintf("tools::checkRdContents(%s, chkInternal=%s)\n",
                                  if(do_install)
                                       sprintf("package = \"%s\"", pkgname)
                                  else sprintf("dir = \"%s\"",     pkgdir),
                                  R_check_Rd_internal_too))
            out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
            if (length(out)) {
                noteLog(Log)
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
            } else resultLog(Log, "OK")
        }

        ## Check undeclared dependencies in examples (if any)
        if (dir.exists("man") && do_install && !extra_arch && !is_base_pkg) {
            checkingLog(Log, "for unstated dependencies in examples")
            Rcmd <- paste(opW_shE_F_str,
                          sprintf("suppressPackageStartupMessages(tools:::.check_packages_used_in_examples(package = \"%s\"))\n", pkgname))

            out <- R_runR2(Rcmd, "R_DEFAULT_PACKAGES=NULL")
            exfile <- paste0(pkgname, "-Ex.R")
            if (length(out)) {
                failed <- any(grepl("parse error in file", out, fixed = TRUE))
                if (failed) errorLog(Log) else warningLog(Log)
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
                if (failed) {
                    printLog0(Log, "** will not attempt to run examples\n")
                    do_examples <<- FALSE
                    file.copy(exfile, pkgoutdir)  # keep that file (PR#17501)
                }
                # wrapLog(msg_DESCRIPTION)
            } else resultLog(Log, "OK")
            if (file.exists(exfile)) unlink(exfile)

        } ## FIXME, what if no install?
    }

    check_data <- function()
    {
        ## Check contents of 'data'
        if (!is_base_pkg && dir.exists("data")) {
            checkingLog(Log, "contents of 'data' directory")
            warn <- FALSE
            msgs <- character()
            fi <- list.files("data")
            dataFiles <- basename(list_files_with_type("data", "data"))
            if (!any(grepl("\\.[Rr]$", fi))) { # code files can do anything
                odd <- fi %w/o% c(dataFiles, "datalist")
                if (length(odd)) {
                    warn <-TRUE
                    msgs <-
                        c(sprintf("Files not of a type allowed in a %s directory:\n",
                                  sQuote("data")),
                          paste0(.pretty_format(odd), "\n"),
                          sprintf("Please use e.g. %s for non-R data files\n",
                                  sQuote("inst/extdata")),
                          "\n")
                }
            }
            if ("datalist" %in% fi) {
                if(file.info(sv <- file.path("data", "datalist"))$isdir) {
                    warn <- TRUE
                    msgs <- c(msgs, sprintf("%s is a directory\n",
                                            sQuote("data/datalist")),
                              "\n")
                }  else {
                    ## Now check it has the right format:
                    ## it is read in list_data_in_pkg()
                    ## Allowed lines are
                    ## foo
                    ## foo: bar ...
                    ## where bar ... and standalone foo are object names
                    dl <- readLines(sv, warn = FALSE)
                    if (any(bad <- !grepl("^[^ :]+($|: +[[:alpha:].])", dl))) {
                        warn <- TRUE
                        msgs <- c(msgs,
                                  sprintf("File %s contains malformed line(s):\n",
                                          sQuote("data/datalist")),
                                 paste0(.pretty_format(dl[bad]), "\n"))
                    }
                }
            }
            ans <- list_data_in_pkg(dir = pkgdir)
            if (length(ans)) {
                bad <-
                    names(ans)[vapply(ans,
                                      function(x) ".Random.seed" %in% x,
                                      NA)]
                if (length(bad)) {
                    warn <- TRUE
                    msg <- if (length(bad) > 1L)
                         c(sprintf("Object named %s found in datasets:\n",
                                  sQuote(".Random.seed")),
                          paste0(.pretty_format(bad), "\n"),
                          "Please remove it.\n")
                    else
                        c(sprintf("Object named %s found in dataset: ",
                                  sQuote(".Random.seed")),
                          sQuote(bad), "\nPlease remove it.\n")
                    msgs <- c(msgs, msg)
                }
            }
            if (do_install) {
                ## check that all the datasets can be loaded cleanly by data()
                ## except for LazyData.
                instdir <- file.path(libdir, pkgname)
                if (!file.exists(file.path(instdir, "data", "Rdata.rdb"))) {
                    files <- basename(list_files_with_type("data", "data"))
                    files <- unique(basename(file_path_sans_ext(files, TRUE)))
                    for (f in files) {
                        cmd <- sprintf('tools:::.check_package_datasets2("%s", "%s")',
                                       f, pkgname)
                        out <- R_runR(cmd, R_opts2)
                        out <- filtergrep("^OMP:", out)
                        if (length(out)) {
                            if (any(grepl("^(Warning|Error|No dataset created|Search path was changed)", out)))
                                warn <- TRUE
                            msgs <- c(msgs,
                                     sprintf('Output for data("%s", package = "%s"):\n', f, pkgname),
                                     paste(c(paste0("  ", out), ""),
                                           collapse = "\n"))
                        }
                    }
                    check_datalist <-
                        Sys.getenv("_R_CHECK_DATALIST_", "FALSE")
                    check_datalist <-
                        config_val_to_logical(check_datalist)
                    if(check_datalist && !warn) {
                        ## If there was a problem loading the datasets,
                        ## we cannot reliably check whether 'datalist'
                        ## is up-to-date.
                        cmd <- sprintf("tools:::.check_package_datalist(\"%s\", \"%s\")",

                                       pkgname, libdir)
                        out <- R_runR(cmd, R_opts2)
                        if(length(out)) {
                            msgs <- c(msgs,
                                      c("File 'data/datalist' is out-of-date:\n",
                                        paste0("  ", out, "\n"),
                                        "Please re-create using tools::add_datalist(force = TRUE).\n"))
                        }
                    }
                }
            }
            if (length(msgs)) {
                if (warn) warningLog(Log) else noteLog(Log)
                printLog0(Log, msgs)
            } else resultLog(Log, "OK")
        }

        ## Check for non-ASCII characters in 'data'
        if (!is_base_pkg && R_check_ascii_data && dir.exists("data")) {
            checkingLog(Log, "data for non-ASCII characters")
            t1 <- proc.time()
            out <- R_runR0("tools:::.check_package_datasets('.')", R_opts2,
                           if(R_cdo_data) elibs_cdo else elibs)
            out <- filtergrep("Loading required package", out)
            out <- filtergrep("Warning: changing locked binding", out, fixed = TRUE)
            out <- filtergrep("^OMP:", out)
            t2 <- proc.time()
            print_time(t1, t2, Log)
            if (length(out)) {
                bad <- startsWith(out, "Warning:")
                bad2 <-  any(grepl("(unable to find required package|there is no package called)", out))
                if(any(bad) || bad2)
                    warningLog(Log)
                else if(R_check_use_log_info)
                    infoLog(Log)
                else
                    noteLog(Log)
                printLog0(Log, .format_lines_with_indent(out), "\n")
                if(bad2)
                    if(R_cdo_data || R_check_suggests_only)
                        printLog0(Log,
                                  "  The dataset(s) may use package(s) not declared in Depends/Imports.\n")
                    else
                        printLog0(Log,
                                  "  The dataset(s) may use package(s) not declared in the DESCRIPTION file.\n")
            } else resultLog(Log, "OK")
        }

        if(!is_base_pkg) {
            desc <- .read_description("DESCRIPTION")
            thislazy <- parse_description_field(desc, "LazyData", default = FALSE)
            lazyz <- desc["LazyDataCompression"]
            lazyz0 <- !is.na(lazyz)
            if(thislazy || lazyz0) {
                checkingLog(Log, "LazyData")
                if (thislazy && !dir.exists("data")) {
                    if(R_check_use_log_info)
                        infoLog(Log)
                    else
                        noteLog(Log)
                    printLog0(Log,
                              "  'LazyData' is specified without a 'data' directory\n")
                    if(lazyz0)
                        printLog0(Log,
                                  "  'LazyDataCompression' is specified without a 'data' directory\n")
                } else if (!thislazy && lazyz0) {
                    noteLog(Log)
                    printLog0(Log,
                              "  'LazyDataCompression' is specified without 'LazyData'\n")
                } else if (thislazy && lazyz0 &&
                           !(lazyz %in% c("gzip", "bzip2", "xz", "none"))) {
                    warningLog(Log)
                    printLog0(Log,
                              sprintf("  undocumented value %s of field 'LazyDataCompression'\n", sQuote(lazyz)))
                ## Allow "gzip" to indicate that the issue has been considered.
                ## } else if (lazyz %in% c("gzip", "yes")) {
                ##     noteLog(Log)
                ##     printLog0(Log,
                ##               "  'LazyDataCompression' has its default value so would better be omitted\n")
                } else if (thislazy && !lazyz0 && do_install) {
                    f <- file.path(libdir, pkgname, "data", "Rdata.rdb")
                    if (file.exists(f) &&
                        (fs <- file.size(f)) > 5*1024^2) {
                        warningLog(Log)
                        printLog0(Log,
                                  sprintf("  LazyData DB of %.1f MB without LazyDataCompression set\n", fs/1024^2),
                                  "  See \u{00a7}1.1.6 of 'Writing R Extensions'\n")
                    } else resultLog(Log, "OK")
                } else resultLog(Log, "OK")
            }
        }

        ## Check for ASCII and uncompressed/unoptimized saves in 'data'
        if (!is_base_pkg && R_check_compact_data && dir.exists("data")) {
            checkingLog(Log, "data for ASCII and uncompressed saves")
            out <- R_runR0("tools:::.check_package_compact_datasets('.', TRUE)",
                           R_opts2)
            out <- filtergrep("Warning: changing locked binding", out, fixed = TRUE)
            if (length(out)) {
                warningLog(Log)
                printLog0(Log, .format_lines_with_indent(out), "\n")
            } else resultLog(Log, "OK")
        }

        ## Check for ASCII and uncompressed/unoptimized saves in 'sysdata':
        ## no base package has this
        if (R_check_compact_data && file.exists(file.path("R", "sysdata.rda"))) {
            checkingLog(Log, "R/sysdata.rda")
            out <- R_runR0("tools:::.check_package_compact_sysdata('.', TRUE)",
                           R_opts2)
            if (length(out)) {
                bad <- startsWith(out, "Warning:")
                if (any(bad)) warningLog(Log) else noteLog(Log)
                printLog0(Log, .format_lines_with_indent(out), "\n")
            } else resultLog(Log, "OK")
        }
    }

    check_doc_contents <- function()
    {
        ## Have already checked that inst/doc exists
        doc_dir <- file.path(libdir, pkgname, "doc")
        if (!dir.exists(doc_dir)) return()
        checkingLog(Log, "installed files from 'inst/doc'")
        ## special case common problems.
        any <- FALSE
        files <- dir(file.path(pkgdir, "inst", "doc"))
        already <- c("jss.cls", "jss.bst", "Rd.sty", "Sweave.sty")
        bad <- files[files %in% already]
        if (length(bad)) {
            noteLog(Log)
            any <- TRUE
            printLog0(Log,
                      "The following files are already in R: ",
                      paste(sQuote(bad), collapse = ", "), "\n",
                      "Please remove them from your package.\n")
        }
        files2 <- dir(file.path(pkgdir, "inst", "doc"), recursive = TRUE,
                     pattern = "[.](cls|sty|drv)$", full.names = TRUE)
        ## Skip Rnews.sty and RJournal.sty for now
        files2 <- files2[basename(files2) %notin%
                         c("jss.cls", "jss.drv", "Rnews.sty", "RJournal.sty")]
        bad <- character()
        for(f in files2) {
            pat <- "%% (This generated file may be distributed as long as the|original source files, as listed above, are part of the|same distribution.)"
            if(length(grep(pat, readLines(f, warn = FALSE), useBytes = TRUE))
               == 3L) bad <- c(bad, basename(f))
        }
        if (length(bad)) {
            if(!any) noteLog(Log)
            any <- TRUE
            printLog0(Log,
                      "The following files contain a license that requires\n",
                      "distribution of original sources:\n",
                      "  ", paste(sQuote(bad), collapse = ", "), "\n",
                      "Please ensure that you have complied with it.\n")
        }

        ## Now look for TeX leftovers (and soiltexture, Amelia ...).
        bad <- grepl("[.](log|aux|bbl|blg|dvi|toc|out|Rd|Rout|dbj|drv|ins)$",
                     files, ignore.case = TRUE)
        if (any(bad)) {
            if(!any) noteLog(Log)
            any <- TRUE
            printLog0(Log,
                      "The following files look like leftovers/mistakes:\n",
                      paste(strwrap(paste(sQuote(files[bad]), collapse = ", "),
                                    indent = 2, exdent = 2), collapse = "\n"),
                      "\nPlease remove them from your package.\n")
        }

        files <- dir(doc_dir)
        files <- files %w/o% already
        bad <- grepl("[.](tex|lyx|png|jpg|jpeg|gif|ico|bst|cls|sty|ps|eps|img)$",
                     files, ignore.case = TRUE)
        bad <- bad | grepl("(Makefile|~$)", files)
        ## How about any pdf files which look like figures files from vignettes?
        ## Note that we are only run if do_install is true ...
        vigns <-
            .package_vignettes_via_call_to_R(dir = pkgdir,
                                             libpaths = c(libdir,
                                                          .libPaths()))
        if (!is.null(vigns) && length(vigns$docs)) {
            vf <- vigns$names
            pat <- paste(vf, collapse="|")
            pat <- paste0("^(", pat, ")-[0-9]+[.]pdf")
            bad <- bad | grepl(pat, files)
            bad <- bad & is.na(match(files, basename(vigns$docs)))
        }
        bad <- bad | grepl("^fig.*[.]pdf$", files)
        badf <- files[bad]

        dirs <- basename(list.dirs(doc_dir, recursive = FALSE))
        badd <- dirs[dirs %in% c("auto", "Bilder", "fig", "figs", "figures",
                                 "Figures", "img", "images", "JSSstyle",
                                 "jssStyle", "screenshots2", "src", "tex", "tmp")]
        if (length(c(badf, badd))) {
            if(!any) noteLog(Log)
            any <- TRUE
            if(length(badf))
                printLog0(Log,
                          "The following files should probably not be installed:\n",
                          paste(strwrap(paste(sQuote(badf), collapse = ", "),
                                        indent = 2, exdent = 2), collapse = "\n"),
                          "\n")
            if(length(badd))
                printLog0(Log,
                          "The following directories should probably not be installed:\n",
                          paste(strwrap(paste(sQuote(badd), collapse = ", "),
                                        indent = 2, exdent = 2), collapse = "\n"),
                          "\n")
            printLog0(Log, "\nConsider the use of a .Rinstignore file: see ",
                      sQuote("Writing R Extensions"), ",\n",
                      "or move the vignette sources from ",
                      sQuote("inst/doc"), " to ", sQuote("vignettes"), ".\n")
        }
        if (!any) resultLog(Log, "OK")
    }

    check_vign_contents <- function(ignore_vignettes = FALSE)
    {
        checkingLog(Log, "files in 'vignettes'")
        if (ignore_vignettes) {
            resultLog(Log, "SKIPPED")
            return()
        }
        ## special case common problems.
        any <- FALSE
        pattern <- vignetteEngine("Sweave")$pattern
        vign_dir <- file.path(pkgdir, "vignettes")
        sources <- setdiff(list.files(file.path(pkgdir, "inst", "doc"),
                                      pattern = pattern),
                           list.files(vign_dir, pattern = pattern))
        if(length(sources)) {
            warningLog(Log)
            any <- TRUE
            msg <- c("Vignette sources in 'inst/doc' missing from the 'vignettes' directory:",
                     .pretty_format(sources))
            printLog0(Log, paste(msg, collapse = "\n"), "\n")
        }

        ## Did the vignettes get updated in inst/doc?
        inst_doc_files <- list.files(file.path(pkgdir, "inst", "doc"),
                                     recursive = TRUE)
        vignette_files <- list.files(vign_dir, recursive = TRUE)
        if (!is_base_pkg && length(vignette_files)) {
            if (!length(inst_doc_files)) {
                if (!any) warningLog(Log)
                any <- TRUE
                msg <- c("Files in the 'vignettes' directory but no files in 'inst/doc':",
                         .pretty_format(vignette_files))
                printLog0(Log, paste(msg, collapse = "\n"), "\n")
            } else {
                ## allow for some imprecision in file times (in secs)
                time_tol <- as.double(Sys.getenv("_R_CHECK_FILE_TIMES_TOL_", 10))
                vignette_times <- file.mtime(file.path(vign_dir, vignette_files))
                inst_doc_times <- file.mtime(file.path(pkgdir, "inst", "doc", inst_doc_files))
                if (sum(!is.na(vignette_times)) && sum(!is.na(inst_doc_times)) &&
                    max(vignette_times, na.rm = TRUE) > max(inst_doc_times, na.rm = TRUE) + time_tol) {
                    if (!any) warningLog(Log)
                    any <- TRUE
                    msg <- c("Files in the 'vignettes' directory newer than all files in 'inst/doc':",
                             .pretty_format(vignette_files[!is.na(vignette_times) & vignette_times > max(inst_doc_times, na.rm = TRUE)]))
                    keep <- is.na(vignette_times) |
                        vignette_times <= max(inst_doc_times, na.rm = TRUE) + time_tol
                    vignette_files <- vignette_files[keep]
                    vignette_times <- vignette_times[keep]
                    printLog0(Log, paste(msg, collapse = "\n"), "\n")
                }
                matches <- match(vignette_files, inst_doc_files)
                newer <- vignette_times > inst_doc_times[matches] + time_tol
                newer <- !is.na(matches) & !is.na(newer) & newer
                if (any(newer)) {
                    if (!any) warningLog(Log)
                    any <- TRUE
                    msg <- c("Files in the 'vignettes' directory newer than same file in 'inst/doc':",
                             .pretty_format(vignette_files[newer]))
                    printLog0(Log, paste(msg, collapse = "\n"), "\n")
                }
            }
        }

        files <- dir(file.path(pkgdir, "vignettes"))
        if(length(files) &&
           !length(dir(file.path(pkgdir, "vignettes"),
                       pattern = pattern)) &&
           is.na(desc["VignetteBuilder"])) {
            if(!any) noteLog(Log)
            any <- TRUE
            printLog0(Log,
                      "Package has no Sweave vignette sources and no VignetteBuilder field.\n")
        }

        vigns <-
            .package_vignettes_via_call_to_R(dir = pkgdir,
                                             check = TRUE,
                                             libpaths = c(if(do_install)
                                                              libdir,
                                                          .libPaths()))
        if(length(msg <- vigns[["msg"]])) {
            if(!any) noteLog(Log)
            any <- TRUE
            printLog0(Log, paste(msg, collapse = "\n"), "\n")
        }

        already <- c("jss.cls", "jss.bst", "Rd.sty", "Sweave.sty")
        bad <- files[files %in% already]
        if (length(bad)) {
            if(!any) noteLog(Log)
            any <- TRUE
            printLog0(Log,
                      "The following files are already in R: ",
                      paste(sQuote(bad), collapse = ", "), "\n",
                      "Please remove them from your package.\n")
        }
        files2 <- dir(file.path(pkgdir, "vignettes"), recursive = TRUE,
                     pattern = "[.](cls|sty|drv)$", full.names = TRUE)
        files2 <- files2[basename(files2) %notin%
                         c("jss.cls", "jss.drv", "Rnews.sty", "RJournal.sty")]
        bad <- character()
        for(f in files2) {
            pat <- "%% (This generated file may be distributed as long as the|original source files, as listed above, are part of the|same distribution.)"
            if(length(grep(pat, readLines(f, warn = FALSE), useBytes = TRUE))
               == 3L) bad <- c(bad, basename(f))
        }
        if (length(bad)) {
            if(!any) noteLog(Log)
            any <- TRUE
            printLog0(Log,
                      "The following files contain a license that requires\n",
                      "distribution of original sources:\n",
                      "  ", paste(sQuote(bad), collapse = ", "), "\n",
                      "Please ensure that you have complied with it.\n")
        }

        ## Now look for TeX leftovers (and soiltexture, Amelia ...).
        bad <- grepl("[.](log|aux|bbl|blg|dvi|toc|out|Rd|Rout|dbj|drv|ins)$",
                     files, ignore.case = TRUE)
        bad <- bad | (files %in% c("Rplots.ps", "Rplots.pdf"))
        if (any(bad)) {
            if(!any) noteLog(Log)
            any <- TRUE
            printLog0(Log,
                      "The following files look like leftovers/mistakes:\n",
                      paste(strwrap(paste(sQuote(files[bad]), collapse = ", "),
                                    indent = 2, exdent = 2), collapse = "\n"),
                      "\nPlease remove them from your package.\n")
        }

        ## Probable leftovers from knitr
        dirs <- file.path(pkgdir, "vignettes", c("cache", "figure"))
        dirs <- basename(dirs[dir.exists(dirs)])
        if(length(dirs)) {
            if(!any) noteLog(Log)
            any <- TRUE
            printLog0(Log,
                      if(length(dirs)> 1L) "The following directories look like leftovers from 'knitr':\n"
                      else "The following directory looks like a leftover from 'knitr':\n",
                      paste(strwrap(paste(sQuote(dirs), collapse = ", "),
                                    indent = 2, exdent = 2), collapse = "\n"),
                      "\nPlease remove from your package.\n")
        }

        if (!any) resultLog(Log, "OK")
    }

    check_doc_size <- function()
    {
        ## Have already checked that inst/doc exists and qpdf can be found
        pdfs <- dir('inst/doc', pattern="\\.pdf",
                    recursive = TRUE, full.names = TRUE)
        pdfs <- setdiff(pdfs, "inst/doc/Rplots.pdf")
        if (length(pdfs)) {
            checkingLog(Log, "sizes of PDF files under 'inst/doc'")
            any <- FALSE
            td <- tempfile('pdf')
            dir.create(td)
            file.copy(pdfs, td)
            res <- compactPDF(td, gs_quality = "none") # use qpdf
            res <- format(res, diff = 1e5)
            if(length(res)) {
                noteLog(Log)
                any <- TRUE
                printLog(Log,
                         "  'qpdf' made some significant size reductions:\n",
                         paste("  ", res, collapse = "\n"),
                         "\n",
                         "  consider running tools::compactPDF() on these files,\n",
                         "  or build the source package with --compact-vignettes\n")
            }
            if (R_check_doc_sizes2) {
                gs_cmd <- find_gs_cmd()
                if (nzchar(gs_cmd)) {
                    res <- compactPDF(td, gs_cmd = gs_cmd, gs_quality = "ebook")
                    res <- format(res, diff = 2.56e5) # 250 KB for now
                    if(length(res)) {
                        if (!any) warningLog(Log)
                        any <- TRUE
                        printLog(Log,
                                 "  'gs+qpdf' made some significant size reductions:\n",
                                 paste("  ", res, collapse = "\n"),
                                 "\n",
                                 '  consider running tools::compactPDF(gs_quality = "ebook") on these files,\n',
                                 '  or build the source package with --compact-vignettes=both\n')
                    }
                } else {
                    if (!any) noteLog(Log)
                    any <- TRUE
                    printLog(Log, "Unable to find GhostScript executable to run checks on size reduction\n")
                }

            }
            if (!any) resultLog(Log, "OK")
        }
    }

    check_src_dir <- function(desc)
    {
        ## Added in R 3.4.2: check line endings for shell scripts:
        ## for Unix CRLF line endings are fatal but these are not used
        ## on Windows and hence this is not detected.
        ## Packages could have arbitrary scripts, so we could
        ## extend this to look for scripts at top level or elsewhere.
        scripts <- dir(".", pattern = "^(configure|configure.in|configure.ac|cleanup)$")
        if(length(scripts)) {
            checkingLog(Log, "line endings in shell scripts")
            bad_files <- character()
            for(f in scripts) {
                contents <- readChar(f, file.size(f), useBytes = TRUE)
                if (grepl("\r", contents, fixed = TRUE, useBytes = TRUE))
                    bad_files <- c(bad_files, f)
            }
            if (length(bad_files)) {
                warningLog(Log, "Found the following shell script(s) with CR or CRLF line endings:")
                printLog0(Log, .format_lines_with_indent(bad_files), "\n")
                printLog(Log, "Non-Windows OSes require LF line endings.\n")
            } else resultLog(Log, "OK")
       }


        ## Check C/C++/Fortran sources/headers for CRLF line endings.
        ## <FIXME>
        ## Does ISO C really require LF line endings?
        ## (ISO C does not comment on OSes ....)
        ## Solaris compilers still do, with a warning but no longer an error.
        ## </FIXME>
        if(dir.exists("src") || dir.exists("inst/include")) {
            checkingLog(Log, "line endings in C/C++/Fortran sources/headers")
            ## pattern is "([cfh]|cc|cpp)"
            files <- dir("src", pattern = "\\.([cfh]|cc|cpp|hpp)$",
                         full.names = TRUE, recursive = TRUE)
            ## exclude dirs starting src/win, e.g for tiff
            files <- filtergrep("^src/[Ww]in", files)
            files2 <- dir("inst/include", pattern = "\\.([cfh]|cc|cpp|hpp)$",
                          full.names = TRUE, recursive = TRUE)
            bad_files <- character()
            no_eol <- character()
            for(f in c(files, files2)) {
                contents <- readChar(f, file.size(f), useBytes = TRUE)
                if (grepl("\r", contents, fixed = TRUE, useBytes = TRUE))
                    bad_files <- c(bad_files, f)
                else if (nzchar(contents) &&  ## allow empty dummy files
                         !grepl("\n$", contents, useBytes = TRUE))
                    no_eol <- c(no_eol, f)
            }
            if (length(bad_files) || length(no_eol)) noteLog(Log, "")
            else resultLog(Log, "OK")
            if (length(bad_files)) {
                printLog(Log, "Found the following sources/headers with CR or CRLF line endings:\n")
                printLog0(Log, .format_lines_with_indent(bad_files), "\n")
                printLog(Log, "Some Unix compilers require LF line endings.\n")
            } else if (length(no_eol)) {
                printLog(Log, "Found the following sources/headers not terminated with a newline:\n")
                printLog0(Log, .format_lines_with_indent(no_eol), "\n")
                printLog(Log, "Some compilers warn on such files.\n")
            }
        }

        ## Check src/Make* for LF line endings, as Sun make does not accept CRLF
        ## .win files are not checked, as CR/CRLF work there
        all_files <-
            dir("src",
                pattern = "^(Makevars|Makevars.in|Makefile|Makefile.in)$",
                full.names = TRUE, recursive = TRUE)
        all_files <- c(all_files,
                       dir(".", pattern = "^Makefile$",
                           full.names = TRUE, recursive = TRUE))
        all_files <- sub("^[.]/", "", all_files)
        all_files <- unique(sort(all_files))
        if(length(all_files)) {
            checkingLog(Log, "line endings in Makefiles")
            bad_files <- noEOL<- character()
            for(f in all_files) {
                if (!file.exists(f)) next
                contents <- readChar(f, file.size(f), useBytes = TRUE)
                if (grepl("\r", contents, fixed = TRUE, useBytes = TRUE))
                    bad_files <- c(bad_files, f)
                if (!grepl("\n$", contents, useBytes = TRUE))
                    noEOL <- c(noEOL, f)
            }
            if (length(bad_files)) {
                warningLog(Log, "Found the following Makefile(s) with CR or CRLF line endings:")
                printLog0(Log, .format_lines_with_indent(bad_files), "\n")
                printLog(Log, "Some Unix 'make' programs require LF line endings.\n")
            } else if (length(noEOL)) {
                noteLog(Log, "Found the following Makefile(s) without a final LF:")
                printLog0(Log, .format_lines_with_indent(noEOL), "\n")
                printLog(Log, "Some 'make' programs ignore lines not ending in LF.\n")
            } else resultLog(Log, "OK")
        }
        ## Check src/Makevars[.in] compilation flags.
        if (length(makevars)) {
            checkingLog(Log, "compilation flags in Makevars")

            Rcmd <- sprintf("tools:::.check_make_vars(\"src\", %s)\n",
                            deparse(makevars))
            out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
            if (length(out)) {
                if(any(grepl("^(Non-portable flags|Variables overriding)", out)))
                   warningLog(Log) else noteLog(Log)
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
            } else resultLog(Log, "OK")
        }
        ## Check GNUisms
        if (length(all_files)) {
            checkingLog(Log, "for GNU extensions in Makefiles")
            bad_files <- character()
            for(f in all_files) {
                contents <- readLines(f, warn = FALSE)
                contents <- filtergrep("^ *#", contents)
                ## Things like $(SUBDIRS:=.a)
                contents <- filtergrep("[$][(].+:=.+[)]", contents)
                if (any(grepl("([+]=|:=|[$][(]wildcard|[$][(]shell|[$][(]eval|[$][(]call|[$][(]patsubst|^ifeq|^ifneq|^ifdef|^ifndef|^endifi|[.]NOTPARALLEL)",
                              contents)))
                    bad_files <- c(bad_files, f)
            }
            SysReq <- desc["SystemRequirements"]
            if (length(bad_files)) {
                if(!is.na(SysReq) &&
                   grepl("GNU [Mm]ake",
                         gsub("[[:space:]]+", " ", SysReq))) {
                    if(!config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_NOTE_GNU_MAKE_",
                                                         "FALSE"))) {
                        if(R_check_use_log_info)
                            infoLog(Log,
                                    "GNU make is a SystemRequirements.")
                        else
                            noteLog(Log,
                                    "GNU make is a SystemRequirements.")
                    } else resultLog(Log, "OK")
                } else {
                    warningLog(Log, "Found the following file(s) containing GNU extensions:")
                    printLog0(Log, .format_lines_with_indent(bad_files), "\n")
                    wrapLog("Portable Makefiles do not use GNU extensions",
                            "such as +=, :=, $(shell), $(wildcard),",
                            "ifeq ... endif, .NOTPARALLEL",
                            "See section 'Writing portable packages'",
                            "in the 'Writing R Extensions' manual.\n")
                }
            } else resultLog(Log, "OK")
        }

        ## check src/Makevar*, src/Makefile* for correct use of BLAS_LIBS
        ## FLIBS is not needed on Windows, at least currently (as it is
        ## statically linked).
        makefiles <- Sys.glob(file.path("src",
                                        c("Makevars", "Makevars.in",
                                          "Makefile", "Makefile.win",
                                          "Makefile.ucrt")))
        if(length(makefiles)) {
            checkingLog(Log, "for portable use of $(BLAS_LIBS) and $(LAPACK_LIBS)")
            any <- FALSE
            for (f in makefiles) {
                lines <- readLines(f, warn = FALSE)
                ## Combine lines ending in escaped newlines.
                if(any(ind <- grepl("[\\]$", lines, useBytes = TRUE))) {
                    ## Eliminate escape.
                    lines[ind] <-
                        sub("[\\]$", "", lines[ind], useBytes = TRUE)
                    ## Determine ids of blocks that need to be joined.
                    ind <- seq_along(ind) - c(0, cumsum(ind)[-length(ind)])
                    ## And join.
                    lines <- unlist(lapply(split(lines, ind), paste,
                                           collapse = " "))
                }
                ## Truncate at first comment char
                lines <- sub("#.*", "", lines)
                c1 <- grepl("^[[:space:]]*PKG_LIBS", lines, useBytes = TRUE)
                c2l <- grepl("\\$[{(]{0,1}LAPACK_LIBS", lines, useBytes = TRUE)
                c2b <- grepl("\\$[{(]{0,1}BLAS_LIBS", lines, useBytes = TRUE)
                c2lb <- grepl("\\$[{(]{0,1}LAPACK_LIBS.*\\$[{(]{0,1}BLAS_LIBS",
                              lines, useBytes = TRUE)
                c2bf <- grepl("\\$[{(]{0,1}BLAS_LIBS.*\\$[{(]{0,1}FLIBS",
                              lines, useBytes = TRUE)
                ## FLIBS is unneeded if Fortran sources are included.
                ## So we look for top-level .f .f90 .f95 source files.
                have_F <- any(grepl("[.](f|f90|f95)$", dir("src")))
                if (any(c1 & c2l & !c2lb) && !have_F) {
                    if (!any) warningLog(Log)
                    any <- TRUE
                    printLog(Log,
                             "  apparently using $(LAPACK_LIBS) without following $(BLAS_LIBS) in ",
                             sQuote(f), "\n")
                }
                if (any(c1 & c2b & !c2bf) && !have_F) {
                    if (!any) warningLog(Log)
                    any <- TRUE
                    printLog(Log,
                             "  apparently using $(BLAS_LIBS) without following $(FLIBS) in ",
                             sQuote(f), "\n")
                }
            }
            if (!any) resultLog(Log, "OK")
        }

        makefiles <- Sys.glob(file.path("src",
                                        c("Makevars", "Makevars.in",
                                          "Makevars.win", "Makevars.ucrt",
                                          "Makefile", "Makefile.win",
                                          "Makefile.ucrt")))

        if(length(makefiles)) {
            checkingLog(Log, "use of PKG_*FLAGS in Makefiles")
            any <- msg <- character()
            for (m in makefiles) {
                lines <- readLines(m, warn = FALSE)
                have_c <- length(dir('src', pattern = "[.]c$", recursive = TRUE)) > 0L
                have_cxx <- length(dir('src', pattern = "[.](cc|cpp)$", recursive = TRUE)) > 0L
                have_f <- length(dir('src', pattern = "[.]f$", recursive = TRUE)) > 0L
                have_f9x <- length(dir('src', pattern = "[.]f9[05]$", recursive = TRUE)) > 0L
                for (f in c("C", "CXX", "F", "FC", "CPP"))  {
                    this <- paste0(f, "FLAGS")
                    this2 <- paste0("PKG_", this)
                    pat <- paste0("^[[:space:]]*", this2)
                    if(any(grepl(pat, lines, useBytes = TRUE))) {
                        if(!switch(f, C = have_c, CXX = have_cxx,
                                   F = have_f | have_f9x, FC =  have_f9x,
                                   CPP = have_c | have_cxx)) {
                            msg <- c(msg,
                                     paste("  ", this2, "set in", sQuote(m),
                                           "without any corresponding files\n"))
                        }
                    }
                }
            }
            if (length(msg)) {
                noteLog(Log)
                printLog0(Log, msg)
            } else resultLog(Log, "OK")
        }

        test_omp <-
            config_val_to_logical(Sys.getenv("_R_CHECK_SHLIB_OPENMP_FLAGS_", "FALSE"))
        if(length(makefiles) && test_omp) {
            checkingLog(Log, "use of SHLIB_OPENMP_*FLAGS in Makefiles")
            ## If any of these flags are included in PKG_*FLAGS, it
            ## should also be included in PKG_LIBS.  And it is
            ## not portable to use more than one of these in one package.
            any <- msg2 <- msg3 <- FALSE
            for (m in makefiles) {
                lines <- readLines(m, warn = FALSE)
                ## Combine lines ending in escaped newlines.
                if(any(ind <- grepl("[\\]$", lines, useBytes = TRUE))) {
                    ## Eliminate escape.
                    lines[ind] <-
                        sub("[\\]$", "", lines[ind], useBytes = TRUE)
                    ## Determine ids of blocks that need to be joined.
                    ind <- seq_along(ind) - c(0, cumsum(ind)[-length(ind)])
                    ## And join.
                    lines <- unlist(lapply(split(lines, ind), paste,
                                           collapse = " "))
                }
                ## Truncate at first comment char, skip empty lines
                lines <- sub("#.*", "", lines)
                lines <- lines[nzchar(lines)]

                c1 <- grepl("^[[:space:]]*PKG_LIBS", lines, useBytes = TRUE)
                anyInLIBS <- any(grepl("SHLIB_OPENMP_", lines[c1], useBytes = TRUE))
                use_fc <- any(grepl("^USE_FC_TO_LINK", lines, useBytes = TRUE))

                ## Now see what sort of files we have
                have_c <- length(dir('src', pattern = "[.]c$", recursive = TRUE)) > 0L
                have_cxx <- length(dir('src', pattern = "[.](cc|cpp)$", recursive = TRUE)) > 0L
                have_f <- length(dir('src', pattern = "[.]f$", recursive = TRUE)) > 0L
                have_f9x <- length(dir('src', pattern = "[.]f9[05]$", recursive = TRUE)) > 0L
                used <- character()
                for (f in c("C", "CXX", "F", "FC"))  {
                    this <- this2 <- paste0(f, "FLAGS")
                    if (f == "FC") this2 <- "(F|FC)FLAGS"
                    pat <- paste0("^[[:space:]]*PKG_", this, ".*SHLIB_OPENMP_", this2)
                    if(any(grepl(pat, lines, useBytes = TRUE))) {
                        used <- c(used, this)
                        f_or_fc <- "F"
                        if(f == "FC") {
                            if(any(grepl("SHLIB_OPENMP_FCFLAGS",
                                         lines, useBytes = TRUE))) {
                                f_or_fc <- "FC"
                                if (!any) warningLog(Log)
                                any <- TRUE
                                msg <- "SHLIB_OPENMP_FCFLAGS is defunct (used in PKG_FCFLAGS)\n"
                                printLog(Log, "  ", m, ": ", msg)
                            }
                        }
                        if(f == "C" && !have_c) {
                            if (!any) noteLog(Log)
                            any <- TRUE
                            msg <- "SHLIB_OPENMP_CFLAGS is included in PKG_CFLAGS without any C files\n"
                            printLog(Log, "  ", m, ": ", msg)
                            next
                        }
                        ## as from R 3.6.0, PKG_FFLAGS is by default
                        ## used for both fixed- and free-form files.
                        if(f == "F" && !(have_f || have_f9x)) {
                            if (!any) noteLog(Log)
                            any <- TRUE
                            msg <- "SHLIB_OPENMP_FFLAGS is included in PKG_FFLAGS without any Fortran files\n"
                            printLog(Log, "  ", m, ": ", msg)
                            next
                        }
                        if(f == "CXX" && !have_cxx) {
                            if (!any) noteLog(Log)
                            any <- TRUE
                            msg <- "SHLIB_OPENMP_CXXFLAGS is included in PKG_CXXFLAGS without any C++ files\n"
                            printLog(Log, "  ", m, ": ", msg)
                            next
                        }
                        ## The recommendation is to use _F[C]FLAGS to
                        ## compile and _CFLAGS or _CXXFLAGS to link with Fortran
                        ## code (which is linked by the C or C++ compiler)
                        c_or_cxx <- if(have_cxx) "CXXFLAGS" else "CFLAGS"
                        this2 <- if (f %in% c("F", "FC")) c_or_cxx else this
                        pat2 <- paste0("SHLIB_OPENMP_", this2)
                        if(!any(grepl(pat2, lines[c1], useBytes = TRUE))
                           && !use_fc) {
                            if (!any) noteLog(Log)
                            any <- TRUE
                            msg <- if(anyInLIBS) {
                                if (f == "F")
                                    sprintf("SHLIB_OPENMP_FFLAGS is included in PKG_FFLAGS but not SHLIB_OPENMP_%s in PKG_LIBS\n", c_or_cxx)
                                else if (f == "FC")
                                     sprintf("SHLIB_OPENMP_%sFLAGS is included in PKG_FCFLAGS but not SHLIB_OPENMP_%s in PKG_LIBS\n", f_or_fc, c_or_cxx)
                               else
                                    sprintf("SHLIB_OPENMP_%s is included in PKG_%s but not in PKG_LIBS\n",
                                            this, this)
                            } else {
                                msg3 <- TRUE
                                sprintf("SHLIB_OPENMP_%s is included in PKG_%s but no OPENMP macro in PKG_LIBS\n",
                                           this, this)
                            }
                            printLog(Log, "  ", m, ": ", msg)
                        }
                    } else {
                        ## several packages have the wrong flag
                        pat <- paste0("^[[:space:]]*PKG_", this, ".*SHLIB_OPENMP_")
                        if(any(c2 <- grepl(pat, lines, useBytes = TRUE))) {
                            if (!any) noteLog(Log)
                            any <- TRUE
                            if (!anyInLIBS) msg3 <- TRUE
                            ## assume just one
                            l <- lines[c2][1L]
                            found <- sub(".*SHLIB_OPENMP_([A-Z]*).*", "\\1", l, useBytes = TRUE)
                            printLog(Log,"  ", m, ": ",
                                     sprintf("incorrect macro SHLIB_OPENMP_%s included in PKG_%s\n",
                                             found, this))
                        }
                    }
                }
                ## Now check if PKG_LIBS includes a macro that is not used
                ## in PKG_*FLAGS, or more than one.
                cnt <- 0L
                for (f in c("C", "CXX", "F", "FC"))  {
                    this <- paste0(f, "FLAGS")
                    pat2 <- paste0("SHLIB_OPENMP_", this)
                    res <- any(grepl(pat2 , lines[c1], useBytes = TRUE))
                    cnt <- cnt + res
                    if (res && f %in% c( "F", "FC") && !use_fc)  {
                        if (!any) noteLog(Log)
                        any <- TRUE
                        printLog(Log,"  ", m, ": ",
                                 sprintf("SHLIB_OPENMP_%s is included in PKG_LIBS but linking is by %s\n",
                                         this,
                                         if(have_cxx) "C++" else "C"))
                         next
                    }
                     if (res &&
                         ((!have_cxx && f == "CXX") || (have_cxx && f == "C"))) {
                        if (!any) noteLog(Log)
                        any <- TRUE
                        printLog(Log,"  ", m, ": ",
                                 sprintf("SHLIB_OPENMP_%s is included in PKG_LIBS but linking is by %s\n",
                                         this,
                                         if(have_cxx) "C++" else "C"))
                         next
                    }
                    if (this %in% used) next
                    ## Fortran exceptions
                    if (((!have_cxx && f == "C") || (have_cxx && f == "CXX"))
                        && any(c("FFLAGS", "FCFLAGS") %in% used)) next
                    ## A package still used PKG_FCFLAGS
                    if (use_fc && f == "F" && used == "FCFLAGS") next
                    if (res) {
                        if (!any) noteLog(Log)
                        any <- TRUE
                        printLog(Log,"  ", m, ": ",
                                 sprintf("SHLIB_OPENMP_%s is included in PKG_LIBS but not in PKG_%s\n",
                                         this, this))
                   }
                }
                if (cnt > 1L) {
                    if (!any) noteLog(Log)
                    any <- TRUE
                    if (!anyInLIBS) msg3 <- TRUE
                    printLog(Log, "  ", m, ": ",
                             "it is not portable to include multiple",
                             " SHLIB_OPENMP_*' macros in PKG_LIBS",
                             "\n")
                }

                ## several packages include one of these in PKG_CPPFLAGS
                ## which is used for both C and C++ code.
                pat <- "^[[:space:]]*PKG_CPPFLAGS.*SHLIB_OPENMP_"
                if(any(grepl(pat, lines, useBytes = TRUE))) {
                    if (!any) noteLog(Log)
                    any <- TRUE
                    msg2 <- TRUE
                    printLog(Log,"  ", m, ": ",
                             "it is not portable to include",
                             " SHLIB_OPENMP_*' macros in PKG_CPPFLAGS",
                             "\n")
                }
            }
            if (!any) resultLog(Log, "OK")
            else {
                wrapLog("Use of these macros is discussed in sect 1.2.1.1 of",
                        paste0(sQuote("Writing R Extensions"), "."),
                        "The macros for different languages may differ",
                        "so the matching macro must be used in",
                        "PKG_CXXFLAGS (etc) and match that used in",
                        "PKG_LIBS (except for Fortran: see the manual).\n")
                if (msg2)
                    wrapLog("PKG_CPPFLAGS is used for both C and C++ code",
                            "so it is not portable to use it",
                            "for these macros.\n")
                if (msg3)
                    wrapLog("Using a SHLIB_OPENMP_ macro for compilation",
                            "but none in PKG_LIBS",
                            "is not portable and may result in",
                            "installation errors.\n")
            }
        }

        ## Check include directives for use of R_HOME which may contain
        ## spaces for which there is no portable way to quote/escape.
        all_files <-
            dir(".",
                pattern = "^(Makefile|Makefile.in|Makefile.win|Makefile.ucrt|makefile|GNUmakefile)$",
                recursive = TRUE)
        all_files <- unique(sort(all_files))
        if(length(all_files)) {
            checkingLog(Log, "include directives in Makefiles")
            bad_lines <-
                lapply(all_files,
                       function(f) {
                           s <- readLines(f, warn = FALSE)
                           grep("^include .*R_HOME", s, value = TRUE)
                       })
            bad_files <- all_files[lengths(bad_lines) > 0L]
            if(length(bad_files)) {
                noteLog(Log,
                        "Found the following Makefile(s) with an include directive with a pathname using R_HOME:")
                printLog0(Log, .format_lines_with_indent(bad_files),
                          "\n")
                msg <-
                    c("Even though not recommended, variable R_HOME may contain spaces.",
                      "Makefile directives use space as a separator and there is no portable",
                      "way to quote/escape the space in Make rules and directives.  However,",
                      "one can and should quote pathnames when passed from Makefile to the",
                      "shell, and this can be done specifically when invoking Make recursively.",
                      "It is therefore recommended to use the Make '-f' option to include files",
                      "in directories specified using R_HOME.  This option can be specified",
                      "multiple times to include multiple Makefiles.  Note that 'Makeconf' is",
                      "included automatically into top-level makefile of a package.",
                      "More information can be found in 'Writing R Extensions'.")
                printLog0(Log, paste(msg, collapse = "\n"), "\n")
            } else resultLog(Log, "OK")
        }

    }

    check_src <- function() {
        Check_pragmas <- Sys.getenv("_R_CHECK_PRAGMAS_", "FALSE")
        if(config_val_to_logical(Check_pragmas) &&
           any(dir.exists(c("src", "inst/include")))) {
            checkingLog(Log, "pragmas in C/C++ headers and code")
            ans <- .check_pragmas('.')
            if(length(ans)) {
                warn <- attr(ans, "warn")
                port <- attr(ans, "port")
                if(length(warn) || length(port))
                {
                    warningLog(Log)
                    msg <- character()
                    rest <- ans
                    if(length(warn)) {
                        msg <- c(msg, if(length(warn) == 1L)
                                          "File which contains pragma(s) suppressing important diagnostics"
                                      else
                                          "Files which contain pragma(s) suppressing important diagnostics",
                                 .pretty_format(warn))
                        rest <- setdiff(ans, warn)
                    }
                    if(length(port)) {
                        msg <- c(msg, if(length(port) == 1L)
                                          "File which contains non-portable pragma(s)"
                                      else
                                          "Files which contain non-portable pragma(s)",
                                 .pretty_format(port))
                    }
                    if(length(rest)) {
                        msg <- c(msg, if(length(rest) == 1L)
                                          "File which contains pragma(s) suppressing diagnostics:"
                                      else
                                          "Files which contain pragma(s) suppressing diagnostics:",
                                 .pretty_format(rest))
                    }
                } else {
                    noteLog(Log)
                    msg <- if(length(ans) == 1L)
                               "File which contains pragma(s) suppressing diagnostics:"
                           else
                               "Files which contain pragma(s) suppressing diagnostics:"
                    msg <- c(msg, .pretty_format(ans))
                }
                printLog0(Log, paste(c(msg,""), collapse = "\n"))
            } else resultLog(Log, "OK")
        }

        Check_flags <- Sys.getenv("_R_CHECK_COMPILATION_FLAGS_", "FALSE")
        if(config_val_to_logical(Check_flags)) {
            instlog <- if (startsWith(install, "check"))
                           install_log_path
                       else
                           file.path(pkgoutdir, "00install.out")
            if (file.exists(instlog) && dir.exists('src')) {
                InstLog <<- instlog
                checkingLog(Log, "compilation flags used")
                lines <- readLines(instlog, warn = FALSE)
                ## skip stuff before building libs
                ll <- grep("^[*][*] libs", lines, useBytes = TRUE)
                if (length(ll)) lines <- lines[-seq_len(ll[1L])]
                ## drop GCC 14 diagnostics lines with code literals
                patt <- "^[[:digit:]]+ \\|"
                lines <- lines[!grepl(patt, lines, useBytes = TRUE)]
                poss <- grep(" -[Wmf]", lines,  useBytes = TRUE, value = TRUE)
                ## compilation lines start at the left margin,
                ## and are not configure/echo lines
                poss <- grep("^(\\s|checking|echo)", poss, perl = TRUE,
                             invert = TRUE, value = TRUE, useBytes = TRUE)
                tokens <- unique(unlist(strsplit(poss, " ", perl = TRUE,
                                                 useBytes = TRUE)))
                tokens <- gsub('["\']$', "", tokens,
                               perl = TRUE, useBytes = TRUE)
                ## datasailr gets trailing )
                tokens <- gsub('[)]$', "", tokens,
                               perl = TRUE, useBytes = TRUE)
                warns <- grep("^[-]W", tokens,
                              value = TRUE, perl = TRUE, useBytes = TRUE)
                ## Not sure -Wextra and -Weverything are portable, though
                ## -Werror is not compiler independent
                ##   (as what is a warning is not)
                ## -Wno-dev is from qt, not a compiler flag.
                ## -Wstrict-prototypea is long supported by gcc and LLVM/Apple clang.
                except <- Sys.getenv("_R_CHECK_COMPILATION_FLAGS_KNOWN_", "")
                except <- unlist(strsplit(except, "\\s", perl = TRUE))
                warns <- setdiff(warns,
                                 c(except, "-Wall", "-Wextra", "-Weverything",
                                   "-Wno-dev", "-Wstrict-prototypes",
                                   "-Wno-strict-prototypes"))
                warns <- warns[!startsWith(warns, "-Wl,")] # linker flags
                diags <- grep(" -fno-diagnostics-show-option", tokens,
                              useBytes = TRUE, value = TRUE)
                ## next set are about unsafe optimizations
                opts <- grep("-f(fast-math|unsafe-math-optimizations|associative-math|reciprocal-math)",
                             tokens, useBytes = TRUE, value = TRUE)
                machs <- grep("^[-]m", tokens,
                              value = TRUE, perl = TRUE, useBytes = TRUE)
                ## The only -m flag which is reasonably portable is
                ## -mtune and even that is debatable as it currently
                ## does nothing and may be removed on clang.
                machs <- setdiff(machs,
                                 c(except, c("-m", # not a flag
                                             "-msse2", "-mfpmath=sse", # SAFE_FFLAGS
                                             "-mstackrealign", # SAFE_* / Windows EOPTS
                                             "-m32", # BRugs
                                             "-m64", # RcppParallel
                                             "-multiply_defined" # macOS
                                             )))
                machs <- machs[!startsWith(machs, "-mtune=")]
                ## This should only appear on macOS!
                ## -mmacosx- has been replaced by -mmacos-
                if(grepl('darwin', R.version$platform))
                    machs <- machs[!startsWith(machs, "-mmacos")]  # macOS target flags
                warns <- c(warns, diags, opts, machs)
                if(any(startsWith(warns, "-Wno-")) || length(diags)) {
                    warningLog(Log)
                    msg <- c("Compilation used the following non-portable flag(s):",
                             .pretty_format(sort(warns)),
                             "including flag(s) suppressing warnings")
                    printLog0(Log, paste(c(msg,""), collapse = "\n"))
                } else if(length(warns)) {
                    noteLog(Log) # or warningLog?
                    msg <- c("Compilation used the following non-portable flag(s):",
                             .pretty_format(sort(warns)))
                    printLog0(Log, paste(c(msg,""), collapse = "\n"))
                } else
                    resultLog(Log, "OK")
            }
        }
    }

    check_sos <- function() {
        checkingLog(Log, "compiled code")
        ## from sotools.R
        Rcmd <- paste(opWarn_string, "\n",
                      sprintf("tools:::check_compiled_code(\"%s\")",
                              file.path(libdir, pkgname)))
        out <- R_runR0(Rcmd, R_opts2, "R_DEFAULT_PACKAGES=NULL")
        if(length(out) == 1L && startsWith(out, "Note:")) {
            ## This will be a note about symbols.rds not being available
            if(!is_base_pkg) {
                noteLog(Log)
                printLog0(Log, c(out, "\n"))
            } else resultLog(Log, "OK")
        } else if(length(out)) {
            ## If we have named objects then we have symbols.rds and
            ## will not be picking up symbols just in system libraries.
            haveObjs <- any(grepl("^ *Object", out))
            pat <- paste("possibly from",
                         sQuote("(abort|assert|exit|_exit|_Exit|stop)"))
            rempat <- "REAL0|COMPLEX0|ddfind|DDVAL|ENSURE_NAMEDMAX|INTERNAL|PRSEEN|SET_PRSEEN|SYMVALUE"
            if(haveObjs && any(grepl(pat, out)) && pkgname %notin% "parallel")
                ## need _exit in forked child
                warningLog(Log)
            ## Very crude hack to escalete NOTE about some non-API
            ## calls to a WARNING. Hopefully this can be dropped again
            ## soon.
            else if (length(grep("Found non-API", out)) &&
                     any(grepl(rempat, out))) {
                warningLog(Log)
                if (any(grepl("calls", out))) {
                    ep <- Filter(function(x) any(grepl(x, out)),
                                 strsplit(rempat, "\\|")[[1]])
                    epq <- paste(sQuote(ep), collapse = ", ")
                    out <- paste(c(out,
                                   "These entry points may be removed soon:",
                                   epq),
                                 collapse = "\n")
                }
                else
                    out <- paste(c(out,
                                   "This entry point may be removed soon."),
                                 collapse = "\n")
            }
            else {
                ## look for Fortran detritus
                pat1 <- paste("possibly from", sQuote("(open|close|rewind)"))
                pat2 <- paste("possibly from", sQuote("(read|write)"))
                pat3 <- paste("possibly from", sQuote("close"))
                pat4 <- paste("possibly from", sQuote("open"))
                pat5 <- paste("possibly from", sQuote("sprintf"))
                pat6 <- paste("possibly from", sQuote("vsprintf"))
                if(haveObjs &&
                   (any(grepl(pat1, out)) && !any(grepl(pat2, out))) ||
                   (any(grepl(pat3, out)) && !any(grepl(pat4, out))) ||
                   (any(grepl(pat4, out)) && !any(grepl(pat3, out))) ||
                   any(grepl(pat5, out)) || any(grepl(pat6, out))
                   )
                    warningLog(Log)
                else noteLog(Log)
            }
            printLog0(Log, paste(c(out, ""), collapse = "\n"))
            nAPIs <- length(grep("Found non-API", out))
            nRS <- length(grep("Found no call", out))
            nBad <- length(grep(", possibly from ", out))
            msg <- if (nBad) {
                if(haveObjs)
                    c("Compiled code should not call entry points which",
                      "might terminate R nor write to stdout/stderr instead of",
                      "to the console, nor use Fortran I/O nor system RNGs nor [v]sprintf.\n")
                else
                    c("Compiled code should not call entry points which",
                      "might terminate R nor write to stdout/stderr instead of",
                      "to the console, nor use Fortran I/O nor system RNGs nor [v]sprintf.",
                      "The detected symbols are linked",
                      "into the code but might come from libraries",
                      "and not actually be called.\n")
            } else character()
            if(nAPIs)
                msg <- c(msg,
                         "Compiled code should not call non-API entry points in R.\n")
            if(nRS)
                msg <- c(msg,
                         "It is good practice to register native routines and to disable symbol search.\n")
            msg2 <- "See 'Writing portable packages' in the 'Writing R Extensions' manual"
            msg3 <- "and section 'Moving into C API compliance' for issues with the use of non-API entry points.\n"
            wrapLog("\n", paste(msg, collapse = " "), "\n",
                    if(nAPIs) paste0(msg2, ",\n", msg3)
                    else paste0(msg2, ".")
                    )
        } else resultLog(Log, "OK")
    }

    check_rust <- function()
    {
        ## It is impossible to tell definiitively if a package
        ## compiles rust code.  SystemRequirements in DESCRIPTION is
        ## fres-format, and only advisory.  So we look at the
        ## installation log, which we found in check_src()
        if (is.na(InstLog)) return (NA)
        ##message("InstLog = ", InstLog)
        lines <- readLines(InstLog, warn = FALSE)
        l1 <- grep("(cargo build|   Compiling )", lines)
        if(!length(l1)) return(NA)
        l2 <- grep("   Compiling ", lines)
        checkingLog(Log, "Rust compilation")
        msg <- character(); OK <- TRUE
        if(any(grep("Downloading crates ...", lines, fixed = TRUE))) {
            OK <- FALSE
            msg <- c(msg, "Downloads Rust crates")
        }
        lines <- if(length(l2)) lines[1:l2[1L]] else lines[1:l1[1L]]
        patt <- "rustc *[[:digit:]]+[].][[:digit:]]"
        ans <- any(grepl(patt, lines, ignore.case = TRUE))
        if(!ans) {
            OK <- FALSE
            msg <- c(msg, "No rustc version reported prior to compilation")
##            print(lines)
        }
        if(OK)
            resultLog(Log, "OK")
        else {
            msg <- paste(paste0("  ", msg), collapse = "\n")
            warningLog(Log, msg)
        }
    }

    check_loading <- function(arch = "")
    {
        checkingLog(Log, "whether the package can be loaded")
        Rcmd <- sprintf("library(%s)", pkgname)
        opts <- if(nzchar(arch)) R_opts4 else R_opts2
        env <- "R_DEFAULT_PACKAGES=NULL"
        env1 <- if(nzchar(arch)) env0 else character()
        t1 <- proc.time()
        out <- R_runR0(Rcmd, opts, env1, arch = arch)
        t2 <- proc.time()
        print_time(t1, t2, Log)
        if(length(st <- attr(out, "status"))) {
            errorLog(Log)
            wrapLog("Loading this package had a fatal error",
                    "status code ", st,  "\n")
            if(length(out))
                printLog0(Log,
                          paste(c("Loading log:", out, ""),
                                collapse = "\n"))
            summaryLog(Log)
            do_exit(1L)
        }
        if (any(startsWith(out, "Error"))) {
            errorLog(Log)
            printLog0(Log, paste(c(out, ""), collapse = "\n"))
            wrapLog("\nIt looks like this package",
                    "has a loading problem: see the messages",
                    "for details.\n")
            maybe_exit(1L)
        } else resultLog(Log, "OK")

        checkingLog(Log, "whether the package can be loaded with stated dependencies")
        t1 <- proc.time()
        out <- R_runR0(Rcmd, opts, c(env, env1), arch = arch)
        t2 <- proc.time()
        print_time(t1, t2, Log)
        if (any(startsWith(out, "Error")) || length(attr(out, "status"))) {
            warningLog(Log)
            printLog0(Log, paste(c(out, ""), collapse = "\n"))
            wrapLog("\nIt looks like this package",
                    "(or one of its dependent packages)",
                    "has an unstated dependence on a standard",
                    "package.  All dependencies must be",
                    "declared in DESCRIPTION.\n")
            wrapLog(msg_DESCRIPTION)
        } else resultLog(Log, "OK")

        checkingLog(Log, "whether the package can be unloaded cleanly")
        Rcmd <- sprintf("suppressMessages(library(%s)); cat('\n---- unloading\n'); detach(\"package:%s\")",
                        pkgname, pkgname)
        t1 <- proc.time()
        out <- R_runR0(Rcmd, opts, c(env, env1), arch = arch)
        t2 <- proc.time()
        print_time(t1, t2, Log)
        if (any(grepl("^(Error|\\.Last\\.lib failed)", out)) ||
            length(attr(out, "status"))) {
            warningLog(Log)
            ll <- grep("---- unloading", out)
            if(length(ll)) {
                ll <- ll[length(ll)]
                out <- out[ll:length(out)]
            }
            printLog0(Log, paste(c(out, ""), collapse = "\n"))
        } else resultLog(Log, "OK")

        ## and if it has a namespace, that we can load/unload just
        ## the namespace
        if (file.exists(file.path(pkgdir, "NAMESPACE"))) {
            checkingLog(Log, "whether the namespace can be loaded with stated dependencies")
            Rcmd <-
                sprintf("%s\ntools:::.load_namespace_rather_quietly(\"%s\")",
                        opWarn_string, pkgname)
            env2 <- Sys.getenv("_R_LOAD_CHECK_S4_EXPORTS_", "NA")
            env2 <- paste0("_R_LOAD_CHECK_S4_EXPORTS_=",
                           if(env2 == "all") env2 else pkgname)
            t1 <- proc.time()
            out <- R_runR0(Rcmd, opts, c(env, env1, env2), arch = arch)
            t2 <- proc.time()
            print_time(t1, t2, Log)
            any <- FALSE
            if (any(startsWith(out, "Error")) || length(attr(out, "status"))) {
                warningLog(Log)
                any <- TRUE
            } else {
                ## Drop tcltk warning if no DISPLAY variable
                if(pkgname == "tcltk")
                    out <- filtergrep("Warning: no DISPLAY variable so Tk is not available",
                                      out, fixed = TRUE)
                ## Drop warnings about replacing previous imports unless
                ## these were disabled for the installation check.
                check_imports_flag <-
                    Sys.getenv("_R_CHECK_REPLACING_IMPORTS_", "TRUE")
                if(config_val_to_logical(check_imports_flag))
                    out <- filtergrep("Warning: replacing previous import", out,
                                      fixed = TRUE)
                if(any(startsWith(out, "Warning: S4 exports"))) {
                    warningLog(Log)
                    any <- if(length(out) == 1L) NA else TRUE
                } else if(any(startsWith(out, "Warning"))) {
                    noteLog(Log)
                    any <- TRUE
                }
            }
            if (is.na(any)) {
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
            } else if(any) {
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
                wrapLog("\nA namespace must be able to be loaded",
                        "with just the base namespace loaded:",
                        "otherwise if the namespace gets loaded by a",
                        "saved object, the session will be unable",
                        "to start.\n\n",
                        "Probably some imports need to be declared",
                        "in the NAMESPACE file.\n")
            } else resultLog(Log, "OK")

            checkingLog(Log,
                        "whether the namespace can be unloaded cleanly")
            Rcmd <- sprintf("invisible(suppressMessages(loadNamespace(\"%s\"))); cat('\n---- unloading\n'); unloadNamespace(\"%s\")",
                            pkgname, pkgname)
            t1 <- proc.time()
            out <- if (is_base_pkg && pkgname != "stats4")
                R_runR0(Rcmd, opts, "R_DEFAULT_PACKAGES=NULL", arch = arch)
            else R_runR0(Rcmd, opts, env1)
            t2 <- proc.time()
            print_time(t1, t2, Log)
            if (any(grepl("^(Error|\\.onUnload failed)", out)) ||
                length(attr(out, "status"))) {
                warningLog(Log)
                ll <- grep("---- unloading", out)
                if(length(ll)) {
                    ll <- ll[length(ll)]
                    out <- out[ll:length(out)]
                }
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
            } else resultLog(Log, "OK")
        }

        ## No point in this test if already installed in .Library
        if (pkgname %notin% dir(.Library)) {
            checkingLog(Log, "loading without being on the library search path")
            Rcmd <- sprintf("library(%s, lib.loc = '%s')", pkgname, libdir)
            opts <- if(nzchar(arch)) R_opts4 else R_opts2
            env <- setRlibs(pkgdir = pkgdir, libdir = libdir,
                            self2 = FALSE, quote = TRUE)
            if(nzchar(arch)) env <- c(env, "R_DEFAULT_PACKAGES=NULL")
            t1 <- proc.time()
            out <- R_runR0(Rcmd, opts, env, arch = arch)
            t2 <- proc.time()
            print_time(t1, t2, Log)
            if (any(startsWith(out, "Error"))) {
                warningLog(Log)
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
                wrapLog("\nIt looks like this package",
                        "has a loading problem when not on .libPaths:",
                        "see the messages for details.\n")
            } else resultLog(Log, "OK")
        }

        if(!extra_arch && !is_base_pkg) {
            ## Look to see if there is a startup function.
            Rcmd <- sprintf("ls(asNamespace('%s'), all = TRUE)", pkgname)
            out <- R_runR0(Rcmd, opts, arch = arch)
            if (any(grepl("[.]on(Load|Attach)", out))) {
                checkingLog(Log, "whether startup messages can be suppressed")
                Rcmd <- sprintf("suppressWarnings(suppressPackageStartupMessages(library(%s, lib.loc = '%s',  warn.conflicts=FALSE)))", pkgname, libdir)
                opts <- if(nzchar(arch)) R_opts4 else R_opts2
                env <- character()
                if(nzchar(arch)) env <- c(env, "R_DEFAULT_PACKAGES=NULL")
                t1 <- proc.time()
                out <- R_runR0(Rcmd, opts, env, arch = arch)
                t2 <- proc.time()
                print_time(t1, t2, Log)
                if (length(out)) {
                    noteLog(Log)
                    printLog0(Log, paste(c(out, ""), collapse = "\n"))
                    wrapLog("\nIt looks like this package",
                            "(or a package it requires)",
                            "has a startup message which cannot be suppressed:",
                            "see ?packageStartupMessage.\n")
                } else resultLog(Log, "OK")
            }
        }

        if(!extra_arch && !is_base_pkg) {
            check_S3reg <-
                Sys.getenv("_R_CHECK_OVERWRITE_REGISTERED_S3_METHODS_", "NA")
            check_S3reg <- if(check_S3reg == "NA") check_incoming else {
                config_val_to_logical(check_S3reg)
            }
            if(check_S3reg) {
                checkingLog(Log, "use of S3 registration")
                Rcmd <- sprintf("suppressWarnings(suppressPackageStartupMessages(loadNamespace('%s', lib.loc = '%s')))",
                                pkgname, libdir)
                opts <- if(nzchar(arch)) R_opts4 else R_opts2
                env <- Sys.getenv("_R_LOAD_CHECK_OVERWRITE_S3_METHODS_",
                                  "NA")
                env <- paste0("_R_LOAD_CHECK_OVERWRITE_S3_METHODS_=",
                              if(env == "all") env else pkgname)
                ## <FIXME>
                ## Oh dear.  R-ints says that if env var
                ## '_R_CHECK_OVERWRITE_REGISTERED_S3_METHODS_' is set to
                ## something true,
                ##   report already registered S3 methods in
                ##   base/recommended packages which are overwritten
                ##   when this package's namespace is loaded.
                ## As of 2017-12, to make this work as documented we
                ## really need to load all base and recommended
                ## ("standard") packages which register S3 methods
                ## first, which takes *quite some time*.
                ## There really should be a better way ...
                ## Running with
                ##   R_DEFAULT_PACKAGES=MASS,Matrix,boot,class,cluster,grDevices,graphics,grid,lattice,mgcv,nlme,nnet,parallel,rpart,spatial,splines,stats,survival,tcltk,tools,utils
                ## does not suppress package startup messages: so try to
                ## load the relevant standard package namespaces quietly.
                ## When checking a standard package p we should preload
                ## only the standard packages not depending on p.
                preloads <-
                    c("MASS", "Matrix", "boot", "class", "cluster",
                      "grDevices",  "graphics", "grid", "lattice",
                      "mgcv", "nlme", "nnet", "parallel", "rpart",
                      "spatial", "splines", "stats", "survival",
                      "tcltk", "tools", "utils")
                if(!is.na(match(pkgname, preloads))) {
                    rdepends <-
                        .get_standard_package_dependencies(reverse = TRUE,
                                                           recursive = TRUE)
                    preloads <- setdiff(preloads,
                                        c(pkgname, rdepends[[pkgname]]))
                }
                Rcmd <-
                    c(sprintf("suppressPackageStartupMessages(loadNamespace('%s', lib.loc = '%s'))",
                              preloads,
                              .Library),
                      Rcmd)
                env <- c(env, "R_DEFAULT_PACKAGES=NULL")
                out <- R_runR0(Rcmd, opts, env, arch = arch)
                ## </FIXME>
                if (any(grepl("^Registered S3 method.*standard package.*overwritten", out, useBytes = TRUE))) {
                    out <- out[!startsWith(out, "<environment: namespace:")]
                    warningLog(Log)
                    printLog0(Log, paste(out, collapse = "\n"), "\n")
                } else resultLog(Log, "OK")
            }
        }
    }

    run_examples <- function()
    {
        run_one_arch <- function(exfile, exout, arch = "", do_diff = TRUE)
        {
            any <- FALSE
            ## moved here to avoid WARNING + OK
            ##   if (nzchar(enc) && is_ascii) {
            ##       warningLog(Log,
            ##                  "checking a package with non-ASCII example code in an ASCII locale\n")
            ##       enc <- ""
            ##       any <- TRUE
            ##   }
            cprof <- Sys.getenv("_R_CHECK_EXAMPLES_PROFILE_", "")
            cprof <- if(!file.exists(cprof)) "" else normalizePath(cprof)
            Ropts <- if(nzchar(cprof)) {
                         if(nzchar(arch)) {
                             ## R_opts3 without --no-init-file
                             "--no-site-file --no-save --no-restore"
                         } else
                             ## R_opts without --no-init-file
                             "--no-site-file --no-save --no-restore --no-environ"
                     } else if(nzchar(arch)) R_opts3 else R_opts
            if (use_valgrind) Ropts <- paste(Ropts, "-d valgrind")
            t1 <- proc.time()
            tlim <- get_timeout(Sys.getenv("_R_CHECK_EXAMPLES_ELAPSED_TIMEOUT_",
                                Sys.getenv("_R_CHECK_ELAPSED_TIMEOUT_")))
            ## might be diff-ing results against tests/Examples later
            ## so force LANGUAGE=en
            status <- R_runR0(NULL, c(Ropts, enc),
                              c("LANGUAGE=en", "_R_CHECK_INTERNALS2_=1",
                                if(nzchar(arch)) env0, jitstr,
                                if(R_cdo_examples) elibs_cdo else elibs,
                                if(nzchar(cprof))
                                    paste0("R_PROFILE_USER=", cprof)),
                              stdout = exout, stderr = exout,
                              stdin = exfile, arch = arch, timeout = tlim)
            t2 <- proc.time()
            print_time(t1, t2, Log)
            if (status) {
                errorLog(Log, "Running examples in ",
                         sQuote(basename(exfile)),
                         " failed")
                ## Try to spot the offending example right away.
                ## Sometimes processes need extra time to shut down,
                ## particularly parallel cluster on Windows, hence a hack to retry after 2 sec:
                txt <- tryCatch(suppressWarnings(readLines(exout, warn = FALSE)),
                                error = function(e){Sys.sleep(2); readLines(exout, warn = FALSE)})
                txt <- paste(txt, collapse = "\n")
                ## Look for the header section anchored by a
                ## subsequent call to flush(): needs to be kept in
                ## sync with the code in massageExamples (in
                ## testing.R).  Should perhaps also be more
                ## defensive about the prompt ...
                chunks <- strsplit(txt,
                                   "> ### \\* [^\n]+\n> \n> flush[^\n]+\n> \n", useBytes = TRUE)[[1L]]
                ## convert "bytes" to string, with <xx> for invalid bytes
                chunks <- iconv(chunks, sub="byte")
                if((ll <- length(chunks)) >= 2) {
                    printLog(Log, "The error most likely occurred in:\n\n")
                    printLog0(Log, chunks[ll], "\n")
                } else {
                    ## most likely error before the first example
                    ## so show all the output.
                    printLog(Log, "The error occurred in:\n\n")
                    printLog0(Log, txt, "\n")
                }
                if(do_timings) {
                    theta <-
                        as.numeric(Sys.getenv("_R_CHECK_EXAMPLE_TIMING_THRESHOLD_",
                                              "5"))
                    tfile <- paste0(pkgname, "-Ex.timings")
                    times <-
                        utils::read.table(tfile, header = TRUE, row.names = 1L,
                                      colClasses = c("character", rep.int("numeric", 3)))
                    o <- order(times[[1L]] + times[[2L]], decreasing = TRUE)
                    times <- times[o, ]
                    keep <- ((times[[1L]] + times[[2L]] > theta) |
                             (times[[3L]] > theta))
                    if(any(keep)) {
                        printLog(Log,
                                 sprintf("Examples with CPU (user + system) or elapsed time > %gs\n",
                                         theta))
                        out <- utils::capture.output(format(times[keep, ]))
                        printLog0(Log, paste(out, collapse = "\n"), "\n")
                    }
                }

                return(FALSE)
            }

            ## Look at the output from running the examples.  For
            ## the time being, report warnings about use of
            ## deprecated , as the next release will make
            ## them defunct and hence using them an error.
            bad <- FALSE
            ## Sometimes processes need extra time to shut down,
            ## particularly parallel cluster on Windows, hence a hack to retry after 2 sec:
            lines <- tryCatch(suppressWarnings(readLines(exout, warn = FALSE)),
                              error = function(e){Sys.sleep(2); readLines(exout, warn = FALSE)})
            ## r85870 changed .Deprecated to report the call, hence changed msg
            bad_lines <- grep("^Warning.*: .*is deprecated[.]$",
                              lines, useBytes = TRUE, value = TRUE)
            if(length(bad_lines)) {
                bad <- TRUE
                warningLog(Log, "Found the following significant warnings:\n")
                printLog0(Log, .format_lines_with_indent(bad_lines), "\n")
                wrapLog("Deprecated functions may be defunct as",
                        "soon as of the next release of R.\n",
                        "See ?Deprecated.\n")
            }
            bad_lines <- grep("^Warning.*screen devices should not be used in examples",
                              lines, useBytes = TRUE, value = TRUE)
            if(length(bad_lines)) {
                if(!bad) {
                    warningLog(Log,
                               "Found the following significant warnings:")
                    bad <- TRUE
                }
                printLog0(Log, .format_lines_with_indent(bad_lines), "\n")
                wrapLog("dev.new() is the preferred way to open a new device,",
                        "in the unlikely event one is needed.")
            }
            bad_lines <- grep("^Warning: .*simultaneous processes spawned$",
                              lines, useBytes = TRUE, value = TRUE)
            if(length(bad_lines)) {
                if(!bad) {
                    warningLog(Log,
                               "Found the following significant warnings:")
                    bad <- TRUE
                }
                printLog0(Log, .format_lines_with_indent(bad_lines), "\n")
                wrapLog("Note that CRAN packages must never use more than two",
                        "cores simultaneously during their checks.")
            }
            bad_lines <- grep("^Warning: working directory was changed to",
                              lines, useBytes = TRUE, value = TRUE)
            if(length(bad_lines)) {
                if(!bad) {
                    warningLog(Log,
                               "Found the following significant warnings:")
                    bad <- TRUE
                }
                printLog0(Log, .format_lines_with_indent(bad_lines), "\n")
            }
            bad_lines <- grep("^Warning: items .* were removed from the search path",
                              lines, useBytes = TRUE, value = TRUE)
            if(length(bad_lines)) {
                if(!bad) {
                    warningLog(Log,
                               "Found the following significant warnings:")
                    bad <- TRUE
                }
                printLog0(Log, .format_lines_with_indent(bad_lines), "\n")
            }
            any <- any || bad

            if (!any && !(check_incoming && do_timings && do_diff))
                resultLog(Log, "OK")

            if (do_timings && do_diff) { ## do_diff = false is for re-running
                theta <-
                    as.numeric(Sys.getenv("_R_CHECK_EXAMPLE_TIMING_THRESHOLD_",
                                          "5"))
                tfile <- paste0(pkgname, "-Ex.timings")
                times <-
                    utils::read.table(tfile, header = TRUE, row.names = 1L,
                                      colClasses = c("character", rep.int("numeric", 3)))
                o <- order(times[[1L]] + times[[2L]], decreasing = TRUE)
                times <- times[o, ]

                keep <- ((times[[1L]] + times[[2L]] > theta) |
                         (times[[3L]] > theta))
                if(any(keep)) {
                    if(!any && check_incoming) {
                        noteLog(Log)
                        any <- TRUE
                    }
                    printLog(Log,
                             sprintf("Examples with CPU (user + system) or elapsed time > %gs\n",
                                     theta))
                    out <- utils::capture.output(format(times[keep, ]))
                    printLog0(Log, paste(out, collapse = "\n"), "\n")
                }

                theta <-
                    as.numeric(Sys.getenv("_R_CHECK_EXAMPLE_TIMING_CPU_TO_ELAPSED_THRESHOLD_",
                                          NA_character_))
                if(!is.na(theta)) {
                    keep <- ((times[[1L]] + times[[2L]]) >=
                              pmax(theta * times[[3L]], 1))
                    if(any(keep)) {
                        if(!any && check_incoming) {
                            noteLog(Log)
                            any <- TRUE
                        }
                        printLog(Log,
                                 sprintf("Examples with CPU time > %g times elapsed time\n",
                                         theta))
                        bad <- times[keep, ]
                        ratio <- (bad[[1L]] + bad[[2L]]) / bad[[3L]]
                        bad <- cbind(bad, ratio = round(ratio, 3L))
                        bad <- bad[order(bad$ratio, decreasing = TRUE), ]
                        out <- utils::capture.output(format(bad))
                        printLog0(Log, paste(out, collapse = "\n"), "\n")
                    }
                }

                if(!any && check_incoming)
                    resultLog(Log, "OK")
            }

            ## Try to compare results from running the examples to
            ## a saved previous version.
            exsave <- file.path(pkgdir, test_dir, "Examples",
                                paste0(pkgname, "-Ex.Rout.save"))
            if (do_diff && file.exists(exsave)) {
                checkingLog(Log, "differences from ",
                            sQuote(basename(exout)),
                            " to ", sQuote(basename(exsave)))
                cmd <- paste0("invisible(tools::Rdiff('",
                              exout, "', '", exsave, "',TRUE,TRUE))")
                out <- R_runR0(cmd, R_opts2)
                if(length(out)) {
                    noteLog(Log)
                    printLog0(Log, paste(out, collapse = "\n"), "\n")
                }
                else
                    resultLog(Log, "OK")
            }

            TRUE
        }

        checkingLog(Log, "examples")
        if (!do_examples) resultLog(Log, "SKIPPED")
        else {
            pkgtopdir <- file.path(libdir, pkgname)
            cmd <- sprintf('tools:::.createExdotR("%s", "%s", silent = TRUE, use_gct = %s, addTiming = %s, commentDontrun = %s, commentDonttest = %s)',
                           pkgname, pkgtopdir, use_gct, do_timings,
                           !run_dontrun, !run_donttest)
            Rout <- tempfile("Rout")
            ## any arch will do here
            status <- R_runR0(cmd, R_opts2,
                              stdout = Rout, stderr = Rout)
            exfile <- paste0(pkgname, "-Ex.R")
            if (status) {
                errorLog(Log,
                         paste("Running massageExamples to create",
                               sQuote(exfile), "failed"))
                printLog0(Log,
                          paste(readLines(Rout, warn = FALSE),
                                collapse = "\n"),
                          "\n")
                maybe_exit(1L)
            }
            ## It ran, but did it create any examples?
            if (file.exists(exfile)) {
                ## <NOTE>
                ## This used to be
                ##   enc <- if (!is.na(e <- desc["Encoding"])) {
                ##       paste0("--encoding=", e)
                ##   } else ""
                ## but apparently these days .createExdotR() will always
                ## use Rd2ex() with its outputEncoding = "UTF-8" default
                ## so that the -Ex.R file will be in ASCII or UTF-8, and
                ## the latter can be the case when there is no package
                ## encoding.  However, always using
                ##   enc <- "--encoding=UTF-8"
                ## will warn in ASCII locales even in the all-ASCII
                ## case, so let us find out whether the -Ex.R file is
                ## all ASCII, and use --encoding=UTF-8 if not.
                enc <-
                    if(length(suppressMessages(showNonASCIIfile(exfile)))) {
                        "--encoding=UTF-8"
                    } else ""
                ## </NOTE>
                if (!this_multiarch) {
                    exout <- paste0(pkgname, "-Ex.Rout")
                    if(!run_one_arch(exfile, exout)) maybe_exit(1L)
                } else {
                    printLog(Log, "\n")
                    Log$stars <<-  "**"
                    res <- TRUE
                    for (arch in inst_archs) {
                        printLog(Log, "** running examples for arch ",
                                 sQuote(arch), " ...")
                        if (arch %in% R_check_skip_examples_arch) {
                            resultLog(Log, "SKIPPED")
                        } else {
                            tdir <- paste0("examples_", arch)
                            dir.create(tdir)
                            if (!dir.exists(tdir)) {
                                errorLog(Log,
                                         "unable to create examples directory")
                                summaryLog(Log)
                                do_exit(1L)
                            }
                            od <- setwd(tdir)
                            exout <- paste0(pkgname, "-Ex_", arch, ".Rout")
                            res <- res & run_one_arch(file.path("..", exfile),
                                                      file.path("..", exout),
                                                      arch)
                            setwd(od)
                        }
                    }
                    Log$stars <<-  "*"
                    if (!res) maybe_exit(1L)
                }
                cntFile <- paste0(exfile, "-cnt")
                if (file.exists(cntFile)) {
                    unlink(cntFile)
                    x <- Sys.getenv("_R_CHECK_DONTTEST_EXAMPLES_", "NA")
                    test_donttest <- !run_donttest &&
                        (if (x == "NA") as_cran else config_val_to_logical(x))
                    if (test_donttest) {
                        checkingLog(Log, "examples with --run-donttest")
                        cmd <- sprintf('tools:::.createExdotR("%s", "%s", silent = TRUE, use_gct = %s, addTiming = %s, commentDontrun = %s, commentDonttest = %s)',
                                       pkgname, pkgtopdir, use_gct, do_timings,
                                       !run_dontrun, FALSE)
                        Rout <- tempfile("Rout")
                        ## any arch will do here
                        status <- R_runR0(cmd, R_opts2,
                                          stdout = Rout, stderr = Rout)
                        exfile <- paste0(pkgname, "-Ex.R")
                        if (status) {
                            errorLog(Log,
                                     paste("Running massageExamples to create",
                                           sQuote(exfile), "failed"))
                            printLog0(Log,
                                      paste(readLines(Rout, warn = FALSE),
                                            collapse = "\n"),
                                      "\n")
                            maybe_exit(1L)
                        }
                        exout <- paste0(pkgname, "-Ex.Rout")
                        if(!run_one_arch(exfile, exout, do_diff = FALSE))
                            maybe_exit(1L)
                    }
                    else if (as_cran)
                        printLog(Log, "** found \\donttest examples:",
                                 " check also with --run-donttest\n")
                }
            } else {
                resultLog(Log, "NONE")
                no_examples <<- TRUE
            }
        }
    }

    ## this is also used for --run-demo
    run_tests <- function(test_dir = "tests", run = TRUE)
    {
        is_demo <- test_dir == "demo"
        check_packages_used <- !is_demo ||
            config_val_to_logical(Sys.getenv("_R_CHECK_PACKAGES_USED_IN_DEMO_", run))
        if (check_packages_used && !extra_arch && !is_base_pkg) {
            checkingLog(Log, "for unstated dependencies in ", sQuote(test_dir))
            Rcmd <- paste(opW_shE_F_str,
                          sprintf("tools:::.check_packages_used_in_tests(\"%s\", \"%s\")\n", pkgdir, test_dir))

            out <- R_runR2(Rcmd, "R_DEFAULT_PACKAGES=NULL")
            if (length(out)) {
                if (is_demo) noteLog(Log) else warningLog(Log)
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
                # wrapLog(msg_DESCRIPTION)
            } else resultLog(Log, "OK")
        }

        if (is_demo) {
            if (run) {
                checkingLog(Log, "demos")
            } else return()
        } else {
            if (test_dir == "tests")
                checkingLog(Log, "tests")
            else
                checkingLog(Log, "tests in ", sQuote(test_dir))
        }

        run_one_arch <- function(arch = "")
        {
            testsrcdir <- file.path(pkgdir, test_dir)
            testdir <- file.path(pkgoutdir, if (is_demo) "demo" else "tests")
            if(nzchar(arch)) testdir <- paste(testdir, arch, sep = "_")
            if(!dir.exists(testdir)) dir.create(testdir, mode = "0755")
            if(!dir.exists(testdir)) {
                errorLog(Log,
                         sprintf("unable to create %s", sQuote(testdir)))
                summaryLog(Log)
                do_exit(1L)
            }
            file.copy(Sys.glob(paste0(testsrcdir, "/*")),
                      testdir, recursive = TRUE)
            setwd(testdir)
            logf <- gsub("\\", "/", tempfile(), fixed=TRUE)
            extra <- c(if(use_gct) "use_gct = TRUE",
                       if(use_valgrind) "use_valgrind = TRUE",
                       if(!stop_on_test_error) "stop_on_error = FALSE",
                       paste0('Log="', logf, '"'))
            ## might be diff-ing results against tests/*.R.out.save
            ## so force LANGUAGE=en
            cmd <- paste0("tools:::.runPackageTestsR(",
                          paste(extra, collapse = ", "), ")")
            t1 <- proc.time()
            tlim <- get_timeout(Sys.getenv("_R_CHECK_TESTS_ELAPSED_TIMEOUT_",
                                Sys.getenv("_R_CHECK_ELAPSED_TIMEOUT_")))
            status <- R_runR0(cmd,
                              if(nzchar(arch)) R_opts4 else R_opts2,
                              env = c("LANGUAGE=en",
                                     "_R_CHECK_INTERNALS2_=1",
                              if(nzchar(arch)) env0, jitstr, elibs_tests),
                              stdout = "", stderr = "", arch = arch,
                              timeout = tlim)
            t2 <- proc.time()
            print_time(t1, t2, Log)
            if (status) {
                errorLog(Log)
                if (Log$con > 0L && file.exists(logf)) {
                    ## write individual results only to 00check.log
                    cat(readLines(logf, warn = FALSE),
                        sep = "\n", file = Log$con)
                }
                ## Don't just fail: try to log where the problem occurred.
                ## First, find the test(s) which failed.
                ## (Maybe there was an error without a failing test.)
                bad_files <- dir(".", pattern="\\.Rout\\.fail$")
                if (length(bad_files)) {
                    ## Read in output from the failed test(s)
                    ## (As from R 3.4.0 there can be more than one
                    ## with option --no-stop-on-test-error.)
                    for(f in bad_files) {
                        lines <- readLines(f, warn = FALSE)
                        f <- file.path(test_dir, sub("out\\.fail$", "", f))
                        src_files <- dir(".", pattern = "\\.[rR]$")
                        if (endsWith(basename(f), ".Rin.R")) {
                            f <- sub("\\.R$", "", f)
                        } else if (basename(f) %notin% src_files) {
                            f <- sub("R$", "r", f) # This assumes only one of foo.r and foo.R exists.
                            if (basename(f) %notin% src_files)
                                f <- sub("r$", "[rR]", f) # Just in case the test script got deleted somehow, show the pattern.
                        }
                        keep <- as.integer(Sys.getenv("_R_CHECK_TESTS_NLINES_",
                                                      "13"))
                        ## keep = 0 means keep all of it, but we will
                        ## always omit the R preamble and start at the first
                        ## line with an R prompt.
                        ll <- length(lines)
                        st <- grep("^>", lines, useBytes = TRUE)
                        if (length(st)) {
                            lines <- lines[st[1L]:ll]
                            ll <- length(lines)
                        }
                        if (keep > 0L)
                            lines <- lines[max(1L, ll-keep-1L):ll]
                        if (R_check_suppress_RandR_message)
                            lines <- filtergrep('^Xlib: *extension "RANDR" missing on display',
                                                lines, useBytes = TRUE)
                        printLog(Log, sprintf(paste(if(endsWith(f, ".Rin")) "Processing"
                                                    else "Running the tests in",
                                                    "%s failed.\n"),
                                              sQuote(f)))
                        printLog(Log, if(keep > 0L && keep < ll)
                                 sprintf("Last %i lines of output:\n", keep)
                                 else "Complete output:\n")
                        printLog0(Log, .format_lines_with_indent(lines), "\n")
                    }
                }
                return(FALSE)
            } else {
                any <- FALSE
                lines <- NULL
                if (Log$con > 0L && file.exists(logf)) {
                    ## write results only to 00check.log
                    ## check o/p might be in a different encoding.
                    lines <- readLines(logf, warn = FALSE)
                    if(any(grepl("Running R code.*times elapsed time",
                                 lines, useBytes = TRUE)) ||
                       any(startsWith(lines, "  Comparing") &
                           !endsWith(lines, "... OK")))
                        any <- TRUE
                }
                if(any) noteLog(Log) else resultLog(Log, "OK")
                if(!is.null(lines)) {
                    cat(lines, sep="\n", file = Log$con)
                    unlink(logf)
                }
            }
            setwd(pkgoutdir)
            TRUE
        }
        if (do_install && run) {
            if (!this_multiarch) {
                res <- run_one_arch()
            } else {
                printLog(Log, "\n")
                res <- TRUE
                for (arch in inst_archs)
                    if (arch %notin% R_check_skip_tests_arch) {
                        printLog(Log, "** running tests for arch ",
                                 sQuote(arch), " ...")
                        res <- res & run_one_arch(arch)
                    }
            }
            if (!res) maybe_exit(1L)
        } else resultLog(Log, "SKIPPED")
    }

    run_vignettes <- function(desc)
    {
        theta <-
            as.numeric(Sys.getenv("_R_CHECK_VIGNETTE_TIMING_CPU_TO_ELAPSED_THRESHOLD_",
                                  NA_character_))

        libpaths <- .libPaths()
        .libPaths(c(libdir, libpaths))
        vigns <- pkgVignettes(dir = pkgdir)
        .libPaths(libpaths)
        if(is.null(vigns)) return()

        ## Packages with a 'vignette' subdir not providing vignettes.
        if(!length(vigns$docs)) {
            checkingLog(Log, "package vignettes")
            noteLog(Log)
            msg <- c("Package has 'vignettes' subdirectory but apparently no vignettes.",
                     "Perhaps the 'VignetteBuilder' information is missing from the DESCRIPTION file?")
            wrapLog(msg)
            return()
        }

        if(do_install && !spec_install && !is_base_pkg && !extra_arch) {
            ## fake installs don't install inst/doc
            checkingLog(Log, "for unstated dependencies in vignettes")
            Rcmd <- paste(opW_shE_F_str,
                          sprintf("tools:::.check_packages_used_in_vignettes(package = \"%s\")\n",
                                  pkgname))
            out <- R_runR2(Rcmd, "R_DEFAULT_PACKAGES=NULL")
            if (length(out)) {
                noteLog(Log)
                printLog0(Log, paste(c(out, ""), collapse = "\n"))
            } else resultLog(Log, "OK")
        }

        checkingLog(Log, "package vignettes")
        any <- FALSE
        ## Do PDFs or HTML files exist for all package vignettes?
        ## A base source package need not have PDFs to avoid
        ## frequently-changing binary files in the SVN archive.
        if (!is_base_pkg) {
            dir <- file.path(pkgdir, "inst", "doc")
            if (dir.exists(dir)) {
                outputs <- character(length(vigns$docs))
                .msg <- character()
                for (i in seq_along(vigns$docs)) {
                    file <- vigns$docs[i]
                    name <- vigns$names[i]
                    engine <- vignetteEngine(vigns$engines[i])
                    outputs[i] <- tryCatch({
                        find_vignette_product(name, what="weave", final=TRUE, dir=dir, engine = engine)
                    }, error = function(e) {
                        .msg <<- c(.msg, conditionMessage(e))
                        NA}
                    )
                }
                bad_vignettes <- vigns$docs[is.na(outputs)]
            } else {
                .msg <- "Directory 'inst/doc' does not exist."
                bad_vignettes <- vigns$docs
            }
            if (nb <- length(bad_vignettes)) {
                any <- TRUE
                warningLog(Log)
                if (length(.msg)) printLog0(Log, .msg, "\n")
                msg <- ngettext(nb,
                                "Package vignette without corresponding single PDF/HTML:\n",
                                "Package vignettes without corresponding single PDF/HTML:\n", domain = NA)
                printLog0(Log, msg)
                printLog0(Log, .format_lines_with_indent(sQuote(basename(bad_vignettes))), "\n")
            }
            bad_vignettes <- vigns$docs[vigns$encodings == "non-ASCII"]
            if(nb <- length(bad_vignettes)) {
                if(!any) warningLog(Log)
                any <- TRUE
                msg <- ngettext(nb,
                         "Non-ASCII package vignette without specified encoding:\n",
                         "Non-ASCII package vignettes without specified encoding:\n", domain = NA)
                printLog0(Log, msg)
                printLog0(Log, .format_lines_with_indent(sQuote(basename(bad_vignettes))), "\n")
            }
        }

# FIXME:  we should do this check in build, not here.  Currently not doing it at all.
#        ## Do any of the .R files which will be generated
#        ## exist in inst/doc?  If so the latter will be ignored,
#        sources <-
#            basename(list_files_with_exts(file.path(pkgdir, "inst/doc"), "R"))
#        custom <- !is.na(desc["VignetteBuilder"])
#        if (length(sources) && !custom) {
#            new_sources <- paste0(vigns$names, ".R")
#            dups <- sources[sources %in% new_sources]
#            if(nb <- length(dups)) {
#                if(!any) warningLog(Log)
#                any <- TRUE
#                msg <- ngettext(nb,
#                                "Unused file in 'inst/doc' which is pointless or misleading",
#                                "Unused files in 'inst/doc' which are pointless or misleading", domain = NA)
#                printLog(Log, "  ",
#                         paste(msg,
#                               "  as they will be re-created from the vignettes:", "",
#                               sep = "\n"))
#                printLog(Log,
#                         paste(c(paste("  ", dups), "", ""),
#                               collapse = "\n"))
#            }
#        }
        ## avoid case-insensitive matching
        if ("makefile" %in% dir(vigns$dir)) {
            if(!any) warningLog(Log)
            any <- TRUE
            printLog(Log,
                     "Found 'inst/doc/makefile': should be 'Makefile' and will be ignored\n")
        }
        if ("Makefile" %in% dir(vigns$dir)) {
            f <- file.path(vigns$dir, "Makefile")
            lines <- readLines(f, warn = FALSE)
            ## remove comment lines
            lines <- filtergrep("^[[:space:]]*#", lines)
            if(any(grepl("[^/]R +CMD", lines))) {
                if(!any) warningLog(Log)
                any <- TRUE
                printLog(Log,
                         "Found 'R CMD' in Makefile: should be '\"$(R_HOME)/bin/R\" CMD'\n")
            }
            contents <- readChar(f, file.size(f), useBytes = TRUE)
            if(any(grepl("\r", contents, fixed = TRUE, useBytes = TRUE))) {
                if(!any) warningLog(Log)
                any <- TRUE
                printLog(Log, "Found Makefile with CR or CRLF line endings:\n")
                printLog(Log, "some Unix 'make' programs require LF line endings.\n")
           }
            if(any(grepl("[^/]Rscript", lines))) {
                if(!any) warningLog(Log)
                any <- TRUE
                printLog(Log,
                         "Found 'Rscript' in Makefile: should be '\"$(R_HOME)/bin/Rscript\"'\n")
            }
        }

        ## If the vignettes declare an encoding, are they actually in it?
        ## (We don't check the .tex, though)
        bad_vignettes <- character()
        for (i in seq_along(vigns$docs)) {
            v <- vigns$docs[i]
            enc <- vigns$encodings[i]
            if (enc %in% c("", "non-ASCII", "unknown")) next
            lines <- readLines(v, warn = FALSE) # some miss final NA
            lines2 <- iconv(lines, enc, "UTF-16LE", toRaw = TRUE)
            if(any(vapply(lines2, is.null, TRUE)))
                bad_vignettes <- c(bad_vignettes, v)
            if(nb <- length(bad_vignettes)) {
                if(!any) warningLog(Log)
                any <- TRUE
                msg <- ngettext(nb,
                                "Package vignette which is not in its specified encoding:\n",
                                "Package vignettes which are not in their specified encoding:\n", domain = NA)
                printLog0(Log, msg)
                printLog0(Log, .format_lines_with_indent(sQuote(basename(bad_vignettes))), "\n")
            }
        }

        ## Packages not specifying a package for their vignette engine.
        ## tools::pkgVignettes() actually uses tools:::engineMatches()
        ## trickery to work around these cases, other code does not.
        bad_vignettes <-
            vigns$docs[!grepl("::",
                              vapply(vigns$docs, getVignetteEngine, ""),
                              fixed = TRUE)]
        if(nb <- length(bad_vignettes)) {
            if(!any) noteLog(Log)
            any <- TRUE
            msg <- ngettext(nb,
                            "Package vignette with \\VignetteEngine{} not specifying an engine package:",
                            "Package vignettes with \\VignetteEngine{} not specifying an engine package:",
                            domain = NA)
                printLog0(Log, msg, "\n",
                          .format_lines_with_indent(sQuote(basename(bad_vignettes))),
                          "\n")
            wrapLog("Engines should be specified as \\VignetteEngine{PKG::ENG}.")
        }

        if(R_check_vignette_titles) {
            titles <- vapply(vigns$docs, function(v) vignetteInfo(v)$title, "",
                             USE.NAMES = TRUE)
            bad_vignettes <- names(titles)[titles == "Vignette Title"]
            if(nb <- length(bad_vignettes)) {
                if(!any) noteLog(Log)
                any <- TRUE
                msg <- ngettext(nb,
                                "Package vignette with placeholder title 'Vignette Title':\n",
                                "Package vignettes with placeholder title 'Vignette Title':\n",
                                domain = NA)
                wrapLog(msg)
                printLog0(Log,
                          .format_lines_with_indent(sQuote(basename(bad_vignettes))),
                          "\n")
            }

            ## Check for duplicated titles (which look silly on CRAN pages)
            if (any(dup <- duplicated(titles) & nzchar(titles))) {
                ## empty titles are reported in check_indices()
                if(!any) noteLog(Log)
                any <- TRUE
                dups <- unique(titles[dup])
                msg <- ngettext(length(dups),
                                "Duplicated vignette title:",
                                "Duplicated vignette titles:",
                                domain = NA)
                printLog0(Log, msg, "\n",
                          .format_lines_with_indent(sQuote(dups)), "\n")
                wrapLog("Ensure that the %\\VignetteIndexEntry lines in the",
                        "vignette sources correspond to the vignette titles.")
            }
        }

        ## Check for missing tangle outputs.
        ## Note that R CMD build drops outputs with no R code, via
        ##   bfr <- readLines(file, warn = FALSE)
        ##   if(all(grepl("(^###|^[[:space:]]*$)", bfr, useBytes = TRUE))) {
        ## so we do the same here.
        if(!is_base_pkg &&
           dir.exists(dir <- file.path(pkgdir, "inst", "doc"))) {
            ## This is similar to the weave output check above, but we
            ## cannot simply use find_vignette_product(by = "tangle") as
            ## we need to ignore outputs with no R code, see above.
            ## <FIXME>
            ## Unfortunately, knitr::vtangle() still creates empty
            ## outputs when xfun::is_R_CMD_check() which gives true when
            ##   !is.na(Sys.getenv("_R_CHECK_PACKAGE_NAME_", NA))
            ## or when
            ##   tolower(Sys.getenv("_R_CHECK_LICENSE_")) == "true"
            ## so we have to reset these env vars.
            rcp <- Sys.getenv("_R_CHECK_PACKAGE_NAME_", NA_character_)
            rcl <- Sys.getenv("_R_CHECK_LICENSE_")
            if(!is.na(rcp))
                Sys.unsetenv("_R_CHECK_PACKAGE_NAME_")
            if(tolower(rcl) == "true")
                Sys.setenv("_R_CHECK_LICENSE_" = "false")
            bad_vignettes <- character()
            for (i in seq_along(vigns$docs)) {
                tdir <- tempfile()
                file <- vigns$docs[i]
                engine <- vignetteEngine(vigns$engines[i])
                encoding <- vigns$encodings[i]
                dir.create(tdir)
                ## Argh.
                ## At least knitr::markdown() gives error msgs when
                ## tangling with quiet = TRUE, so we need to capture
                ## these.
                .eval_with_capture({
                    products <-
                        tryCatch(buildVignette(file, dir = tdir,
                                               weave = FALSE,
                                               quiet = TRUE,
                                               engine = engine,
                                               encoding = encoding),
                                 error = identity)
                })
                if(!inherits(products, "error") && length(products)) {
                    ## Hmm ... there should really only be one tangle
                    ## product.
                    lines <- readLines(file.path(tdir, products[1L]),
                                       warn = FALSE)
                    if(!all(grepl("(^###|^[[:space:]]*$)", lines,
                                  useBytes = TRUE)) &&
                       !file.exists(file.path(dir, basename(products[1L])))
                       )
                        bad_vignettes <- c(bad_vignettes, file)
                }
                unlink(tdir, recursive = TRUE)
            }
            if(tolower(rcl) == "true")
                Sys.setenv("_R_CHECK_LICENSE_" = rcl)
            if(!is.na(rcp))
                Sys.setenv("_R_CHECK_PACKAGE_NAME_" = rcp)
            ## Hopefully knitr::vtangle() will be fixed eventually ...
            ## </FIXME>
            if(nb <- length(bad_vignettes)) {
                if(!any) noteLog(Log)
                any <- TRUE
                msg <- ngettext(nb,
                            "Package vignette without corresponding tangle output:",
                            "Package vignettes without corresponding tangle output:",
                            domain = NA)
                printLog0(Log, msg, "\n",
                          .format_lines_with_indent(sQuote(basename(bad_vignettes))),
                          "\n")
            }
        }

        if (!any) resultLog(Log, "OK")

        if (do_install && do_vignettes) {
            ## Can we run the code in the vignettes?
            ## Should checking the vignettes assume the system default
            ## packages, or just base?
            ## FIXME: should we do this for multiple sub-archs?

            ## Re-building the vignette outputs also runs the code, so
            ## doing so as well creates no additional value unless the
            ## results are compared against saved results (which could
            ## perhaps also be integrated into buildVignettes().
            ## Hence, when re-building, skip running the code when there
            ## are no saved results.
            ## Could make this controllable via some env var ...

            build_vignettes <-
                parse_description_field(desc, "BuildVignettes", TRUE)
            if (!build_vignettes && as_cran) {
                ## FOSS packages must be able to rebuild their vignettes
                info <- analyze_license(desc["License"])
                build_vignettes <- info$is_verified
            }
            do_build_vignettes <- do_build_vignettes && build_vignettes
            skip_run_maybe <-
                R_check_vignettes_skip_run_maybe && do_build_vignettes

            ## vigns <- pkgVignettes(dir = pkgdir)
            savefiles <-
                file.path(dirname(vigns$docs),
                          paste0(vigns$names, ".Rout.save"))
            ran <- FALSE

            if(!skip_run_maybe || any(file.exists(savefiles))) {
                checkingLog(Log, "running R code from vignettes")
                res <- character()
                t1 <- proc.time()
                iseq <- seq_along(savefiles)
                if(skip_run_maybe)
                    iseq <- iseq[file.exists(savefiles)]
                out0 <- character()
                anyNOTE <- FALSE
                cat("\n")
                for (i in iseq) {
                    file <- basename(vigns$docs[i])
                    ## name <- vigns$names[i]
                    enc <- vigns$encodings[i]
                    out1 <- c("  ", sQuote(file),
                              if(nzchar(enc)) paste(" using", sQuote(enc)),
                              "...")
                    Rcmd <- paste0(opWarn_string, "\ntools:::.run_one_vignette('",
                                   file, "', '", vigns$dir, "'",
                                   if (nzchar(enc))
                                       paste0(", encoding = '", enc, "'"),
                                   ", pkgdir='", vigns$pkgdir, "')")
                    outfile <- paste0(file, ".log")
                    tlim <- get_timeout(Sys.getenv("_R_CHECK_ONE_VIGNETTE_ELAPSED_TIMEOUT_",
                                        Sys.getenv("_R_CHECK_ELAPSED_TIMEOUT_")))
                    t1b <- proc.time()
                    status <- R_runR0(Rcmd,
                                      if (use_valgrind) paste(R_opts2, "-d valgrind") else R_opts2,
                                      ## add timing as footer, as BATCH does
                                      env = c(jitstr, "R_BATCH=1234",
                                              if(R_cdo_vignettes) elibs_cdo else elibs,
                                              "_R_CHECK_INTERNALS2_=1"),
                                      stdout = outfile, stderr = outfile,
                                      timeout = tlim)
                    t2b <- proc.time()
                    out <- readLines(outfile, warn = FALSE)
                    pos <- which(out == " *** Run successfully completed ***")
                    if(!length(pos) || any(nzchar(out[seq_len(pos[1L] - 1L)])))
                        ran <- TRUE
                    savefile <- savefiles[i]
                    if(length(grep("^  When (running|tangling|sourcing)", out,
                                   useBytes = TRUE))) {
                        out1 <- c(out1, " failed\n")
                        keep <- as.numeric(Sys.getenv("_R_CHECK_VIGNETTES_NLINES_",
                                                      "10"))
                        res <- if (keep > 0)
                            c(res,
                              paste("when running code in", sQuote(file)),
                              "  ...",
                              utils::tail(out, keep))
                        else
                            c(res,
                              paste("when running code in", sQuote(file)),
                              out)

                    } else if(status || " *** Run successfully completed ***" %notin% out) {
                        ## (Need not be the final line if running under valgrind)
                        keep <- as.numeric(Sys.getenv("_R_CHECK_VIGNETTES_NLINES_",
                                                      "10"))
                        out1 <- c(out1, " failed to complete the test\n")
                        out <- c(out, "", "... incomplete output.  Crash?")
                        res <- if (keep > 0)
                            c(res,
                                 paste("when running code in", sQuote(file)),
                                 "  ...",
                                 utils::tail(out, keep))
                        else
                            c(res,
                                 paste("when running code in", sQuote(file)),
                                 out)
                    } else if (file.exists(savefile)) {
                        cmd <- paste0("invisible(tools::Rdiff('",
                                      outfile, "', '", savefile, "',TRUE,TRUE))")
                        out2 <- R_runR0(cmd, R_opts2)
                        if(length(out2)) {
                            out1 <- c(out1, print_time0(t1b, t2b))
                            anyNOTE <- TRUE
                            out1 <- c(out1, " NOTE\n")
                            out1 <- c(out1, paste("differences from",
                                                  sQuote(basename(savefile))))
                            out1 <- c(out1,
                                      paste(c("", out2, ""), collapse = "\n"))
                        } else {
                            out1 <- c(out1, print_time0(t1b, t2b))
                            out1 <- c(out1, " OK\n")
                            if (!config_val_to_logical(Sys.getenv("_R_CHECK_ALWAYS_LOG_VIGNETTE_OUTPUT_", use_valgrind)))
                                unlink(outfile)
                        }
                    } else {
                        out1 <- c(out1, print_time0(t1b, t2b))
                        out1 <- c(out1, " OK\n")
                        if (!config_val_to_logical(Sys.getenv("_R_CHECK_ALWAYS_LOG_VIGNETTE_OUTPUT_", use_valgrind)))
                            unlink(outfile)
                    }
                    out0 <- c(out0, out1)
                    cat(out1, sep = "")
                    if(!WINDOWS && !is.na(theta)) {
                        td <- t2b - t1b
                        cpu <- sum(td[-3L])
                        if(cpu >= pmax(theta * td[3L], 1)) {
                            ratio <- round(cpu/td[3L], 1L)
                            cat(sprintf("Running R code from vignette %s had CPU time %g times elapsed time\n",
                                        sQuote(file), ratio))
                        }
                    }
                }
                t2 <- proc.time()
                print_time(t1, t2, Log)
                if(!ran) {
                    resultLog(Log, "NONE")
                    ## printLog0(Log, out0)
                    if (!is.null(Log) && Log$con > 0L)
                        cat(out0, sep ="", file = Log$con)
                } else {
                    if(R_check_suppress_RandR_message)
                        res <- filtergrep('^Xlib: *extension "RANDR" missing on display',
                                          res, useBytes = TRUE)
                    if(length(res)) {
                        if(length(grep("there is no package called", res,
                                       useBytes = TRUE))) {
                            warningLog(Log, "Errors in running code in vignettes:")
                            printLog0(Log, paste(c(res, "", ""), collapse = "\n"))
                        } else {
                            errorLog(Log, "Errors in running code in vignettes:")
                            printLog0(Log, paste(c(res, "", ""), collapse = "\n"))
                            maybe_exit(1L)
                        }
                    } else if(anyNOTE) noteLog(Log)
                    else resultLog(Log, "OK")
##                    printLog0(Log, out0)
                    if (!is.null(Log) && Log$con > 0L)
                        cat(out0, sep = "", file = Log$con)
                    if(!WINDOWS && !is.na(theta)) {
                        td <- t2 - t1
                        cpu <- sum(td[-3L])
                        if(cpu >= pmax(theta * td[3L], 1)) {
                            ratio <- round(cpu/td[3L], 1L)
                            printLog(Log,
                                     sprintf("Running R code from vignettes had CPU time %g times elapsed time\n",
                                             ratio))
                        }
                    }
                }
            }

            if (do_build_vignettes) {
                checkingLog(Log, "re-building of vignette outputs")
                ## copy the whole pkg directory to check directory
                ## so we can work in place, and allow ../../foo references.
                dir.create(vd2 <- "vign_test")
                if (!dir.exists(vd2)) {
                    errorLog(Log, "unable to create 'vign_test'")
                    summaryLog(Log)
                    do_exit(1L)
                }
                file.copy(pkgdir, vd2, recursive = TRUE)

                ## since so many people use 'R CMD' in Makefiles,
                oPATH <- Sys.getenv("PATH")
                Sys.setenv(PATH = paste(R.home("bin"), oPATH,
                                        sep = .Platform$path.sep))
                on.exit(Sys.setenv(PATH = oPATH))
                ## And too many 'vignettes/Makefile's are not safe for
                ## parallel makes
                Sys.setenv(MAKEFLAGS="")
                ## we could use clean = FALSE, but that would not be
                ## testing what R CMD build uses.
                Rcmd <-
                    if (!config_val_to_logical(Sys.getenv("_R_CHECK_BUILD_VIGNETTES_SEPARATELY_", "TRUE")))
                        sprintf("%s\ntools::buildVignettes(dir = '%s', skip = TRUE)",
                                opWarn_string,
                                file.path(pkgoutdir, "vign_test", pkgname0))
                    else {
                            ## serialize elibs to avoid quotation hell
                            tf <- gsub("\\", "/", tempfile(fileext = ".rds"),
                                       fixed = TRUE)
                            saveRDS(c(jitstr, if(R_cdo_vignettes) elibs_cdo else elibs), tf)
                            sprintf("%s\ntools::buildVignettes(dir = '%s', skip = TRUE, ser_elibs = '%s')",
                                    opWarn_string,
                                    file.path(pkgoutdir, "vign_test", pkgname0),
                                    tf)
                    }
                tlim <- get_timeout(Sys.getenv("_R_CHECK_BUILD_VIGNETTES_ELAPSED_TIMEOUT_",
                                    Sys.getenv("_R_CHECK_ELAPSED_TIMEOUT_")))
                t1 <- proc.time()
                outfile <- file.path(pkgoutdir, "build_vignettes.log")
                status <- R_runR0(Rcmd,
                                  if (use_valgrind) paste(R_opts2, "-d valgrind")
                                  else R_opts2,
                                  c(jitstr,
                                    if(R_cdo_vignettes) elibs_cdo else elibs),
                                  stdout = outfile, stderr = outfile,
                                  timeout = tlim)
                t2 <- proc.time()
                print_time(t1, t2, Log)
                out <- readLines(outfile, warn = FALSE)
                if(R_check_suppress_RandR_message)
                    out <- filtergrep('^Xlib: *extension "RANDR" missing on display',
                                      out, useBytes = TRUE)
                warns <- grep("^Warning: file .* is not portable",
                              out, value = TRUE, useBytes = TRUE)
                ltx_err <- any(grepl("LaTeX error", out, ignore.case = TRUE,
                                     useBytes = TRUE))
                iskip <- grep("^Note: skipping .* dependencies:", out,
                              useBytes = TRUE)
                any <- FALSE
                if (status) {
                    any <- TRUE
                    keep <- as.numeric(Sys.getenv("_R_CHECK_VIGNETTES_NLINES_",
                                                  "25"))
                    if(skip_run_maybe || !ran) {
                        if(ltx_err) warningLog(Log) else errorLog(Log)
                    } else noteLog(Log)
                    if(keep > 0  && length(out) < keep) {
                        out <- utils::tail(out, keep)
                        printLog0(Log,
                                  paste(c("Error(s) in re-building vignettes:",
                                          "  ...", out, "", ""), collapse = "\n"))
                    } else
                        printLog0(Log,
                                  paste(c("Error(s) in re-building vignettes:",
                                          out, "", ""), collapse = "\n"))
                } else if(nw <- length(warns)) {
                    any <- TRUE
                    if(skip_run_maybe || !ran) warningLog(Log) else noteLog(Log)
                    msg <- ngettext(nw,
                                    "Warning in re-building vignettes:\n",
                                    "Warnings in re-building vignettes:\n",
                                    domain = NA)
                    wrapLog(msg)
                    printLog0(Log, .format_lines_with_indent(warns), "\n")
                } else {
                    ## clean up
                    if (config_val_to_logical(Sys.getenv("_R_CHECK_CLEAN_VIGN_TEST_", "true")))
                        unlink(vd2, recursive = TRUE)
                    if (!config_val_to_logical(Sys.getenv("_R_CHECK_ALWAYS_LOG_VIGNETTE_OUTPUT_", "false")))
                            unlink(outfile)
                    if (length(iskip)) {
                        any <- TRUE
                        iempty <- which(out == "")
                        ## skipping notes from buildVignettes each close with empty line
                        iskip <- unlist(lapply(iskip, function(i)
                            i:(iempty[iempty > i][1L] - 1L)))
                        noteLog(Log)
                        printLog0(Log, paste(out[iskip], collapse = "\n"), "\n")
                    }
                }
                if(!WINDOWS && !is.na(theta)) {
                    td <- t2 - t1
                    cpu <- sum(td[-3L])
                    if(cpu >= pmax(theta * td[3L], 1)) {
                        if(!any) {
                            noteLog(Log)
                            any <- TRUE
                        }
                        ratio <- round(cpu/td[3L], 1L)
                        printLog(Log,
                                 sprintf("Re-building vignettes had CPU time %g times elapsed time\n",
                                        ratio))
                    }
                }
                if(!any)
                    resultLog(Log, "OK")
            } else {
                checkingLog(Log, "re-building of vignette outputs")
                resultLog(Log, "SKIPPED")
            }
        } else {
            checkingLog(Log, "running R code from vignettes")
            resultLog(Log, "SKIPPED")
            checkingLog(Log, "re-building of vignette outputs")
            resultLog(Log, "SKIPPED")
        }
    }

    check_pkg_manual <- function(pkgdir, pkgname)
    {
        ## Run Rd2pdf on the manual, if there are man pages
        ## If it is installed there is a 'help' dir
        ## and for a source package, there is a 'man' dir
        if (dir.exists(file.path(pkgdir, "help")) ||
            dir.exists(file.path(pkgdir, "man"))) {
            tlim <- get_timeout(Sys.getenv("_R_CHECK_PKGMAN_ELAPSED_TIMEOUT_",
                                Sys.getenv("_R_CHECK_ELAPSED_TIMEOUT_")))
            topdir <- pkgdir
            Rd2pdf_opts <- "--no-preview --internals"
            checkingLog(Log, "PDF version of manual")
            build_dir <- gsub("\\", "/", tempfile("Rd2pdf"), fixed = TRUE)
            man_file <- paste0(pkgname, "-manual.pdf ")
            ## precautionary remove in case some other attempt left it behind
            if(file.exists(man_file)) unlink(man_file)
            args <- c( "Rd2pdf ", Rd2pdf_opts,
                      paste0("--build-dir=", shQuote(build_dir)),
                      "--no-clean", "-o ", man_file , shQuote(topdir))
            t1 <- proc.time()
            res <- run_Rcmd(args,  "Rdlatex.log", timeout = tlim)
            t2 <- proc.time()
            print_time(t1, t2, Log)
            latex_log <- file.path(build_dir, "Rd2.log")
            if (file.exists(latex_log))
                file.copy(latex_log, paste0(pkgname, "-manual.log"))
            if (res == 11) { ## return code from Rd2pdf
                errorLog(Log, "Rd conversion errors:")
                lines <- readLines("Rdlatex.log", warn = FALSE)
                lines <- filtergrep("^(Hmm|Execution)", lines)
                printLog0(Log, paste(c(lines, ""), collapse = "\n"))
                unlink(build_dir, recursive = TRUE)
                maybe_exit(1L)
            } else if (res > 0) {
                latex_file <- file.path(build_dir, "Rd2.tex")
                if (file.exists(latex_file))
                    file.copy(latex_file, paste0(pkgname, "-manual.tex"))
                warningLog(Log)
                printLog0(Log,
                          paste0("LaTeX errors when creating PDF version.\n",
                                 "This typically indicates Rd problems.\n"))
                ## If possible, indicate the problems found.
                if (file.exists(latex_log)) {
                    lines <- .get_LaTeX_errors_from_log_file(latex_log)
                    printLog(Log, "LaTeX errors found:\n")
                    printLog0(Log, paste(c(lines, ""), collapse = "\n"))
                }
                unlink(build_dir, recursive = TRUE)
                ## for Windows' sake: errors can make it unwritable
                build_dir <- gsub("\\", "/", tempfile("Rd2pdf"), fixed = TRUE)
                checkingLog(Log, "PDF version of manual without index")
                Sys.setenv(R_RD4PDF = "times")
                ## --force is used for the case when pdf file was created by
                ## the previous run (seen with MiKTeX on Windows)
                args <- c( "Rd2pdf ", Rd2pdf_opts,
                          paste0("--build-dir=", shQuote(build_dir)),
                          "--no-clean", "--no-index", "--force",
                          "-o ", man_file, topdir)
                if (run_Rcmd(args, "Rdlatex.log", timeout = tlim)) {
                    ## FIXME: the info is almost certainly in Rdlatex.log
                    errorLog(Log)
                    latex_log <- file.path(build_dir, "Rd2.log")
                    if (file.exists(latex_log))
                        file.copy(latex_log, paste0(pkgname, "-manual.log"))
                    else {
                        ## No log file and thus no chance to find out
                        ## what went wrong.  Hence, re-run without
                        ## redirecting stdout/stderr and hope that this
                        ## gives the same problem ...
                        # printLog(Log, "Error when running command:\n")
                        # cmd <- paste(c("R CMD", args), collapse = " ")
                        # printLog(Log, strwrap(cmd, indent = 2, exdent = 4), "\n")
                        printLog(Log, "Re-running with no redirection of stdout/stderr.\n")
                        unlink(build_dir, recursive = TRUE)
                        build_dir <- gsub("\\", "/", tempfile("Rd2pdf"), fixed = TRUE)
                        args <- c( "Rd2pdf ", Rd2pdf_opts,
                                  paste0("--build-dir=", shQuote(build_dir)),
                                  "--no-clean", "--no-index",
                                  "-o ", paste0(pkgname, "-manual.pdf "),
                                  topdir)
                        run_Rcmd(args, timeout = tlim)
                    }
                    unlink(build_dir, recursive = TRUE)
                    maybe_exit(1L)
                } else {
                    unlink(build_dir, recursive = TRUE)
                    resultLog(Log, "OK")
                }
            } else {
                unlink(build_dir, recursive = TRUE)
                resultLog(Log, "OK")
            }
        }
    }

    check_Rd2HTML <- function(dir, installed = FALSE) {

        db <- if(installed)
                  Rd_db(basename(dir), lib.loc = dirname(dir))
              else
                  Rd_db(dir = dir)
        if(!length(db))
            return()

        eq <- .Rd_get_equations_from_Rd_db(db)

        i1 <- (length(db) && isTRUE(R_check_Rd_validate_Rd2HTML))
        i2 <- (length(eq) && isTRUE(R_check_Rd_math_rendering))
        if(!i1 && !i2)
            return()

        checkingLog(Log, "HTML version of manual")
        any <- FALSE

        t1 <- proc.time()
        if(i1) { ## validate
            Tidy <- .find_tidy_cmd()
            OK1 <- nzchar(Tidy)
            if(OK1) {
                out <- tempfile()
                on.exit(unlink(out))
                if(installed) {
                    ## May need libdir for stage=render Sexprs.
                    libpaths <- .libPaths()
                    .libPaths(c(libdir, libpaths))
                    on.exit(.libPaths(libpaths), add = TRUE)
                }
                results1 <- lapply(db,
                                  function(x)
                                      tryCatch({
                                          Rd2HTML(x, out, concordance = TRUE)
                                          tidy_validate(out, tidy = Tidy)
                                      },
                                      error = identity))
                ignore <-
                    Sys.getenv("_R_CHECK_RD_VALIDATE_RD2HTML_IGNORE_EMPTY_SPANS_",
                               "true")
                ignore <- if(config_val_to_logical(ignore))
                              "Warning: trimming empty <span>"
                          else
                              character()
                results1 <- tidy_validate_db(results1, names(db), ignore)
            }
        }

        if(i2) { ## math rendering
            OK2 <- !is.null(katex <- .make_KaTeX_checker())
            if(OK2)
                results2 <-
                    check_math_rendering_in_Rd_db(eq = eq,
                                                  katex = katex)
        }

        t2 <- proc.time()
        print_time(t1, t2, Log)

        if(i1) { ## report on validation
            if(!OK1) {
                noteLog(Log)
                any <- TRUE
                txt <-
                    paste("Please obtain a recent version of HTML Tidy",
                          "by downloading a binary release",
                          "or compiling the source code from",
                          "<https://www.html-tidy.org/>.")
                txt <- paste(strwrap(txt), collapse = "\n")
                printLog0(Log,
                          c("Skipping checking HTML validation: ",
                            attr(Tidy, "msg"),
                            ".\n",
                            txt,
                            "\n"))
            }
            if(OK1 && length(errors <- attr(results1, "errors"))) {
                if(!any) noteLog(Log)
                any <- TRUE
                printLog0(Log,
                          c("Encountered the following conversion/validation errors:\n",
                            paste(unlist(lapply(errors, conditionMessage)),
                                  collapse = "\n"),
                            "\n"))
            }
            if(OK1 && NROW(results1)) {
                if(!any) noteLog(Log)
                any <- TRUE
                printLog0(Log,
                          c("Found the following HTML validation problems:\n",
                            sprintf("%s:%s:%s (%s:%s): %s\n",
                                    sub("[Rr]d$", "html", results1[, "path"]),
                                    results1[, "line"],
                                    results1[, "col"],
                                    results1[, "srcFile"],
                                    results1[, "srcLine"],
                                    results1[, "msg"])))
            }
        }

        if(i2) { ## report on math rendering
            if(!OK2) {
                if(!any) noteLog(Log)
                any <- TRUE
                printLog0(Log,
                          "Skipping checking math rendering: package 'V8' unavailable\n")
            }
            if(OK2 && NROW(results2)) {
                if(!any) noteLog(Log)
                any <- TRUE
                printLog0(Log,
                          c("Found the following math rendering problems:\n",
                            sprintf("%s%s: %s\n",
                                    results2[, 1L],
                                    results2[, 2L],
                                    gsub("\n", "\n  ", results2[, 3L]))))
            }
        }

        if(!any) resultLog(Log, "OK")
    }

    check_executables <- function()
    {
        owd <- setwd(pkgdir)
        allfiles <- dir(".", all.files = TRUE, full.names = TRUE,
                        recursive = TRUE)
        allfiles <- sub("^./","", allfiles)
        ## this is tailored to the FreeBSD/Linux 'file',
        ## see <http://www.darwinsys.com/file/>
        ## (Solaris has a different 'file' without --version)
        FILE <- "file"
        lines <- suppressWarnings(tryCatch(system2(FILE, "--version", TRUE, TRUE), error = function(e) "error"))
        ## a reasonable check -- it does not identify itself well
        have_free_file <- any(grepl("^(file-[45]|magic file from)", lines))
        if (!have_free_file) {
            ## OpenCSW calls this 'gfile'
            FILE <- "gfile"
            lines <- suppressWarnings(tryCatch(system2(FILE, "--version", TRUE, TRUE), error = function(e) "error"))
            have_free_file <- any(grepl("magic file from", lines))
        }
        if (have_free_file) {
            checkingLog(Log, "for executable files")

            ## There is a bug mis-identifying DBF files from 2022
            ## <https://bugs.astron.com/view.php?id=316>
            pretest <- function(f)
            {
                ## The format is (in bytes) the version mumber,
                ## year-1900 of last change, month#, day, ...
                z <-  readBin(f, raw(), 2L)
                identical(z, as.raw(c(3, 122)))
            }
            allfiles <- allfiles[!vapply(allfiles, pretest, NA)]

            ## Watch out for spaces in file names here
            ## Do in parallel for speed on Windows, but in batches
            ## since there may be a line-length limit.
            execs <- character()
            files <- allfiles
            while(ll <- length(files)) {
                chunk <- seq_len(min(100, ll))
                these <- files[chunk]
                files <- files[-chunk]
                lines <- suppressWarnings(system2(FILE, shQuote(these), TRUE, TRUE))
                ## avoid match to is_executable.Rd
                ex <- grepl(" executable", lines, useBytes=TRUE)
                ex2 <- grepl("script", lines, useBytes=TRUE) &
                    grepl("text", lines, useBytes=TRUE)
                execs <- c(execs, lines[ex & !ex2])
            }
            if(length(execs)) {
                execs <- sub(":[[:space:]].*$", "", execs, useBytes = TRUE)
                known <- rep.int(FALSE, length(execs))
                pexecs <- file.path(pkgname, execs)
                ## known false positives
                for(fp in  c("foreign/tests/datefactor.dta"
                             ## "SunOS mc68020 pure executable not stripped"
                             ) )
                    known <- known | grepl(fp, pexecs)
                execs <- execs[!known]
            }
        } else {
            ## no 'file', so just check extensions
            checkingLog(Log, "for .dll and .exe files")
            execs <- grep("\\.(exe|dll)$", allfiles, value = TRUE)
        }
        if (R_check_executables_exclusions && file.exists("BinaryFiles")) {
            excludes <- readLines("BinaryFiles")
            execs <- execs %w/o% excludes
        }
        if (nb <- length(execs)) {
            msg <- ngettext(nb,
                            "Found the following executable file:",
                            "Found the following executable files:",
                            domain = NA)
            warningLog(Log, msg)
            printLog0(Log, .format_lines_with_indent(execs), "\n")
            wrapLog("Source packages should not contain undeclared executable files.\n",
                    "See section 'Package structure'",
                    "in the 'Writing R Extensions' manual.\n")
        } else resultLog(Log, "OK")
        setwd(owd)
    }

    ## CRAN-pack knows about
    .hidden_file_exclusions <-
        c(".Renviron", ".Rprofile", ".Rproj.user",
          ".Rhistory", ".Rapp.history",
          ".tex", ".log", ".aux", ".pdf", ".png",
          ".backups", ".cvsignore", ".cproject", ".directory",
          ".dropbox", ".exrc", ".gdb.history",
          ".gitattributes", ".gitignore", ".gitmodules",
          ".hgignore", ".hgtags",
          ".project", ".seed", ".settings", ".tm_properties")

    check_dot_files <- function(cran = FALSE)
    {
        checkingLog(Log, "for hidden files and directories")
        owd <- setwd(pkgdir)
        dots <- dir(".", all.files = TRUE, full.names = TRUE,
                        recursive = TRUE, pattern = "^[.]")
        dots <- sub("^./","", dots)
        allowed <-
            c(".Rbuildignore", ".Rinstignore", "vignettes/.install_extras")
        dots <- dots %w/o% allowed
        alldirs <- list.dirs(".", full.names = TRUE, recursive = TRUE)
        alldirs <- sub("^./","", alldirs)
        alldirs <- alldirs[alldirs != "."]
        bases <- basename(alldirs)
        dots <- c(dots, setdiff(alldirs[startsWith(bases, ".")], ".aspell"))
        if (length(dots)) {
            noteLog(Log, "Found the following hidden files and directories:")
            printLog0(Log, .format_lines_with_indent(dots), "\n")
            wrapLog("These were most likely included in error.",
                    "See section 'Package structure'",
                    "in the 'Writing R Extensions' manual.\n")
            if(cran) {
                known <- basename(dots) %in% .hidden_file_exclusions
                known <- known | grepl("^.Rbuildindex[.]", dots) |
                    ## or?      startsWith(dots,".Rbuildindex.") |
                    endsWith(dots, "inst/doc/.Rinstignore") |
                    endsWith(dots, "inst/doc/.build.timestamp") |
                    endsWith(dots, "vignettes/.Rinstignore") |
                    grepl("^src.*/[.]deps$", dots)
                if (all(known))
                    printLog(Log, "\nCRAN-pack knows about all of these\n")
                else if (any(!known)) {
                    printLog(Log, "\nCRAN-pack does not know about\n")
                    printLog0(Log, .format_lines_with_indent(dots[!known]), "\n")
                }
            }
        } else resultLog(Log, "OK")
        setwd(owd)
    }

    check_install <- function()
    {
        ## Option '--no-install' turns off installation and the tests
        ## which require the package to be installed.  When testing
        ## recommended packages bundled with R we can skip
        ## installation, and do so if '--install=skip' was given.  If
        ## command line option '--install' is of the form
        ## 'check:FILE', it is assumed that installation was already
        ## performed with stdout/stderr redirected to FILE, the
        ## contents of which need to be checked (without repeating the
        ## installation).  In this case, one also needs to specify
        ## *where* the package was installed to using command line
        ## option '--library' (or '-l').

        if (install == "skip")
            messageLog(Log, "skipping installation test")
        else {
            tlim <- get_timeout(Sys.getenv("_R_CHECK_INSTALL_ELAPSED_TIMEOUT_",
                                Sys.getenv("_R_CHECK_ELAPSED_TIMEOUT_")))
            use_install_log <-
                (startsWith(install, "check") || R_check_use_install_log
                 || !isatty(stdout()))
            INSTALL_opts <- install_args
            ## Don't use HTML, checkRd goes over the same ground.
            ## Not quite for static HTML, so just use the R default.
            ##   INSTALL_opts <- c(INSTALL_opts,  "--no-html")
            if (install == "fake")
                INSTALL_opts <- c(INSTALL_opts,  "--fake")
            else if (!multiarch)
                INSTALL_opts <- c(INSTALL_opts,  "--no-multiarch")
            INSTALL_opts <- paste(INSTALL_opts, collapse = " ")
            args <- c("INSTALL", "-l", shQuote(libdir), INSTALL_opts,
                      shQuote(if (WINDOWS) utils::shortPathName(pkgdir) else pkgdir))
            tOK_msg <- NULL
            if (!use_install_log) {
                ## Case A: No redirection of stdout/stderr from installation.
                ## This is very rare: needs _R_CHECK_USE_INSTALL_LOG_ set
                ## to false.
                message("")
                ## Rare use of R CMD INSTALL
                if (run_Rcmd(args, timeout = tlim)) {
                    errorLog(Log, "Installation failed.")
                    summaryLog(Log)
                    do_exit(1L)
                }
                message("")
            } else {
                ## Case B. All output from installation redirected,
                ## or already available in the log file.
                checkingLog(Log,
                            "whether package ", sQuote(desc["Package"]),
                            " can be installed")
                outfile <- file.path(pkgoutdir, "00install.out")
                if (startsWith(install, "check")) {
                    if (!nzchar(arg_libdir))
                        printLog(Log, "\nWarning: --install=check... specified without --library\n")
                    thislog <- install_log_path
                    if(!nzchar(thislog)) {
                        errorLog(Log,
                                 sprintf("install log %s does not exist", sQuote(thislog)))
                        summaryLog(Log)
                        do_exit(2L)
                    }
                    file.copy(thislog, outfile)
                    install <- "check"
                    lines <- readLines(outfile, warn = FALSE)
                    ## <NOTE>
                    ## We used to have
                    ## $install_error = ($lines[$#lines] !~ /^\* DONE/);
                    ## but what if there is output from do_cleanup
                    ## in (Unix) R CMD INSTALL?
                    ## </NOTE>
                    install_error <- !any(startsWith(lines, "* DONE"))
                } else {
                    ## record in the log what options were used
                    cat("* install options ", sQuote(INSTALL_opts),
                        "\n\n", sep = "", file = outfile)
##                    env <- ""
                    ## Normal use of R CMD INSTALL
                    t1 <- proc.time()
                    install_error <-
                        run_Rcmd(args, outfile, timeout = tlim)
                    t2 <- proc.time()
                    print_time(t1, t2, Log)
                    theta <- as.numeric(Sys.getenv("_R_CHECK_INSTALL_TIMING_CPU_TO_ELAPSED_THRESHOLD_",
                                                   NA_character_))
                    if(!WINDOWS && !is.na(theta)) {
                        td <- t2 -t1
                        if(td[3L] >= td0) {
                            cpu <- sum(td[-3L])
                            if(cpu >= pmax(theta * td[3L], 1)) {
                                ratio <- round(cpu/td[3L], 1L)
                                tOK_msg <-
                                    sprintf("Installation took CPU time %g times elapsed time\n",
                                            ratio)
                            }
                        }
                    }
                    lines <- readLines(outfile, warn = FALSE)
                }
                if (install_error) {
                    errorLog(Log, "Installation failed.")
                    printLog0(Log, "See ", sQuote(outfile),
                             " for details.\n")
                    summaryLog(Log)
                    do_exit(1L)
                }

                ## There could still be some important warnings that
                ## we'd like to report.  For the time being, start
                ## with compiler warnings about non ISO C code (or
                ## at least, what looks like it), and also include
                ## warnings resulting from the const char * CHAR()
                ## change in R 2.6.0.  (In theory, we should only do
                ## this when using GCC ...)

                if (install != "check")
                    lines <- readLines(outfile, warn = FALSE)

                ## A few packages call SHLIB twice.
                lines00 <- unique(grep("^using (C compiler|C[+][+] compiler|Fortran compiler|SDK)",
                                       lines, value = TRUE))

                lines0 <- lines
                warn_re <- c("^WARNING:",
                             ## This fires on ODS 12.5 warnings like
                             ##   Warning: original hides icu_55::PtnSkeleton::original.
                             ## so filter out later.
                             "^Warning:",
                             ## <FIXME>
                             ## New form of warning in 4.4.0
                             "^Warning.*: .*is deprecated[.]$",
                             ## New style Rd conversion
                             ## which may even show errors:
                             "^Rd (warning|error): ",
                             ## </FIXME>
                             ": warning: .*ISO C",
                             ": warning: .* discards qualifiers from pointer target type",
                             ": warning: .* is used uninitialized",
                             ": warning: .* set but not used",
                             ": warning: unused",
                             ": warning: .* makes pointer from integer", # gcc
                             ": warning: .* pointer.* conversion", # clang
                             ": warning: improper pointer", # Solaris
                             ": warning: unknown escape sequence", # gcc
                             ": warning: use of non-standard escape character", # clang
                             ## clang warning about invalid returns.
                             "warning: void function",
                             "warning: control reaches end of non-void function",
                             "warning: no return statement in function returning non-void",
                             ## gcc-only form
                             ## ": #warning",
                             ## gcc indents these, igraph has space after #
                             "^ *# *warning",
                             ## Solaris cc has
                             "Warning: # *warning",
                             ## these are from era of static HTML
                             "missing links?:",
                             ## From the byte compiler's 'warn' methods
                             "^Note: possible error in",
                             "^Note: (break|next) used in wrong context: no loop is visible",
                             ## Warnings about S4 classes
                             "^  The prototype for class.*undefined slot",

                             ## from configure
                             "'config' variable.*is deprecated"
                             )
                ## Warnings spotted by gcc with
                ##   '-Wimplicit-function-declaration'
                ## which is implied by '-Wall'.
                ## True as from R 4.2.0, as Apple clang on macOS made these
                ## errors in 2020.
                ## Previously only accessible via an internal environment
                ## variable.
                check_src_flag <-
                    Sys.getenv("_R_CHECK_SRC_MINUS_W_IMPLICIT_", "TRUE")
                ## (Not quite perfect, as the name should really
                ## include 'IMPLICIT_FUNCTION_DECLARATION'.)
                if (config_val_to_logical(check_src_flag)) {
                    warn_re <- c(warn_re,
                                 ": warning: implicit declaration of function",
                                 ": warning: incompatible implicit declaration of built-in function")
                }

                ## Warnings spotted by clang with
                ## '-Wreturn-type-c-linkage':
                warn_re <- c(warn_re,
                             ": warning: .* \\[-Wreturn-type-c-linkage\\]")

                ## gcc and clang warnings about sequencing

                ## gcc warnings
                warn_re <- c(warn_re,
                             ": warning: pointer of type .* used in arithmetic",
                             ": warning: .* \\[-Wformat-contains-nul\\]",
                             ": warning: .* \\[-Wformat-zero-length\\]",
                             ": warning: .* \\[-Wpointer-to-int-cast\\]",
                             ": warning: .* \\[-Wsequence-point\\]",
                             ": warning: .* \\[-Wformat-overflow=\\]",
                             ": warning: .* \\[-Wformat-truncation=\\]",
                             ": warning: .* \\[-Wnonull",
                             ## gcc warnings usually about [mc]alloc with signed argument
                             ": warning: .* \\[-Walloc-size-larger-than=\\]",
                             ": warning: .* \\[-Wterminate\\]",
                             ## Solaris warns on this next one. Also clang
                             ": warning: .* \\[-Wint-conversion\\]",
                             ## clang calls these 'a GNU extension'
                             ": warning: .* \\[-Wconversion-null\\]",
                             ": warning: .* GCC extension",
                             ": warning: .* \\[-Wsizeof-pointer-memaccess\\]",
                             ## usually | for ||, = for == (etc)
                             ": warning: suggest parentheses around (comparison|assignment)",
                             ": warning: .* \\[-Wstringop", # mainly gcc8
                             ": warning: .* \\[-Wclass-memaccess\\]", # gcc8
                             ## used for things deprecated in C++11, for example
                             ": warning: .* \\[-Wdeprecated\\]",
                             ": warning: .* \\[-Waligned-new",
                             ## new in gcc 8
                             ": warning: .* \\[-Wcatch-value=\\]",
                             ## removed 2020-05, nowadays clang only
                             ## ": warning: .* \\[-Wunused-value\\]",
                             ## warning in g++, fatal in clang++.
                             ": warning: .* \\[-Wnarrowing\\]",
                             ## includes -Waddress-of-packed-member
                             ": warning: .* \\[-Waddress",
                             ": warning: .* \\[-Woverflow",
                             ## -pedantic warning in gcc, fatal in clang and ODS
                             ": warning: initializer element is not a constant expression",
                             ": warning: range expressions in switch statements are non-standard",
                             ## clang version is
                             ": warning: use of GNU case range extension",
                             ": warning: ordered comparison of pointer with integer zero",
                             ## clang version is
                             ": warning: ordered comparison between pointer and zero",
                             ": warning: initialization of a flexible array member",
                             ## clang version is
                             ": warning: flexible array initialization is a GNU extension",
                             ": warning: C[+][+] designated initializers",
                             ": warning: designated initializers are a C99 feature",
                             ## Fatal, not warning, for clang and Solaris ODS
                             ": warning: .* with a value, in function returning void",
                             ": warning: .*\\[-Wlto",
                             ": warning: .*\\[-Wodr\\]",
                             ": warning: .*\\[-Wswitch\\]",
                             ": warning: line number out of range",
                             ## gcc 10 some -fanalyzer warnings
                             ": warning: .*\\[-Wanalyzer-null-dereference\\]",
                             ": warning: .*\\[-Wanalyzer-double-free\\]",
                             ": warning: .*\\[-Wanalyzer-malloc-leak\\]",
                             ": warning: .*\\[-Wanalyzer-file-leak\\]",
                             ": warning: .*\\[-Wanalyzer-use-after-free\\]",
                             ": warning: .*\\[-Wanalyzer-free-of-non-heap\\]",
                             ": warning: .*\\[-Wint-in-bool-context\\]",
                             ## gcc and clang
                             ": warning: .*\\[-Wpointer-sign\\]",
                             ## gcc's version of clang's -Wformat
                             ": warning: .* \\[-Wformat=\\]",
                             ## gcc and clang with -Wstrict-prototypes
                             ": warning: .* \\[-Wstrict-prototypes\\]",
                             ## clang-15 variant
                             ": warning: .* \\[-Wdeprecated-non-prototype\\]",
                             ## gcc and clang reports on use of #warning
                             ## but not suppressing the warning itself.
                             "\\[-Wcpp\\] ",
                             "\\[-W#warnings\\]",
                             "\\[-Wrange-loop-construct\\]",
                             "\\[-Warray-parameter=\\]",
                             ## GCC 14's C++ stdlib (as seen for TMB headers)
                             "\\[-Wtemplate-id-cdtor\\]",
                             ## clang version (not Apple clang)
                             "\\[-Warray-parameter\\]",
                             "\\[-Wuse-after-free\\]",
                             ## rustc
                             "^warning: use of deprecated"
                             )

                ## warning most seen with -D_FORTIFY_SOURCE
                warn_re <- c(warn_re,
                             ": warning: .* \\[-Wunused-result\\]", # also clang
                             ": warning: .* \\[-Warray-bounds\\]",
                             ": warning: .* \\[-Wrestrict\\]"
                             )

                ## clang warnings
                warn_re <- c(warn_re,
                             ": warning: .* GNU extension",
                             ": warning: .* \\[-Wdeprecated-register\\]",
                             ## skip some of these below
                             ": warning: .* \\[-Wdeprecated-declarations\\]",
                             ": warning: .* \\[-Wformat-extra-args\\]", # also gcc
                             ": warning: .* \\[-Wformat-security\\]",
                             ": warning: .* \\[-Wformat-insufficient-args\\]",
                             ": warning: .* \\[-Wheader-guard\\]",
                             ": warning: .* \\[-Wpointer-arith\\]",
                             ": warning: .* \\[-Wunsequenced\\]",
                             ": warning: .* \\[-Wvla-extension\\]",
                             ": warning: .* \\[-Wmismatched-new-delete\\]",
                             ": warning: .* \\[-Wabsolute-value\\]",
                             ": warning: .* \\[-Wreorder\\]", # also gcc
                             ": warning: .* \\[-Wself-assign",
                             ": warning: .* \\[-Wtautological",  # also gcc
                             ": warning: .* \\[-Wincompatible-pointer-types\\]",
                             ": warning: format string contains '[\\]0'",
                             ## Apple's clang warns about this even in C++11 mode
                             ## ": warning: .* \\[-Wc[+][+]11-long-long\\]",
                             ": warning: empty macro arguments are a C99 feature",
                             ": warning: .* \\[-Winvalid-source-encoding\\]",
                             ": warning: .* \\[-Wunused-command-line-argument\\]",
                             ": warning: .* \\[-Wxor-used-as-pow\\]", # clang 10
                             ": warning: .* \\[-Winconsistent-missing-override\\]",
                             ": warning: .* \\[-Wsizeof-array-div\\]",
                             ": warning: .* \\[-Wvarargs\\]",
                             ## also on gcc, but fewer warnings
                             ": warning: .* \\[-Wlogical-not-parentheses\\]",
                             ## For non-portable flags (seen in sub-Makefiles)
                             "warning: .* \\[-Wunknown-warning-option\\]",
                             "warning: .* \\[-Wnested-anon-types\\]",
                             "warning: .* is not needed and will not be emitted",
                             "warning: .* \\[-Wnon-literal-null-conversion\\]",
                             "warning: .* \\[-Wignored-optimization-argument\\]",
                             ## thinkos like <- for = or == for =
                             "warning: .* \\[-Wunused-comparison\\]",
                             "warning: .* \\[-Wliteral-conversion\\]",
                             "warning: .* \\[-Wempty-body\\]",
                             "warning: .* \\[-Wformat\\]",
                             "warning: .* \\[-Wreturn-stack-address\\]",
                             "warning: .* \\[-Wuninitialized\\]",
                             ## clang only
                             "warning: .* \\[-Wuninitialized-const-reference\\]",
                             ## also gcc
                             "warning: .* \\[-Wsizeof-pointer-div\\]",
                             "warning: .* \\[-Wnon-c-typedef-for-linkage\\]",
                             "warning: .* \\[-Wc\\+\\+14-extensions\\]",
                             "warning: .* \\[-Wc\\+\\+17-extensions\\]",
                             "warning: .* \\[-Wc\\+\\+20-extensions\\]",
                             "warning: .* \\[-Wc\\+\\+23-extensions\\]",
                             "warning: .* \\[-Wc\\+\\+2b-extensions\\]",
                             ## LLVM clang 15 versions
                             "warning: .* \\[-Wc\\+\\+14-attribute-extensions\\]",
                             "warning: .* \\[-Wc\\+\\+17-attribute-extensions\\]",
                             "warning: .* \\[-Wc\\+\\+20-attribute-extensions\\]",
                             "warning: .* \\[-Wc\\+\\+23-attribute-extensions\\]",
                             "warning: .* \\[-Wc\\+\\+2b-attribute-extensions\\]",
                             ## consider also -Wbitwise-op-parentheses
                             ## -Wlogical-op-parentheses
                             ## -Wshift-op-parentheses
                             ## LLVM clang 14, at least
                             "warning: .* \\[-Wbitwise-instead-of-logical\\]",
                             "warning: .* \\[-Wunneeded-internal-declaration\\]",
                             ## LLVM clang 15
                             "warning: .* \\[-Winvalid-utf8\\]",
                             "warning: .* \\[-Wunqualified-std-cast-call\\]",
                             "warning: .* \\[-Wincompatible-pointer-types-discards-qualifiers\\]",

                             ## LLVM clang 16
                             " warning: use of unary operator that may be intended as compound assignment",

                             ## Apple and LLVM clang
                             " warning: switch condition has boolean value \\[-Wswitch-bool\\]",
                             " warning: .* \\[-Wembedded-directive\\]",
                             " warning: using directive refers to implicitly-defined namespace",
                             ## same flag but different wording for clang++ 19
                             ## C99 and C++11 require at least one argument:
                             ## this is relaxed in C23 and C++20.
                             "\\[-Wgnu-zero-variadic-macro-arguments\\]",

                             ## LLVM flang warnings:
                             ## Includes Hollerith constants
                             ## does not complain about 'Shared DO termination'
                             "(portability: A DO loop should terminate with an END DO or CONTINUE|portability: deprecated usage|in the context: arithmetic IF statement)",
                             ## LLVM >= 18 clang++
                             ": warning: .* \\[-Wdeprecated-literal-operator\\]"
                             )

                warn_re <- paste0("(", paste(warn_re, collapse = "|"), ")")

                lines <- grep(warn_re, lines, value = TRUE, useBytes = TRUE)

                ## Filter out BH header warnings
                ex_re <- "BH/include/boost/.*\\[-Wdeprecated-literal-operator\\]"
                lines <- filtergrep(ex_re, lines, useBytes = TRUE)

                ## "gcc (even 9) seems not to know the size of pointers, so skip
                ## some from -Walloc-size-larger-than= and -Wstringop-overflow="
##                lines <- grep("exceeds maximum object size.*-W(alloc-size-larger-than|stringop-overflow)", lines,
                ## Skip those from -Wstringop-overflow=
                ## The alloc-size ones are genuine,
                ## seen from malloc, alloc and (C++) new called with 'int' size
                lines <- grep("exceeds maximum object size.*-Wstringop-overflow", lines,
                              value = TRUE, useBytes = TRUE, invert = TRUE)

                ## Filter out boost/armadillo header warnings
                ## 2023-01: still have class-memaccess from BH with gcc
                ex_re <- "(BH/include/boost|RcppArmadillo/include/armadillo_bits)/.*\\[-W(tautological-overlap-compare|class-memaccess)\\]"
                lines <- filtergrep(ex_re, lines, useBytes = TRUE)

                ## Filter out Eigen header warnings
                ex_re <- "(RcppEigen/include/Eigen)/.*\\[-Wtautological-compare\\]"
                lines <- filtergrep(ex_re, lines, useBytes = TRUE)

                ## Filter out StanHeader warnings
                ex_re <- "StanHeaders/.*\\[-Wunneeded-internal-declaration\\]"
                lines <- filtergrep(ex_re, lines, useBytes = TRUE)

                ## and GNU extensions in system headers
                ex_re <- "^ *(/usr/|/opt/).*GNU extension"
                lines <- filtergrep(ex_re, lines, useBytes = TRUE)

                ## and -Wstrict-prototypes in what should be thought of
                ## as system headers.
                ex_re <- "^ *(/usr/include|/opt/R/arm64/include).*\\[-Wstrict-prototypes\\]"
                lines <- filtergrep(ex_re, lines, useBytes = TRUE)

                ## filter out Complex.h warnings from C++ compilers
                ## (g++ -pedantic,
                ## clang++ -Wgnu-anonymous-struct, -Wc99-extensions)
                ## as users can do nothing about these.
                ex_re <-"/include/R_ext/Complex.h"
                lines <- filtergrep(ex_re, lines, useBytes = TRUE)

                ## and ODS 12.5 warnings
                ex_re <- "^Warning: [[:alnum:]]+ hides"
                lines <- filtergrep(ex_re, lines, useBytes = TRUE)

                ## and gfortran 9 warnings about F2018
                ## No longer filtered in R 4.5.0.
                ## Many are errors with -std=f2018
                ## ex_re <- "^Warning: Fortran 2018 deleted feature:"
                ## lines <- filtergrep(ex_re, lines, useBytes = TRUE)

                ## and gfortran 10 warnings
                ex_re <- "^Warning: Array.*is larger than limit set"
##                ex_re <- "^(Warning: Rank mismatch between actual argument|Warning: Array.*is larger than limit set)"
                lines <- filtergrep(ex_re, lines, useBytes = TRUE)

                ## Filter out gcc 12 warnings that are not certain
                ex_re <- "may be used after.*\\[-Wuse-after-free\\]"
                lines <- filtergrep(ex_re, lines, useBytes = TRUE)

                ## And deprecated declarations in Eigen and boost
                ## unary_function in boost < 1.81, auto_ptr in boost/smart_ptr.
                ex_re <- "(include/Eigen|include/boost|boost/smart_ptr).* warning: .* \\[-Wdeprecated-declarations\\]"
                lines <- filtergrep(ex_re, lines, useBytes = TRUE)

                ## and -Wstrict-prototypes in what should be thought of as
                ## system headers.
                ex_re <- "/usr/lib/mxe/usr/x86_64-w64-mingw32.static.posix/include.*\\[-Wstrict-prototypes\\]"
                lines <- filtergrep(ex_re, lines, useBytes = TRUE)

                ## and -Wpedantic in what should be thought of as system
                ## headers.
                ex_re <- "/usr/lib/mxe/usr/x86_64-w64-mingw32.static.posix/include.*\\[-Wpedantic\\]"
                lines <- filtergrep(ex_re, lines, useBytes = TRUE)

                ## Ignore install-time readLines() warnings about
                ## files with incomplete final lines.  Most of these
                ## come from .install_package_indices(), and should be
                ## safe to ignore ...
                lines <- filtergrep("Warning: incomplete final line found by readLines",
                                    lines, useBytes = TRUE)

                check_Stangle <- Sys.getenv("_R_CHECK_STANGLE_WARNINGS_", "TRUE")
                if (!config_val_to_logical(check_Stangle))
                lines <- filtergrep("Warning: value of .* option should be lowercase",
                                    lines, useBytes = TRUE)

                ## Package writers cannot really do anything about
                ## non ISO C code in *system* headers.  Also,
                ## GCC >= 3.4 warns about function pointers
                ## casts which are "needed" for dlsym(), but it
                ## seems that all systems which have dlsym() also
                ## support the cast.  Hence, try to ignore these by
                ## default, but make it possible to get all ISO C
                ## warnings via an environment variable.
                if (!R_check_all_non_ISO_C) {
                    lines <- filtergrep("^ */.*: warning: .*ISO C",
                                        lines, useBytes = TRUE)
                    lines <- filtergrep("warning: *ISO C forbids.*function pointer",
                                        lines, useBytes = TRUE)
                    if(WINDOWS) lines <- filtergrep(
                                    "warning: *ISO C does not support.*ms_printf length modifier",
                                    lines, useBytes = TRUE)
                }

                ## Warnings spotted by gcc with
                ##   '-Wunused'
                ## which is implied by '-Wall'.
                ## Currently only accessible via an internal environment
                ## variable.
                check_src_flag <-
                    Sys.getenv("_R_CHECK_SRC_MINUS_W_UNUSED_", "FALSE")
                if (!config_val_to_logical(check_src_flag)) {
                    lines <- filtergrep("warning: unused", lines,
                                        ignore.case = TRUE, useBytes = TRUE)
                    lines <- filtergrep("warning: .* set but not used", lines,
                                        ignore.case = TRUE, useBytes = TRUE)
                }
                ## (gfortran seems to use upper case.)

                ## Warnings spotted by clang with
                ##   '-Wsometimes-uninitialized'
                ## which is implied by '-Wall'.
                ## Currently only accessible via an internal environment
                ## variable.
                check_src_flag <-
                    Sys.getenv("_R_CHECK_SRC_MINUS_W_SOMETIMES_UNINITIALIZED_",
                               "FALSE")
                if (!config_val_to_logical(check_src_flag)) {
                    lines <- filtergrep("warning: .* is used uninitialized whenever",
                                        lines, useBytes = TRUE)
                }

                ## Warnings spotted by gfortran >= 4.0 with '-Wall'.
                ## Justified in principle, it seems.
                ## Let's filter them for the time being, and maybe
                ## revert this later on ... but make it possible to
                ## suppress filtering out by setting the internal
                ## environment variable _R_CHECK_WALL_FORTRAN_ to
                ## something "true".
                ## All gfortran -Wall warnings start Warning: so have been
                ## included.  We exclude some now.

                check_src_flag <- Sys.getenv("_R_CHECK_WALL_FORTRAN_", "FALSE")
                if (!config_val_to_logical(check_src_flag)) {
                    warn_re <-
                        c("Label .* at \\(1\\) defined but not used",
                          "Line truncated at \\(1\\)", # none currently
                          ## None of these left
                          ## "ASSIGN statement at \\(1\\)",
                          ## "Assigned GOTO statement at \\(1\\)",
                          ## "arithmetic IF statement at \\(1\\)",
                          ## Reported as from 2024-09
                          ## "Obsolescent feature:",
                          ## see e.g. https://fortranwiki.org/fortran/show/Modernizing+Old+Fortran
                          "Obsolescent feature: Statement function",
                          "Nonconforming tab character (in|at)")
                    warn_re <- c(warn_re,
                                 "Warning: .*\\[-Wconversion]",
                                 ## We retain [-Wuninitialized]
                                 "Warning: .*\\[-Wmaybe-uninitialized]",
                                 ## Reported as from 2004-09
                                 ## "Warning: .*\\[-Wintrinsic-shadow]",
                                 ## R itself uses these, the latter in LAPACK
                                 "Warning: GNU Extension: DOUBLE COMPLEX",
                                 "Warning: GNU Extension: .*COMPLEX[*]16"
                                )
                    check_src_flag <-
                        Sys.getenv("_R_CHECK_SRC_MINUS_W_UNUSED_", "FALSE")
                    if (!config_val_to_logical(check_src_flag))
                        warn_re <- c(warn_re,
                                     "Warning: .*\\[-Wunused-function]",
                                     "Warning: .*\\[-Wunused-dummy-argument]")
                    warn_re <- paste0("(", paste(warn_re, collapse = "|"), ")")
                    lines <- filtergrep(warn_re, lines)
                }

                if (WINDOWS) {
                    ## Warning on Windows with some packages that
                    ## cannot transparently be installed bi-arch.
                    lines <- filtergrep("Warning: this package has a non-empty 'configure.ucrt' file",
                                        lines)
                    lines <- filtergrep("Warning: this package has a non-empty 'configure.win' file",
                                        lines)
                    ## Warning on x64 Windows gcc 4.5.1 that
                    ## seems to be spurious
                    lines <- filtergrep("Warning: .drectve .* unrecognized", lines)
                }

                check_imports_flag <-
                    Sys.getenv("_R_CHECK_REPLACING_IMPORTS_", "TRUE")
                if (!config_val_to_logical(check_imports_flag))
                    lines <- filtergrep("Warning: replacing previous import", lines,
                                        fixed = TRUE)
                else {
                    this <- unique(grep("Warning: replacing previous import",
                                        lines, fixed = TRUE, value = TRUE))
                    this <- grep(paste0("when loading .*", pkgname, ".*$"), this,
                                 value = TRUE)
                    lines <- filtergrep("Warning: replacing previous import", lines,
                                        fixed = TRUE)
                    lines <- c(lines, this)
                }
                check_FirstLib_flag <-
                    Sys.getenv("_R_CHECK_DOT_FIRSTLIB_", "FALSE")
                if (!config_val_to_logical(check_FirstLib_flag))
                    lines <- filtergrep("Warning: ignoring .First.lib()", lines,
                                        fixed = TRUE)

                ## <FIXME>
                ## Building with --enable-prebuilt-html warns about
                ## missing links for Rd xrefs with missing package
                ## anchors (unless in recommended packages).  For now,
                ## filter these out when noting the missing anchors.
                ## Remove eventually ...
                ## Note also that further above we explicitly arrange to
                ## get these Rd warnings from the install log ...
                if(config_val_to_logical(Sys.getenv("_R_CHECK_XREFS_NOTE_MISSING_PACKAGE_ANCHORS_",
                                                    "FALSE"))) {
                    lines <- filtergrep("Rd warning:.*: missing link",
                                        lines, useBytes = TRUE)
                }
                ## </FIXME>
                
                lines <- unique(lines)

                ## Can get reports like
                ## Warning: No generic function `as.vector' found corresponding to requested imported methods from package `Matrix' when loading `MatrixModels' (malformed exports?)
                ## Exclude these unless they are about the current package.
                load_re <- "Warning: No generic function.*corresponding to requested imported methods"
                ex <- grepl(load_re, lines, useBytes = TRUE) &
                    !grepl(pkgname, lines, fixed = TRUE, useBytes = TRUE)
                lines <- lines[!ex]

                ## byte-compilation errors, often from bugs there
                this <- grep("Error: compilation failed -",
                             lines0, value = TRUE)
                lines <- c(lines, unique(this))

                note_re <-
                    "warning: control may reach end of non-void function"

                notes <- grep(note_re, lines0, value = TRUE, useBytes = TRUE)
                notes <- unique(notes)
                if (any(grepl("using non-staged installation", lines0,
                               useBytes = TRUE)))
                    notes <- c("Non-staged installation was used", notes)
                if (length(lines)) {
                    warningLog(Log, "Found the following significant warnings:")
                    printLog0(Log, .format_lines_with_indent(lines), "\n")
                    if(length(notes)) {
                        printLog(Log,
                                 "Found the following additional notes/warnings:\n")
                        printLog0(Log, .format_lines_with_indent(notes),
                                  "\n")
                    }
                    printLog0(Log, sprintf("See %s for details.\n",
                                           sQuote(outfile)))
                    if(any(grepl("^Note:", lines, useBytes = TRUE)))
                        wrapLog("Information on the location(s)",
                                "of code generating the",
                                paste0(sQuote("Note"), "s"),
                                "can be obtained by re-running with",
                                "environment variable R_KEEP_PKG_SOURCE",
                                "set to 'yes'.\n")

                } else if(length(notes)) {
                    noteLog(Log, "Found the following notes/warnings:")
                    printLog0(Log, .format_lines_with_indent(notes), "\n")
                    printLog0(Log, sprintf("See %s for details.\n",
                                           sQuote(outfile)))
                } else {
                    if(is.null(tOK_msg)) resultLog(Log, "OK")
                    else noteLog(Log)
                }
                if(!is.null(tOK_msg))
                    printLog0(Log, tOK_msg)
                if (length(lines00)) {
                    ll <- sub("using", "used", lines00)
                    for (l in ll)  messageLog(Log, l)
                }
                line <- unique(grep("^using C[+][+][12]", lines0, value = TRUE))
                if (length(line)) {
                    checkingLog(Log, "C++ specification")
                    std <- as.numeric(sub("using C[+][+]", "", line))
                    if (std < 17) {
                        noteLog(Log,
                                sprintf("  Specified C++%d: please drop specification unless essential", std))
                    ## since R 4.4.0 C++17 support is required, but
                    ## C++20/23} support is patchy
                    } else if (std >= 20) {
                         resultLog(Log, "OK")
                         printLog(Log,
                                  sprintf("  Not all R platforms support C++%s\n", std))
                    } else resultLog(Log, "OK")
                }
            }   ## end of case B
        }
    } ## {check_install()}

    ## This requires a GNU-like 'du' with 1k block sizes,
    ## so use -k (which POSIX requires).
    ## It also depends on the total being last.
    check_install_sizes <- function()
    {
        pd <- file.path(libdir, pkgname)
        ## if we used a log, the installation would not need to remain.
        if (!dir.exists(pd)) return()
        checkingLog(Log, "installed package size")
        owd <- setwd(pd)
        res <- system2("du", "-k", TRUE, TRUE)
        sizes <- as.integer(sub("\\D.*", "", res))
        dirs <- sub("^\\d*\\s*", "", res)
        res2 <- data.frame(size = sizes, dir = I(dirs))
        total <- res2[nrow(res2), 1L]
        if(!is.na(total) &&
           total > 1024 * as.numeric(Sys.getenv("_R_CHECK_PKG_SIZES_THRESHOLD_", unset = 5)) && # report at 5Mb
           pkgname != "Matrix") { # <- large recommended package
            if(R_check_use_log_info)
                infoLog(Log)
            else
                noteLog(Log)
            printLog(Log, sprintf("  installed size is %4.1fMb\n", total/1024))
            rest <- res2[-nrow(res2), ]
            rest[, 2L] <- sub("./", "", rest[, 2L], fixed=TRUE)
            ## keep only top-level directories
            rest <- rest[!grepl("/", rest[, 2L]), ]
            rest <- rest[rest[, 1L] > 1024, ] # > 1Mb
            if(nrow(rest)) {
                o <- sort.list(rest[, 2L])
                printLog(Log, "  sub-directories of 1Mb or more:\n")
                size <- sprintf('%4.1fMb', rest[, 1L]/1024)
                printLog0(Log,
                          paste0("    ", format(rest[o, 2L], justify = "left"),
                                 "  ", format(size[o], justify = "right"), "\n"))
            }
        } else resultLog(Log, "OK")
        setwd(owd)
    }

    check_description <- function()
    {
        checkingLog(Log, "for file ",
                    sQuote(file.path(pkgname0, "DESCRIPTION")))
        if ("DESCRIPTION" %in% dir(pkgdir)) {
            f <- file.path(pkgdir, "DESCRIPTION")
            desc <- tryCatch(.read_description(f), error = identity)
            if(inherits(desc, "error")) {
                errorLog(Log, conditionMessage(desc))
                summaryLog(Log)
                do_exit(1L)
            } else if(!length(desc)) {
                errorLog(Log, "File DESCRIPTION exists but is not in correct format")
                summaryLog(Log)
                do_exit(1L)
            }
            mandatory <- c("Package", "Version", "License", "Description",
                           "Title", "Author", "Maintainer")
            OK <- vapply(desc[mandatory],
                         function(x) !is.na(x) && nzchar(x),
                         NA)
            if(!all(OK)) {
                fail <- mandatory[!OK]
                msg <- ngettext(length(fail),
                                "Required field missing or empty:",
                                "Required fields missing or empty:")
                msg <- paste0(msg, "\n", .pretty_format(fail))
                errorLog(Log, msg)
                summaryLog(Log)
                do_exit(1L)
            }
            if(!grepl("^[[:alpha:]][[:alnum:].]*[[:alnum:]]$", desc["Package"])
               || endsWith(desc["Package"], ".")) {
                warningLog(Log)
                printLog(Log,"  Package name is not portable:\n",
                         "  It must start with a letter, contain letters, digits or dot\n",
                         "  have at least 2 characters and not end with a dot.\n")
            } else resultLog(Log, "OK")
            encoding <- desc["Encoding"]
        } else if (file.exists(f <- file.path(pkgdir, "DESCRIPTION"))) {
            errorLog(Log,
                     "File DESCRIPTION does not exist but there is a case-insensitive match.")
            summaryLog(Log)
            do_exit(1L)
        } else {
            errorLog(Log,
                     "File DESCRIPTION does not exist")
            summaryLog(Log)
            do_exit(1L)
        }
        if(!is.na(desc["Type"])) { # standard packages do not have this
            checkingLog(Log, "extension type")
            if(desc["Type"] != "Package") {
                errorLog(Log,
                         sprintf("Extensions with Type %s cannot be checked.",
                                 sQuote(desc["Type"])))
                summaryLog(Log)
                do_exit(0L)
            } else resultLog(Log, desc["Type"])
        }
        if(!is.na(desc["Bundle"])) {
            checkingLog(Log, "package bundle")
            errorLog(Log,
                     sprintf("Looks like %s is a package bundle -- they are defunct",
                             sQuote(pkgname0)))
            summaryLog(Log)
            do_exit(1L)
        }

        messageLog(Log,
                   sprintf("this is package %s version %s",
                           sQuote(desc["Package"]),
                           sQuote(desc["Version"])))

        if (!is.na(encoding))
            messageLog(Log, "package encoding: ", encoding)

        desc
    }

    check_CRAN_incoming <- function(localOnly, pkgSize)
    {
        checkingLog(Log, "CRAN incoming feasibility")
        t1 <- proc.time()
        res <- .check_package_CRAN_incoming(pkgdir, localOnly, pkgSize)
        t2 <- proc.time()
        print_time(t1, t2, Log)
        if(length(res)) {
            bad <- FALSE
            out <- format(res)
            if(length(out) == 1L && startsWith(out, "Maintainer: ")) {
                ## Special-case when there is only the maintainer
                ## address to note (if at all).
                ## We used to note via 'Note_to_CRAN_maintainers' unless
                ## it agreed with the _R_CHECK_MAINTAINER_ADDRESS_ env
                ## var which apparently never got used.
                ## As of 2024-09, nobody remembers why we did either: so
                ## simply say OK.
                resultLog(Log, "OK")
                out <- character()
            } else if(length(res$bad_package)) {
                errorLog(Log)
                bad <- TRUE
            } else if(length(res$bad_version) ||
                      length(res$strong_dependencies_not_in_mainstream_repositories) ||
                      isTRUE(res$foss_with_BuildVignettes) ||
                      isTRUE(res$Maintainer_invalid_or_multi_person) ||
                      isTRUE(res$empty_Maintainer_name) ||
                      isTRUE(res$Maintainer_needs_quotes))
                warningLog(Log)
            else if(length(res) > 1L) {
                if((all(names(res) %in%
                        c("Maintainer",
                          "spelling",
                          "suggests_or_enhances_not_in_mainstream_repositories",
                          "additional_repositories_analysis_results")))
                   ## Maybe using Filter(NROW, res) is safe enough?
                   && (NROW(res$spelling) == 0L)
                   && (NROW(y <- res$additional_repositories_analysis_results)
                       == length(res$suggests_or_enhances_not_in_mainstream_repositories))
                   && all(y[, 2L] == "yes")
                   && R_check_use_log_info)
                    infoLog(Log)
                else
                    noteLog(Log)
            }
            else resultLog(Log, "OK")
            if(length(out))
                printLog0(Log, c(paste(out, collapse = "\n\n"), "\n"))
            if(bad) maybe_exit(1L)
        } else resultLog(Log, "OK")
    }

    check_dependencies <- function()
    {
        ## Try figuring out whether the package dependencies can be
        ## resolved at run time.  Ideally, the installation
        ## mechanism would do this, and we also do not check
        ## versions ... also see whether vignette and namespace
        ## package dependencies are recorded in DESCRIPTION.

        ## <NOTE>
        ## We are not checking base packages here, so all packages do
        ## have a description file.
        ## </NOTE>

        ## <NOTE>
        ## If a package has a namespace, checking dependencies will
        ## try making use of it without the NAMESPACE file ever
        ## being validated.
        ## Uncaught errors can lead to messages like
        ##   * checking package dependencies ... ERROR
        ##   Error in e[[1]] : object is not subsettable
        ##   Execution halted
        ## which are not too helpful :-(
        ## Hence, we try to intercept this here.

        if (!extra_arch &&
            file.exists(file.path(pkgdir, "NAMESPACE"))) {
            checkingLog(Log, "package namespace information")
            ns <- tryCatch(parseNamespaceFile(basename(pkgdir),
                                              dirname(pkgdir)),
                     error = function(e) {
                         errorLog(Log)
                         printLog0(Log,
                                   "Invalid NAMESPACE file, parsing gives:",
                                   "\n", as.character(e), "\n")
                         msg_NAMESPACE <-
                             c("See section 'Package namespaces'",
                               " in the 'Writing R Extensions' manual.\n")
                         wrapLog(msg_NAMESPACE)
                         summaryLog(Log)
                         do_exit(1L)
                     })
            OK <- TRUE
            ## Look for empty importFrom
            imp <- ns$imports
            lens <- lengths(imp)
            imp <- imp[lens == 2L]
            nm <- sapply(imp, `[[`, 1)
            lens <- vapply(imp, function(x) length(x[[2L]]), 0L)
            bad <- nm[lens == 0L]
            if(length(bad)) {
                OK <- FALSE
                msg <- if(length(bad) == 1L)
                    sprintf("  Namespace with empty importFrom: %s", sQuote(bad))
                else
                    paste0("  Namespaces with empty importFrom:\n",
                           .pretty_format(sort(bad)))
                noteLog(Log, msg)
            }

            ## Look for S4 exports when 'methods' is not a strong dependency:
            ## - loadNamespace() silently ignores S4 export directives when
            ##   there is no S4 metadata; seen: exportClass(<S3 class>)
            ## - 'Suggests: methods' is not sufficient to ensure that S4 exports
            ##   are processed when loading under R_DEFAULT_PACKAGES=NULL
            pi <- .split_description(.read_description(file.path(pkgdir, "DESCRIPTION")))
            dependsMethods <- "methods" %in% c(names(pi$Depends), names(pi$Imports))
            if (!dependsMethods &&
                length(bad <- Filter(length, ns[c("exportClasses",
                                                  "exportMethods",
                                                  "exportClassPatterns")]))) {
                OK <- FALSE
                noteLog(Log, ngettext(length(bad),
                    "Found export directive that requires package 'methods':",
                    "Found export directives that require package 'methods':",
                    domain = NA))
                printLog0(Log, paste0(.pretty_format(names(bad)), collapse = "\n"), "\n")
                wrapLog("Remove all such namespace directives (if obsolete)",
                        "or ensure that the DESCRIPTION Depends or Imports",
                        "field contains 'methods'.")
            }

            ## Check for missing R version requirement
            nS3methods <- nrow(ns$S3methods)
            if (nS3methods > 500L) {
                ## check that this is installable in R 3.0.1
                status <- 0L
                current <- as.numeric_version("3.0.1")
                for(depends in pi$Rdepends2) {
                    ## .check_package_description will insist on these operators
                    if(depends$op %notin% c("<=", ">=", "<", ">", "==", "!="))
                        next
                    status <- if(inherits(depends$version, "numeric_version"))
                        !do.call(depends$op, list(current, depends$version))
                    else {
                        ver <- R.version
                        if (ver$status %in% c("", "Patched")) FALSE
                        else !do.call(depends$op,
                                      list(ver[["svn rev"]],
                                           as.numeric(sub("^r", "", depends$version))))
                    }
                    if(status != 0L)  break
                }
                if (status == 0L) {
                    OK <- FALSE
                    msg <- sprintf("R < 3.0.2 had a limit of 500 registered S3 methods: found %d",
                                   nS3methods)
                    noteLog(Log, msg)
                }
            }
            if(OK) resultLog(Log, "OK")
        }

        checkingLog(Log, "package dependencies")
        ## Everything listed in Depends or Suggests or Imports
        ## should be available for successfully running R CMD check.
        ## \VignetteDepends{} entries not "required" by the package code
        ## must be in Suggests.  Note also that some of us think that a
        ## package vignette must require its own package, which OTOH is
        ## not required in the package DESCRIPTION file.
        ## Namespace imports must really be in Depends.
        res <- .check_package_depends(pkgdir, R_check_force_suggests,
                                      check_incoming, ignore_vignettes)
        if(any(lengths(res) > 0L)) {
            out <- format(res)
            allowed <- c("suggests_but_not_installed",
                         "enhances_but_not_installed",
                         "many_depends", "many_imports",
                         "skipped",
                         "hdOnly",
                         "orphaned2", "orphaned", "orphaned1",
                         "required_for_checking_but_not_installed",
                         "no_vignettes",
                         if(!check_incoming) "bad_engine")
            if(!all(names(res) %in% allowed)) {
                errorLog(Log)
                printLog0(Log, paste(out, collapse = "\n"), "\n")
                if(length(res$suggested_but_not_installed))
                   wrapLog("The suggested packages are required for",
                           "a complete check.\n",
                           "Checking can be attempted without them",
                           "by setting the environment variable",
                           "_R_CHECK_FORCE_SUGGESTS_",
                           "to a false value.\n\n")
                wrapLog(msg_DESCRIPTION)
                summaryLog(Log)
                do_exit(1L)
            } else if (length(res$required_for_checking_but_not_installed)) {
                warningLog(Log, "Cannot process vignettes")
                do_vignettes  <<- FALSE
                printLog0(Log, paste(out, collapse = "\n"))
            } else {
                if( length(res[["orphaned"]]) || length(res[["orphaned1"]]) )
                    warningLog(Log)
                else if(R_check_use_log_info)
                    infoLog(Log)
                else
                    noteLog(Log)
                printLog0(Log, paste(out, collapse = "\n"))
                ## if(length(res$orphaned2))
                ##     wrapLog("\nSuggested packages need to be used conditionally:",
                ##             "this is particularly important for",
                ##             "orphaned ones.\n")
            }
        } else resultLog(Log, "OK")
    }

    check_sources <- function()
    {
        checkingLog(Log, "if this is a source package")
        ## <NOTE>
        ## This check should be adequate, but would not catch a manually
        ## installed package, nor one installed prior to 1.4.0.
        ## </NOTE>
        if (!is.na(desc["Built"])) {
            errorLog(Log)
            printLog(Log, "Only *source* packages can be checked.\n")
            summaryLog(Log)
            do_exit(1L)
        } else if (!startsWith(install, "check")) {
            ini <- character()
            ## Check for package 'src' subdirectories with object
            ## files (but not if installation was already performed).
            pat <- "(a|o|[ls][ao]|sl|obj|dll)" # Object file/library extensions.
            any <- FALSE
            srcd <- file.path(pkgdir, "src")
            if (dir.exists(srcd) &&
                length(of <- list_files_with_exts(srcd, pat))) {
                if (!any) warningLog(Log)
                any <- TRUE
                of <- sub(paste0(".*/", file.path(pkgname, "src"), "/"),
                          "", of)
                printLog0(Log,
                          sprintf("Subdirectory %s contains apparent object files/libraries\n",
                                  sQuote(file.path(pkgname, "src"))),
                          paste(strwrap(paste(of, collapse = " "),
                                        indent = 2L, exdent = 2L),
                                collapse = "\n"),
                          "\nObject files/libraries should not be included in a source package.\n")
                ini <- ""
            }
            ## A submission had src-i386 etc from multi-arch builds
            ad <- list.dirs(pkgdir, recursive = FALSE)
            if(thispkg_src_subdirs != "no" &&
               any(ind <- grepl("/src-(i386|x64|x86_64|ppc)$", ad))) {
                if(!any) warningLog(Log)
                any <- TRUE
                msg <- ngettext(sum(ind),
                                "Found the following directory with a name of a multi-arch build directory:\n",
                                "Found the following directories with names of multi-arch build directories:\n",
                                domain = NA)
                printLog0(Log,
                          ini,
                          msg,
                          .format_lines_with_indent(basename(ad[ind])),
                          "\n",
                          "Most likely, these were included erroneously.\n")
                ini <- ""
            }
            if (thispkg_src_subdirs != "no" && dir.exists(srcd)) {
                setwd(srcd)
                if (!file.exists("Makefile") &&
                    !file.exists("Makefile.win") &&
                    !file.exists("Makefile.ucrt") &&
                    !(file.exists("Makefile.in") && spec_install)) {
                    ## Recognized extensions for sources or headers.
                    srcfiles <- dir(".", all.files = TRUE)
                    srcfiles <- srcfiles[!dir.exists(srcfiles)]
                    srcfiles <- filtergrep(
                        "(\\.([cfmCM]|cc|cpp|f90|f95|mm|h|o|so)$|^Makevars|-win\\.def|^install\\.libs\\.R$)",
                        srcfiles)
                    if (length(srcfiles)) {
                        if (!any) warningLog(Log)
                        any <- TRUE
                        msg <- c(ini,
                                 paste("Subdirectory",
                                       sQuote("src"),
                                       "contains:"),
                                 strwrap(paste(srcfiles, collapse = " "),
                                         indent = 2, exdent = 2),
                                 strwrap("These are unlikely file names for src files."),
                                 "")
                        printLog0(Log, paste(msg, collapse = "\n"))
                        ini <- ""
                    }
                }
                setwd(startdir)
            }
            ## All remaining checks give notes and not warnings.
            if(length(ini))
                ini <- c("",
                         "In addition to the above warning(s), found the following notes:",
                         "")
            files <- list.files(pkgdir, recursive = TRUE)
            ## Check for object files not directly in src.
            ## (Note that the above does not look for object files in
            ## subdirs of src.)
            bad <- files[grepl(sprintf("\\.%s$", pat), basename(files))]
            bad <- bad[dirname(bad) != "src" |
                       dirname(dirname(bad)) != "."]
            if(length(bad)) {
                if(!any) noteLog(Log)
                any <- TRUE
                msg <- c(ini,
                         "Found the following apparent object files/libraries:",
                         strwrap(paste(bad, collapse = " "),
                                 indent = 2L, exdent = 2L),
                         "Object files/libraries should not be included in a source package.\n")
                printLog0(Log, paste(msg, collapse = "\n"))
                ini <- ""
            }
            ## Check for installed copies of the package in some subdir.
            files <- files[basename(dirname(files)) == "Meta"]
            if(length(files) &&
               all(!is.na(match(c("package.rds", "hsearch.rds"),
                                basename(files))))) {
                if(!any) noteLog(Log)
                any <- TRUE
                msg <- c(ini,
                         sprintf("Subdirectory %s seems to contain an installed version of the package.\n",
                                 sQuote(dirname(dirname(files[1L])))))
                printLog0(Log, paste(msg, collapse = "\n"))
            }
            if (!any) resultLog(Log, "OK")
        } else resultLog(Log, "OK")
    }

    do_exit <-
        if(no.q)
            function(status) (if(status) stop else message)(
                ".check_packages() exit status ", status)
        else
            function(status) q("no", status = status, runLast = FALSE)

    maybe_exit <- function(status = 1L) {
        if (R_check_exit_on_first_error) {
            printLog(Log, "NOTE:  Quitting check on first error.\n")
            summaryLog(Log)
            do_exit(status)
        }
    }

    Usage <- function() {
        cat("Usage: R CMD check [options] pkgs",
            "",
            "Check R packages from package sources, which can be directories or",
            "package 'tar' archives with extension '.tar', .tar.gz', '.tar.bz2',",
            "'.tar.xz', '.tar.zstd' or '.tgz'.",
            "",
            "A variety of diagnostic checks on directory structure, index and",
            "control files are performed.  The package is installed into the log",
            "directory and production of the package manual is tested.",
            "All examples and tests provided by the package are tested to see if",
            "they run successfully. Vignettes are re-made from their sources.",
            "",
            "Options:",
            "  -h, --help            print short help message and exit",
            "  -v, --version         print version info and exit",
            "  -l, --library=LIB     library directory used for test installation",
            "                        of packages (default is outdir)",
            "  -o, --output=DIR      directory for output, default is current directory.",
            "           Logfiles, R output, etc. will be placed in 'pkg.Rcheck'",
            "           in this directory, where 'pkg' is the name of the",
            "           checked package",
            "      --no-clean        do not clean 'outdir' before using it",
            "      --no-codoc        do not check for code/documentation mismatches",
            "      --no-examples     do not run the examples in the Rd files",
            "      --no-install      skip installation and associated tests",
            "      --no-tests        do not run code in 'tests' subdirectory",
            "      --no-manual       do not produce the PDF and HTML manuals",
            "      --no-vignettes    do not run R code in vignettes nor build outputs",
            "      --no-build-vignettes  do not build vignette outputs",
            "      --ignore-vignettes    skip all tests on vignettes",
            "      --run-demo        do run R scripts in 'demo' subdirectory",
            "      --run-dontrun     do run \\dontrun sections in the Rd files",
            "      --run-donttest    do run \\donttest sections in the Rd files",
            "      --use-gct         use 'gctorture(TRUE)' when running examples/tests",
            "      --use-valgrind    use 'valgrind' when running examples/tests/vignettes",
            "                        and when rebuilding vignettes",
            "      --timings         record timings for examples",
            "      --install-args=   command-line args to be passed to INSTALL",
            "      --test-dir=       look in this subdirectory for test scripts (default tests)",
            "      --no-stop-on-test-error   do not stop running tests after first error",
            "",
            "Special-purpose options:",
            "      --check-subdirs=default|yes|no",
            "                        run checks on the package subdirectories",
            "                        (default is yes for a tarball, no otherwise)",
            "      --as-cran         select customizations similar to those used",
            "                        for CRAN incoming checking",
            "      --install=skip    skip installation test",
            "      --install=check:  see 'Writing R Extensions'",
            "",
            "The following options apply where sub-architectures are in use:",
            "      --extra-arch      do only runtime tests needed for an additional",
            "                        sub-architecture.",
            "      --multiarch       do runtime tests on all installed sub-archs",
            "      --no-multiarch    do runtime tests only on the main sub-architecture",
            "      --force-multiarch run tests on all sub-archs even for packages",
            "                        with no compiled code",
            "",
            "By default, all test sections are turned on.",
            "",
            "Report bugs at <https://bugs.R-project.org>.", sep="\n")
    }

###--- begin{.check_packages()} "main" ---

    warnOption <- max(getOption("warn"), warnOption)# notably allow caller to set 2
    op <- options(showErrorCalls=FALSE, warn = warnOption)
    opWarn_string <- sprintf("options(warn = %d)", warnOption)
    opW_shE_F_str <- sprintf("options(warn = %d, showErrorCalls=FALSE)\n", warnOption)
    on.exit(options(op), add=TRUE)
    if(no.q) { ..check.wd.. <- getwd(); on.exit(setwd(..check.wd..), add=TRUE) }

    ## Read in check environment file.
    Renv <- Sys.getenv("R_CHECK_ENVIRON", unset = NA_character_)
    if(!is.na(Renv)) {
        ## Do not read any check environment file if R_CHECK_ENVIRON is
        ## set to empty of something non-existent.
        if(nzchar(Renv) && file.exists(Renv)) readRenviron(Renv)
    } else {
        ## Read in ~/.R/check.Renviron[.rarch] (if it exists).
        rarch <- .Platform$r_arch
        if (nzchar(rarch) &&
            file.exists(Renv <- paste0("~/.R/check.Renviron.", rarch)))
            readRenviron(Renv)
        else if (file.exists(Renv <- "~/.R/check.Renviron"))
            readRenviron(Renv)
    }

    ## A user might have turned on JIT compilation.  That does not
    ## work well, so mostly disable it.
    jit <- Sys.getenv("R_ENABLE_JIT")
    jitstr <- if(nzchar(jit)) {
        Sys.setenv(R_ENABLE_JIT = "0")
        paste0("R_ENABLE_JIT=", jit)
    } else character()

    if (is.null(args)) {
        args <- commandArgs(TRUE)
        ## it seems that splits on spaces, so try harder.
        args <- paste(args, collapse=" ")
        args <- strsplit(args,'nextArg', fixed = TRUE)[[1L]][-1L]
    }

    clean <- TRUE
    do_codoc <- TRUE
    do_examples <- TRUE
    do_install_arg <- TRUE; install <- ""
    do_tests <- TRUE
    do_vignettes <- TRUE
    do_build_vignettes <- TRUE
    ignore_vignettes <- FALSE
    do_manual <- TRUE
    use_gct <- FALSE
    use_valgrind <- FALSE
    do_timings <- FALSE
    install_args <- NULL
    test_dir <- "tests"
    check_subdirs <- ""           # defaults to R_check_subdirs_strict
    extra_arch <- FALSE
    spec_install <- FALSE
    multiarch <- NA
    force_multiarch <- FALSE
    as_cran <- FALSE
    do_demo <- FALSE
    run_dontrun <- FALSE
    run_donttest <- FALSE
    stop_on_test_error <- TRUE

    libdir <- ""
    outdir <- ""
    pkgs <- character()
    while(length(args)) {
        a <- args[1L]
        if (a %in% c("-h", "--help")) {
            Usage()
            do_exit(0L)
        }
        else if (a %in% c("-v", "--version")) {
            cat("R add-on package check: ",
                R.version[["major"]], ".",  R.version[["minor"]],
                " (r", R.version[["svn rev"]], ")\n", sep = "")
            cat("",
                .R_copyright_msg(1997),
                "This is free software; see the GNU General Public License version 2",
                "or later for copying conditions.  There is NO warranty.",
                sep="\n")
            do_exit(0L)
        } else if (a == "-o") {
            if (length(args) >= 2L) {outdir <- args[2L]; args <- args[-1L]}
            else stop("-o option without value", call. = FALSE)
        } else if (substr(a, 1, 9) == "--output=") {
            outdir <- substr(a, 10, 1000)
        } else if (a == "-l") {
            if (length(args) >= 2L) {libdir <- args[2L]; args <- args[-1L]}
            else stop("-l option without value", call. = FALSE)
        } else if (substr(a, 1, 10) == "--library=") {
            libdir <- substr(a, 11, 1000)
        } else if (a == "--no-clean") {
            clean  <- FALSE
        } else if (a == "--no-codoc") {
            do_codoc  <- FALSE
        } else if (a == "--no-examples") {
            do_examples  <- FALSE
        } else if (a == "--no-install") {
            do_install_arg  <- FALSE
        } else if (substr(a, 1, 10) == "--install=") {
            install <- substr(a, 11, 1000)
        } else if (a == "--no-tests") {
            do_tests  <- FALSE
        } else if (a == "--no-build-vignettes") {
            do_build_vignettes  <- FALSE
        } else if (a == "--no-rebuild-vignettes") { # pre-3.0.0 version
            stop("'--no-rebuild-vignettes' is defunct: use '--no-build-vignettes' instead",
                 call. = FALSE, domain = NA)
        } else if (a == "--no-vignettes") {
            do_vignettes  <- FALSE
        } else if (a == "--ignore-vignettes") {
            ignore_vignettes  <- TRUE
            do_vignettes  <- FALSE
            do_build_vignettes  <- FALSE
        } else if (a == "--no-manual") {
            do_manual  <- FALSE
        } else if (a == "--no-latex") {
            stop("'--no-latex' is defunct: use '--no-manual' instead",
                 call. = FALSE, domain = NA)
        } else if (a == "--run-demo") {
            do_demo  <- TRUE
        } else if (a == "--run-dontrun") {
            run_dontrun  <- TRUE
        } else if (a == "--run-donttest") {
            run_donttest  <- TRUE
        } else if (a == "--use-gct") {
            use_gct  <- TRUE
        } else if (a == "--use-valgrind") {
            use_valgrind  <- TRUE
        } else if (a == "--timings") {
            do_timings  <- TRUE
        } else if (substr(a, 1, 15) == "--install-args=") {
            install_args <- substr(a, 16, 1000)
        } else if (substr(a, 1, 11) == "--test-dir=") {
            test_dir <- substr(a, 12, 1000)
        } else if (substr(a, 1, 16) == "--check-subdirs=") {
            check_subdirs <- substr(a, 17, 1000)
        } else if (a == "--extra-arch") {
            extra_arch  <- TRUE
        } else if (a == "--multiarch") {
            multiarch  <- TRUE
        } else if (a == "--no-multiarch") {
            multiarch  <- FALSE
        } else if (a == "--force-multiarch") {
            force_multiarch  <- TRUE
        } else if (a == "--as-cran") {
            as_cran  <- TRUE
        } else if (a == "--no-stop-on-test-error") {
            stop_on_test_error <- FALSE
        } else if (substr(a, 1, 9) == "--rcfile=") {
            warning("configuration files are not supported as from R 2.12.0")
        } else if (startsWith(a, "-")) {
            message("Warning: unknown option ", sQuote(a))
        } else pkgs <- c(pkgs, a)
        args <- args[-1L]
    }

    ## record some of the options used.
    opts <- character()
    if (install == "fake") opts <- c(opts, "--install=fake")
    if (!do_install_arg) opts <- c(opts, "--no-install")
    if (install == "no") {
        opts <- c(opts, "--install=no")
        do_install_arg <- FALSE
        ## If we do not install, then we cannot *run* any code.
        do_examples <- do_tests <- do_vignettes <- do_build_vignettes <- 0
    }
    if(startsWith(install, "check+fake")) {
        install <- paste0("check", substring(install, 11L))
        opts <- c(opts, "--install=fake")
    }
    if (run_dontrun) opts <- c(opts, "--run-dontrun")
    if (run_donttest) opts <- c(opts, "--run-donttest")
    opts0 <- opts # other options are added later.

    if (install == "fake") {
        ## If we fake installation, then we cannot *run* any code.
        do_examples <- do_tests <- do_vignettes <- do_build_vignettes <- 0
        spec_install <- TRUE
        multiarch <- FALSE
    }

    install_log_path <- ""
    if(startsWith(install, "check")) {
        ## Expand relative to absolute if possible.
        install_log_path <-
            tryCatch(file_path_as_absolute(substr(install, 7L, 1000L)),
                     error = function(e) "")
    }

    if (!isFALSE(multiarch)) {
        ## see if there are multiple installed architectures, and if they work
        if (WINDOWS) {
            ## always has sub-archs as from R 2.12.0.
            ## usually if two are installed, it was done on a 64-bit OS,
            ## but the filesystem might be shared betweeen OSes.
            f <- dir(file.path(R.home(), "bin"))
            archs <- f[f %in% c("i386", "x64")]
            ## if we have x64, can only run it on a 64-bit OS
            if (length(archs) > 1L && !grepl("x64", utils::win.version()))
                archs <- "i386"
        } else {
            wd2 <- setwd(file.path(R.home("bin"), "exec"))
            archs <- Sys.glob("*")
            setwd(wd2)
            if (length(archs) > 1L)
                for (arch in archs) {
                    if (arch == rarch) next
                    cmd <- paste0(shQuote(file.path(R.home(), "bin", "R")),
                                  " --arch=", arch,
                                  " --version > /dev/null")
                    if (system(cmd)) archs <- archs[archs != arch]
                }
        }
        if (length(archs) <= 1L && isTRUE(multiarch))
            warning("'--multiarch' specified with only one usable sub-architecture",
                    call.=FALSE, immediate. = TRUE)
        multiarch <- length(archs) > 1L
    }


    ## Use system default unless explicitly specified otherwise.
    Sys.setenv(R_DEFAULT_PACKAGES="")

    ## Configurable variables
    R_check_use_install_log <-
        config_val_to_logical(Sys.getenv("_R_CHECK_USE_INSTALL_LOG_", "TRUE"))
    R_check_subdirs_nocase <-
        config_val_to_logical(Sys.getenv("_R_CHECK_SUBDIRS_NOCASE_", "TRUE"))
    R_check_all_non_ISO_C <-
        config_val_to_logical(Sys.getenv("_R_CHECK_ALL_NON_ISO_C_", "FALSE"))
    R_check_subdirs_strict <-
        Sys.getenv("_R_CHECK_SUBDIRS_STRICT_", "default")
    R_check_Rd_contents <-
        config_val_to_logical(Sys.getenv("_R_CHECK_RD_CONTENTS_", "TRUE"))
    R_check_Rd_line_widths <-
        config_val_to_logical(Sys.getenv("_R_CHECK_RD_LINE_WIDTHS_", "FALSE"))
    R_check_Rd_style <-
        config_val_to_logical(Sys.getenv("_R_CHECK_RD_STYLE_", "TRUE"))
    R_check_Rd_xrefs <-
        config_val_to_logical(Sys.getenv("_R_CHECK_RD_XREFS_", "TRUE"))
    R_check_Rd_internal_too <-
        config_val_to_logical(Sys.getenv("_R_CHECK_RD_INTERNAL_TOO_", "NA"))
    R_check_use_codetools <-
        config_val_to_logical(Sys.getenv("_R_CHECK_USE_CODETOOLS_", "TRUE"))
    ## However, we cannot use this if we did not install the recommended
    ## packages.
    if(R_check_use_codetools) {
        tmp <- tryCatch(find.package('codetools'), error = identity)
        if(inherits(tmp, "error")) R_check_use_codetools <- FALSE
    }
    R_check_executables <-
        config_val_to_logical(Sys.getenv("_R_CHECK_EXECUTABLES_", "TRUE"))
    R_check_executables_exclusions <-
        config_val_to_logical(Sys.getenv("_R_CHECK_EXECUTABLES_EXCLUSIONS_", "TRUE"))
    R_check_permissions <-
        config_val_to_logical(Sys.getenv("_R_CHECK_PERMISSIONS_",
                                         as.character(.Platform$OS.type == "unix")))
    R_check_dot_internal <-
        config_val_to_logical(Sys.getenv("_R_CHECK_DOT_INTERNAL_", "TRUE"))
    R_check_depr_def <-
        config_val_to_logical(Sys.getenv("_R_CHECK_DEPRECATED_DEFUNCT_", "FALSE"))
    R_check_bogus_return <-
        config_val_to_logical(Sys.getenv("_R_CHECK_BOGUS_RETURN_", "FALSE"))
    R_check_ascii_code <-
        config_val_to_logical(Sys.getenv("_R_CHECK_ASCII_CODE_", "TRUE"))
    R_check_ascii_data <-
        config_val_to_logical(Sys.getenv("_R_CHECK_ASCII_DATA_", "TRUE"))
     R_check_compact_data <-
        config_val_to_logical(Sys.getenv("_R_CHECK_COMPACT_DATA_", "TRUE"))
    R_check_vc_dirs <-
        config_val_to_logical(Sys.getenv("_R_CHECK_VC_DIRS_", "FALSE"))
    R_check_pkg_sizes <-
        config_val_to_logical(Sys.getenv("_R_CHECK_PKG_SIZES_", "TRUE")) &&
        nzchar(Sys.which("du"))
    R_check_doc_sizes <-
        config_val_to_logical(Sys.getenv("_R_CHECK_DOC_SIZES_", "TRUE")) &&
        nzchar(Sys.which(Sys.getenv("R_QPDF", "qpdf")))
    R_check_doc_sizes2 <-
        config_val_to_logical(Sys.getenv("_R_CHECK_DOC_SIZES2_", "FALSE"))
    R_check_code_assign_to_globalenv <-
        config_val_to_logical(Sys.getenv("_R_CHECK_CODE_ASSIGN_TO_GLOBALENV_",
                                         "FALSE"))
    R_check_code_attach <-
        config_val_to_logical(Sys.getenv("_R_CHECK_CODE_ATTACH_", "FALSE"))
    R_check_code_data_into_globalenv <-
        config_val_to_logical(Sys.getenv("_R_CHECK_CODE_DATA_INTO_GLOBALENV_",
                                         "FALSE"))

    ## Only relevant when the package is loaded, thus installed.
    R_check_suppress_RandR_message <-
        do_install_arg && config_val_to_logical(Sys.getenv("_R_CHECK_SUPPRESS_RANDR_MESSAGE_", "TRUE"))
    R_check_force_suggests <-
        config_val_to_logical(Sys.getenv("_R_CHECK_FORCE_SUGGESTS_", "TRUE"))
    R_check_skip_tests_arch <-
        unlist(strsplit(Sys.getenv("_R_CHECK_SKIP_TESTS_ARCH_"), ",")[[1]])
    R_check_skip_examples_arch <-
        unlist(strsplit(Sys.getenv("_R_CHECK_SKIP_EXAMPLES_ARCH_"), ",")[[1]])
    R_check_skip_arch <-
        unlist(strsplit(Sys.getenv("_R_CHECK_SKIP_ARCH_"), ",")[[1]])
    R_check_unsafe_calls <-
        config_val_to_logical(Sys.getenv("_R_CHECK_UNSAFE_CALLS_", "TRUE"))
    R_cdo <-
        config_val_to_logical(Sys.getenv("_R_CHECK_DEPENDS_ONLY_", "FALSE"))
    R_cdo_data <-
        config_val_to_logical(Sys.getenv("_R_CHECK_DEPENDS_ONLY_DATA_",
                                         R_cdo))
    R_cdo_examples <-
        config_val_to_logical(Sys.getenv("_R_CHECK_DEPENDS_ONLY_EXAMPLES_",
                                         R_cdo))
    R_cdo_tests <-
        config_val_to_logical(Sys.getenv("_R_CHECK_DEPENDS_ONLY_TESTS_",
                                         R_cdo))
    R_cdo_vignettes <-
        config_val_to_logical(Sys.getenv("_R_CHECK_DEPENDS_ONLY_VIGNETTES_",
                                         R_cdo))
    R_check_suggests_only <-
        config_val_to_logical(Sys.getenv("_R_CHECK_SUGGESTS_ONLY_", "FALSE"))
    R_check_FF <- Sys.getenv("_R_CHECK_FF_CALLS_", "true")
    R_check_FF_DUP <-
        config_val_to_logical(Sys.getenv("_R_CHECK_FF_DUP_", "TRUE"))
    R_check_toplevel_files <-
        config_val_to_logical(Sys.getenv("_R_CHECK_TOPLEVEL_FILES_", "FALSE"))
    R_check_exit_on_first_error <-
        config_val_to_logical(Sys.getenv("_R_CHECK_EXIT_ON_FIRST_ERROR_", "FALSE"))
    R_check_vignettes_skip_run_maybe <-
        config_val_to_logical(Sys.getenv("_R_CHECK_VIGNETTES_SKIP_RUN_MAYBE_",
                                         "TRUE"))
    R_check_serialization <-
        config_val_to_logical(Sys.getenv("_R_CHECK_SERIALIZATION_", "FALSE"))
    R_check_things_in_check_dir <-
        config_val_to_logical(Sys.getenv("_R_CHECK_THINGS_IN_CHECK_DIR_",
                                         "FALSE"))
    R_check_things_in_temp_dir <-
        config_val_to_logical(Sys.getenv("_R_CHECK_THINGS_IN_TEMP_DIR_",
                                         "FALSE"))
    R_check_things_in_others <-
        config_val_to_logical(Sys.getenv("_R_CHECK_THINGS_IN_OTHER_DIRS_",
                                         "FALSE"))
    R_check_vignette_titles <-
        config_val_to_logical(Sys.getenv("_R_CHECK_VIGNETTE_TITLES_",
                                         "FALSE"))
    R_check_code_class_is_string <-
        config_val_to_logical(Sys.getenv("_R_CHECK_CODE_CLASS_IS_STRING_",
                                         "FALSE"))

    tmp <- Sys.getenv("_R_CHECK_RD_VALIDATE_RD2HTML_", "unset")
    R_check_Rd_validate_Rd2HTML <-
        if(tmp == "unset") NA else config_val_to_logical(tmp)
    R_check_Rd_math_rendering <-
        config_val_to_logical(Sys.getenv("_R_CHECK_RD_MATH_RENDERING_",
                                         "FALSE"))

    if (!nzchar(check_subdirs)) check_subdirs <- R_check_subdirs_strict

    R_check_use_log_info <-
        config_val_to_logical(Sys.getenv("_R_CHECK_LOG_USE_INFO_",
                                         "TRUE"))

    if (as_cran) {
        if (extra_arch) {
            message("'--as-cran' turns off '--extra-arch'")
            extra_arch <- FALSE
        }
        prev <- Sys.getenv("_R_CHECK_TIMINGS_", NA_character_)
        if(is.na(prev)) Sys.setenv("_R_CHECK_TIMINGS_" = "10")
        Sys.setenv("_R_CHECK_INSTALL_DEPENDS_" = "TRUE")
        Sys.setenv("_R_CHECK_NO_RECOMMENDED_" = "TRUE")
        Sys.setenv("_R_SHLIB_BUILD_OBJECTS_SYMBOL_TABLES_" = "TRUE")
        Sys.setenv("_R_CHECK_DOT_FIRSTLIB_" = "TRUE")
        Sys.setenv("_R_CHECK_PACKAGES_USED_CRAN_INCOMING_NOTES_" = "TRUE")
        prev <- Sys.getenv("_R_CHECK_LIMIT_CORES_", NA_character_)
        if(is.na(prev)) Sys.setenv("_R_CHECK_LIMIT_CORES_" = "TRUE")
        prev <- Sys.getenv("_R_CHECK_SCREEN_DEVICE_", NA_character_)
        if(is.na(prev)) Sys.setenv("_R_CHECK_SCREEN_DEVICE_" = "stop")
        Sys.setenv("_R_CHECK_CODE_USAGE_VIA_NAMESPACES_" = "TRUE")
        Sys.setenv("_R_CHECK_CODE_USAGE_WITH_ONLY_BASE_ATTACHED_" = "TRUE")
        Sys.setenv("_R_CHECK_PACKAGE_DATASETS_SUPPRESS_NOTES_" = "TRUE")
        prev <- Sys.getenv("_R_CHECK_PACKAGES_USED_IGNORE_UNUSED_IMPORTS_",
                           NA_character_)
        if(is.na(prev))
            Sys.setenv("_R_CHECK_PACKAGES_USED_IGNORE_UNUSED_IMPORTS_" = "FALSE")
        Sys.setenv("_R_CHECK_NATIVE_ROUTINE_REGISTRATION_" = "TRUE")
        Sys.setenv("_R_CHECK_NO_STOP_ON_TEST_ERROR_" = "TRUE")
        Sys.setenv("_R_CHECK_PRAGMAS_" = "TRUE")
        Sys.setenv("_R_CHECK_COMPILATION_FLAGS_" = "TRUE")
        Sys.setenv1("_R_CHECK_R_DEPENDS_", "warn")
        ## until this is tested on Windows
        Sys.setenv("_R_CHECK_R_ON_PATH_" = if(WINDOWS) "FALSE" else "TRUE")
        Sys.setenv("_R_CHECK_PACKAGES_USED_IN_TESTS_USE_SUBDIRS_" = "TRUE")
        Sys.setenv("_R_CHECK_CONNECTIONS_LEFT_OPEN_" = "TRUE")
        Sys.setenv("_R_CHECK_SHLIB_OPENMP_FLAGS_" = "TRUE")
        Sys.setenv("_R_CHECK_FUTURE_FILE_TIMESTAMPS_" = "TRUE")
        Sys.setenv("_R_CHECK_RD_CONTENTS_KEYWORDS_" = "TRUE")
        Sys.setenv("_R_CHECK_CODOC_VARIABLES_IN_USAGES_" = "TRUE")
        Sys.setenv("_R_CHECK_DATALIST_" = "TRUE")
        if(!WINDOWS) Sys.setenv("_R_CHECK_BASHISMS_" = "TRUE")
        Sys.setenv("_R_CHECK_ORPHANED_" = "TRUE")
        Sys.setenv("_R_CHECK_EXCESSIVE_IMPORTS_" = "20")
        Sys.setenv("_R_CHECK_DEPENDS_ONLY_DATA_" = "TRUE")
##        Sys.setenv("_R_CHECK_XREFS_PKGS_ARE_DECLARED_" = "TRUE")
##        Sys.setenv("_R_CHECK_XREFS_MIND_SUSPECT_ANCHORS_" = "TRUE")
        ## allow this to be overridden if there is a problem elsewhere
        prev <- Sys.getenv("_R_CHECK_MATRIX_DATA_",  NA_character_)
        if(is.na(prev)) Sys.setenv("_R_CHECK_MATRIX_DATA_" = "TRUE")
##        Sys.setenv("_R_NO_S_TYPEDEFS_" = "TRUE")
        Sys.setenv("_R_CHECK_NEWS_IN_PLAIN_TEXT_" = "TRUE")
        Sys.setenv("_R_CHECK_BROWSER_NONINTERACTIVE_" = "TRUE")
        Sys.setenv("_R_CHECK_RD_NOTE_LOST_BRACES_" = "TRUE")
        Sys.setenv("_R_CHECK_MBCS_CONVERSION_FAILURE_" = "TRUE")
        Sys.setenv("_R_CHECK_VALIDATE_UTF8_" = "TRUE")
## next two are the defailt as from R 4.5.0
##        Sys.setenv("_R_CXX_USE_NO_REMAP_" = "TRUE")
##        Sys.setenv("_R_USE_STRICT_R_HEADERS_" = "TRUE")
        Sys.setenv("_R_CHECK_S3_METHODS_SHOW_POSSIBLE_ISSUES_" = "TRUE")
        Sys.setenv("_R_CHECK_XREFS_NOTE_MISSING_PACKAGE_ANCHORS_" = "TRUE")
        Sys.setenv("_R_CHECK_PACKAGES_USED_IN_DEMO_" = "TRUE")
        R_check_vc_dirs <- TRUE
        R_check_executables_exclusions <- FALSE
        R_check_doc_sizes2 <- TRUE
        R_check_suggests_only <- TRUE
        R_check_code_assign_to_globalenv <- TRUE
        R_check_code_attach <- TRUE
        R_check_code_data_into_globalenv <- TRUE
        R_check_depr_def <- TRUE
        R_check_Rd_line_widths <- TRUE
        R_check_FF <- "registration"
        do_timings <- TRUE
        R_check_toplevel_files <- TRUE
        R_check_vignettes_skip_run_maybe <- TRUE
        R_check_serialization <- TRUE
        R_check_things_in_check_dir <- TRUE
        R_check_things_in_temp_dir <- TRUE
        R_check_vignette_titles <- TRUE
        R_check_bogus_return <- TRUE
        R_check_code_class_is_string <- TRUE
        if(is.na(R_check_Rd_validate_Rd2HTML))
            R_check_Rd_validate_Rd2HTML <- TRUE
        R_check_Rd_math_rendering <- TRUE
        R_check_use_log_info <- TRUE

    } else {
        ## do it this way so that INSTALL produces symbols.rds
        ## when called from check but not in general.
        if(is.na(Sys.getenv("_R_SHLIB_BUILD_OBJECTS_SYMBOL_TABLES_",
                            NA_character_)))
            Sys.setenv("_R_SHLIB_BUILD_OBJECTS_SYMBOL_TABLES_" = "TRUE")
    }

    ## needs to be after --as-cran
    td0 <- as.numeric(Sys.getenv("_R_CHECK_TIMINGS_"))
    if (is.na(td0)) td0 <- Inf

    if (extra_arch) {
        R_check_Rd_contents <- R_check_all_non_ISO_C <-
            R_check_Rd_xrefs <- R_check_use_codetools <- R_check_Rd_style <-
                R_check_executables <- R_check_permissions <-
                    R_check_dot_internal <- R_check_bogus_return <- R_check_ascii_code <-
                        R_check_ascii_data <- R_check_compact_data <-
                            R_check_pkg_sizes <- R_check_doc_sizes <-
                                R_check_doc_sizes2 <-
                                    R_check_unsafe_calls <-
                                        R_check_toplevel_files <- FALSE
        R_check_Rd_line_widths <- FALSE
        R_check_code_class_is_string <- FALSE
        R_check_Rd_validate_Rd2HTML <- FALSE
    }

    startdir <- getwd()
    if (is.null(startdir))
        stop("current working directory cannot be ascertained")
    if (!nzchar(outdir)) outdir <- startdir
    setwd(outdir)
    outdir <- getwd()
    setwd(startdir)

    sessdir <- ""
    if (R_check_things_in_temp_dir) {
        ## tempdir() should be unique, so don't need a special name within it
        sessdir <- file.path(tempdir(), "working_dir")
        if (!dir.create(sessdir))
            stop("unable to create working directory for subprocesses",
                 domain = NA)
        Sys.setenv(TMPDIR = sessdir)
    }

    snap <- if (R_check_things_in_others) snapshot() else NULL
    R_LIBS <- Sys.getenv("R_LIBS")
    arg_libdir <- libdir
    if (nzchar(libdir)) {
        setwd(libdir)
        libdir <- getwd()
        Sys.setenv(R_LIBS = path_and_libPath(libdir, R_LIBS))
        setwd(startdir)
    }

    ## all the analysis code is run with --no-echo
    ## examples and tests are not.
    R_opts <- "--vanilla"
    R_opts2 <- "--vanilla --no-echo"
    ## do run Renviron.site for some multiarch runs
    ## We set R_ENVIRON_USER to skip .Renviron files.
    R_opts3 <- "--no-site-file --no-init-file --no-save --no-restore"
    R_opts4 <- "--no-site-file --no-init-file --no-save --no-restore --no-echo"
    env0 <- if(WINDOWS) "R_ENVIRON_USER='no_such_file'" else "R_ENVIRON_USER=''"

    msg_DESCRIPTION <-
        c("See section 'The DESCRIPTION file' in the 'Writing R Extensions' manual.\n")

    if (!length(pkgs)) {
        message("Error: no packages were specified")
        do_exit(1L)
    }

    ## This is the main loop over all packages to be checked.
    for (pkg in pkgs) {
        ## pkg should be the path to the package root source
        ## directory, either absolute or relative to startdir.
        ## As from 2.1.0 it can also be a tarball

        ## The previous package may have set do_install to FALSE
        do_install <- do_install_arg
        no_examples <- FALSE

        ## $pkgdir is the corresponding absolute path.
        ## pkgname0 is the name of the top-level directory
        ## (and often the name of the package).
        setwd(startdir)
        pkg <- sub("/$", "", pkg)       # strip any trailing '/'
        pkgname0 <- basename(pkg)
        is_ascii <- FALSE

        thispkg_subdirs <- check_subdirs
        ## is this a tar archive?
        if (dir.exists(pkg)) {
            is_tar <- FALSE
            if (thispkg_subdirs == "default") thispkg_subdirs <- "no"
        } else if (file.exists(pkg)) {
            is_tar <- TRUE
            if (thispkg_subdirs == "default") thispkg_subdirs <- "yes-maybe"
            pkgname0 <- sub("\\.(tar|tar\\.gz|tgz|tar\\.bz2|tar\\.xz|tar\\.zstd)$", "", pkgname0)
            pkgname0 <- sub("_[0-9.-]*$", "", pkgname0)
        } else {
            warning(sQuote(pkg), " is neither a file nor directory, skipping\n",
                    domain = NA, call. = FALSE, immediate. = TRUE)
            next
        }
        pkgoutdir <- file.path(outdir, paste0(pkgname0, ".Rcheck"))
        if (clean && dir.exists(pkgoutdir)) {
            unlink(pkgoutdir, recursive = TRUE)
            if(WINDOWS) Sys.sleep(0.5) # allow for antivirus interference
        }
        dir.create(pkgoutdir, mode = "0755")
        if (!dir.exists(pkgoutdir)) {
            message(sprintf("ERROR: cannot create check dir %s", sQuote(pkgoutdir)))
            do_exit(1L)
        }

        Log <- newLog(file.path(pkgoutdir, "00check.log"))

        messageLog(Log, "using log directory ", sQuote(pkgoutdir))
        messageLog(Log, "using ", R.version.string)
        sp <- 8*.Machine$sizeof.pointer
        if (sp != 64)
            messageLog(Log, "using platform: ", R.version$platform,
                       " (", sp, "-bit)")
        else
            messageLog(Log, "using platform: ", R.version$platform)
        vers <- R_compiled_by()
        if (any(nzchar(vers))) {
            messageLog(Log, "R was compiled by")
            printLog(Log, paste("   ", vers, collapse = "\n"), "\n")
        }
        osV <- utils::osVersion
        if(!is.null(osV))
            messageLog(Log, "running under: ", osV)
        charset <-
            if (l10n_info()[["UTF-8"]]) "UTF-8" else utils::localeToCharset()
        messageLog(Log, "using session charset: ", charset)
        is_ascii <- charset == "ASCII"

        if(config_val_to_logical(Sys.getenv("_R_CHECK_R_ON_PATH_", "FALSE")))
            add_dummies(file_path_as_absolute(pkgoutdir), Log)

        if (is_tar) {
            dir <- file.path(pkgoutdir, "00_pkg_src")
            dir.create(dir, mode = "0755")
            if (!dir.exists(dir)) {
                checkingLog(Log, "whether tarball can be unpacked")
                errorLog(Log, sprintf("cannot create %s", sQuote(dir)))
                summaryLog(Log)
                do_exit(1L)
            }
            ## force the use of internal untar unless over-ridden
            ## so e.g. .tar.xz works everywhere
            if (utils::untar(pkg, exdir = dir,
                             tar = Sys.getenv("R_INSTALL_TAR", "internal"))) {
                checkingLog(Log, "whether tarball can be unpacked")
                errorLog(Log, sprintf("cannot unpack %s", sQuote(pkg)))
                summaryLog(Log)
                do_exit(1L)
            }
            pkg_size <- file.info(pkg)$size
            ## this assumes foo_x.y.tar.gz unpacks to foo, but we are about
            ## to test that.
            pkg <- file.path(dir, pkgname0)
        } else
            pkg_size <- NA
        if (!dir.exists(pkg)) {
            checkingLog(Log, "package directory")
            errorLog(Log,
                     gettextf("package directory %s does not exist",
                              sQuote(pkg)))
            summaryLog(Log)
            do_exit(1L)
        }
        setwd(pkg)
        pkgdir <- getwd()

        thispkg_src_subdirs <- thispkg_subdirs
        if (thispkg_src_subdirs == "yes-maybe") {
            ## now see if there is a 'configure' file
            ## configure files are only used if executable, but
            ## -x is always false on Windows.
            if (WINDOWS) {
                if (file_test("-f", "configure")) thispkg_src_subdirs <- "no"
            } else {
                if (file_test("-x", "configure")) thispkg_src_subdirs <- "no"
            }
        }
        setwd(startdir)

        .unpack.time <- Sys.time()

        ## report options used
        opts <- opts0
        if (!do_codoc) opts <- c(opts, "--no-codoc")
        if (!do_examples && !spec_install) opts <- c(opts, "--no-examples")
        if (!do_tests && !spec_install) opts <- c(opts, "--no-tests")
        if (!do_manual && !spec_install) opts <- c(opts, "--no-manual")
        if (ignore_vignettes) opts <- c(opts, "--ignore-vignettes")
        else {
            if (!do_vignettes && !spec_install)
                opts <- c(opts, "--no-vignettes")
            if (!do_build_vignettes && !spec_install)
                opts <- c(opts, "--no-build-vignettes")
        }
        if (use_gct) opts <- c(opts, "--use-gct")
        if (use_valgrind) opts <- c(opts, "--use-valgrind")
        if (!stop_on_test_error) opts <- c(opts, "--no-stop-on-test-error")
        if (as_cran) opts <- c(opts, "--as-cran")
        if (length(opts) > 1L)
            messageLog(Log, "using options ", sQuote(paste(opts, collapse=" ")))
        else if (length(opts) == 1L)
            messageLog(Log, "using option ", sQuote(opts))

        if(isTRUE(config_val_to_logical(Sys.getenv("_R_CHECK_NO_STOP_ON_TEST_ERROR_",
                                                   "FALSE"))))
            stop_on_test_error <- FALSE

        if (!nzchar(libdir)) { # otherwise have set R_LIBS above
            libdir <- pkgoutdir
            Sys.setenv(R_LIBS = path_and_libPath(libdir, R_LIBS))
        }
        if (WINDOWS && grepl(" ", libdir)) # need to avoid spaces in libdir
            libdir <- gsub("\\", "/", utils::shortPathName(libdir), fixed = TRUE)

        ## Package sources from the R distribution are special.  They
        ## have a 'DESCRIPTION.in' file (instead of 'DESCRIPTION'),
        ## with Version and License fields containing '@VERSION@' for
        ## substitution by configure.  Earlier bundles had packages
        ## containing DESCRIPTIION.in, hence the extra check for
        ## Makefile.in.

        is_base_pkg <- is_rec_pkg <- FALSE
        if (file.exists(f <- file.path(pkgdir, "DESCRIPTION.in")) &&
            file.exists(file.path(pkgdir, "Makefile.in"))) {
            desc <- try(read.dcf(f))
            if (inherits(desc, "try-error") || !length(desc)) {
                errorLog(Log, "File DESCRIPTION exists but is not in correct format")
                summaryLog(Log)
                do_exit(1L)
            }
            desc <- desc[1L, ]
            if (identical(desc["Priority"], c(Priority = "base"))) {    # Priority might be missing
                messageLog(Log, "looks like ", sQuote(pkgname0),
                           " is a base package")
                is_base_pkg <- TRUE
                pkgname <- desc["Package"] # should be same as pkgname0
                if (pkgname == "tcltk" && !capabilities("tcltk")) {
                    do_install <- FALSE
                    messageLog(Log, "stub package: skipping tests requiring installation")
                } else
                    messageLog(Log, "skipping installation test")
            }
        }

        this_multiarch <- multiarch
        elibs <- elibs_tests <- elibs_cdo <- character()
        if (!is_base_pkg) {
            desc <- check_description()
            pkgname <- desc["Package"]
            is_rec_pkg <- identical(desc["Priority"], c(Priority = "recommended"))

            ## Check if we have any hope of installing
            OS_type <- desc["OS_type"]
            if (do_install && !is.na(OS_type)) {
                if (WINDOWS && OS_type != "windows") {
                    messageLog(Log, "will not attempt to install this package on Windows")
                    do_install <- FALSE
                }
                if (!WINDOWS && OS_type == "windows") {
                    messageLog(Log, "this is a Windows-only package, skipping installation")
                    do_install <- FALSE
                }
            } else OS_type <- NA

            check_incoming <- Sys.getenv("_R_CHECK_CRAN_INCOMING_", "NA")
            check_incoming <- if(check_incoming == "NA") as_cran else {
                config_val_to_logical(check_incoming)
            }
            check_incoming_remote <- Sys.getenv("_R_CHECK_CRAN_INCOMING_REMOTE_", "NA")
            check_incoming_remote <- if(check_incoming_remote == "NA") as_cran else {
                config_val_to_logical(check_incoming_remote)
            }
            if (check_incoming) check_CRAN_incoming(!check_incoming_remote, pkg_size)

            ## <NOTE>
            ## We want to check for dependencies early, since missing
            ## dependencies may make installation fail, and in any case we
            ## give up if they are missing.  But we don't check them if
            ## we are not going to install and hence not run any code.
            ## </NOTE>
            if (do_install) {
                topfiles0 <- dir(pkgdir)
                check_dependencies()
            } else topfiles0 <- NULL

            check_sources()
            checkingLog(Log, "if there is a namespace")
            ## careful: we need a case-sensitive match
            if ("NAMESPACE" %in% dir(pkgdir))
                resultLog(Log, "OK")
            else  if (file.exists(file.path(pkgdir, "NAMESPACE"))) {
                errorLog(Log,
                       "File NAMESPACE does not exist but there is a case-insenstiive match.")
                summaryLog(Log)
                do_exit(1L)
            } else if (dir.exists(file.path(pkgdir, "R"))) {
                errorLog(Log)
                wrapLog("All packages need a namespace as from R 3.0.0.\n",
                        "R CMD build will produce a suitable starting point,",
                        "but it is better to handcraft a NAMESPACE file.")
                maybe_exit(1L)
            } else {
                noteLog(Log)
                wrapLog("Packages without R code can be installed without",
                        "a NAMESPACE file, but it is cleaner to add",
                        "an empty one.")
            }

            ## we need to do this before installation
            if (R_check_executables) check_executables()
            ## (Alternatively, could use .unpack.time.)

            check_dot_files(check_incoming)

            setwd(pkgdir)
            allfiles <- check_file_names()
            if (R_check_permissions) check_permissions(allfiles)
            if (!is_base_pkg && R_check_serialization) {
                ## We should not not do this if there is a dependence
                ## on R >= 3.5.0, and we have to check that on the sources.
                db <- .read_description("DESCRIPTION")
                Rver <-.split_description(db, verbose = TRUE)$Rdepends2
                if(length(Rver) && Rver[[1L]]$op == ">="
                   && Rver[[1L]]$version >= "3.5.0") {
                       ## skip
                } else check_serialization(allfiles)
            }
            setwd(startdir)

            ## record this before installation.
            ## <NOTE>
            ## Could also teach the code to check 'src/Makevars[.in]'
            ## files to use .unpack.time.
            ## (But we want to know if the sources contain
            ## 'src/Makevars' and INSTALL re-creates this.)
            ## </NOTE>
            makevars <-
                Sys.glob(file.path(pkgdir, "src",
                                   c("Makevars.in", "Makevars")))
            makevars <- basename(makevars)

            if (do_install) {
                check_install()
                if(R_check_pkg_sizes) check_install_sizes()
            }
            if (multiarch) {
                if (force_multiarch) inst_archs <- archs
                else {
                    ## check which architectures this package is installed for
                    if (dir.exists(dd <- file.path(libdir, pkgname, "libs"))) {
                        inst_archs <- dir(dd)
                        ## xlsReadWrite has spurious subdir 'template'
                        inst_archs <- inst_archs[inst_archs %in% archs]
                        if (!identical(inst_archs, archs)) {
                            if (length(inst_archs) > 1)
                                printLog0(Log,
                                          "NB: this package is only installed for sub-architectures ",
                                          paste(sQuote(inst_archs), collapse=", "), "\n")
                            else {
                                printLog0(Log,
                                          "NB: this package is only installed for sub-architecture ",
                                          sQuote(inst_archs), "\n")
                                if(inst_archs == .Platform$r_arch)
                                    this_multiarch <- FALSE
                            }
                        }
                    } else this_multiarch <- FALSE  # no compiled code
                }
                if (this_multiarch && length(R_check_skip_arch))
                    inst_archs <- inst_archs %w/o% R_check_skip_arch
            }

            ## prepare restricted library paths
            if(R_check_suggests_only)
                elibs <- setRlibs(pkgdir = pkgdir, libdir = libdir, suggests = TRUE)
            elibs_tests <- if(R_cdo_tests) {
                setRlibs(pkgdir = pkgdir, libdir = libdir, tests = TRUE)
            } else elibs
            if(R_cdo || R_cdo_examples || R_cdo_vignettes || R_cdo_data)
                elibs_cdo <- setRlibs(pkgdir = pkgdir, libdir = libdir)
        } else check_incoming <- FALSE  ## end of if (!is_base_pkg)

        setwd(startdir)
        check_pkg(pkgdir, pkgname, pkgoutdir, startdir, libdir, desc,
                  is_base_pkg, is_rec_pkg, thispkg_subdirs, extra_arch)
        if (!extra_arch && do_manual) {
            setwd(pkgoutdir)
            instdir <- file.path(libdir, pkgname)
            if (dir.exists(file.path(instdir, "help")))
                check_pkg_manual(instdir, desc["Package"])
            else
                check_pkg_manual(pkgdir, desc["Package"])
        }

        if(!extra_arch && do_manual &&
           (isTRUE(R_check_Rd_validate_Rd2HTML) ||
            isTRUE(R_check_Rd_math_rendering))) {
            if(do_install)
                check_Rd2HTML(file.path(if(is_base_pkg) .Library else libdir,
                                        pkgname),
                              installed = TRUE)
            else
                check_Rd2HTML(pkgdir)
        }

        if (!is_base_pkg && check_incoming && no_examples &&
            dir.exists(file.path(pkgdir, "R"))) {
            tests_dir <- file.path(pkgdir, test_dir)
            if (dir.exists(tests_dir) &&
                length(dir(tests_dir, pattern = "\\.(r|R|Rin)$")))
                no_examples <- FALSE
            vigns <- pkgVignettes(dir = pkgdir)
            if (!is.null(vigns) && length(vigns$docs)) no_examples <- FALSE
            if (no_examples) {
                ## figure out if the R code exercises anything
                ns <- parseNamespaceFile(basename(pkgdir), dirname(pkgdir))
                if(length(ns$exports) || length(ns$exportPatterns) ||
                   length(ns$exportMethods) || length(ns$S3methods)) {
                    checkingLog(Log, "for code which exercises the package")
                    warningLog(Log, "No examples, no tests, no vignettes")
                }
            }
        }

        if(R_check_things_in_check_dir) {
            checkingLog(Log,
                        "for non-standard things in the check directory")
            things <-
                setdiff(list.files(pkgoutdir, all.files = TRUE,
                                   include.dirs = TRUE, no.. = TRUE),
                        c("00check.log",
                          "00install.out",
                          "00package.dcf",
                          "00_pkg_src",
                          pkgname,
                          sprintf("%s-Ex.%s",
                                  pkgname,
                                  c("R", "Rout", "pdf", "timings")),
                          sprintf("%s-manual.%s",
                                  pkgname,
                                  c("log", "pdf")),
                          "Rdlatex.log",
                          "R_check_bin",
                          "build_vignettes.log",
                          "tests", "vign_test",
                          if (do_demo) "demo",
                          if(this_multiarch)
                              c(paste0("examples_", inst_archs),
                                paste0(pkgname, "-Ex_", inst_archs, ".Rout"),
                                if (do_demo) paste0("demo_", inst_archs),
                                paste0("tests_", inst_archs))
                          ))
            ## Examples calling dev.new() give files Rplots*.pdf,
            ## building vignettes give *.log files: be nice ...
            things <- things[!grepl("^Rplots.*[.]pdf$|[.]log$", things)]
            if(length(things)) {
                noteLog(Log)
                msg <- c("Found the following files/directories:",
                         strwrap(paste(sQuote(things), collapse = " "),
                                 indent = 2L, exdent = 2L))
                printLog0(Log, paste(msg, collapse = "\n"), "\n")
            } else
                resultLog(Log, "OK")
        }

        if (R_check_things_in_temp_dir) {
            checkingLog(Log, "for detritus in the temp directory")
            ff <- list.files(sessdir, include.dirs = TRUE)
            ## Exclude session temp dirs from crashed subprocesses
            dir <- file.info(ff)$isdir
            poss <- grepl("^Rtmp[A-Za-z0-9.]{6}$", ff, useBytes = TRUE)
            ff <- ff[!(poss & dir)]
            patt <- Sys.getenv("_R_CHECK_THINGS_IN_TEMP_DIR_EXCLUDE_")
            if (nzchar(patt)) ff <- ff[!grepl(patt, ff, useBytes = TRUE)]
            ff <- ff[!is.na(ff)]
            if (length(ff)) {
                noteLog(Log)
                msg <- c("Found the following files/directories:",
                         strwrap(paste(sQuote(ff), collapse = " "),
                                 indent = 2L, exdent = 2L))
                printLog0(Log, paste(msg, collapse = "\n"), "\n")
            } else
                resultLog(Log, "OK")
            ## clean up of this process would also do this
            unlink(sessdir, recursive = TRUE)
        }

        if (R_check_things_in_others) {
            checkingLog(Log, "for new files in some other directories")
            snap2 <- snapshot()
            ff <- character()
            for(i in seq_along(snap))
                ff <- c(ff, setdiff(snap2[[i]], snap[[i]]))
            if (length(ff)) {
                ## add trailing / to indicate a directory
                isdir <- isTRUE(file.info(ff)$isdir)
                ff[isdir] <- paste0(ff[isdir], "/")
                ff <- sub(paste0("^", normalizePath("~")), "~" , ff)
                patt <- Sys.getenv("_R_CHECK_THINGS_IN_OTHER_DIRS_EXCLUDE_")
                if (nzchar(patt)) {
                    if (startsWith(patt, "@")) {
                        patt <- readLines(substring(patt, 2L))
                        patt <- paste(patt, collapse = "|")
                    }
                    ff <- ff[!grepl(patt, ff, useBytes = TRUE)]
                }
            }
            ## Precautionary clean up
            if (length(ff)) {
                ff <- ff[!is.na(ff)]
                ff <- ff[ff != "NA"]
            }
            if (length(ff)) {
                noteLog(Log)
                msg <- c("Found the following files/directories:",
                         strwrap(paste(sQuote(ff), collapse = " "),
                                 indent = 2L, exdent = 2L))
                printLog0(Log, paste(msg, collapse = "\n"), "\n")
            } else
                 resultLog(Log, "OK")
        }

        summaryLog(Log)

        if(config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_STATUS_SUMMARY_",
                                            "FALSE"))) {
            s <- summarize_CRAN_check_status(pkgname)
            if(nzchar(s)) {
                writeLines(c("", s), Log$con)
            }
        }

        if(Log$errors > 0L)
            do_exit(1L)

        closeLog(Log)

    } ## end for (pkg in pkgs)
}
###--- end{ .check_packages }

.format_lines_with_indent <-
function(x)
    paste0("  ", x, collapse = "\n")
    ## Hard-wire indent of 2 for now.

### Local variables:
### mode: R
### page-delimiter: "^###[#-]"
### End:
#  File src/library/tools/R/checktools.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 2013-2023 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

### ** check_packages_in_dir

check_packages_in_dir <-
function(dir,
         pfiles = Sys.glob("*.tar.gz"),
         check_args = character(), check_args_db = list(),
         reverse = NULL,
         check_env = character(),
         xvfb = FALSE,
         Ncpus = getOption("Ncpus", 1L),
         clean = TRUE,
         install_args = list(),
         parallel_args = list(),
         ...)
{
    owd <- getwd()
    dir <- normalizePath(dir)
    setwd(dir)
    on.exit(setwd(owd))

    .check_packages_in_dir_retval <-
    function(dir,
             pfiles,
             pnames = character(),
             rnames = character()) {
        structure(pfiles,
                  dir = dir,
                  pnames = pnames,
                  rnames = rnames,
                  class = "check_packages_in_dir")
    }

    if(!length(pfiles)) {
        message("no packages to check")
        return(invisible(.check_packages_in_dir_retval(dir, pfiles)))
    }

    pnames <- sub("_.*", "", pfiles)

    os_type <- .Platform$OS.type

    ## Xvfb usage and options.
    ## We do not use Xvfb on Windows.
    ## Otherwise, if argument 'xvfb' is
    ## * a logical, Xvfb is used only if identical to TRUE;
    ## * something else, then as.character(xvfb) gives the Xvfb options.
    xvfb_options <- "-screen 0 1280x1024x24"
    if(os_type == "windows")
        xvfb <- FALSE
    else if(is.logical(xvfb)) {
        if(!isTRUE(xvfb))
            xvfb <- FALSE
    } else {
        xvfb_options <- as.character(xvfb)
        xvfb <- TRUE
    }

    curl <- if(os_type == "windows")
        sprintf("file:///%s", dir)
    else
        sprintf("file://%s", dir)

    libdir <- file.path(dir, "Library")
    dir.create(libdir, showWarnings = FALSE)
    outdir <- file.path(dir, "Outputs")
    dir.create(outdir, showWarnings = FALSE)

    ## Determine packages using fake/no install for checking.
    ## Handle these as follows:
    ## * For packages using '--install=no', forward dependencies do not
    ##   need to installed, and reverse dependencies do not need to be
    ##   checked.
    ## * For packages using '--install=fake', forward dependencies must
    ##   be available for checking, and checking reverse dependencies
    ##   makes sense (e.g, to spot missing Rd xrefs).
    pnames_using_install_no <- character()
    pnames_using_install_fake <- character()
    check_args_db <- as.list(check_args_db)
    if(length(check_args_db) &&
       !is.null(nms <- names(check_args_db))) {
        args <- lapply(check_args_db,
                       function(e)
                       scan(text = e, what = character(), quiet = TRUE))
        pnames_using_install_no <-
            nms[vapply(args, function(e) any(e == "--install=no"), NA)]
        pnames_using_install_fake <-
            nms[vapply(args, function(e) any(e == "--install=fake"), NA)]
    } else {
        ## If check_args_db has no names it is useless.
        ## Perhaps complain?
        check_args_db <- list()
    }

    ## Build a package db from the source packages in the working
    ## directory.
    write_PACKAGES(dir, type = "source")
    if(dir.exists(depdir <- file.path(dir, "Depends"))) {
        write_PACKAGES(depdir, type = "source")
        curl <- c(curl, paste0(curl, "/Depends"))
    }
    ## Determine packages available locally (for checking) and in the
    ## repositories, and merge the information giving preference to the
    ## former.
    localones <- utils::available.packages(contriburl = curl,
                                           type = "source")
    curls <- utils::contrib.url(getOption("repos"), type = "source")
    available <- utils::available.packages(contriburl = curls,
                                           type = "source")
    available <- rbind(localones, available)
    available <-
        available[!duplicated(available[, "Package"]), , drop = FALSE]
    curls <- c(curl, curls)

    ## As of c52164, packages with OS_type different from the current
    ## one are *always* checked with '--install=no'.
    ## These packages are also filtered out by default (via the OS_type
    ## filter) from the repository package computations.
    ## Hence move packages in the install=fake list not listed by
    ## available.packages() to the install=no list.
    pnames_using_install_no <-
        c(pnames_using_install_no,
          setdiff(pnames_using_install_fake, available[, "Package"]))
    pnames_using_install_fake <-
        intersect(pnames_using_install_fake, available[, "Package"])

    if(!is.null(reverse) && !isFALSE(reverse)) {
        ## Determine and download reverse dependencies to be checked as
        ## well.

        defaults <- list(which = c("Depends", "Imports", "LinkingTo"),
                         recursive = FALSE,
                         repos = getOption("repos"))
        if(!is.character(reverse)) {
            reverse <- as.list(reverse)
            ## Merge with defaults, using partial name matching.
            pos <- pmatch(names(reverse), names(defaults), nomatch = 0L)
            defaults[pos] <- reverse[pos > 0L]
        }

        subset_reverse_repos <- !identical(defaults$repos, getOption("repos"))
        if(subset_reverse_repos &&
           !all(defaults$repos %in% getOption("repos")))
            stop("'reverse$repos' should be a subset of getOption(\"repos\")")

        rnames <- if(is.character(reverse)) {
            reverse
        } else if(is.list(defaults$which)) {
            ## No recycling of repos for now.
            defaults$recursive <- rep_len(as.list(defaults$recursive),
                                          length(defaults$which))
            unlist(Map(function(w, r)
                       package_dependencies(setdiff(pnames,
                                                    pnames_using_install_no),
                                            available,
                                            which = w,
                                            recursive = r,
                                            reverse = TRUE),
                       defaults$which,
                       defaults$recursive),
                   use.names = FALSE)
        } else {
            package_dependencies(setdiff(pnames,
                                         pnames_using_install_no),
                                 available,
                                 which = defaults$which,
                                 recursive = defaults$recursive,
                                 reverse = TRUE)
        }

        add_recommended_maybe <-
            config_val_to_logical(Sys.getenv("_R_TOOLS_C_P_I_D_ADD_RECOMMENDED_MAYBE_",
                                             "FALSE"))
        if(add_recommended_maybe) {
            ## Add all recommended packages with any dependency on the
            ## packages to be checked.
            rnames <-
                c(rnames,
                  names(Filter(length,
                               lapply(package_dependencies(.get_standard_package_names()$recommended,
                                                           available,
                                                           which = "all"),
                                      intersect,
                                      pnames))))
        }

        rnames <- intersect(unlist(rnames, use.names = FALSE),
                            available[, "Package"])
        rnames <- setdiff(rnames, pnames)

        pos <- match(rnames, available[, "Package"], nomatch = 0L)
        if(subset_reverse_repos) {
            pos <- split(pos[pos > 0L], available[pos, "Repository"])
            ## Only want the reverse dependencies for which Repository
            ## starts with an entry in defaults$repos.
            nms <- names(pos)
            ind <- (rowSums(outer(nms, defaults$repos, startsWith)) > 0)
            pos <- unlist(pos[ind], use.names = FALSE)
        }
        rnames <- available[pos, "Package"]
        rfiles <- sprintf("%s_%s.tar.gz",
                          rnames,
                          available[pos, "Version"])

        if(length(rfiles)) {
            message("downloading reverse dependencies ...")
            rfurls <- sprintf("%s/%s",
                              available[pos, "Repository"],
                              rfiles)
            for(i in seq_along(rfiles)) {
                message(sprintf("downloading %s ... ", rfiles[i]),
                        appendLF = FALSE)
                status <- if(!utils::download.file(rfurls[i], rfiles[i],
                                                   quiet = TRUE))
                    "ok" else "failed"
                message(status)
            }
            message("")
        }

    } else {
        rfiles <- rnames <- character()
    }

    pnames <- c(pnames, rnames)

    ## Install what is needed.

    if(xvfb) {
        pid <- start_virtual_X11_fb(xvfb_options)
        on.exit(close_virtual_X11_db(pid), add = TRUE)
    }

    depends <-
        package_dependencies(pnames, available, which = "most")
    depends <- setdiff(unique(unlist(depends, use.names = FALSE)),
                       .get_standard_package_names()$base)

    ## Need to install depends which are not installed or installed but
    ## old.
    libs <- c(libdir, .libPaths())
    installed <- utils::installed.packages(libs)
    installed <- installed[!duplicated(installed[, "Package"]), ,
                           drop = FALSE]
    outofdate <- utils::old.packages(instPkgs = installed,
                                     available = available)[, "Package"]
    installed <- installed[, "Package"]
    depends <- c(setdiff(depends, installed),
                 intersect(intersect(depends, installed), outofdate))
    if(length(depends)) {
        message(paste(strwrap(sprintf("installing dependencies %s",
                                      paste(sQuote(sort(depends)),
                                            collapse = ", ")),
                              exdent = 2L),
                      collapse = "\n"), domain = NA)
        ## <NOTE>
        ## Ideally we would capture stdout and stderr in e.g.
        ##   outdir/install_stdout.txt
        ##   outdir/install_stderr.txt
        ## But using several CPUs uses Make to install, which seems to
        ## write to stdout/stderr "directly" ... so using sink() will
        ## not work.  Hence, use 'keep_outputs' to capture "outputs"
        ## (combining install stdout and stderr into one file).
        message("")
        iflags <- as.list(rep.int("--fake",
                                  length(pnames_using_install_fake)))
        names(iflags) <- pnames_using_install_fake
        tmpdir <- tempfile(tmpdir = outdir)
        dir.create(tmpdir)
        do.call(utils::install.packages, c(
                            list(pkgs = depends, lib = libdir,
                                contriburl = curls,
                                available = available,
                                dependencies = NA,
                                INSTALL_opts = iflags,
                                keep_outputs = tmpdir,
                                Ncpus = Ncpus,
                                type = "source"), 
                            install_args))
        
        outfiles <- Sys.glob(file.path(tmpdir, "*.out"))
        file.rename(outfiles,
                    file.path(outdir,
                              sprintf("install_%s",
                                      basename(outfiles))))
        unlink(tmpdir, recursive = TRUE)
        message("")
        ## </NOTE>
    }

    ## Merge check_args and check_args_db into check_args_db used for
    ## checking.
    check_args <- if(is.list(check_args)) {
        c(rep.int(list(check_args[[1L]]), length(pfiles)),
          rep.int(list(check_args[[2L]]), length(rfiles)))
    } else {
        rep.int(list(check_args), length(pnames))
    }
    check_args_db <- check_args_db[pnames]
    check_args_db <- Map(c, check_args, check_args_db)
    names(check_args_db) <- pnames

    check_env <- if(is.list(check_env)) {
        c(rep.int(list(check_env[[1L]]), length(pfiles)),
          rep.int(list(check_env[[2L]]), length(rfiles)))
    } else {
        rep.int(list(check_env), length(pnames))
    }
    ## No user level check_env_db for now.
    check_env_db <- as.list(check_env)
    names(check_env_db) <- pnames

    pfiles <- c(pfiles, rfiles)

    ## sub-R processes need to use libdir + current library trees
    oldrlibs <- Sys.getenv("R_LIBS")
    Sys.setenv(R_LIBS = paste(libs, collapse = .Platform$path.sep))
    on.exit(Sys.setenv(R_LIBS = oldrlibs), add = TRUE)

    check_package <- function(pfile, args_db = NULL, env_db = NULL) {
        message(sprintf("checking %s ...", pfile))
        pname <- sub("_.*", "", basename(pfile))
        out <- file.path(outdir,
                         sprintf("check_%s_stdout.txt", pname))
        err <- file.path(outdir,
                         sprintf("check_%s_stderr.txt", pname))
        lim <- get_timeout(Sys.getenv("_R_CHECK_ELAPSED_TIMEOUT_"))
        system.time(system2(file.path(R.home("bin"), "R"),
                            c("CMD",
                              "check",
                              "--timings",
                              args_db[[pname]],
                              pfile),
                            stdout = out,
                            stderr = err,
                            env = env_db[[pname]],
                            timeout = lim))
    }

    if(Ncpus > 1L) {
        if(os_type != "windows") {
            timings <- do.call(parallel::mclapply, c(
                                        list(X = pfiles,
                                          FUN = check_package,
                                          args_db = check_args_db,
                                          env_db = check_env_db,
                                          mc.cores = Ncpus),
                                        parallel_args))
        } else {
            cl <- parallel::makeCluster(Ncpus)
            on.exit(parallel::stopCluster(cl), add = TRUE)
            timings <- do.call(parallel::parLapply, c(
                                         list(cl = cl, 
                                           X = pfiles, 
                                           fun = check_package,
                                           args_db = check_args_db,
                                           env_db = check_env_db),
                                         parallel_args))

        }
    } else {
        timings <- lapply(pfiles,
                          check_package,
                          check_args_db,
                          check_env_db)
    }

    timings <- do.call(rbind, lapply(timings, summary))
    rownames(timings) <- pnames
    utils::write.table(timings, "timings.tab")

    file.rename(sprintf("%s.Rcheck", rnames),
                sprintf("rdepends_%s.Rcheck", rnames))

    if(clean) {
        file.remove(rfiles)
    } else {
        file.rename(rfiles, sprintf("rdepends_%s", rfiles))
    }

    invisible(.check_packages_in_dir_retval(dir,
                                            pfiles,
                                            setdiff(pnames, rnames),
                                            rnames))
}

### ** print.check_packages_in_dir

print.check_packages_in_dir <-
function(x, ...)
{
    if(!length(x)) {
        writeLines("No packages checked.")
        return(invisible(x))
    }

    dir <- attr(x, "dir")
    writeLines(c(strwrap(sprintf("Check results for packages in dir '%s':",
                                 dir)),
                 sprintf("Package sources: %d, Reverse depends: %d",
                         length(attr(x, "pnames")),
                         length(attr(x, "rnames"))),
                 "Use summary() for more information."))
    invisible(x)
}

### ** summary.check_packages_in_dir

summary.check_packages_in_dir <-
function(object, all = TRUE, full = FALSE, ...)
{
    if(!length(object)) {
        writeLines("No packages checked.")
        return(invisible(object))
    }

    dir <- attr(object, "dir")
    writeLines(c(strwrap(sprintf("Check results for packages in dir '%s':",
                                 dir)),
                 ""))
    details <- summarize_check_packages_in_dir_results(dir)
    if(!full && details) {
        writeLines("\nUse summary(full = TRUE) for details.")
    }
    invisible(object)
}

### ** start_virtual_X11_fb

start_virtual_X11_fb <-
function(options)
{
    ## Determine the display number from the options, or the PID of the
    ## current R process (alternatively, could mimic xvfb-run).
    args <- scan(text = options, what = character(), quiet = TRUE)
    ind <- grepl("^:[[:digit:]]+$", args)
    if(any(ind)) {
        num <- args[ind][1L]
    } else {
        num <- paste0(":", Sys.getpid())
        options <- c(num, options)
    }

    dis <- Sys.getenv("DISPLAY", unset = NA_character_)

    ## We need to start Xvfb with the given options and obtain its pid
    ## so that we can terminate it when done checking.
    ## This could be done via
    ##   system2("Xvfb", options, stdout = FALSE, stderr = FALSE,
    ##           wait = FALSE)
    ## and then determine the pid as
    ##   pid <- scan(text =
    ##               grep(sprintf("Xvfb %s", num),
    ##                    system2("ps", "auxw", stdout = TRUE),
    ##                    value = TRUE,
    ##                    fixed = TRUE),
    ##               what = character(),
    ##               quiet = TRUE)[2L]
    ## A better approach (suggested by BDR) is to create a shell script
    ## containing the call to start Xvfb in the background and display
    ## the pid of this as available in the shell's $! parameter.
    tf <- tempfile()
    on.exit(unlink(tf))
    writeLines(c(paste(c(shQuote("Xvfb"), options, ">/dev/null 2>&1 &"),
                       collapse = " "),
                 "echo ${!}"),
               tf)
    pid <- system2("sh", tf, stdout = TRUE)
    Sys.setenv("DISPLAY" = num)

    ## Propagate both pid and original setting of DISPLAY so that the
    ## latter can be restored when Xvfb is closed.
    attr(pid, "display") <- dis
    pid
}

### ** close_virtual_X11_db

close_virtual_X11_db <-
function(pid)
{
    pskill(pid)
    if(is.na(dis <- attr(pid, "display")))
        Sys.unsetenv("DISPLAY")
    else
        Sys.setenv("DISPLAY" = dis)
}

### ** R_check_outdirs

R_check_outdirs <-
function(dir, all = FALSE, invert = FALSE)
{
    dir <- normalizePath(dir)
    outdirs <- dir(dir, pattern = "\\.Rcheck")
    ind <- startsWith(basename(outdirs), "rdepends_")
    ## Re-arrange to have reverse dependencies last if at all.
    outdirs <- if(invert)
        c(if(all) outdirs[!ind], outdirs[ind])
    else
        c(outdirs[!ind], if(all) outdirs[ind])
    file.path(dir, outdirs)
}

### ** summarize_check_packages_in_dir_depends

summarize_check_packages_in_dir_depends <-
function(dir, all = FALSE, which = c("Depends", "Imports", "LinkingTo"))
{
    ## See tools::package_dependencies(): should perhaps separate out.
    if(identical(which, "all"))
        which <- c("Depends", "Imports", "LinkingTo", "Suggests",
                   "Enhances")
    else if(identical(which, "most"))
        which <- c("Depends", "Imports", "LinkingTo", "Suggests")

    for(d in R_check_outdirs(dir, all = all)) {
        dfile <- Sys.glob(file.path(d, "00_pkg_src", "*",
                                    "DESCRIPTION"))[1L]
        if(file_test("-f", dfile)) {
            meta <- .read_description(dfile)
            package <- meta["Package"]
            meta <- meta[match(which, names(meta), nomatch = 0L)]
            if(length(meta)) {
                writeLines(c(sprintf("Package: %s", package),
                             unlist(Map(function(tag, val) {
                                 strwrap(sprintf("%s: %s", tag, val),
                                         indent = 2L, exdent = 4L)
                             },
                                        names(meta),
                                        meta))))
            }
        }
    }

    invisible()
}

### ** summarize_check_packages_in_dir_results

summarize_check_packages_in_dir_results <-
function(dir, all = TRUE, full = FALSE, ...)
{
    dir <- normalizePath(dir)
    outdirs <- R_check_outdirs(dir, all = all)
    logs <- file.path(outdirs, "00check.log")
    logs <- logs[file_test("-f", logs)]

    results <- check_packages_in_dir_results(logs = logs, ...)

    writeLines("Check status summary:")
    tab <- check_packages_in_dir_results_summary(results)
    rownames(tab) <- paste0("  ", rownames(tab))
    print(tab)
    writeLines("")

    writeLines("Check results summary:")
    Map(function(p, r) {
        writeLines(c(sprintf("%s ... %s", p, r$status), r$lines))
    },
        names(results),
        results)

    if(full &&
       !all(as.character(unlist(lapply(results, `[[`, "status"))) ==
            "OK")) {
        writeLines(c("", "Check results details:"))
        details <- check_packages_in_dir_details(logs = logs, ...)
        writeLines(paste(format(details), collapse = "\n\n"))
        invisible(TRUE)
    } else {
        invisible(FALSE)
    }
}

### ** summarize_check_packages_in_dir_timings

summarize_check_packages_in_dir_timings <-
function(dir, all = FALSE, full = FALSE)
{
    dir <- normalizePath(dir)
    tfile <- file.path(dir, "timings.tab")
    if(file_test("-f", tfile)) {
        timings <- utils::read.table(tfile)
        ## Should we store the information about reverse dependencies in
        ## some place (rather than rely on the naming convention)?
        if(!all) {
            rdepends <- Sys.glob(file.path(dir, "rdepends_*.Rcheck"))
            timings <- timings[is.na(match(rownames(timings),
                                           sub("rdepends_(.*).Rcheck",
                                               "\\1",
                                               basename(rdepends)))),
                               ]
        }
        print(timings)
    }
    if(full) {
        tfiles <- Sys.glob(file.path(R_check_outdirs(dir, all = all),
                                     "*-Ex.timings"))
        if(length(tfiles)) message("")
        timings <- lapply(tfiles, utils::read.table, header = TRUE)
        ## Order by CPU time.
        timings <- lapply(timings,
                          function(x)
                          x[order(x$user, decreasing = TRUE), ])
        ## This looks silly, but we want a common alignment.
        timings <- split(as.data.frame(lapply(do.call(rbind, timings),
                                              format)),
                         rep.int(sub("\\.Rcheck$", "",
                                     basename(dirname(tfiles))),
                                 vapply(timings, nrow, 0L)))
        invisible(Map(function(x, y) {
            writeLines(sprintf("Example timings for package '%s':", x))
            cat(rbind(" ", t(as.matrix(y))),
                sep = c(" ", " ", " ", " ", "\n"))
        },
                      names(timings), timings))
    }

    invisible()
}

### ** check_packages_in_dir_results

## <FIXME>
## For new-style logs from successful check runs (a '* DONE' line
## followed by a 'Status: ' line), we could simply get the status from
## the 'Status: ' line.
## Change to preferably rely on the new format eventually.
## Note that check logs can end up incomplete in which case there is no
## final status line ...
## </FIXME>

check_packages_in_dir_results <-
function(dir, logs = NULL, ...)
{
    if(is.null(logs))
        logs <- Sys.glob(file.path(dir, "*.Rcheck", "00check.log"))

    ## <NOTE>
    ## Perhaps make the individual non-OK check values more readily
    ## available?
    ## </NOTE>

    results <- lapply(logs, function(log, ...) {
        lines <- read_check_log(log, ...)

        ## Re-encode to UTF-8 using the session charset info.
        re <- "^\\* using session charset: "
        pos <- grep(re, lines, perl = TRUE, useBytes = TRUE)
        enc <- if(length(pos))
                   sub(re, "", lines[pos[1L]], useBytes = TRUE)
               else ""
        lines <- iconv(lines, enc, "UTF-8", sub = "byte")
        if(any(bad <- !validUTF8(lines)))
            lines[bad] <- iconv(lines[bad], to = "ASCII", sub = "byte")
        
        ## See analyze_lines() inside analyze_check_log():
        pos <- which(startsWith(lines, "* loading checks for arch"))
        pos <- pos[pos < length(lines)]
        pos <- pos[startsWith(lines[pos + 1L], "** checking")]
        if(length(pos))
            lines <- lines[-pos]
        pos <- which(startsWith(lines, "* checking examples"))
        pos <- pos[pos < length(lines)]
        pos <- pos[startsWith(lines[pos + 1L],
                              "** running examples for arch")]
        if(length(pos))
            lines <- lines[-pos]
        pos <- which(startsWith(lines, "* checking tests"))
        pos <- pos[pos < length(lines)]
        pos <- pos[startsWith(lines[pos + 1L],
                              "** running tests for arch")]
        if(length(pos))
            lines <- lines[-pos]
        re <- "^\\*\\*? ((checking|creating|running examples for arch|running tests for arch) .*) \\.\\.\\.( (\\[[^ ]*\\]))?( (NOTE|WARNING|ERROR)|)$"
        m <- regexpr(re, lines, perl = TRUE)
        ind <- (m > 0L)
        ## Note that we use WARN instead of WARNING for the summary.
        status <-
            if(any(ind)) {
                status <- sub(re, "\\6", lines[ind], perl = TRUE)
                if(any(status == "")) "FAILURE"
                else if(any(status == "ERROR")) "ERROR"
                else if(any(status == "WARNING")) "WARNING"
                else "NOTE"
            } else {
                "OK"
            }
        list(status = status, lines = lines[ind])
    }, ...)
    names(results) <- sub("\\.Rcheck$", "", basename(dirname(logs)))

    results
}

### ** check_packages_in_dir_results_summary

check_packages_in_dir_results_summary <-
function(results)
{
    if(!length(results)) return()
    status <- vapply(results, `[[`, "", "status")
    ind <- startsWith(names(results), "rdepends_")
    tab <- table(ifelse(ind, "Reverse depends", "Source packages"),
                 status, deparse.level = 0L)
    tab <- tab[match(c("Source packages", "Reverse depends"),
                     rownames(tab), nomatch = 0L),
               match(c("FAILURE", "ERROR", "WARNING", "NOTE", "OK"),
                     colnames(tab), nomatch = 0L),
               drop = FALSE]
    names(dimnames(tab)) <- NULL
    tab
}

### ** read_check_log

read_check_log <-
function(log, drop = TRUE, ...)
{
    lines <- readLines(log, warn = FALSE, ...)

    if(drop) {
        ## Drop CRAN check status footer.
        ## Ideally, we would have a more general mechanism to detect
        ## footer information to be skipped (e.g., a line consisting of
        ## a single non-printing control character?)
        pos <- grep("^Current CRAN status:", lines,
                    perl = TRUE, useBytes = TRUE)
        if(length(pos) && lines[pos <- (pos[1L] - 1L)] == "") {
            lines <- lines[seq_len(pos - 1L)]
        }
    }

    lines
}

### ** analyze_check_log

## <FIXME>
## New-style check logs should have a '* DONE' line followed by a
## 'Status:' line.  If not, a check failure occurred.
## Change to fully rely on the new format eventually.
## </FIXME>

analyze_check_log <-
function(log, drop_ok = TRUE, ...)
{
    make_results <- function(package, version, flags, chunks)
        list(Package = package, Version = version,
             Flags = flags, Chunks = chunks)

    ## Alternatives for left and right quotes.
    lqa <- "'|\u2018"
    rqa <- "'|\u2019"
    ## Group when used ...

    if(is.character(drop_ok)) {
        drop_ok_status_tags <- drop_ok
        drop_ok <- TRUE
    } else {
        drop_ok_status_tags <- c("OK", "NONE", "SKIPPED", "INFO")
    }

    ## Start by reading in.
    lines <- read_check_log(log, ...)

    ## Re-encode to UTF-8 using the session charset info.
    re <- "^\\* using session charset: "
    pos <- grep(re, lines, perl = TRUE, useBytes = TRUE)
    if(length(pos)) {
        enc <- sub(re, "", lines[pos[1L]], useBytes = TRUE)
        lines <- iconv(lines, enc, "UTF-8", sub = "byte")
        ## If the check log uses ASCII, there should be no non-ASCII
        ## characters in the message lines: could check for this.
        if(any(bad <- !validUTF8(lines)))
            lines[bad] <- iconv(lines[bad], to = "ASCII", sub = "byte")
    } else return()

    package <- "???"
    version <- ""

    ## Get header.
    header <- lines
    re <- sprintf("^\\* this is package (%s)(.*)(%s) version (%s)(.*)(%s)$",
                  lqa, rqa, lqa, rqa)
    pos <- grep(re, lines, perl = TRUE)
    if(length(pos)) {
        pos <- pos[1L]
        txt <- lines[pos]
        package <- sub(re, "\\2", txt, perl = TRUE)
        version <- sub(re, "\\5", txt, perl = TRUE)
        header <- lines[seq_len(pos - 1L)]
        lines <- lines[-seq_len(pos)]
    } else {
        ## If there was no 'this is package %s version %s' line, then
        ## either there was a fundamental immediate problem, or an error
        ## in check_description().  In the latter case there should be a
        ## line like
        ##   * checking for file '%s/DESCRIPTION'
        ## with %s the package name implied by the invocation, but not
        ## necessarily the one recorded in DESCRIPTION: let's use that
        ## package name nevertheless, as it is better than nothing.
        re <- sprintf("^\\* checking for file (%s)(.*)/DESCRIPTION(%s).*$",
                      lqa, rqa)
        pos <- grep(re, lines, perl = TRUE)
        if(length(pos)) {
            pos <- pos[1L]
            txt <- lines[pos]
            package <- sub(re, "\\2", txt, perl = TRUE)
            header <- lines[seq_len(pos - 1L)]
        } else if(!any(startsWith(lines, "* checking ")))
            return()
    }
    ## Get check options from header.
    re <- sprintf("^\\* using options? (%s)(.*)(%s)$", lqa, rqa)
    flags <- if(length(pos <- grep(re, header, perl = TRUE))) {
                 sub(re, "\\2", header[pos[1L]], perl = TRUE)
             } else ""

    ## Get footer.
    len <- length(lines)
    pos <- which(lines == "* DONE")
    if(length(pos) &&
       ((pos <- pos[length(pos)]) < len) &&
       startsWith(lines[pos + 1L], "Status: "))
        lines <- lines[seq_len(pos - 1L)]
    else {
        ## Not really new style, or failure ... argh.
        ## Some check systems explicitly record the elapsed time in the
        ## last line:
        if(startsWith(lines[len], "* elapsed time ")) {
            lines <- lines[-len]
            len <- len - 1L
            while(grepl("^[[:space:]]*$", lines[len])) {
                lines <- lines[-len]
                len <- len - 1L
            }
        }
        ## Summary footers.
        if(startsWith(lines[len], "Status: ")) {
            ## New-style status summary.
            lines <- lines[-len]
            len <- len - 1L
        } else {
            ## Old-style status summary.
            num <- length(grep("^(NOTE|WARNING): There",
                               lines[c(len - 1L, len)]))
            if(num > 0L) {
                pos <- seq.int(len - num + 1L, len)
                lines <- lines[-pos]
                len <- len - num
            }
        }
        if(lines[len] == "* DONE")
            lines <- lines[-len]
    }

    analyze_lines <- function(lines) {
        ## Windows has
        ##   * loading checks for arch
        ##   * checking examples ...
        ##   * checking tests ...
        ## headers: drop these unless not followed by the appropriate
        ## 'subsection', which indicates failure.
        pos <- which(startsWith(lines, "* loading checks for arch"))
        pos <- pos[pos < length(lines)]
        pos <- pos[startsWith(lines[pos + 1L], "** checking")]
        if(length(pos))
            lines <- lines[-pos]
        pos <- which(startsWith(lines, "* checking examples"))
        pos <- pos[pos < length(lines)]
        pos <- pos[startsWith(lines[pos + 1L],
                              "** running examples for arch")]
        if(length(pos))
            lines <- lines[-pos]
        pos <- which(startsWith(lines, "* checking tests"))
        pos <- pos[pos < length(lines)]
        pos <- pos[startsWith(lines[pos + 1L],
                              "** running tests for arch")]
        if(length(pos))
            lines <- lines[-pos]
        ## Get info about compilers used into the "whether package can
        ## be installed" output without leading star.
        pos <- which(startsWith(lines, "* used"))
        if(length(pos))
            lines[pos] <- paste("Used", substring(lines[pos], 8L))
        ## We might still have
        ##   * package encoding:
        ## entries for packages declaring a package encoding.
        ## Hopefully all other log entries we still have are
        ##   * checking
        ##   * creating
        ## ones ... apparently, with the exception of
        ##   ** running examples for arch
        ##   ** running tests for arch
        ## So let's drop everything up to the first such entry.
        re <- "^\\*\\*? ((checking|creating|running examples for arch|running tests for arch) .*) \\.\\.\\.( (\\[[^ ]*\\]))?( (.*)|)$"
        ind <- grepl(re, lines, perl = TRUE)
        csi <- cumsum(ind)
        ind <- (csi > 0)
        chunks <-
            lapply(split(lines[ind], csi[ind]),
                   function(s) {
                       ## Note that setting
                       ##   _R_CHECK_TEST_TIMING_=yes
                       ##   _R_CHECK_VIGNETTE_TIMING_=yes
                       ## will result in a different chunk format ...
                       line <- s[1L]
                       check <- sub(re, "\\1", line, perl = TRUE)
                       status <- sub(re, "\\6", line, perl = TRUE)
                       if(status == "") status <- "FAILURE"
                       list(check = check,
                            status = status,
                            output = paste(s[-1L], collapse = "\n"))
                   })

        status <- vapply(chunks, `[[`, "", "status")
        if(isTRUE(drop_ok) ||
           (is.na(drop_ok)
               && all(is.na(match(c("ERROR", "FAILURE"), status)))))
            chunks <- chunks[is.na(match(status, drop_ok_status_tags))]
        
        chunks
    }

    chunks <- analyze_lines(lines)
    if(!length(chunks) && !isFALSE(drop_ok)) {
        chunks <- list(list(check = "*", status = "OK", output = ""))
    }

    make_results(package, version, flags, chunks)
}

### ** check_packages_in_dir_details

check_packages_in_dir_details <-
function(dir, logs = NULL, drop_ok = TRUE, ...)
{
    ## Build a data frame with columns
    ##   Package Version Check Status Output Flags
    ## and some optimizations.

    db_from_logs <- function(logs, drop_ok, ...) {
        out <- lapply(logs, analyze_check_log, drop_ok, ...)
        out <- out[lengths(out) > 0L]
        if(!length(out))
            return(matrix(character(), ncol = 6L))
        chunks <- lapply(out, `[[`, "Chunks")
        package <- vapply(out, `[[`, "", "Package")
        lens <- lengths(chunks)
        cbind(rep.int(package, lens),
              rep.int(vapply(out, `[[`, "", "Version"), lens),
              matrix(as.character(unlist(chunks)), ncol = 3L,
                     byrow = TRUE),
              rep.int(vapply(out, `[[`, "", "Flags"),
                      lens))
    }

    if(is.null(logs)) {
        if(inherits(dir, "check_packages_in_dir"))
            dir <- attr(dir, "dir")
        logs <- Sys.glob(file.path(dir, "*.Rcheck", "00check.log"))
    }

    db <- db_from_logs(logs, drop_ok, ...)
    colnames(db) <- c("Package", "Version", "Check", "Status",
                      "Output", "Flags")

    ## Now some cleanups.

    ## Alternatives for left and right quotes.
    lqa <- "'|\u2018"
    rqa <- "'|\u2019"
    ## Group when used ...

    checks <- db[, "Check"]
    checks <- sub(sprintf("checking whether package (%s).*(%s) can be installed",
                          lqa, rqa),
                  "checking whether package can be installed",
                  checks, perl = TRUE)
    checks <- sub("creating .*-Ex.R", "checking examples creation",
                  checks, perl = TRUE)
    checks <- sub("creating .*-manual\\.tex", "checking manual creation",
                  checks, perl = TRUE)
    checks <- sub("checking .*-manual\\.tex", "checking manual",
                  checks, perl = TRUE)
    checks <- sub(sprintf("checking package vignettes in (%s)inst/doc(%s)",
                          lqa, rqa),
                  "checking package vignettes",
                  checks, perl = TRUE)
    checks <- sub("^checking *", "",
                  checks, perl = TRUE)
    db[, "Check"] <- checks
    ## In fact, for tabulation purposes it would even be more convenient
    ## to shorten the check names ...

    db[, "Output"] <-
        sub("[[:space:]]+$", "", db[, "Output"], perl = TRUE)

    db <- as.data.frame(db, stringsAsFactors = FALSE)
    class(db) <- c("check_details", "data.frame")

    db
}

format.check_details <-
function(x, ...)
{
    flags <- x$Flags
    flavor <- x$Flavor
    paste0(sprintf("Package: %s %s\n",
                   x$Package, x$Version),
           ifelse(nzchar(flavor),
                  sprintf("Flavor: %s\n", flavor),
                  ""),
           ifelse(nzchar(flags),
                  sprintf("Flags: %s\n", flags),
                  ""),
           sprintf("Check: %s, Result: %s\n",
                   x$Check, x$Status),
           sprintf("  %s",
                   gsub("\n", "\n  ", x$Output, perl = TRUE))
           )
}

print.check_details <-
function(x, ...)
{
    writeLines(paste(format(x, ...), collapse = "\n\n"))
    invisible(x)
}

### ** check_packages_in_dir_changes

check_packages_in_dir_changes <-
function(dir, old, outputs = FALSE, sources = FALSE, ...)
{
    dir <- if(inherits(dir, "check_packages_in_dir"))
        dir <- attr(dir, "dir")
    else
        normalizePath(dir)

    outdirs <- R_check_outdirs(dir, all = sources, invert = TRUE)
    logs <- file.path(outdirs, "00check.log")
    logs <- logs[file_test("-f", logs)]
    new <- check_packages_in_dir_details(logs = logs, drop_ok = FALSE, ...)

    ## Use
    ##   old = tools:::CRAN_check_details(FLAVOR)
    ## to compare against the results/details of a CRAN check flavor.

    if(!inherits(old, "check_details"))
        old <- check_packages_in_dir_details(old, drop_ok = FALSE, ...)

    check_details_changes(new, old, outputs)
}

### ** check_details_changes

check_details_changes <-
function(new, old, outputs = FALSE)
{
    check_details_changes_classes <-
        c("check_details_changes", "data.frame")

    if(!inherits(new, "check_details")) stop("wrong class")
    if(!inherits(old, "check_details")) stop("wrong class")

    ## Simplify matters by considering only "changes" in *available*
    ## results/details.

    packages <- intersect(old$Package, new$Package)

    if(!length(packages)) {
        db <- list2DF(list(Package = character(),
                           Check = character(),
                           Old = character(),
                           New = character()))
        class(db) <- check_details_changes_classes
        return(db)
    }

    db <- merge(old[!is.na(match(old$Package, packages)), ],
                new[!is.na(match(new$Package, packages)), ],
                by = c("Package", "Check"), all = TRUE)

    ## Complete possibly missing version information.
    chunks <-
        lapply(split(db, db$Package),
               function(e) {
                   len <- nrow(e)
                   if(length(pos <- which(!is.na(e$Version.x))))
                       e$Version.x <-
                           rep.int(e[pos[1L], "Version.x"], len)
                   if(length(pos <- which(!is.na(e$Version.y))))
                       e$Version.y <-
                           rep.int(e[pos[1L], "Version.y"], len)
                   e
               })
    db <- do.call(rbind, chunks)

    ## Drop checks that are OK in both versions
    x.issue <- !is.na(match(db$Status.x,
                            c("NOTE", "WARNING", "ERROR", "FAILURE")))
    y.issue <- !is.na(match(db$Status.y,
                            c("NOTE", "WARNING", "ERROR", "FAILURE")))
    db <- db[x.issue | y.issue,]

    ## Even with the above simplification, missing entries do not
    ## necessarily indicate "OK" (checks could have been skipped).
    ## Hence leave as missing and show as empty in the diff.

    sx <- as.character(db$Status.x)
    sy <- as.character(db$Status.y)
    if(outputs) {
        ind <- nzchar(ox <- db$Output.x)
        sx[ind] <- sprintf("%s\n  %s", sx[ind],
                           gsub("\n", "\n  ", ox[ind], fixed = TRUE))
        ind <- nzchar(oy <- db$Output.y)
        sy[ind] <- sprintf("%s\n  %s", sy[ind],
                           gsub("\n", "\n  ", oy[ind], fixed = TRUE))
    }
    sx[is.na(db$Status.x)] <- ""
    sy[is.na(db$Status.y)] <- ""
    ind <- if(outputs)
        (.canonicalize_quotes(sx) != .canonicalize_quotes(sy))
    else
        (sx != sy)

    db <- cbind(db[ind, ], Old = sx[ind], New = sy[ind],
                stringsAsFactors = FALSE)

    ## Add information about possible version changes.
    ind <- (db$Version.x != db$Version.y)
    if(any(ind))
        db$Package[ind] <-
            sprintf("%s [Old version: %s, New version: %s]",
                    db$Package[ind],
                    db$Version.x[ind],
                    db$Version.y[ind])

    db <- db[c("Package", "Check", "Old", "New")]

    class(db) <- check_details_changes_classes

    db
}

`[.check_details_changes` <-
function(x, i, j, drop = FALSE)
{
    if(((nargs() - !missing(drop)) == 3L)
       && (length(i) == 1L)
       && any(!is.na(match(i, c("==", "!=", "<", "<=", ">", ">="))))) {
        levels <- c("", "OK", "NOTE", "WARNING", "ERROR", "FAILURE")
        encode <- function(s) {
            s <- sub("\n.*", "", s)
            s[is.na(match(s, levels))] <- ""
            ordered(s, levels)
        }
        old <- encode(x$Old)
        new <- encode(x$New)
        i <- do.call(i, list(old, new))
    }
    NextMethod()
}

format.check_details_changes <-
function(x, ...)
{
    if(!nrow(x)) return(character())
    sprintf("Package: %s\nCheck: %s%s%s",
            x$Package,
            x$Check,
            ifelse(nzchar(old <- x$Old),
                   sprintf("\nOld result: %s", old),
                   ""),
            ifelse(nzchar(new <- x$New),
                   sprintf("\nNew result: %s", new),
                   ""))
}

print.check_details_changes <-
function(x, ...)
{
    if(length(y <- format(x)))
        writeLines(paste(y, collapse = "\n\n"))
    invisible(x)
}

### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
#  File src/library/tools/R/citation.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2014 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

## Tools for computing on CITATION info.

.parse_CITATION_file <-
function(cfile, encoding = NULL)
{
    if(is.null(encoding))
        encoding <- "ASCII"

    ## The parser can only read valid strings, but single-byte locales
    ## can mark their encoding.  The following allows latin1 and UTF-8
    ## citation files to be read in UTF-8 and any single-byte locale
    ## (including C).
    ##
    ## FIXME: if parse() could be told to read strings bytewise,
    ## we could simply convert to UTF-8.
    if(encoding %in% c("latin1", "UTF-8") && !l10n_info()$MBCS) {
        ## NOTE: in an MBCS locale, calling parse() with a "UTF-8" or
        ## "latin1" encoding argument in order to get character strings
        ## marked as UTF-8 or latin1 does not work, as such encoding
        ## arguments are ignored with a warning.  So one needs to use a
        ## file connection to re-encode as below, and cannot get strings
        ## marked.  One might consider this a bug and not a feature ...
        parse(file = cfile, encoding = encoding)
    } else if(encoding %in% c("C", "ASCII")) {
        ## We do want to make sure this is ASCII: in single-byte
        ## locales 8-bit chars are likely to be parsed as bytes.
        ## Based on showNonASCII()
        x <- readLines(cfile, warn = FALSE)
        asc <- iconv(x, "latin1", "ASCII")
        if (any(is.na(asc) | asc != x))
            stop("non-ASCII input in a CITATION file without a declared encoding")
        parse(file = cfile)
    } else {
        con <- file(cfile, encoding = encoding)
        on.exit(close(con))
        parse(con)
    }
}

.parse_CITATION_file_in_package <-
function(cfile, installed = FALSE)    
{
    cfile <- file_path_as_absolute(cfile)
    dfile <- file.path(if(installed)
                           dirname(cfile)
                       else
                           dirname(dirname(cfile)),
                       "DESCRIPTION")
    meta <- .read_description(dfile)
    if(is.na(encoding <- meta["Encoding"]))
        encoding <- NULL
    .parse_CITATION_file(cfile, encoding)
}
    
BibTeX_entry_field_db <-
    list(Article = c("author", "title", "journal", "year"),
         Book = c("author|editor", "title", "publisher", "year"),
         Booklet = c("title"),
         InBook =
         c("author|editor", "title", "chapter", "publisher", "year"),
         InCollection =
         c("author", "title", "booktitle", "publisher", "year"),
         InProceedings = c("author", "title", "booktitle", "year"),
         Manual = c("title"),
         MastersThesis = c("author", "title", "school", "year"),
         Misc = character(),
         PhdThesis = c("author", "title", "school", "year"),
         Proceedings = c("title", "year"),
         TechReport = c("author", "title", "institution", "year"),
         Unpublished = c("author", "title", "note")
         )
## See e.g. lisp/textmodes/bibtex.el in the GNU Emacs sources.

get_CITATION_entry_fields <-
function(file, encoding = "ASCII")
{
    exprs <- .parse_CITATION_file(file, encoding)

    ## Assume that bibentry() or citEntry() only occur at top level.

    ## Try to detect entry type and field names from the calls.
    FOO1 <- FOO2 <- function() match.call(expand.dots = FALSE)
    formals(FOO1) <- formals(utils::citEntry)
    formals(FOO2) <- formals(utils::bibentry)
    ## Could also hard-wire this, of course.
    get_names_of_nonempty_fields <- function(x) {
        names(x)[vapply(x,
                        function(e) {
                            length(e) &&
                            !(is.character(e) &&
                              all(grepl("^[[:space:]]*$", e)))
                        },
                        NA)]
    }

    out <- lapply(exprs,
           function(e) {
               nm <- as.character(e[[1L]])
               if(nm == "citEntry") {
                   e[[1L]] <- as.name("FOO1")
                   e <- as.list(eval(e))
                   entry <- e$entry
                   fields <- get_names_of_nonempty_fields(e$...)
               }
               else if(nm == "bibentry") {
                   e[[1L]] <- as.name("FOO2")
                   e <- as.list(eval(e))
                   entry <- e$bibtype
                   fields <- get_names_of_nonempty_fields(c(e$...,
                                                            as.list(e$other)[-1L]))
               }
               else return()
               entry <- if(!is.character(entry)) NA_character_ else entry[1L]
               list(entry = entry, fields = as.character(fields))
           })

    out <- Filter(Negate(is.null), out)
    ## If we found nothing return nothing ...
    if(!length(out)) return(NULL)
    entries <- sapply(out, `[[`, 1L)
    fields <- lapply(out, `[[`, 2L)
    out <- data.frame(File = file,
                      Entry = entries,
                      stringsAsFactors = FALSE)
    out$Fields <- fields
    out
}


find_missing_required_BibTeX_fields <-
function(entry, fields)
{
    pos <- match(tolower(entry),
                 tolower(names(BibTeX_entry_field_db)))
    if(is.na(pos)) {
        ## Invalid entry.
        return(NA_character_)
    }
    rfields <- BibTeX_entry_field_db[[pos]]
    if(!length(rfields)) return(character())
    ## Go for legibility/generality rather than efficiency.
    fields <- tolower(fields)
    ok <- vapply(strsplit(rfields, "|", fixed = TRUE),
                 function(f) any(f %in% fields),
                 NA)
    rfields[!ok]
}
#  File src/library/utils/R/code2html.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 2022 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

## Similar in spirit to example() and demo(), but instead of running
## in the console, run it through knitr to produce HTML output.


## Decisions to be made:
## - knitr opts to use; being minimal lets users decide
## - whether to evaluate globally or locally

.code2html_payload_console <- function(type, topic, package, enhancedHTML = TRUE, Rhome = "")
{
    snippet <- sprintf("%s(%s, package = \"%s\")", type, topic, package)
    msg <- c(gettextf("<p><code class='language-R'>%s</code> was run in the console.</p>", snippet),
             if (enhancedHTML)
                 gettext("<p>To view output in the browser, the <a href='https://CRAN.R-project.org/package=knitr'>knitr</a> package must be installed.</p>")
             else
                 gettext("<p>To view output in the browser, the <a href='https://CRAN.R-project.org/package=knitr'>knitr</a> package must be installed and the environment variable <code>_R_HELP_ENABLE_ENHANCED_HTML_</code> must be set to TRUE.</p>"))
    list(payload = paste(c(HTMLheader("R example", Rhome = Rhome,
                                      logo = FALSE, up = NULL, top = NULL),
                           msg, "\n</div></body></html>"),
                         collapse = "\n"))
}

.code2html_payload_browser <- function(type, codelines, topic, package,
                                       Rhome = "", header.info = NULL,
                                       env = NULL)
{
    if (type == "example" && package != "base") library(package, character.only = TRUE)
    header.lines <-
        if (is.null(header.info)) character(0)
        else # Assumes Name, Title, Aliases present without checking (which should be OK)
        {
            header.info$Aliases <- strsplit(header.info$Aliases, "[[:space:]]+")[[1]]
            header.info$Keywords <- if (is.null(header.info$Keywords)) character(0)
                                    else strsplit(header.info$Keywords, "[[:space:]]+")[[1]]
            with(header.info,
            {
                ## Note: sprintf() return 0-length output with 0-length input
                c(sprintf("<h2>%s</h2>", Title),
                  sprintf("<p>Aliases: %s</p>",
                          paste(sprintf("<a href='../help/%s'>%s</a>",
                                        vapply(Aliases, urlify, "",
                                               reserved = TRUE),
                                        vapply(Aliases, shtmlify, "")),
                                collapse = " ")),
                  sprintf("<p>Keywords: %s</p>",
                          paste(sprintf("<a href='/doc/html/Search?category=%s'>%s</a>",
                                        Keywords, Keywords),
                                collapse = " "))
                  )
            })
        }
    ## Not really important, but to be consistent with help pages
    pkgversion <- utils::packageDescription(package, fields = "Version")
    footer.lines <-
        sprintf('<hr><div style="text-align: center;">[Package <em>%s</em> version %s <a href="../html/00Index.html">Index</a>]</div>', package, pkgversion)
    rhtml <-
        c(HTMLheader(title = sprintf("%s '%s::%s'",
                                     switch(type, demo = "Demo for", example = "Examples for"),
                                     package, topic),
                     Rhome = Rhome,
                     logo = FALSE, up = NULL, top = NULL),
          header.lines,
          "\n\n<!--begin.rcode\n",
          codelines,
          "\nend.rcode-->\n\n",
          footer.lines,
          "</div></body></html>")
    figdir <- tempfile(pattern = package, fileext = topic)
    on.exit(unlink(figdir, recursive = TRUE), add = TRUE)
    ## Record old knitr / chunk options and restore on exit
    old_opts_knit <- knitr::opts_knit$get()
    old_opts_chunk <- knitr::opts_chunk$get()
    on.exit(knitr::opts_knit$restore(old_opts_knit), add = TRUE)
    on.exit(knitr::opts_chunk$restore(old_opts_chunk), add = TRUE)
    knitr::opts_knit$set(upload.fun = function(x) paste0("data:", mime_type(x), ";base64,", xfun::base64_encode(x)),
                         unnamed.chunk.label = sprintf("%s-%s-%s", type, package, topic))
    knitr::opts_chunk$set(comment = "", warning = TRUE, message = TRUE, error = TRUE,
                          fig.path = file.path(figdir, "fig-"),
                          fig.width = 9, fig.height = 7,
                          dpi = 96)
    out <- knitr::knit(text = rhtml, quiet = TRUE,
                       envir = env %||% new.env(parent = .GlobalEnv))
    ## the paste() doesn't seem necessary, but just to be safe
    list(payload = paste(out, collapse = "\n"))
}

example2html <- function(topic, package, Rhome = "", env = NULL)
{
    ## topic must be character (no NSE), and package must be specified
    enhancedHTML <-
        config_val_to_logical(Sys.getenv("_R_HELP_ENABLE_ENHANCED_HTML_", "TRUE"))
    if (!enhancedHTML || !requireNamespace("knitr", quietly = TRUE)) {
        ## Don't display in HTML (run in console instead)
        utils::example(topic, package = package, character.only = TRUE, ask = FALSE)
        .code2html_payload_console("example", topic, package,
                                   enhancedHTML = enhancedHTML, Rhome = Rhome)
    }
    else {
        ecode <- utils::example(topic, package = package, character.only = TRUE, give.lines = TRUE)
        ## Parse initial lines starting with ###
        hlines <- grep("^###[ ][^*]", ecode)
        wskip <- which(diff(hlines) != 1)
        if (length(wskip)) hlines <- hlines[seq_len(wskip[1])]
        if (length(hlines))
        {
            header.info <-
                as.list(read.dcf(textConnection(substring(ecode[hlines], 5)))[1, , drop = TRUE])
            ecode <- ecode[-hlines]
        }
        else header.info <- NULL
        .code2html_payload_browser("example", ecode, topic, package,
                                   Rhome = Rhome, header.info = header.info,
                                   env = env)
    }
}

demo2html <- function(topic, package, Rhome = "", env = NULL)
{
    enhancedHTML <-
        config_val_to_logical(Sys.getenv("_R_HELP_ENABLE_ENHANCED_HTML_", "TRUE"))
    if (!enhancedHTML || !requireNamespace("knitr", quietly = TRUE)) {
        ## Don't display in HTML (run in console instead)
        utils::demo(topic, package = package, character.only = TRUE, ask = FALSE)
        .code2html_payload_console("demo", topic, package, enhancedHTML = enhancedHTML, Rhome = Rhome)
    }
    else {
        ## Assumes that demo file is names topic.R
        dcode <- readLines(system.file("demo", paste0(topic, ".R"), package = package))
        .code2html_payload_browser("demo", dcode, topic, package, Rhome = Rhome, env = env)
    }
}

#  File src/library/tools/R/CRANtools.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 2014-2022 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

summarize_CRAN_check_status <-
function(packages, results = NULL, details = NULL, issues = NULL)
{
    if(is.null(results))
        results <- CRAN_check_results()
    results <-
        results[!is.na(match(results$Package, packages)) & !is.na(results$Status), ]

    if(!NROW(results)) {
        s <- character(length(packages))
        names(s) <- packages
        return(s)
    }

    if(any(results$Status != "OK")) {
        if(is.null(details))
            details <- CRAN_check_details()
        details <- details[!is.na(match(details$Package, packages)), ]
        ## Remove all ok stubs.
        details <- details[details$Check != "*", ]
        ## Remove trailing white space from outputs ... remove eventually
        ## when this is done on CRAN.
        details$Output <- sub("[[:space:]]+$", "", details$Output)

    } else {
        ## Create empty details directly to avoid the cost of reading
        ## and subscripting the actual details db.
        details <- as.data.frame(matrix(character(), ncol = 7L),
                                 stringsAsFactors = FALSE)
        names(details) <-
            c("Package", "Version", "Flavor", "Check", "Status", "Output",
              "Flags")
    }

    if(is.null(issues))
        issues <- CRAN_check_issues()

    summarize_results <- function(p, r) {
        if(!NROW(r)) return(character())
        tab <- table(r$Status)[c("OK", "NOTE", "WARNING", "ERROR", "FAILURE")]
        tab <- tab[!is.na(tab) & (tab > 0)]
        paste(c(sprintf("Current CRAN status: %s",
                        paste(sprintf("%s: %s", names(tab), tab),
                              collapse = ", ")),
                sprintf("See: <https://CRAN.R-project.org/web/checks/check_results_%s.html>",
                        p)),
              collapse = "\n")
    }

    summarize_details <- function(p, d) {
        if(!NROW(d)) return(character())

        pof <- which(names(d) == "Flavor")
        poo <- which(names(d) == "Output")
        ## Outputs from checking "whether package can be installed" will
        ## have a machine-dependent final line
        ##    See ....... for details.
        ind <- d$Check == "whether package can be installed"
        if(any(ind)) {
            d[ind, poo] <-
                sub("\nSee[^\n]*for details[.]$", "", d[ind, poo])
        }
        txt <- apply(d[-pof], 1L, paste, collapse = "\r")
        ## Outputs from checking "installed package size" will vary
        ## according to system.
        ind <- d$Check == "installed package size"
        if(any(ind)) {
            txt[ind] <-
                apply(d[ind, - c(pof, poo)],
                      1L, paste, collapse = "\r")
        }

        ## Canonicalize fancy quotes.
        ## Could also try using iconv(to = "ASCII//TRANSLIT"))
        txt <- .canonicalize_quotes(txt)
        out <-
            lapply(split(seq_len(NROW(d)), match(txt, unique(txt))),
                   function(e) {
                       tmp <- d[e[1L], ]
                       flags <- tmp$Flags
                       flavors <- d$Flavor[e]
                       c(sprintf("Version: %s", tmp$Version),
                         if(nzchar(flags)) sprintf("Flags: %s", flags),
                         sprintf("Check: %s, Result: %s", tmp$Check, tmp$Status),
                         sprintf("  %s",
                                 gsub("\n", "\n  ", tmp$Output,
                                      perl = TRUE)),
                         sprintf("See: %s",
                                 paste(sprintf("<https://www.r-project.org/nosvn/R.check/%s/%s-00check.html>",
                                               flavors,
                                               p),
                                       collapse = ",\n     ")))
                   })
        paste(unlist(lapply(out, paste, collapse = "\n")),
              collapse = "\n\n")
    }

    summarize_issues <- function(i) {
        if(!length(i)) return(character())
        ## In principle the hyperrefs can be obtained from the package
        ## check results page already pointed to by summarize_results(),
        ## but this is not convenient for plain text processing ...
        paste(c("Additional issues:",
                sprintf("  %s <%s>", i$kind, i$href)),
              collapse = "\n")
    }

    summarize <- function(p, r, d, i) {
        paste(c(summarize_results(p, r),
                summarize_issues(i),
                summarize_details(p, d)),
              collapse = "\n\n")
    }

    ## Split according to package.
    issues <- split(issues[-1L], issues[[1L]])

    s <- if(length(packages) == 1L) {
        summarize(packages, results, details, issues[[packages]])
    } else {
        results <- split(results, factor(results$Package, packages))
        details <- split(details, factor(details$Package, packages))
        unlist(lapply(packages,
                      function(p) {
                          summarize(p,
                                    results[[p]],
                                    details[[p]],
                                    issues[[p]])
                      }))
    }

    names(s) <- packages
    class(s) <- "summarize_CRAN_check_status"
    s
}

format.summarize_CRAN_check_status <-
function(x, header = NA, ...)
{
    if(is.na(header)) header <- (length(x) > 1L)
    if(header) {
        s <- sprintf("Package: %s", names(x))
        x <- sprintf("%s\n%s\n\n%s", s, gsub(".", "*", s), x)
    }
    x
}

print.summarize_CRAN_check_status <-
function(x, ...)
{
    writeLines(paste(format(x, ...), collapse = "\n\n"))
    invisible(x)
}

## Summarize complete CRAN check status according to maintainer.

summarize_CRAN_check_status_according_to_maintainer <-
function(db = CRAN_package_db(),
         results = CRAN_check_results(),
         details = CRAN_check_details(),
         issues  = CRAN_check_issues())
{
    ind <- !duplicated(db[, "Package"])

    maintainer <- db[, "Maintainer"]
    maintainer <- tolower(sub(".*<(.*)>.*", "\\1", maintainer))

    split(format(summarize_CRAN_check_status(db[ind, "Package"],
                                             results,
                                             details,
                                             issues),
                 header = TRUE),
          maintainer[ind])
}

CRAN_baseurl_for_src_area <-
function()
    Sys.getenv("R_CRAN_SRC", .get_CRAN_repository_URL())

## This allows for partial local mirrors, or to look at a
## more-freqently-updated mirror.  Exposed as utils::findCRANmirror
CRAN_baseurl_for_web_area <-
function()
    Sys.getenv("R_CRAN_WEB", .get_CRAN_repository_URL())

read_CRAN_object <-
function(cran, path)
{
    con <- gzcon(url(sprintf("%s/%s", cran, path),
                     open = "rb"))
    on.exit(close(con))
    readRDS(con)
}

CRAN_check_results <-
function(flavors = NULL)
{
    db <- read_CRAN_object(CRAN_baseurl_for_web_area(),
                           "web/checks/check_results.rds")
    if(!is.null(flavors))
        db <- db[!is.na(match(db$Flavor, flavors)), ]
    db
}

CRAN_check_details <-
function(flavors = NULL)
{
    db <- read_CRAN_object(CRAN_baseurl_for_web_area(),
                           "web/checks/check_details.rds")
    if(!is.null(flavors))
        db <- db[!is.na(match(db$Flavor, flavors)), ]
    db
}

## Deprecated in 3.4.1, removed in 4.3.0
## CRAN_memtest_notes <-
## function()
## {
##     .Deprecated("CRAN_check_issues")
##     read_CRAN_object(CRAN_baseurl_for_web_area(),
##                      "web/checks/memtest_notes.rds")
## }

CRAN_check_issues <-
function()
    read_CRAN_object(CRAN_baseurl_for_web_area(),
                     "web/checks/check_issues.rds")

CRAN_package_db <-
function()
    as.data.frame(read_CRAN_object(CRAN_baseurl_for_web_area(),
                                   "web/packages/packages.rds"),
                  stringsAsFactors = FALSE)

CRAN_aliases_db <-
function()
    read_CRAN_object(CRAN_baseurl_for_src_area(),
                     "src/contrib/Meta/aliases.rds")

CRAN_archive_db <-
function()
    read_CRAN_object(CRAN_baseurl_for_src_area(),
                     "src/contrib/Meta/archive.rds")

CRAN_authors_db <-
function()
    read_CRAN_object(CRAN_baseurl_for_src_area(),
                     "src/contrib/Meta/authors.rds")

CRAN_current_db <-
function()
    read_CRAN_object(CRAN_baseurl_for_src_area(),
                     "src/contrib/Meta/current.rds")

CRAN_rdxrefs_db <-
function()
    read_CRAN_object(CRAN_baseurl_for_src_area(),
                     "src/contrib/Meta/rdxrefs.rds")

check_CRAN_mirrors <-
function(mirrors = NULL, verbose = FALSE)
{
    retry_upon_error <- function(expr, n = 3L) {
        i <- 1L
        repeat {
            y <- tryCatch(expr, error = identity)
            if(!inherits(y, "error") || (i >= n))
                break
            i <- i + 1L
        }
        y
    }

    read_package_db <- function(baseurl) {
        path <- sprintf("%ssrc/contrib/PACKAGES.gz", baseurl)
        db <- retry_upon_error({
            con <- gzcon(url(path, "rb"))
            on.exit(close(con))
            readLines(con)
        })
        if(inherits(db, "error")) {
            msg <- sprintf("Reading %s failed with message: %s",
                           path, conditionMessage(db))
            return(simpleError(msg))
        }
        db
    }

    read_timestamp <- function(baseurl, path) {
        path <- sprintf("%s%s", baseurl, path)
        ts <- retry_upon_error(readLines(path))
        if(inherits(ts, "error")) {
            msg <- sprintf("Reading %s failed with message: %s",
                           path, conditionMessage(ts))
            return(simpleError(msg))
        }
        as.POSIXct(as.numeric(ts), origin = "1970-01-01")
    }

    if_ok <- function(u, v) if(inherits(u, "error")) u else v

    check_mirror <- function(mirror) {
        mirror_packages <- read_package_db(mirror)
        mirror_ts1 <- read_timestamp(mirror, path_ts1)
        mirror_ts2 <- read_timestamp(mirror, path_ts2)
        mirror_ts3 <- read_timestamp(mirror, path_ts3)

        list("PACKAGES" =
             if_ok(mirror_packages,
                   c("Delta_master_mirror" =
                         sprintf("%d/%d",
                                 length(setdiff(master_packages,
                                                mirror_packages)),
                                 length(master_packages)),
                     "Delta_mirror_master" =
                         sprintf("%d/%d",
                                 length(setdiff(mirror_packages,
                                                master_packages)),
                                 length(mirror_packages)))),
             "TIME" =
             if_ok(mirror_ts1, difftime(master_ts1, mirror_ts1)),
             "TIME_r-release" =
             if_ok(mirror_ts2, difftime(master_ts2, mirror_ts2)),
             "TIME_r-old-release" =
             if_ok(mirror_ts3, difftime(master_ts3, mirror_ts3))
             )
    }

    master <- "https://CRAN.R-project.org/"
    path_ts1 <- "TIME"
    path_ts2 <- "bin/windows/contrib/r-release/TIME_r-release"
    path_ts3 <- "bin/windows/contrib/r-old-release/TIME_r-old-release"

    master_packages <- read_package_db(master)
    master_ts1 <- read_timestamp(master, path_ts1)
    master_ts2 <- read_timestamp(master, path_ts2)
    master_ts3 <- read_timestamp(master, path_ts3)

    if(is.null(mirrors)) {
        mirrors <- as.character(utils::getCRANmirrors(all = TRUE)$URL)
    }

    results <- lapply(mirrors,
                      function(m) {
                          if(verbose)
                              message(sprintf("Checking %s", m))
                          suppressWarnings(tryCatch(check_mirror(m),
                                                    error = identity))
                      })
    names(results) <- mirrors

    results
}

CRAN_mirror_maintainers_info <-
function(mirrors, db = NULL, collapse = TRUE)
{
    if(is.null(db))
        db <- utils::getCRANmirrors(all = TRUE)
    mirrors <- sort(unique(mirrors))
    ind <- match(mirrors, as.character(db$URL))
    addresses <- db[ind, "Maintainer"]
    addresses <- gsub("[[:space:]]*#[[:space:]]*", "@", addresses)
    to <- unique(unlist(strsplit(addresses,
                                 "[[:space:]]*,[[:space:]]*")))
    head <- list("To" = "CRAN@R-project.org",
                 "Bcc" = to,
                 "Subject" = "CRAN mirrors maintained by you",
                 "Reply-To" = "CRAN@R-project.org")
    if(collapse) {
        head$Bcc <- paste(head$Bcc, collapse = ",\n    ")
        head <- sprintf("%s: %s", names(head), unlist(head))
    }
    len <- length(addresses)
    body <- c(if(len > 1L) {
                  "Dear maintainers,"
              } else {
                  "Dear maintainer,"
              },
              "",
              strwrap(paste(if(length(mirrors) > 1L) {
                                "This concerns the following CRAN mirrors"
                            } else {
                                "This concerns the following CRAN mirror"
                            },
                            "maintained by",
                            if(len > 1L) "one of",
                            "you:")),
              "",
              paste0("  ", formatDL(mirrors, addresses, style = "list"))
              )
    list(head = head, body = body)
}

CRAN_mirror_mirmon_status <-
function()
{
    ## See
    ## <http://www.projects.science.uu.nl/csg/mirmon/mirmon.html#state_file_format>.

    fields <-
        c("url",
          "age",
          "status_last_probe",
          "time_last_successful_probe",
          "probe_history",
          "state_history",
          "last_probe")
    ts_to_POSIXct <- function(ts) {
        suppressWarnings(as.POSIXct(as.numeric(as.character(ts)),
                                    origin = "1970-01-01"))
    }
    read_mirmon_state_file <- function(con) {
        db <- utils::read.table(con, header = FALSE, col.names = fields)
        db$url <- as.character(db$url)
        db$age <- ts_to_POSIXct(db$age)
        db$time_last_successful_probe <-
            ts_to_POSIXct(db$time_last_successful_probe)
        db$last_probe <- ts_to_POSIXct(db$last_probe)
        db$delta <- difftime(Sys.time(), db$age, units = "days")
        db
    }
    state_files <-
        c("TIME" = "mirror.state",
          "TIME_r-release" = "mirror_release.state",
          "TIME_r-old-release" = "mirror_old_release.state")

    ## Need to always use master for now (the mirrors do not have the
    ## state files).
    do.call(rbind,
            c(Map(function(u, v) {
                      u <- paste0("https://cran.r-project.org/mirmon/state/", u)
                      cbind(read_mirmon_state_file(u),
                            timestamp = v,
                            stringsAsFactors = FALSE)
                  },
                  state_files,
                  names(state_files)),
              list(make.row.names = FALSE)))
}


CRAN_Rd_xref_db_with_expansions <-
function()
{
    db <- CRAN_rdxrefs_db()
    ## Flatten:
    db <- cbind(do.call(rbind, db),
                rep.int(names(db), vapply(db, NROW, 0L)))
    colnames(db) <- c(colnames(db)[1L : 2L], "S_File", "S_Package")
    unique(cbind(db, .expand_anchored_Rd_xrefs(db)))
}

CRAN_Rd_xref_available_target_ids <-
function()
{
    targets <- lapply(CRAN_aliases_db(), .Rd_available_xref_targets)
    .Rd_object_id(rep.int(names(targets), lengths(targets)),
                  unlist(targets, use.names = FALSE))
}

CRAN_Rd_xref_reverse_dependencies <-
function(packages, db = NULL, details = FALSE)
{
    if(is.null(db))
        db <- CRAN_Rd_xref_db_with_expansions()
    y <- split.data.frame(db, db[, "T_Package"])[packages]
    if(!details)
        y <- lapply(y, function(e) unique(e[, "S_Package"]))
    y
}

CRAN_Rd_xref_problems <-
function()
{
    y <- list()

    db <- CRAN_Rd_xref_db_with_expansions()
    db <- db[nzchar(db[, "T_Package"]), , drop = FALSE]
    ## Add ids:
    db <- cbind(db,
                T_ID = .Rd_object_id(db[, "T_Package"], db[, "T_File"]))

    ## Do we have Rd xrefs to current CRAN packages which no longer work?
    current <- sub("_.*", "", rownames(CRAN_current_db()))
    db1 <- db[!is.na(match(db[, "T_Package"], current)), , drop = FALSE]
    y$broken_xrefs_to_current_CRAN_packages <-
        db1[is.na(match(db1[, "T_ID"],
                        CRAN_Rd_xref_available_target_ids())), ,
            drop = FALSE]

    ## Do we have Rd xrefs "likely" to archived CRAN packages?
    ## This is a bit tricky because packages could have been archived on
    ## CRAN but still be available from somewhere else.  The code below
    ## catches availability in standard repositories, but not in
    ## additional repositories.
    repos <- .get_standard_repository_URLs() # CRAN and BioC
    ## Previous versions used getOption("repos").
    archived <-
        setdiff(names(CRAN_archive_db()),
                c(rownames(utils::available.packages(filters = list(),
                                                     repos = repos)),
                  unlist(.get_standard_package_names(),
                         use.names = FALSE)))
    y$xrefs_likely_to_archived_CRAN_packages <-
        db[!is.na(match(db[, "T_Package"], archived)), , drop = FALSE]

    y
}

.Rd_available_xref_targets <-
function(aliases)
{
    ## Argument aliases as obtained from Rd_aliases(), or directly by
    ## calling
    ##   lapply(rddb, .Rd_get_metadata, "alias")
    ## on an Rd db.
    unique(c(unlist(aliases, use.names = FALSE),
             sub("\\.[Rr]d", "", basename(names(aliases)))))
}

.Rd_object_id <-
function(package, nora)
{
    ## Name OR Alias: nora.
    sprintf("%s::%s", package, nora)
}

CRAN_package_maintainers_db <-
function(db = CRAN_package_db())
{
    maintainer <- db[, "Maintainer"]
    address <- tolower(sub(".*<(.*)>.*", "\\1", maintainer))
    maintainer <- gsub("\n", " ", maintainer, fixed=TRUE)
    list2DF(list(Package = db[, "Package"],
                 Address = address,
                 Maintainer = maintainer))
}

CRAN_package_maintainers_info <-
function(packages, db = NULL, collapse = TRUE)
{
    if(is.null(db))
        db <- CRAN_package_maintainers_db()
    ind <- match(packages, db[, "Package"])
    addresses <- db[ind, "Address"]
    to <- sort(unique(addresses))
    head <- list("To" = "CRAN@R-project.org",
                 "Bcc" = to,
                 "Subject" = "CRAN packages maintained by you",
                 "Reply-To" = "CRAN@R-project.org")
    if(collapse) {
        head$Bcc <- paste(head$Bcc, collapse = ",\n    ")
        head <- sprintf("%s: %s", names(head), unlist(head))
    }
    lst <- split(db[ind, "Package"], db[ind, "Maintainer"])
    len <- length(addresses)
    body <- c(if(len > 1L) {
                  "Dear maintainers,"
              } else {
                  "Dear maintainer,"
              },
              "",
              if(length(packages) > 1L) {
                  "This concerns the CRAN packages"
              } else {
                  "This concerns the CRAN package"
              },
              "",
              paste(strwrap(paste(sort(packages), collapse = " "),
                            indent = 2L, exdent = 2L),
                    collapse = "\n"),
              "",
              paste("maintained by",
                    if(len > 1L) "one of",
                    "you:"),
              "",
              paste0("  ",
                     formatDL(vapply(lst, paste, "", collapse = " "),
                              style = "list"))
              )
    list(head = head, body = body)
}

CRAN_package_reverse_dependencies_and_views <-
function(packages)
{
    repos <- getOption("repos")
    ## Alternatively, use .get_standard_repository_URLs()

    a <- utils::available.packages(filters = list(), repos = repos)

    v <- read_CRAN_object(CRAN_baseurl_for_src_area(),
                          "src/contrib/Views.rds")
    v <- do.call(rbind,
                 mapply(cbind,
                        Package =
                        lapply(v, function(e) e$packagelist$name),
                        View = vapply(v, `[[`, "name", FUN.VALUE = "")))
    v <- split(v[, 2L], v[, 1L])

    r <- package_dependencies(packages, a, reverse = TRUE)
    rr <- package_dependencies(packages, a,
                               reverse = TRUE, recursive = TRUE)
    rrs <- package_dependencies(packages, a, "Suggests",
                                reverse = TRUE, recursive = "strong")

    ## For formatting reverse dependencies, for now indicate non-CRAN
    ## ones by adding a '*'.
    expansions <- unique(c(unlist(r, use.names = FALSE),
                           unlist(rr, use.names = FALSE),
                           unlist(rrs, use.names = FALSE)))
    names(expansions) <- expansions
    if("CRAN" %in% names(repos)) {
        ind <- !startsWith(a[match(expansions, a[, "Package"]),
                             "Repository"],
                           repos["CRAN"])
        expansions[ind] <- paste0(expansions[ind], "*")
    }

    rxrefs <- CRAN_Rd_xref_reverse_dependencies(packages)

    fmt <- function(x) {
        if(length(x)) paste(sort(x), collapse = " ") else NA_character_
    }

    y <- lapply(packages,
                function(p) {
                    c(Package = p,
                      "Reverse depends" =
                          fmt(expansions[r[[p]]]),
                      "Additional recursive reverse depends" =
                          fmt(expansions[setdiff(rr[[p]], r[[p]])]),
                      "Additional recursive reverse depends of suggests" =
                          fmt(expansions[setdiff(rrs[[p]], rr[[p]])]),
                      "Reverse Rd xref depends" =
                          fmt(rxrefs[[p]]),
                      "Views" =
                          fmt(v[[p]]))
                })
    y <- as.data.frame(do.call(rbind, y), stringsAsFactors = FALSE)
    class(y) <- c("CRAN_package_reverse_dependencies_and_views",
                  class(y))
    y
}

format.CRAN_package_reverse_dependencies_and_views <-
function(x, ...)
{
    apply(x, 1L,
          function(e) {
              paste(formatDL(e[!is.na(e)],
                             style = "list", indent = 2L),
                    collapse = "\n")
          })
}

print.CRAN_package_reverse_dependencies_and_views <-
function(x, ...)
{
    writeLines(paste(format(x, ...), collapse = "\n\n"))
    invisible(x)
}

CRAN_package_reverse_dependencies_with_maintainers <-
function(packages, which = "strong", recursive = FALSE,
         db = CRAN_package_db())
{
    rdepends <- package_dependencies(packages, db, which,
                                     recursive = recursive,
                                     reverse = TRUE)
    rdepends <- sort(unique(unlist(rdepends)))
    pos <- match(rdepends, db[, "Package"], nomatch = 0L)

    db[pos, c("Package", "Version", "Maintainer")]
}

CRAN_package_dependencies_with_dates <-
function(packages, which = "most", recursive = FALSE,
         db = CRAN_package_db())
{
    repos <- .get_standard_repository_URLs() # CRAN and BioC
    a <- utils::available.packages(filters = list(), repos = repos)

    pb <- NULL                          # Compute if necessary ...
    d <- package_dependencies(packages, a, which = which,
                              recursive = recursive)
    ## We currently keep the base packages dependencies, which have no
    ## date.  Hence, filter these out ...
    base_packages <- .get_standard_package_names()[["base"]]
    lapply(d,
           function(e) {
               e <- setdiff(as.character(e), base_packages)
               i <- match(e, db[, "Package"])
               d <- db[i, "Published"]
               if(any(j <- is.na(i))) {
                   eb <- e[j]
                   if(is.null(pb))
                       pb <<- BioC_package_db()
                   ib <- match(eb, pb[, "Package"])
                   d[j] <- pb[ib, "Date/Publication"]
                   e[j] <- paste0(eb, "*")
               }
               d <- as.Date(d)
               o <- order(d, decreasing = TRUE)
               list2DF(list(Package = e[o], Date = d[o]))
           })
}

CRAN_packages_with_maintainer_matching <-
function(pattern, db = CRAN_package_db(), ...)
{
    ind <- grep(pattern, db[, "Maintainer"], ...)
    db[ind, "Package"]
}

write_texts_to_dir <-
function(lst, dir, verbose = FALSE)
{
    dir.create(dir, showWarnings = FALSE, recursive = FALSE)

    Map(function(m, s) {
        if(verbose)
            message(sprintf("Processing %s ...", m))
        writeLines(paste(s, collapse = "\n\n"),
                   file.path(dir, sprintf("%s.txt", m)))
    },
        names(lst),
        lst)

    invisible()
}

CRAN_package_URL <- function(p)
    paste0("https://CRAN.R-project.org/package=", p)

CRAN_package_check_URL <- function(p)
    sprintf("https://CRAN.R-project.org/web/checks/check_results_%s.html",
            p)

BioC_package_db <-
function()
{
    urls <- .get_standard_repository_URLs()
    urls <- urls[startsWith(names(urls), "BioC")]
    if(!length(urls)) return(NULL)
    info <- lapply(urls, function(u) {
                       con <- url(paste0(u, "/VIEWS"))
                       on.exit(close(con))
                       read.dcf(con)
                   })
    Reduce(function(u, v) merge(u, v, all = TRUE),
           lapply(info,
                  as.data.frame,
                  stringsAsFactors = FALSE))
}
##  File src/library/tools/R/doitools.R
##  Part of the R package, https://www.R-project.org
##
##  Copyright (C) 2015-2023 The R Core Team
##
##  This program is free software; you can redistribute it and/or modify
##  it under the terms of the GNU General Public License as published by
##  the Free Software Foundation; either version 2 of the License, or
##  (at your option) any later version.
##
##  This program is distributed in the hope that it will be useful,
##  but WITHOUT ANY WARRANTY; without even the implied warranty of
##  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
##  GNU General Public License for more details.
##
##  A copy of the GNU General Public License is available at
##  https://www.R-project.org/Licenses/

doi_db <-
function(dois, parents)
{
    db <- list2DF(list(DOI = trimws(as.character(dois)),
                       Parent = as.character(parents)))
    class(db) <- c("doi_db", "data.frame")
    db
}

doi_db_from_package_metadata <- 
function(meta)
{
    dois <- character()
    pattern <- "<(DOI|doi):([^>]*)>"
    if(!is.na(v <- meta["Description"])) {
        m <- gregexpr(pattern, v)
        dois <- c(dois, .gregexec_at_pos(pattern, v, m, 3L))
    }
    ## DOI names may contain ">", but we need this as a delimiter when
    ## writing the names in <doi:name> style.  So at least ">" and hence
    ## also "%" must be percent encoded ...
    doi_db(utils::URLdecode(dois), rep.int("DESCRIPTION", length(dois)))
}

doi_db_from_package_citation <-
function(dir, meta, installed = FALSE)
{
    dois <- character()
    path <- if(installed) "CITATION" else file.path("inst", "CITATION")
    cfile <- file.path(dir, path)
    if(file.exists(cfile)) {
        cinfo <- .read_citation_quietly(cfile, meta)
        if(!inherits(cinfo, "error"))
            dois <- trimws(unique(unlist(cinfo$doi, use.names = FALSE)))
    }
    doi_db(dois, rep.int(path, length(dois)))
}

## \doi a user-defined macro (from system.Rd) which gets expanded by 
## parse_Rd().  To extract programmatically, we try to find the user
## macros with the (current) expansion.
## Alternative, we could call .build_Rd_db() on the package Rd sources
## with e.g. macros = c("\\newcommand{\\doi}{<DOI:#1>}" and look for
## TEXT nodes matching the expansion.  However, we cannot necessarily
## safely process build-time Sexprs ...

doi_db_from_package_Rd_db <-
function(db)
{
    dois <- Filter(length, lapply(db, .get_dois_from_Rd))
    doi_db(.canonicalize_doi(unlist(dois, use.names = FALSE)),
           rep.int(file.path("man", names(dois)),
                   lengths(dois)))
}

.get_dois_from_Rd <-
function(x)
{
    dois <- character()
    recurse <- function(e) {
        if(identical(attr(e, "Rd_tag"), "USERMACRO") &&
           identical(attr(e, "macro"), "\\doi"))
            dois <<- c(dois, e[2L])
        else if(is.list(e))
            lapply(e, recurse)
    }
    if(getDynamicFlags(x)["\\Sexpr"])
        lapply(x, recurse)
    dois
}

doi_db_from_package_sources <-
function(dir, add = FALSE, Rd = FALSE)
{
    meta <- .get_package_metadata(dir, FALSE)
    db <- rbind(doi_db_from_package_metadata(meta),
                doi_db_from_package_citation(dir, meta),
                if(Rd) {
                    rddb <- Rd_db(dir = dir)
                    doi_db_from_package_Rd_db(rddb)
                })
    if(add)
        db$Parent <- file.path(basename(dir), db$Parent)
    db
}

doi_db_from_installed_packages <-
function(packages, lib.loc = NULL, verbose = FALSE, Rd = FALSE)
{
    if(!length(packages)) return()
    one <- function(p) {
        if(verbose)
            message(sprintf("processing %s", p))
        dir <- system.file(package = p, lib.loc = lib.loc)
        if(dir == "") return()
        meta <- .read_description(file.path(dir, "DESCRIPTION"))
        db <- rbind(doi_db_from_package_metadata(meta),
                    doi_db_from_package_citation(dir, meta,
                                                 installed = TRUE),
                    if(Rd) {
                        rddb <- Rd_db(p, lib.loc = dirname(dir))
                        doi_db_from_package_Rd_db(rddb)
                    })
        db$Parent <- file.path(p, db$Parent)
        db
    }
    do.call(rbind,
            c(lapply(packages, one),
              list(make.row.names = FALSE)))
}

check_doi_db <-
function(db, verbose = FALSE, parallel = FALSE, pool = NULL)
{
    if(parallel && is.null(pool))
        pool <- curl::new_pool()    
    
    .gather <- function(d = character(),
                        p = list(),
                        s = rep.int("", length(d)),
                        m = rep.int("", length(d))) {
        y <- list2DF(list(DOI = d, From = p, Status = s, Message = m))
        class(y) <- c("check_doi_db", "data.frame")
        y
    }

    .fetch_headers <-
        if(parallel)
            function(urls, dois)
                .fetch_headers_via_curl(urls, verbose, pool)
        else
            function(urls, dois)
                .fetch_headers_via_base(urls, verbose, dois)

    .check <- function(h) {
        if(inherits(h, "error")) {
            s <- "Error"
            m <- sub("[[:space:]]*$", "", conditionMessage(h))
        } else {
            s <- as.character(attr(h, "status"))
            m <- table_of_HTTP_status_codes[s]
        }
        c(s, m)
    }

    bad <- .gather()

    if(!NROW(db)) return(bad)

    if(inherits(db, "check_doi_db")) {
        ## Allow re-checking check results.
        parents <- db$From
        dois <- db$DOI
    } else {
        parents <- split(db$Parent, db$DOI)
        dois <- names(parents)
    }

    ## <FIXME>
    ## According to <https://www.iana.org/assignments/urn-formal/doi>,
    ##   The 2022 edition of ISO 26324 has amended the syntax of the
    ##   prefix by removing the requirement for the directory indicator
    ##   to be "10" and allow also DOI names without a registrant code.
    ## (ISO 26324 is the DOI standard).
    ## As of 2023-06-06, this is not yet reflected in the DOI Handbook
    ## (<https://doi.org/10.1000/182>) last updated on 2019-12-19, which
    ## still says in
    ## <https://www.doi.org/the-identifier/resources/handbook/2_numbering#2.2>
    ## that
    ##   The DOI prefix shall be composed of a directory indicator
    ##   followed by a registrant code. These two components shall be
    ##   separated by a full stop (period).
    ##   The directory indicator shall be "10".
    ## Nevertheless, let us drop the check below:
    ## <CODE>
    ## ind <- !startsWith(dois, "10")
    ## </CODE>
    ## But do at least minimal tests for formal validity (could do
    ## more):
    ind <- !grepl("/", dois, fixed = TRUE)
    if(any(ind)) {
        len <- sum(ind)
        bad <- rbind(bad,
                     .gather(dois[ind], parents[ind],
                             m = rep.int("Invalid DOI", len)))
    }
    pos <- which(!ind)
    ## </FIXME>

    ## See <https://www.doi.org/the-identifier/resources/handbook/3_resolution#3.8.3>:
    ##   Ideally we would perform GET requests and would look at the
    ##   responseCode in the JSON response.  However, we cannot do this
    ##   with base, and at least for now we can also check using HEAD
    ##   requests and looking at the status code (200 vs 404).
    if(length(pos)) {
        doispos <- dois[pos]
        urlspos <- paste0("https://doi.org/api/handles/",
                          vapply(doispos, urlify_doi, ""))
        ## Do we need to percent encode parts of the DOI name?
        headers <- .fetch_headers(urlspos, doispos)
        results <- do.call(rbind, lapply(headers, .check))
        status <- results[, 1L]
        ind <- (status != "200")
        if(any(ind)) {
            pos <- pos[ind]
            s <- status[ind]
            m <- results[ind, 2L]
            m[is.na(m)] <- ""
            bad <- rbind(bad,
                         .gather(dois[pos], parents[pos], s, m))
        }
    }

    bad
}

format.check_doi_db <-
function(x, ...)
{
    if(!NROW(x)) return(character())

    paste0(sprintf("DOI: %s", x$DOI),
           sprintf("\nFrom: %s",
                   vapply(x$From, paste, "", collapse = "\n      ")),
           ifelse((s <- x$Status) == "",
                  "",
                  sprintf("\nStatus: %s", s)),
           ifelse((m <- x$Message) == "",
                  "",
                  sprintf("\nMessage: %s", m)))
}

print.check_doi_db <-
function(x, ...)
{
    if(NROW(x))
        writeLines(paste(format(x), collapse = "\n\n"))
    invisible(x)
}

check_package_dois <-
function(dir, verbose = FALSE)
{
    db <- doi_db_from_package_sources(dir, Rd = TRUE)
    check_doi_db(db, verbose = verbose, parallel = TRUE)
}
#  File src/library/tools/R/dynamicHelp.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2022 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/


## Helper function used to declare mime-type for files served by
## dynamic help, and for base64-encoded files embedded in example
## output (see code2html.R).

mime_type <- function(path, ext = NULL)
{
    stopifnot(length(path) == 1L)
    if (missing(ext)) ext <- file_ext(path)
    switch(ext,
           "css" = "text/css",
           "js" = "text/javascript",  # for katex etc
           "sgml" = "text/sgml",    # in RGtk2
           "xml" = "text/xml",      # in RCurl (RFC 7303 recommends "application/xml") 
           "html" = "text/html",
           "htm" = "text/html",
           "xhtml" = "application/xhtml+xml",
           "php" = "application/x-httpd-php",
           "epub" = "application/epub+zip",
           "csv" = "text/csv",
           "json" = "application/json",
           "jsonld"  = "application/ld+json",
           "mjs" = "text/javascript",

           ## common <img> types (see https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Image_types)
           "gif" = "image/gif",     # in R2HTML
           "jpg" = "image/jpeg",
           "jpeg" = "image/jpeg",
           "png" = "image/png",
           "svg" = "image/svg+xml",
           "apng" = "image/apng",
           "avif" = "image/avif",
           "webp" = "image/webp",
           "bmp" = "image/bmp",
           "ico" = "image/x-icon",
           "tiff" = "image/tiff",
           "tif" = "image/tiff",

           "pdf" = "application/pdf",
           "eps" =,
           "ps" = "application/postscript", # in GLMMGibbs, mclust

           ## fonts
           "eot" = "application/vnd.ms-fontobject",
           "otf" = "font/otf",
           "ttf" = "font/ttf",
           "woff" = "font/woff",
           "woff2" = "font/woff2",

           ## media
           "aac" = "audio/aac",
           "avi" = "video/x-msvideo",
           "cda" = "application/x-cdf",
           "mid" = "audio/x-midi",
           "midi" = "audio/x-midi",
           "mp3" = "audio/mpeg",
           "mp4" = "video/mp4",
           "mpeg" = "video/mpeg",
           "oga" = "audio/ogg",
           "ogv" = "video/ogg",
           "ogx" = "application/ogg",
           "opus" = "audio/opus",
           "3gp" = "video/3gpp",
           "3g2" = "video/3gpp2",
           "wav" = "audio/wav",
           "weba" = "audio/webm",
           "webm" = "video/webm",

           ## archive / compression
           "bz" = "application/x-bzip",
           "bz2" = "application/x-bzip2",
           "gz" = "application/gzip",
           "rar" = "application/vnd.rar",
           "zip" = "application/zip",
           "7z" = "application/x-7z-compressed",
           "tar" = "application/x-tar",

           ## default
           "text/plain")
}


## This may be asked for
##  R.css, favicon.ico
##  searches with path = "/doc/html/Search"
##  documentation with path = "/doc/....", possibly updated under tempdir()/.R
##  demos with path "/demo/*"
##  Running demos, using path "/Demo/*"
##  html help, either by topic, /library/<pkg>/help/<topic> (pkg=NULL means any)
##             or by file, /library/<pkg>/html/<file>.html
##
##  As any R function, httpd() needs to produce R strings valid in their
##  declared encoding (or valid in the native encoding if they have no
##  encoding flag).  The C code of the server converts the response strings
##  which are given as R strings to UTF-8, and hence the Content-type
##  charset specified in the responses returned by httpd() must also be UTF-8
##  (for errors and results passed as strings inside a list, this must be in
##  sync with Rhttpd.c). 
httpd <- function(path, query, ...)
{
    logHelpRequests <-
        config_val_to_logical(Sys.getenv("_R_HTTPD_LOG_MESSAGES_", "FALSE"))
    if (logHelpRequests) {
        message(sprintf("HTTPD-REQUEST %s%s", path,
                        if (is.null(query)) ""
                        else { # query is a named chr vector 
                            paste(paste(names(query), query, sep = "="),
                                  collapse = ",")
                        }))
    }
    linksToTopics <-
        config_val_to_logical(Sys.getenv("_R_HELP_LINKS_TO_TOPICS_", "TRUE"))
    .HTMLdirListing <- function(dir, base, up) {
        files <- list.files(dir)    # note, no hidden files are listed
        out <- HTMLheader(paste0("Listing of directory<br>", dir),
        		  headerTitle = paste("R:", dir), logo=FALSE,
        		  up = up)
        if(!length(files))
            out <- c(out, gettext("No files in this directory"))
        else {
            urls <- paste0('<a href="', base, '/', files, '">', files, '</a>')
            out <- c(out, "<dl>",
                     paste0("<dd>", mono(iconv(urls, "", "UTF-8")), "</dd>"),
                     "</dl>")
        }
        out <- c(out, "<hr>\n</div></body></html>")
        list(payload = paste(out, collapse="\n"))
    }

    .HTMLusermanuals <- function() {
        ## FIXME: recommended packages might not be available
        pkgs <- unlist(.get_standard_package_names())

        out <- HTMLheader("R User Manuals")
        for (pkg in pkgs) {
            vinfo <- getVignetteInfo(pkg)
     	    if (nrow(vinfo))
         	out <- c(out, paste0('<h2>Manuals in package ', sQuote(pkg),'</h2>'),
         		 makeVignetteTable(cbind(Package=pkg, vinfo[,c("File", "Title", "PDF", "R"), drop = FALSE])))
     	}
        out <- c(out, "<hr>\n</div></body></html>")
        list(payload = paste(out, collapse="\n"))
    }

    .HTMLsearch <- function(query) {
    	bool <- function(x) as.logical(as.numeric(x))
        res <- if(identical(names(query), "category")) {
            utils::help.search(keyword = query, verbose = 1L, use_UTF8 = TRUE)
        } else if(identical(names(query), c("objects", "port"))) {
            .httpd_objects(query["port"])
        } else {
            fields <- types <- NULL
            args <- list(pattern = ".")
            for (i in seq_along(query))
            	switch(names(query)[i],
                       pattern = args$pattern <- query[i],
                       fields.alias =
                           if(bool(query[i]))
                               fields <- c(fields, "alias"),
                       fields.title =
                           if(bool(query[i]))
                               fields <- c(fields, "title"),
                       fields.concept =
                           if(bool(query[i]))
                               fields <- c(fields, "concept"),
                       fields.keyword =
                           if(bool(query[i]))
                               fields <- c(fields, "keyword"),
                       ignore.case =
                           args$ignore.case <- bool(query[i]),
                       agrep =
                           args$agrep <- bool(query[i]),
                       types.help =
                           if(bool(query[i]))
                               types <- c(types, "help"),
                       types.vignette =
                           if(bool(query[i]))
                               types <- c(types, "vignette"),
                       types.demo =
                           if(bool(query[i]))
                               types <- c(types, "demo"),
                       ## Possibly passed from utils:::printhsearchInternal().
                       package = args$package <- strsplit(query[i], ";")[[1L]],
                       lib.loc = args$lib.loc <- strsplit(query[i], ";")[[1L]],
                       warning("Unrecognized search field: ", names(query)[i],
                               domain = NA)
                       )
            args$fields <- fields
            args$use_UTF8 <- TRUE
            args$types <- types
            do.call(utils::help.search, args)
        }
        types <- res$types
        res <- res$matches
        title <- "Search Results"
        out <- c(HTMLheader(title),
                 if ("pattern" %in% names(query) && nchar(query["pattern"]))
                     paste0('The search string was <b>"', query["pattern"], '"</b>'),
                 '<hr>\n')

        if(!NROW(res))
            out <- c(out, gettext("No results found"))
        else {
            vigfile0 <- ""
            vigDB <- NULL
            for (type in types) {
		if(NROW(temp <- res[res[,"Type"] == type, , drop=FALSE]) > 0) {
                    temp <- temp[!duplicated(temp[, "ID"]), , drop = FALSE]
		    switch(type,
		    vignette = {
			out <- c(out, paste0("<h3>", gettext("Vignettes:"), "</h3>"), "<dl>")
			n <- NROW(temp)
			vignettes <- matrix("", n, 5L)
			colnames(vignettes) <-
                            c("Package", "File", "Title", "PDF", "R")
			for (i in seq_len(NROW(temp))) {
			    topic <- temp[i, "Topic"]
			    pkg <- temp[i, "Package"]
			    vigfile <- file.path(temp[i, "LibPath"], "Meta", "vignette.rds")
			    if (!identical(vigfile, vigfile0)) {
			    	vigDB <- readRDS(vigfile)
			    	vigfile0 <- vigfile
			    }
			    vignette <- vigDB[topic == file_path_sans_ext(vigDB$PDF),]
			    # There should be exactly one row in the result, but
			    # bad packages might have more, e.g. vig.Snw and vig.Rnw
			    vignettes[i,] <- c(pkg, unlist(vignette[1,c("File", "Title", "PDF", "R")]))
			 }
			 out <- c(out, makeVignetteTable(vignettes))
		    },
		    demo = {
			out <- c(out, paste0("<h3>", gettext("Code demonstrations:"), "</h3>"))
			out <- c(out, makeDemoTable(temp))
		    },
		    help = {
			out <- c(out, paste0("<h3>", gettext("Help pages:"), "</h3>"))
			out <- c(out, makeHelpTable(temp))
		    })
                }
	    }
        }
        out <- c(out, "<hr>\n</div></body></html>")
        list(payload = paste(out, collapse="\n"))
    }

    .HTML_hsearch_db_concepts <- function() {
        concepts <- utils::hsearch_db_concepts()
        s <- concepts$Concept
        out <-
            c(HTMLheader("Help search concepts"),
              c("",
                "<table>",
                "<tr><th style=\"text-align: left\">Concept</th><th>Frequency</th><th>Packages</th></tr>",
                paste0("<tr><td>",
                       "<a href=\"/doc/html/Search?pattern=",
                       utils::URLencode(reQuote(s), reserved = TRUE),
                       "&amp;fields.concept=1&amp;agrep=0\">",
                       shtmlify(substr(s, 1L, 80L)),
                       "</a>",
                       "</td><td style=\"text-align: right\">",
                       concepts$Frequency,
                       "</td><td style=\"text-align: right\">",
                       concepts$Packages,
                       "</td></tr>"),
                "</table>",
                "</div></body>",
                "</html>"))
        list(payload = paste(out, collapse = "\n"))
    }

    .HTML_hsearch_db_keywords <- function() {
        keywords <- utils::hsearch_db_keywords()
        out <-
            c(HTMLheader("Help search keywords"),
              c("",
                "<table>",
                "<tr><th style=\"text-align: left\">Keyword</th><th style=\"text-align: left\">Concept</th><th>Frequency</th><th>Packages</th></tr>",
                paste0("<tr><td>",
                       "<a href=\"/doc/html/Search?category=",
                       keywords$Keyword,
                       "\">",
                       keywords$Keyword,
                       "</a>",
                       "</td><td>",
                       shtmlify(substr(keywords$Concept, 1L, 80L)),
                       "</td><td style=\"text-align: right\">",
                       keywords$Frequency,
                       "</td><td style=\"text-align: right\">",
                       keywords$Packages,
                       "</td></tr>"),
                "</table>",
                "</div></body>",
                "</html>"))
        list(payload = paste(out, collapse = "\n"))
    }

    .HTML_package_description <- function(descfile) {
        pkg <- basename(dirname(descfile))
        out <- c(HTMLheader(sprintf("Package &lsquo;%s&rsquo;", pkg)),
                 .DESCRIPTION_to_HTML(descfile, dynamic = TRUE),
                 "</div></body></html>")
        list(payload = paste(out, collapse = "\n"))
    }

    charsetSetting <- function(pkg) {
    	encoding <- read.dcf(system.file("DESCRIPTION", package=pkg),
                             "Encoding")
	if (is.na(encoding))
	    ""
        else
    	    paste0("; charset=", encoding)
    }

    sQuote <- function(text)
        paste0("&lsquo;", text, "&rsquo;")
    mono <- function(text)
        paste0('<span class="samp">', text, "</span>")

    error_page <- function(msg) {
        if (logHelpRequests) {
            message(sprintf("HTTPD-ERROR %s %s", path, paste(msg, collapse = " ")))
        }
        list(payload =
             paste(c(HTMLheader("httpd error"), msg, "\n</div></body></html>"), collapse = "\n"))
    }
        
    cssRegexp <- "^/library/([^/]*)/html/R.css$"
    if (grepl("R\\.css$", path) && !grepl(cssRegexp, path)) {
        if (isTRUE(getOption("help.htmltoc")))
            return(list(file = file.path(R.home("doc"), "html", "R-nav.css"),
                        "content-type" = "text/css"))
        else
            return(list(file = file.path(R.home("doc"), "html", "R.css"),
                        "content-type" = "text/css"))
    }
    else if(path == "/favicon.ico")
        return(list(file = file.path(R.home("doc"), "html", "favicon.ico"),
                    "content-type" = "image/x-icon"))
    else if(path == "/NEWS")
         return(list(file = file.path(R.home("doc"), "html", "NEWS.html"),
                     "content-type" = "text/html"))
    else if(grepl("^/NEWS[.][[:digit:]]$", path))
    	return(list(file = file.path(R.home("doc"), sub("/", "", path, fixed=TRUE)),
    	            "content-type" = "text/plain; charset=utf-8"))
    else if((path == "/doc/html/NEWS.html") &&
            identical(names(query), c("objects", "port"))) {
        news <- .httpd_objects(query["port"])
    	formatted <- toHTML(news, title = "R News")
        return( list(payload = paste(formatted, collapse="\n")) )
    }
    else if(grepl("^/licenses/([^/.]*)$", path) &&
            file.exists(file <- file.path(R.home("share"), "licenses",
                                          basename(path))))
        return(list(file = file,
                    "content-type" = "text/plain; charset=utf-8"))
    else if(!grepl("^/(doc|library|session)/", path))
        return(error_page(paste("Only NEWS and URLs under", mono("/doc"),
                                "and", mono("/library"), "are allowed")))
    else if(path == "/doc/html/UserManuals.html")
    	return(.HTMLusermanuals())
    else if(path == "/doc/html/hsearch_db_concepts.html")
        return(.HTML_hsearch_db_concepts())
    else if(path == "/doc/html/hsearch_db_keywords.html")
        return(.HTML_hsearch_db_keywords())

    ## ----------------------- per-package documentation ---------------------
    ## seems we got ../..//<pkg> in the past
    fileRegexp <- "^/library/+([^/]*)/html/([^/]*)\\.html$"
    topicRegexp <- "^/library/+([^/]*)/help/(.*)$"
    docRegexp <- "^/library/([^/]*)/doc(.*)"
    demoRegexp <- "^/library/([^/]*)/demo$"
    demosRegexp <- "^/library/([^/]*)/demo/([^/]*)$"
    DemoRegexp <- "^/library/([^/]*)/Demo/([^/]*)$"
    ExampleRegexp <- "^/library/([^/]*)/Example/([^/]*)$"
    newsRegexp <- "^/library/([^/]*)/NEWS([.](Rd|md))?$"
    figureRegexp <- "^/library/([^/]*)/(help|html)/figures/([^/]*)$"
    sessionRegexp <- "^/session/"
    packageIndexRegexp <- "^/library/([^/]*)$"
    packageLicenseFileRegexp <- "^/library/([^/]*)/(LICEN[SC]E$)"

    file <- NULL
    if (grepl(topicRegexp, path)) {
        ## ----------------------- package help by topic ---------------------
    	pkg <- sub(topicRegexp, "\\1", path)
    	if (pkg == "NULL") pkg <- NULL  # There were multiple hits in the console
    	topic <- sub(topicRegexp, "\\2", path)
        ## If a package is specified, look there first. If not found,
        ## search in other packages. This is used to search for
        ## off-package links where the target package is not specified
        ## (they are nominally links to topics in the same package)

        ## However, if pkg is specified but not installed, give an
        ## error message.
    	if (!is.null(pkg)) { # () avoids deparse here
            if (!nzchar(system.file(package = pkg))) {
                msg <- gettextf("No package named %s could be found",
                                mono(pkg))
                return(error_page(msg))
            }
    	    file <- utils::help(topic, package = (pkg), help_type = "text")
            ## Before searching other packages, check if topic.Rd is
            ## available as a file in the package.
            if (!length(file) && linksToTopics) {
                helppath <- system.file("help", package = pkg)
                if (nzchar(helppath)) {
                    contents <- readRDS(sub("/help$", "/Meta/Rd.rds", helppath, fixed = FALSE))
                    helpfiles <- sub("\\.[Rr]d$", "", contents$File)
                    if (topic %in% helpfiles) file <- file.path(helppath, topic)
                }
            }
        }
        ## Next, search for topic in all installed packages
    	if (!length(file))
            file <- utils::help(topic, help_type = "text", try.all.packages = TRUE)
	if (!length(file)) {
            msg <- gettextf("No help found for topic %s in any package.",
                            mono(topic))
	    return(error_page(msg))
	} else if (length(file) == 1L) {
	    path <- dirname(dirname(file))
	    file <- paste0('../../', basename(path), '/html/',
                           basename(file), '.html')
            ## cat("redirect to", file, "\n")
            ## We need to do this because there are static HTML pages
            ## with links to "<file>.html" for topics in the same
            ## package, and if we served one of such a page as a link from
            ## a different package those links on the page would not work.
	    return(list(payload = paste0('Redirect to <a href="', file, '">"',
                                         basename(file), '"</a>'),
	    		"content-type" = 'text/html',
	    		header = paste0('Location: ', file),
	    		"status code" = 302L)) # temporary redirect
	} else if (length(file) > 1L) {
            paths <- dirname(dirname(file))
            fp <- file.path(paths, "Meta", "Rd.rds")
            tp <- basename(file)
            titles <- tp
            for (i in seq_along(fp)) {
                tmp <- try(readRDS(fp[i]))
                titles[i] <- if(inherits(tmp, "try-error"))
                    "unknown title" else
                    tmp[file_path_sans_ext(tmp$File) == tp[i], "Title"]
            }
            packages <- paste0('<dt><a href="../../',
                               basename(paths), '/html/',
                               basename(file), '.html">', titles,
                               '</a></dt><dd> (in package <a href="../../',
                               basename(paths),
                               '/html/00Index.html">', basename(paths),
                               '</a> in library ', dirname(paths), ")</dd>",
                               collapse = "\n")

            return(list(payload =
                        paste0("<!DOCTYPE html>",
                               "<html>",
                               "<head>",
                               "<title>R: help</title>",
                               "<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">",
                               "</head>",
                               "<body>",
                                "<p>",
                               ## for languages with multiple plurals ....
                               sprintf(ngettext(length(paths),
                                                "Help on topic '%s' was found in the following package:",
                                                "Help on topic '%s' was found in the following packages:"
                                                ), topic),
                               "</p><dl>\n",
                               packages, "</dl>",
                               "</body>",
                               "</html>",
                               collapse = "\n")
                        ))
        }
    } else if (grepl(fileRegexp, path)) {
        ## ----------------------- package help by file ---------------------
    	pkg <- sub(fileRegexp, "\\1", path)
    	helpdoc <- sub(fileRegexp, "\\2", path)
        if (helpdoc == "00Index") {
            ## ------------------- package listing ---------------------
            file <- system.file("html", "00Index.html", package = pkg)
            if(!nzchar(file) || !file.exists(file)) {
                msg <- if(nzchar(system.file(package = pkg)))
                    gettextf("No package index found for package %s",
                             mono(pkg))
                else
                    gettextf("No package named %s could be found",
                             mono(pkg))
                return(error_page(msg))
            } else {
                return(list(file = file))
            }
    	}
        ## ----------------------- package help file ---------------------
        path <- system.file("help", package = pkg)
        if (!nzchar(path)) {
            msg <- if(nzchar(system.file(package = pkg)))
                gettextf("No help found for package %s", mono(pkg) )
            else
                gettextf("No package named %s could be found", mono(pkg))
            return(error_page(msg))
        }
        ## if 'topic' is not a help doc, try it as an alias in the package
        contents <- readRDS(sub("/help$", "/Meta/Rd.rds", path, fixed = FALSE))
        files <- sub("\\.[Rr]d$", "", contents$File)
        if(helpdoc %notin% files) {
            ## or call help()
            aliases <- contents$Aliases
            lens <- lengths(aliases)
            aliases <- structure(rep.int(contents$File, lens),
                                 names = unlist(aliases))
            tmp <- sub("\\.[Rr]d$", "", aliases[helpdoc])
            if(is.na(tmp)) {
                msg <- gettextf("Link %s in package %s could not be located",
                                mono(helpdoc), mono(pkg))
                files <- utils::help(helpdoc, help_type = "text",
                                     try.all.packages = TRUE)
                if (length(files)) {
                    path <- dirname(dirname(files))
                    files <- paste0('/library/', basename(path), '/html/',
                                    basename(files), '.html')
                    msg <- c(msg, "<br>",
                             "However, you might be looking for one of",
                             "<p></p>",
                             paste0('<p><a href="', files, '">',
                                    mono(files), "</a></p>")
                             )
                }
                return(error_page(paste(msg, collapse = "\n")))
            }
            helpdoc <- tmp
        }

        ## Now we know which document we want in which package
	dirpath <- dirname(path)
##	pkgname <- basename(dirpath)
##	RdDB <- file.path(path, pkgname)
        outfile <- tempfile("Rhttpd")
        Rd2HTML(utils:::.getHelpFile(file.path(path, helpdoc)),
                out = outfile, package = dirpath,
                dynamic = TRUE, outputEncoding = "UTF-8")
        on.exit(unlink(outfile))
        return(list(payload = paste(readLines(file(outfile, encoding="UTF-8")),
                                    collapse = "\n")))
    } else if (grepl(docRegexp, path)) {
        ## ----------------------- package doc directory ---------------------
    	pkg <- sub(docRegexp, "\\1", path)
    	rest <- sub(docRegexp, "\\2", path)
        docdir <- system.file("doc", package = pkg)
        up <- paste0("/library/", pkg, "/html/00Index.html")
        if(!nzchar(docdir))
            return(error_page(gettextf("No docs found for package %s",
                                       mono(pkg))))
        if(nzchar(rest) && rest != "/") {
            file <- paste0(docdir, rest)
            exists <- file.exists(file)
            if (!exists && rest == "/index.html") {
                rest <- ""
            	file <- docdir
            }
            if(dir.exists(file))
                return(.HTMLdirListing(file,
                                       paste0("/library/", pkg, "/doc", rest),
                                       up))
            else if (exists)
                return(list(file = file, "content-type" = mime_type(rest)))
            else
            	return(error_page(gettextf("URL %s was not found", mono(path))))
        } else {
            ## request to list <pkg>/doc
            return(.HTMLdirListing(docdir,
                                   paste("/library", pkg, "doc", sep="/"),
                                   up))
        }
    } else if (grepl(demoRegexp, path)) {
    	pkg <- sub(demoRegexp, "\\1", path)

    	url <- paste0("http://127.0.0.1:", httpdPort(),
                      "/doc/html/Search?package=",
                      pkg, "&agrep=0&types.demo=1&pattern=")
    	return(list(payload = paste0('Redirect to <a href="', url,
    				'">help.search()</a>'),
		    		"content-type" = 'text/html',
		    		header = paste0('Location: ', url),
	    		"status code" = 302L)) # temporary redirect
    } else if (grepl(demosRegexp, path)) {
	    pkg <- sub(demosRegexp, "\\1", path)
	    demo <- sub(demosRegexp, "\\2", path)
	    file <- system.file(file.path("demo", demo), package=pkg)
	    return(list(file = file, "content-type" = mime_type(demo)))

    } else if (grepl(DemoRegexp, path)) {
    	pkg <- sub(DemoRegexp, "\\1", path)
    	demo <- sub(DemoRegexp, "\\2", path)
        if (logHelpRequests) {
            message(sprintf("HTTPD-DEMO %s::%s", pkg, demo))
        }
        else return(demo2html(demo, pkg))
    } else if (grepl(ExampleRegexp, path)) {
    	pkg <- sub(ExampleRegexp, "\\1", path)
    	topic <- sub(ExampleRegexp, "\\2", path)
        if (logHelpRequests) {
            message(sprintf("HTTPD-EXAMPLE %s::%s", pkg, topic))
        }
        else return(example2html(topic, pkg,
                                 env = if (identical(query["local"], "FALSE")) .GlobalEnv
                                       else NULL))
    } else if (grepl(newsRegexp, path)) {
    	pkg <- sub(newsRegexp, "\\1", path)
        if(identical(names(query), c("objects", "port")))
            news <- .httpd_objects(query["port"])
        else {
            ## <FIXME>
            ## This should no longer be used ...
            if (!is.null(query) && !is.na(subset <- query["subset"])) {
                ## See utils:::print.news_db for the encoding of the
                ## subset 
                rle <- strsplit(subset, "_")[[1L]]
                rle <- structure(list(lengths = as.numeric(rle),
                                      values = rep_len(c(TRUE, FALSE),
                                                       length(rle))),
                                 class = "rle")
                news <- news(inverse.rle(rle)[-1L], package = pkg)
            ## </FIXME>
            } else
                news <- news(package = pkg)
        }
        if(!inherits(news, "news_db"))
            return(error_page(gettextf("No NEWS found for package %s",
                                       mono(pkg))))
    	formatted <- toHTML(news,
    		            title=paste("NEWS in package", sQuote(pkg)),
    			    up="html/00Index.html")
        if (length(formatted))
    	    return( list(payload = paste(formatted, collapse="\n")) )
    	else
    	    return( list(file = system.file("NEWS", package = pkg),
    	                 "content-type" = paste0("text/plain", charsetSetting(pkg) ) ) )
    } else if (grepl(figureRegexp, path)) {
        pkg <- sub(figureRegexp, "\\1", path)
        fig <- sub(figureRegexp, "\\3", path)
        file <- system.file("help", "figures", fig, package=pkg)
        return( list(file=file, "content-type" = mime_type(fig)) )
    } else if (grepl(sessionRegexp, path)) {
        tail <- sub(sessionRegexp, "", path)
    	file <- file.path(tempdir(), tail)
    	return( list(file=file, "content-type" = mime_type(tail)) )
    } else if (grepl(cssRegexp, path)) {
    	pkg <- sub(cssRegexp, "\\1", path)
        return( list(file = system.file("html", "R.css", package = pkg),
                     "content-type" = "text/css") )
    } else if(grepl(packageIndexRegexp, path)) {
        ## <FIXME>
        ## Can we do this better?
        url <- paste0(path, "/html/00Index.html")
        return(list(payload = paste0('Redirect to <a href="', url, '">"',
                                     url, '"</a>'),
                    "content-type" = 'text/html',
                    header = paste0('Location: ', url),
                    "status code" = 302L)) # temporary redirect
        ## </FIXME>
    } else if(grepl(packageLicenseFileRegexp, path) &&
              file.exists(file <- system.file(basename(path),
                                              package =
                                                  basename(dirname(path))))) {
        return(list(file = file,
                    "content-type" = "text/plain; charset=utf-8"))
    } else if (startsWith(path, "/library/")) {
        descRegexp <- "^/library/+([^/]+)/+DESCRIPTION$"
        if(grepl(descRegexp, path)) {
            pkg <- sub(descRegexp, "\\1", path)
            file <- system.file("DESCRIPTION", package = pkg)
            return(.HTML_package_description(file))
        } else
            return(error_page(gettextf("Only help files, %s, %s and files under %s and %s in a package can be viewed", mono("NEWS"),
                              mono("DESCRIPTION"), mono("doc/"), mono("demo/"))))
    }

    ## ----------------------- R docs ---------------------
    if(path == "/doc/html/Search.html") {
        ## redirect to the page that has search enabled
        list(file = file.path(R.home("doc"), "html/SearchOn.html"))
    } else if(path == "/doc/html/Search") {
        .HTMLsearch(query)
    } else if(path == "/doc/html/packages.html") {
        ## remake as needed
        utils::make.packages.html(temp = TRUE)
        list(file = file.path(tempdir(), ".R", path))
    } else if(path == "/doc/html/rw-FAQ.html") {
        file <- file.path(R.home("doc"), sub("^/doc", "", path))
        if(file.exists(file))
            list(file = file, "content-type" = mime_type(path))
        else {
            url <- "https://cran.r-project.org/bin/windows/base/rw-FAQ.html"
	    return(list(payload = paste0('Redirect to <a href="', url, '">"',
                                         url, '"</a>'),
	    		"content-type" = 'text/html',
	    		header = paste0('Location: ', url),
	    		"status code" = 302L)) # temporary redirect
         }
    } else if(grepl("doc/html/.*html$" , path) &&
              file.exists(tmp <- file.path(tempdir(), ".R", path))) {
        ## use updated version, e.g. of packages.html
        list(file = tmp)
    } else if(grepl("doc/manual/.*html$" , path)) {
        file <- file.path(R.home("doc"), sub("^/doc", "", path))
        if(file.exists(file))
            list(file = file, "content-type" = mime_type(path))
        else if(file.exists(file <- sub("/manual/", "/html/", file, fixed=TRUE))) {
            ## tarball has pre-built version of R-admin.html
            list(file = file, "content-type" = mime_type(path))
        } else {
            ## url <- "https://cran.r-project.org/manuals.html"
            version <-
                if(grepl("unstable", R.version$status)) "r-devel" else "r-patched"
            url <- file.path("https://cran.r-project.org/doc/manuals",
                             version, basename(path))
	    return(list(payload = paste0('Redirect to <a href="', url, '">"',
                                         url, '"</a>'),
	    		"content-type" = 'text/html',
	    		header = paste0('Location: ', url),
	    		"status code" = 302L)) # temporary redirect
        }
    } else {
        if(startsWith(path, "/doc/")) {
            ## /doc/AUTHORS and so on.
            file <- file.path(R.home("doc"), sub("^/doc", "", path))
        } else return(error_page(gettextf("unsupported URL %s", mono(path))))
        if(!file.exists(file))
            error_page(gettextf("URL %s was not found", mono(path)))
        else
            list(file = file, "content-type" = mime_type(path))
    }
}

## 0 = untried, < 0 = failed to start,  > 0 = actual port
httpdPort <- local({
    port <- 0L
    function(new) {
        if(!missing(new))
            port <<- new
        else
            port
    }
})

startDynamicHelp <- function(start = TRUE)
{
    if(nzchar(Sys.getenv("R_DISABLE_HTTPD"))) {
        httpdPort(-1L)
        warning("httpd server disabled by R_DISABLE_HTTPD", immediate. = TRUE)
        utils::flush.console()
        return(invisible(httpdPort()))
    }

    port <- httpdPort()
    if (is.na(start)) {
        if(port <= 0L) return(startDynamicHelp(TRUE))
        return(invisible(port))
    }
    if (start && port) {
        if(port > 0L) stop("server already running")
        else stop("server could not be started on an earlier attempt")
    }
    if(!start && (port <= 0L))
        stop("no running server to stop")
    if (start) {
        utils::flush.console()
        OK <- FALSE
        ports <- getOption("help.ports")
        if (is.null(ports)) {
	    ## Choose 10 random port numbers between 10000 and 32000.
	    ## The random seed might match
	    ## on multiple instances, so add the time as well.  But the
	    ## time may only be accurate to seconds, so rescale it to
	    ## 5 minute units.
            ports <- 10000 + 22000*((stats::runif(10) + unclass(Sys.time())/300) %% 1)
        }
        ports <- as.integer(ports)
	if (all(ports == 0))
	    return(invisible(0))
        message("starting httpd help server ...", appendLF = FALSE)
        for(i in seq_along(ports)) {
            ## the next can throw an R-level error,
            ## so do not assign port unless it succeeds.
	    status <- .Call(C_startHTTPD, "127.0.0.1", ports[i])
	    if (status == 0L) {
                OK <- TRUE
                httpdPort(ports[i])
                break
            }
            if (status != -2L) break
            ## so status was -2, which means port in use
	}
        if (OK) {
            message(" done")
            utils::flush.console()
            ## FIXME: actually test the server
        } else {
            warning("failed to start the httpd server", immediate. = TRUE)
            utils::flush.console()
            httpdPort(-1L)
        }
    } else {
        ## Not really tested
        .Call(C_stopHTTPD)
    	httpdPort(0L)
    }
    invisible(httpdPort())
}

dynamicHelpURL <-
function(path, port = httpdPort())
    paste0("http://127.0.0.1:", port, path)

## environment holding potential custom httpd handlers
.httpd.handlers.env <- new.env()

.httpd_objects <-
local({
    val <- list()
    function(port, new) {
        port <- as.character(port)
        if(!missing(new))
            val[[port]] <<- new
        else
            val[[port]]
    }
})
#  File src/library/tools/R/encodings.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2012 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

get_IANA_character_sets <-
function(file = NULL)
{
    ## Master URI is
    ##   https://www.iana.org/assignments/character-sets
    if(is.null(file))
        file <- file.path(R.home("share"), "encodings",
                          "character-sets")
    lines <- readLines(file)
    ## Start with first Name: entry, and end with REFERENCES.
    spos <- min(which(startsWith(lines, "Name:")))
    epos <- min(which(startsWith(lines, "REFERENCES"))) - 1
    lines <- lines[spos : epos]
    ## Omit 'Alias: None' and similar lines.
    if(any(ind <- grep("^[[:alnum:]]+:[[:space:]]+None[[:space:]]*$",
                       lines)))
        lines <- lines[-ind]
    ## And be nice (version last updated 2007-05-14 was invalid DCF).
    if(any(ind <- grep("^[^[:blank:]][^:]*$", lines)))
        lines <- lines[-ind]
    entries <- paste(lines, collapse = "\n")
    ## What we now have is in DCF format, with multiple fields.
    con <- textConnection(entries)
    on.exit(close(con))
    out <- read.dcf(con,
                    fields = c("Name", "MIBenum", "Source", "Alias"),
                    all = TRUE)
    ## Prefer 'Aliases' for historical reasons.
    names(out)[names(out) == "Alias"] <- "Aliases"
    ## Preferred MIME names.
    MIME <- sapply(mapply(c, out$Name, out$Aliases),
                   function(u) {
                       if(any(ind <- grep("preferred MIME name", u)))
                           sapply(strsplit(u[ind], " +"), `[[`, 1L)
                       else
                           character()
                   })
    out$MIME <- MIME
    out$Name <- sub(" +.*", "", out$Name)
    out$Aliases <- lapply(out$Aliases, function(s) sub(" +.*", "", s))
    out$MIBenum <- as.integer(out$MIBenum)

    out
}

charset_to_Unicode <- local({
    ISOLatin1 <- c(0:127, rep.int(0, 32), 160:255)
    ISOLatin2 <- c(0:127, rep.int(0, 32),
  0x00a0, 0x0104, 0x02d8, 0x0141, 0x00a4, 0x013d, 0x015a, 0x00a7,
  0x00a8, 0x0160, 0x015e, 0x0164, 0x0179, 0x00ad, 0x017d, 0x017b,
  0x00b0, 0x0105, 0x02db, 0x0142, 0x00b4, 0x013e, 0x015b, 0x02c7,
  0x00b8, 0x0161, 0x015f, 0x0165, 0x017a, 0x02dd, 0x017e, 0x017c,
  0x0154, 0x00c1, 0x00c2, 0x0102, 0x00c4, 0x0139, 0x0106, 0x00c7,
  0x010c, 0x00c9, 0x0118, 0x00cb, 0x011a, 0x00cd, 0x00ce, 0x010e,
  0x0110, 0x0143, 0x0147, 0x00d3, 0x00d4, 0x0150, 0x00d6, 0x00d7,
  0x0158, 0x016e, 0x00da, 0x0170, 0x00dc, 0x00dd, 0x0162, 0x00df,
  0x0155, 0x00e1, 0x00e2, 0x0103, 0x00e4, 0x013a, 0x0107, 0x00e7,
  0x010d, 0x00e9, 0x0119, 0x00eb, 0x011b, 0x00ed, 0x00ee, 0x010f,
  0x0111, 0x0144, 0x0148, 0x00f3, 0x00f4, 0x0151, 0x00f6, 0x00f7,
  0x0159, 0x016f, 0x00fa, 0x0171, 0x00fc, 0x00fd, 0x0163, 0x02d9)
    ISOLatin7 <- c(0:127, rep.int(0, 32),
  0x00a0, 0x201d, 0x00a2, 0x00a3, 0x00a4, 0x201e, 0x00a6, 0x00a7,
  0x00d8, 0x00a9, 0x0156, 0x00ab, 0x00ac, 0x00ad, 0x00ae, 0x00c6,
  0x00b0, 0x00b1, 0x00b2, 0x00b3, 0x201c, 0x00b5, 0x00b6, 0x00b7,
  0x00f8, 0x00b9, 0x0157, 0x00bb, 0x00bc, 0x00bd, 0x00be, 0x00e6,
  0x0104, 0x012e, 0x0100, 0x0106, 0x00c4, 0x00c5, 0x0118, 0x0112,
  0x010c, 0x00c9, 0x0179, 0x0116, 0x0122, 0x0136, 0x012a, 0x013b,
  0x0160, 0x0143, 0x0145, 0x00d3, 0x014c, 0x00d5, 0x00d6, 0x00d7,
  0x0172, 0x0141, 0x015a, 0x016a, 0x00dc, 0x017b, 0x017d, 0x00df,
  0x0105, 0x012f, 0x0101, 0x0107, 0x00e4, 0x00e5, 0x0119, 0x0113,
  0x010d, 0x00e9, 0x017a, 0x0117, 0x0123, 0x0137, 0x012b, 0x013c,
  0x0161, 0x0144, 0x0146, 0x00f3, 0x014d, 0x00f5, 0x00f6, 0x00f7,
  0x0173, 0x0142, 0x015b, 0x016b, 0x00fc, 0x017c, 0x017e, 0x2019)
    ISOLatin9 <- c(0:127, rep.int(0, 32),
  0x00a0, 0x00a1, 0x00a2, 0x00a3, 0x20ac, 0x00a5, 0x0160, 0x00a7,
  0x0161, 0x00a9, 0x00aa, 0x00ab, 0x00ac, 0x00ad, 0x00ae, 0x00af,
  0x00b0, 0x00b1, 0x00b2, 0x00b3, 0x017d, 0x00b5, 0x00b6, 0x00b7,
  0x017e, 0x00b9, 0x00ba, 0x00bb, 0x0152, 0x0153, 0x0178, 0x00bf,
                   192:255)
    Cyrillic <- c(0:127, rep.int(0, 32),
  0x00a0, 0x0401, 0x0402, 0x0403, 0x0404, 0x0405, 0x0406, 0x0407,
  0x0408, 0x0409, 0x040a, 0x040b, 0x040c, 0x00ad, 0x040e, 0x040f,
  0x0410, 0x0411, 0x0412, 0x0413, 0x0414, 0x0415, 0x0416, 0x0417,
  0x0418, 0x0419, 0x041a, 0x041b, 0x041c, 0x041d, 0x041e, 0x041f,
  0x0420, 0x0421, 0x0422, 0x0423, 0x0424, 0x0425, 0x0426, 0x0427,
  0x0428, 0x0429, 0x042a, 0x042b, 0x042c, 0x042d, 0x042e, 0x042f,
  0x0430, 0x0431, 0x0432, 0x0433, 0x0434, 0x0435, 0x0436, 0x0437,
  0x0438, 0x0439, 0x043a, 0x043b, 0x043c, 0x043d, 0x043e, 0x043f,
  0x0440, 0x0441, 0x0442, 0x0443, 0x0444, 0x0445, 0x0446, 0x0447,
  0x0448, 0x0449, 0x044a, 0x044b, 0x044c, 0x044d, 0x044e, 0x044f,
  0x2116, 0x0451, 0x0452, 0x0453, 0x0454, 0x0455, 0x0456, 0x0457,
  0x0458, 0x0459, 0x045a, 0x045b, 0x045c, 0x00a7, 0x045e, 0x045f)
    Greek <- c(0:127, rep.int(0, 32),
  0x00a0, 0x2018, 0x2019, 0x00a3, 0x20ac, 0x20af, 0x00a6, 0x00a7,
  0x00a8, 0x00a9, 0x037a, 0x00ab, 0x00ac, 0x00ad, 0xfffd, 0x2015,
  0x00b0, 0x00b1, 0x00b2, 0x00b3, 0x0384, 0x0385, 0x0386, 0x00b7,
  0x0388, 0x0389, 0x038a, 0x00bb, 0x038c, 0x00bd, 0x038e, 0x038f,
  0x0390, 0x0391, 0x0392, 0x0393, 0x0394, 0x0395, 0x0396, 0x0397,
  0x0398, 0x0399, 0x039a, 0x039b, 0x039c, 0x039d, 0x039e, 0x039f,
  0x03a0, 0x03a1, 0xfffd, 0x03a3, 0x03a4, 0x03a5, 0x03a6, 0x03a7,
  0x03a8, 0x03a9, 0x03aa, 0x03ab, 0x03ac, 0x03ad, 0x03ae, 0x03af,
  0x03b0, 0x03b1, 0x03b2, 0x03b3, 0x03b4, 0x03b5, 0x03b6, 0x03b7,
  0x03b8, 0x03b9, 0x03ba, 0x03bb, 0x03bc, 0x03bd, 0x03be, 0x03bf,
  0x03c0, 0x03c1, 0x03c2, 0x03c3, 0x03c4, 0x03c5, 0x03c6, 0x03c7,
  0x03c8, 0x03c9, 0x03ca, 0x03cb, 0x03cc, 0x03cd, 0x03ce, 0xfffd)
    KOI8R <- c(0:127,
  0x2500, 0x2502, 0x250c, 0x2510, 0x2514, 0x2518, 0x251c, 0x2524,
  0x252c, 0x2534, 0x253c, 0x2580, 0x2584, 0x2588, 0x258c, 0x2590,
  0x2591, 0x2592, 0x2593, 0x2320, 0x25a0, 0x2219, 0x221a, 0x2248,
  0x2264, 0x2265, 0x00a0, 0x2321, 0x00b0, 0x00b2, 0x00b7, 0x00f7,
  0x2550, 0x2551, 0x2552, 0x0451, 0x2553, 0x2554, 0x2555, 0x2556,
  0x2557, 0x2558, 0x2559, 0x255a, 0x255b, 0x255c, 0x255d, 0x255e,
  0x255f, 0x2560, 0x2561, 0x0401, 0x2562, 0x2563, 0x2564, 0x2565,
  0x2566, 0x2567, 0x2568, 0x2569, 0x256a, 0x256b, 0x256c, 0x00a9,
  0x044e, 0x0430, 0x0431, 0x0446, 0x0434, 0x0435, 0x0444, 0x0433,
  0x0445, 0x0438, 0x0439, 0x043a, 0x043b, 0x043c, 0x043d, 0x043e,
  0x043f, 0x044f, 0x0440, 0x0441, 0x0442, 0x0443, 0x0436, 0x0432,
  0x044c, 0x044b, 0x0437, 0x0448, 0x044d, 0x0449, 0x0447, 0x044a,
  0x042e, 0x0410, 0x0411, 0x0426, 0x0414, 0x0415, 0x0424, 0x0413,
  0x0425, 0x0418, 0x0419, 0x041a, 0x041b, 0x041c, 0x041d, 0x041e,
  0x041f, 0x042f, 0x0420, 0x0421, 0x0422, 0x0423, 0x0416, 0x0412,
  0x042c, 0x042b, 0x0417, 0x0428, 0x042d, 0x0429, 0x0427, 0x042a)
    KOI8U <- c(0:127,
  0x2500, 0x2502, 0x250c, 0x2510, 0x2514, 0x2518, 0x251c, 0x2524,
  0x252c, 0x2534, 0x253c, 0x2580, 0x2584, 0x2588, 0x258c, 0x2590,
  0x2591, 0x2592, 0x2593, 0x2320, 0x25a0, 0x2219, 0x221a, 0x2248,
  0x2264, 0x2265, 0x00a0, 0x2321, 0x00b0, 0x00b2, 0x00b7, 0x00f7,
  0x2550, 0x2551, 0x2552, 0x0451, 0x0454, 0x2554, 0x0456, 0x0457,
  0x2557, 0x2558, 0x2559, 0x255a, 0x255b, 0x0491, 0x255d, 0x255e,
  0x255f, 0x2560, 0x2561, 0x0401, 0x0404, 0x2563, 0x0406, 0x0407,
  0x2566, 0x2567, 0x2568, 0x2569, 0x256a, 0x0490, 0x256c, 0x00a9,
  0x044e, 0x0430, 0x0431, 0x0446, 0x0434, 0x0435, 0x0444, 0x0433,
  0x0445, 0x0438, 0x0439, 0x043a, 0x043b, 0x043c, 0x043d, 0x043e,
  0x043f, 0x044f, 0x0440, 0x0441, 0x0442, 0x0443, 0x0436, 0x0432,
  0x044c, 0x044b, 0x0437, 0x0448, 0x044d, 0x0449, 0x0447, 0x044a,
  0x042e, 0x0410, 0x0411, 0x0426, 0x0414, 0x0415, 0x0424, 0x0413,
  0x0425, 0x0418, 0x0419, 0x041a, 0x041b, 0x041c, 0x041d, 0x041e,
  0x041f, 0x042f, 0x0420, 0x0421, 0x0422, 0x0423, 0x0416, 0x0412,
  0x042c, 0x042b, 0x0417, 0x0428, 0x042d, 0x0429, 0x0427, 0x042a)
    CP1250 <- c(0:127,
  0x20ac, 0xfffd, 0x201a, 0xfffd, 0x201e, 0x2026, 0x2020, 0x2021,
  0xfffd, 0x2030, 0x0160, 0x2039, 0x015a, 0x0164, 0x017d, 0x0179,
  0xfffd, 0x2018, 0x2019, 0x201c, 0x201d, 0x2022, 0x2013, 0x2014,
  0xfffd, 0x2122, 0x0161, 0x203a, 0x015b, 0x0165, 0x017e, 0x017a,
  0x00a0, 0x02c7, 0x02d8, 0x0141, 0x00a4, 0x0104, 0x00a6, 0x00a7,
  0x00a8, 0x00a9, 0x015e, 0x00ab, 0x00ac, 0x00ad, 0x00ae, 0x017b,
  0x00b0, 0x00b1, 0x02db, 0x0142, 0x00b4, 0x00b5, 0x00b6, 0x00b7,
  0x00b8, 0x0105, 0x015f, 0x00bb, 0x013d, 0x02dd, 0x013e, 0x017c,
  0x0154, 0x00c1, 0x00c2, 0x0102, 0x00c4, 0x0139, 0x0106, 0x00c7,
  0x010c, 0x00c9, 0x0118, 0x00cb, 0x011a, 0x00cd, 0x00ce, 0x010e,
  0x0110, 0x0143, 0x0147, 0x00d3, 0x00d4, 0x0150, 0x00d6, 0x00d7,
  0x0158, 0x016e, 0x00da, 0x0170, 0x00dc, 0x00dd, 0x0162, 0x00df,
  0x0155, 0x00e1, 0x00e2, 0x0103, 0x00e4, 0x013a, 0x0107, 0x00e7,
  0x010d, 0x00e9, 0x0119, 0x00eb, 0x011b, 0x00ed, 0x00ee, 0x010f,
  0x0111, 0x0144, 0x0148, 0x00f3, 0x00f4, 0x0151, 0x00f6, 0x00f7,
  0x0159, 0x016f, 0x00fa, 0x0171, 0x00fc, 0x00fd, 0x0163, 0x02d9)
    CP1251 <- c(0:127,
  0x0402, 0x0403, 0x201a, 0x0453, 0x201e, 0x2026, 0x2020, 0x2021,
  0x20ac, 0x2030, 0x0409, 0x2039, 0x040a, 0x040c, 0x040b, 0x040f,
  0x0452, 0x2018, 0x2019, 0x201c, 0x201d, 0x2022, 0x2013, 0x2014,
  0xfffd, 0x2122, 0x0459, 0x203a, 0x045a, 0x045c, 0x045b, 0x045f,
  0x00a0, 0x040e, 0x045e, 0x0408, 0x00a4, 0x0490, 0x00a6, 0x00a7,
  0x0401, 0x00a9, 0x0404, 0x00ab, 0x00ac, 0x00ad, 0x00ae, 0x0407,
  0x00b0, 0x00b1, 0x0406, 0x0456, 0x0491, 0x00b5, 0x00b6, 0x00b7,
  0x0451, 0x2116, 0x0454, 0x00bb, 0x0458, 0x0405, 0x0455, 0x0457,
  0x0410, 0x0411, 0x0412, 0x0413, 0x0414, 0x0415, 0x0416, 0x0417,
  0x0418, 0x0419, 0x041a, 0x041b, 0x041c, 0x041d, 0x041e, 0x041f,
  0x0420, 0x0421, 0x0422, 0x0423, 0x0424, 0x0425, 0x0426, 0x0427,
  0x0428, 0x0429, 0x042a, 0x042b, 0x042c, 0x042d, 0x042e, 0x042f,
  0x0430, 0x0431, 0x0432, 0x0433, 0x0434, 0x0435, 0x0436, 0x0437,
  0x0438, 0x0439, 0x043a, 0x043b, 0x043c, 0x043d, 0x043e, 0x043f,
  0x0440, 0x0441, 0x0442, 0x0443, 0x0444, 0x0445, 0x0446, 0x0447,
  0x0448, 0x0449, 0x044a, 0x044b, 0x044c, 0x044d, 0x044e, 0x044f)
    CP1252 <- c(0:127,
  0x20ac, 0xfffd, 0x201a, 0x0192, 0x201e, 0x2026, 0x2020, 0x2021,
  0x02c6, 0x2030, 0x0160, 0x2039, 0x0152, 0xfffd, 0x017d, 0xfffd,
  0xfffd, 0x2018, 0x2019, 0x201c, 0x201d, 0x2022, 0x2013, 0x2014,
  0x02dc, 0x2122, 0x0161, 0x203a, 0x0153, 0xfffd, 0x017e, 0x0178,
                160:255)
    CP1253 <- c(0:127,
  0x20ac, 0xfffd, 0x201a, 0x0192, 0x201e, 0x2026, 0x2020, 0x2021,
  0xfffd, 0x2030, 0xfffd, 0x2039, 0xfffd, 0xfffd, 0xfffd, 0xfffd,
  0xfffd, 0x2018, 0x2019, 0x201c, 0x201d, 0x2022, 0x2013, 0x2014,
  0xfffd, 0x2122, 0xfffd, 0x203a, 0xfffd, 0xfffd, 0xfffd, 0xfffd,
  0x00a0, 0x0385, 0x0386, 0x00a3, 0x00a4, 0x00a5, 0x00a6, 0x00a7,
  0x00a8, 0x00a9, 0xfffd, 0x00ab, 0x00ac, 0x00ad, 0x00ae, 0x2015,
  0x00b0, 0x00b1, 0x00b2, 0x00b3, 0x0384, 0x00b5, 0x00b6, 0x00b7,
  0x0388, 0x0389, 0x038a, 0x00bb, 0x038c, 0x00bd, 0x038e, 0x038f,
  0x0390, 0x0391, 0x0392, 0x0393, 0x0394, 0x0395, 0x0396, 0x0397,
  0x0398, 0x0399, 0x039a, 0x039b, 0x039c, 0x039d, 0x039e, 0x039f,
  0x03a0, 0x03a1, 0xfffd, 0x03a3, 0x03a4, 0x03a5, 0x03a6, 0x03a7,
  0x03a8, 0x03a9, 0x03aa, 0x03ab, 0x03ac, 0x03ad, 0x03ae, 0x03af,
  0x03b0, 0x03b1, 0x03b2, 0x03b3, 0x03b4, 0x03b5, 0x03b6, 0x03b7,
  0x03b8, 0x03b9, 0x03ba, 0x03bb, 0x03bc, 0x03bd, 0x03be, 0x03bf,
  0x03c0, 0x03c1, 0x03c2, 0x03c3, 0x03c4, 0x03c5, 0x03c6, 0x03c7,
  0x03c8, 0x03c9, 0x03ca, 0x03cb, 0x03cc, 0x03cd, 0x03ce, 0xfffd)
    CP1257 <- c(0:127,
  0x20ac, 0xfffd, 0x201a, 0xfffd, 0x201e, 0x2026, 0x2020, 0x2021,
  0xfffd, 0x2030, 0xfffd, 0x2039, 0xfffd, 0x00a8, 0x02c7, 0x00b8,
  0xfffd, 0x2018, 0x2019, 0x201c, 0x201d, 0x2022, 0x2013, 0x2014,
  0xfffd, 0x2122, 0xfffd, 0x203a, 0xfffd, 0x00af, 0x02db, 0xfffd,
  0x00a0, 0xfffd, 0x00a2, 0x00a3, 0x00a4, 0xfffd, 0x00a6, 0x00a7,
  0x00d8, 0x00a9, 0x0156, 0x00ab, 0x00ac, 0x00ad, 0x00ae, 0x00c6,
  0x00b0, 0x00b1, 0x00b2, 0x00b3, 0x00b4, 0x00b5, 0x00b6, 0x00b7,
  0x00f8, 0x00b9, 0x0157, 0x00bb, 0x00bc, 0x00bd, 0x00be, 0x00e6,
  0x0104, 0x012e, 0x0100, 0x0106, 0x00c4, 0x00c5, 0x0118, 0x0112,
  0x010c, 0x00c9, 0x0179, 0x0116, 0x0122, 0x0136, 0x012a, 0x013b,
  0x0160, 0x0143, 0x0145, 0x00d3, 0x014c, 0x00d5, 0x00d6, 0x00d7,
  0x0172, 0x0141, 0x015a, 0x016a, 0x00dc, 0x017b, 0x017d, 0x00df,
  0x0105, 0x012f, 0x0101, 0x0107, 0x00e4, 0x00e5, 0x0119, 0x0113,
  0x010d, 0x00e9, 0x017a, 0x0117, 0x0123, 0x0137, 0x012b, 0x013c,
  0x0161, 0x0144, 0x0146, 0x00f3, 0x014d, 0x00f5, 0x00f6, 0x00f7,
  0x0173, 0x0142, 0x015b, 0x016b, 0x00fc, 0x017c, 0x017e, 0x02d9)
    AdobeSymbol <- c(0:31,
       0x0020, 0x0021, 0x2200, 0x0023, 0x2203, 0x0025, 0x0026, 0x220D,
       0x0028, 0x0029, 0x2217, 0x002B, 0x002C, 0x2212, 0x002E, 0x002F,
       0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037,
       0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
       0x2245, 0x0391, 0x0392, 0x03A7, 0x0394, 0x0395, 0x03A6, 0x0393,
       0x0397, 0x0399, 0x03D1, 0x039A, 0x039B, 0x039C, 0x039D, 0x039F,
       0x03A0, 0x0398, 0x03A1, 0x03A3, 0x03A4, 0x03A5, 0x03C2, 0x03A9,
       0x039E, 0x03A8, 0x0396, 0x005B, 0x2234, 0x005D, 0x22A5, 0x005F,
       0xF8E5, 0x03B1, 0x03B2, 0x03C7, 0x03B4, 0x03B5, 0x03C6, 0x03B3,
       0x03B7, 0x03B9, 0x03D5, 0x03BA, 0x03BB, 0x03BC, 0x03BD, 0x03BF,
       0x03C0, 0x03B8, 0x03C1, 0x03C3, 0x03C4, 0x03C5, 0x03D6, 0x03C9,
       0x03BE, 0x03C8, 0x03B6, 0x007B, 0x007C, 0x007D, 0x223C, rep.int(0, 33),
       0x20AC, 0x03D2, 0x2032, 0x2264, 0x2044, 0x221E, 0x0192, 0x2663,
       0x2666, 0x2665, 0x2660, 0x2194, 0x2190, 0x2191, 0x2192, 0x2193,
       0x00B0, 0x00B1, 0x2033, 0x2265, 0x00D7, 0x221D, 0x2202, 0x2022,
       0x00F7, 0x2260, 0x2261, 0x2248, 0x2026, 0xF8E6, 0xF8E7, 0x21B5,
       0x2135, 0x2111, 0x211C, 0x2118, 0x2297, 0x2295, 0x2205, 0x2229,
       0x222A, 0x2283, 0x2287, 0x2284, 0x2282, 0x2286, 0x2208, 0x2209,
       0x2220, 0x2207, 0xF6DA, 0xF6D9, 0xF6DB, 0x220F, 0x221A, 0x22C5,
       0x00AC, 0x2227, 0x2228, 0x21D4, 0x21D0, 0x21D1, 0x21D2, 0x21D3,
       0x25CA, 0x2329, 0xF8E8, 0xF8E9, 0xF8EA, 0x2211, 0xF8EB, 0xF8EC,
       0xF8ED, 0xF8EE, 0xF8EF, 0xF8F0, 0xF8F1, 0xF8F2, 0xF8F3, 0xF8F4,
       0,      0x232A, 0x222B, 0x2320, 0xF8F5, 0x2321, 0xF8F6, 0xF8F7,
       0xF8F8, 0xF8F9, 0xF8FA, 0xF8FB, 0xF8FC, 0xF8FD, 0xF8FE, 0)
    M <- cbind(ISOLatin1, ISOLatin2, ISOLatin7, ISOLatin9, Cyrillic, Greek,
               KOI8R, KOI8U,
               CP1250, CP1251, CP1252, CP1253, CP1257, AdobeSymbol)
    rownames(M) <- format.hexmode(0:255)
    storage.mode(M) <- "integer"
    class(M) <- c("noquote", "hexmode")
    M
})

Adobe_glyphs <- local({
    a <- scan(file.path(R.home("share"), "encodings", "Adobe-glyphlist"),
              what=list(adobe="", unicode=""), quiet=TRUE,
              sep=";", comment.char="#")
    a <- as.data.frame(a, stringsAsFactors=FALSE)
    a[order(a$unicode, a$adobe),]
})
#  File src/library/tools/R/htmltools.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 2022-2024 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

tidy_validate <-
function(f, tidy = "tidy") {
    ## HTML Tidy complains about empty spans, which may be "ok" (and
    ## KaTeX uses these to achieve vertical alignment).
    ## One can suppress these complaints via command line option
    ##   "--drop-empty-elements no"
    ## but this suppresses complaints about all empty elements (and not
    ## only spans).
    ## To allow experimenting, we provide env var
    ##   _R_CHECK_RD_VALIDATE_RD2HTML_OPTS_
    ## for setting command line options.  As of 2024-03, by default we
    ## leave this empty, and filter out
    ##    Warning: trimming empty <span>
    ## messages when checking the Rd2HTML refman conversions.
    z <- suppressWarnings(system2(tidy,
                                  c("-language en", "-qe",
                                    Sys.getenv("_R_CHECK_RD_VALIDATE_RD2HTML_OPTS_",
                                               ""),
                                    f),
                                  stdout = TRUE, stderr = TRUE))
    if(!length(z)) return(NULL)
    ## Strip trailing \r from HTML Tidy output on Windows:
    z <- trimws(z, which = "right")
    ## (Alternatively, replace '$' by '[ \t\r\n]+$' in the regexp below.)
    s <- readLines(f, warn = FALSE)
    m <- regmatches(z,
                    regexec("^line ([0-9]+) column ([0-9]+) - (.+)$",
                            z))
    m <- unique(do.call(rbind, m[lengths(m) == 4L]))
    p <- m[, 2L]
    concordance <- as.Rconcordance(grep("^<!-- concordance:", s, value = TRUE))
    result <- cbind(line = p, col = m[, 3L], msg = m[, 4L], txt = s[as.numeric(p)])
    
    if (!is.null(concordance))
    	result <- cbind(result, matchConcordance(p, concordance = concordance))
    
    result
}

tidy_validate_db <-
function(x, paths = NULL, ignore = character()) {
    if(!is.null(paths))
        names(x) <- paths
    i <- vapply(x, inherits, NA, "error")
    e <- x[i]
    x <- Filter(length, x[!i])
    if(!length(x) && !length(e)) return(NULL)
    y <- do.call(rbind, x)
    if(is.null(y)) {
        y <- list() # cannot set an attr on NULL
    } else {
        y <- cbind(path = rep.int(names(x), vapply(x, nrow, 0)), y)
        if(length(ignore)) {
            y <- y[y[, "msg"] %notin% ignore, , drop = FALSE]
        }
    }
    if(length(e))
        attr(y, "errors") <- e
    y
}

tidy_validate_files <-
function(files, verbose = interactive()) {
    tidy_validate_db(lapply(files,
                            function(f) {
                                if(verbose)
                                    message(sprintf("Processing %s ...",
                                                    f))
                                tidy_validate(f)
                            }),
                     files)
}

tidy_validate_R_httpd_path <-
function(path) {
    y <- tryCatch(httpd(path, query = NULL), error = identity)
    if(inherits(y, "error"))
        return(y)
    if(!is.null(f <- y$file)) {
        ## Should only do this for appropriate content types
        if(is.null(y$"content-type"))
            tidy_validate(f)
        else
            NULL
    } else if(!is.null(payload <- y$payload)) {
        f <- tempfile()
        on.exit(unlink(f))
        writeLines(payload, f)
        tidy_validate(f)
    } else NULL
}

tidy_validate_package_Rd_files <-
function(package, dir, lib.loc = NULL, auto = NA, verbose = interactive())
{
    if(!missing(dir))
        return(tidy_validate_package_Rd_files_from_dir(dir, auto, verbose))

    if(!length(package)) return(NULL)

    n <- 3L

    one <- function(p) {
        if(verbose)
            message(sprintf("* Package: %s", p))
        db <- Rd_db(p, lib.loc = lib.loc)
        files <- sub("[Rr]d$", "html", basename(names(db)))
        results <-
            lapply(files,
                   function(f) {
                       if(verbose)
                           message(sprintf("Processing %s ...", f))
                       path <- sprintf("/library/%s/html/%s", p, f)
                       tryCatch(tidy_validate_R_httpd_path(path),
                                error = identity)
                   })
        ## names(results) <- sprintf("%s/%s", p, files)
        ## results <- Filter(length, results)
        ## if(!length(results)) return(NULL)
        ## cbind(file = rep.int(names(results), vapply(results, nrow, 0)),
        ##       do.call(rbind, results))
        tidy_validate_db(results, sprintf("%s/%s", p, files))
    }

    do.call(rbind, lapply(package, one))
}

tidy_validate_package_Rd_files_from_dir <- function(dir, auto = NA, verbose) {

    if(!length(dir)) return(NULL)

    out <- tempfile()
    on.exit(unlink(out))

    one <- function(d) {
        if(verbose)
            message(sprintf("* Package: %s", basename(d)))
        db <- Rd_db(dir = d)
        if(!is.na(auto)) {
            is <- vapply(db,
                         function(e) {
                             g <- attr(e, "meta")$generator
                             (is.character(g) &&
                              (length(g) == 1L) &&
                              startsWith(g, "% Generated by roxygen2"))
                         },
                         NA)
            db <- db[if(auto) is else !is]
        }
        results <-
            lapply(db,
                   function(x) {
                       tryCatch({
                           Rd2HTML(x, out, concordance = TRUE)
                           tidy_validate(out)
                       },
                       error = identity)
                   })
        tidy_validate_db(results,
                         sprintf("%s::%s", basename(d), names(db)))
    }

    do.call(rbind, lapply(dir, one))
}


tidy_validate_urls <-
function(urls, verbose = interactive()) {
    destfile <- tempfile("tidy_validate")
    on.exit(unlink(destfile))
    tidy_validate_db(lapply(urls,
                            function(u) {
                                if(verbose)
                                    message(sprintf("Processing %s ...",
                                                    u))
                                utils::download.file(u, destfile,
                                                     quiet = TRUE)
                                tidy_validate(destfile)
                            }),
                     urls)
}
#  File src/library/tools/R/index.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2021 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

### Miscellaneous indexing functions.

## <NOTE>
## Currently indices are represented as 2-column character matrices.
## To 'merge' indices in the sense of using the values from index B for
## all keys in index A also present in index B, we currently use
##   idx <- match(indA[ , 1L], indB[ , 1L], 0L)
##   indA[which(idx != 0L), 2L] <- indB[idx, 2L]
## which could be abstracted into a function .mergeIndexEntries().
## </NOTE>

### * .build_data_index

.build_data_index <-
function(dir, contents)
{
    ## Build an index with information about all available data sets.
    ## See .build_demo_index() for an explanation of what we do here.

    dataTopics <- list_data_in_pkg(dir = dir)
    if(!length(dataTopics)) return(matrix("", 0L, 2L))
    names(dataTopics) <- paste0(names(dataTopics), "/")
    datasets <- unlist(dataTopics)
    ## it is possible to have topics that create no object:
    ## BioC's makecdfenv did.
    if(!length(datasets)) return(matrix("", 0L, 2L))
    names(datasets) <- sub("/[^/]*$", "", names(datasets))
    datasets <- sort(datasets)
    dataIndex <- cbind(datasets, "", deparse.level = 0L)
    dimnames(dataIndex) <- NULL
    ## Note that NROW(contents) might be 0.
    if(length(datasets) && NROW(contents)) {
        aliasIndices <-
            rep.int(seq_len(NROW(contents)), lengths(contents$Aliases))
        idx <- match(datasets, unlist(contents$Aliases), 0L)
        dataIndex[which(idx != 0L), 2L] <-
            contents[aliasIndices[idx], "Title"]
    }
    if(length(datasets))
        dataIndex[, 1L] <-
            as.vector(ifelse(datasets == names(datasets), datasets,
                             paste0(datasets, " (", names(datasets), ")")))
    dataIndex
}

### * .build_demo_index

.build_demo_index <-
function(demoDir)
{
    ## Build an index with information about all available demos.

    ## <NOTE>
    ## We use both the contents of @file{00Index} (if possible) and the
    ## information which demos are actually available to build the real
    ## demo index.
    ## This ensures that demo() really lists all *available* demos, even
    ## if some might be 'undocumented', i.e., without index information.
    ## Use .check_demo_index() to check whether available demo code and
    ## docs are in sync.
    ## </NOTE>

    if(!dir.exists(demoDir))
        stop(gettextf("directory '%s' does not exist", demoDir),
             domain = NA)
    demoFiles <- list_files_with_type(demoDir, "demo")
    demoTopics <- unique(basename(file_path_sans_ext(demoFiles)))
    if(!length(demoTopics)) return(matrix("", 0L, 2L))
    demoIndex <- cbind(demoTopics, "")
    if(file_test("-f", INDEX <- file.path(demoDir, "00Index"))) {
        demoEntries <- tryCatch(read.00Index(INDEX), error = identity)
        if(inherits(demoEntries, "error"))
            warning(gettextf("cannot read index information in file '%s'",
                             INDEX),
                    domain = NA)
        else {
            idx <- match(demoTopics, demoEntries[ , 1L], 0L)
            demoIndex[which(idx != 0L), 2L] <- demoEntries[idx, 2L]
        }
    }
    dimnames(demoIndex) <- NULL
    demoIndex
}

### * .check_demo_index

.check_demo_index <-
function(demoDir)
{
    if(!dir.exists(demoDir))
        stop(gettextf("directory '%s' does not exist", demoDir),
             domain = NA)
    info_from_build <- .build_demo_index(demoDir)
    info_from_index <-
        tryCatch(read.00Index(file.path(demoDir, "00Index")),
                 error = function(e)
                 stop(gettextf("cannot read index information in file '%s'",
                               file.path(demoDir, "00Index")),
                      domain = NA))
    bad_entries <-
        list(missing_from_index =
             info_from_build[grep("^[[:space:]]*$",
                                  info_from_build[ , 2L]),
                             1L],
             missing_from_demos =
                 info_from_index[info_from_index[ , 1L] %notin%
                                 info_from_build[ , 1L],
                                 1L])
    class(bad_entries) <- "check_demo_index"
    bad_entries
}

print.check_demo_index <-
function(x, ...)
{
    if(length(bad <- x$missing_from_index)) {
        writeLines(c("Demos with missing or empty index information:",
                     paste0("  ", bad)))
    }
    if(length(bad <- x$missing_from_demos)) {
        writeLines(c("Demo index entries without corresponding demo:",
                     paste0("  ", bad)))
    }
    invisible(x)
}

### * .build_hsearch_index

.build_hsearch_index <-
function(contents, packageName, defaultEncoding = NULL)
{
    ## Build an index of the Rd contents in 'contents', of a package
    ## named 'packageName' in a form useful for help.search().
    ## As from 2.3.0 the installation directory is no longer recorded,
    ## but the format is kept for back-compatibility.

    dbAliases <- dbConcepts <- dbKeywords <-
        matrix(character(), ncol = 3L)

    if((nr <- NROW(contents)) > 0L) {
        ## IDs are used for indexing the Rd objects in the help.search
        ## db.
        IDs <- seq_len(nr)
        if(!is.data.frame(contents)) {
            colnames(contents) <-
                c("Name", "Aliases", "Title", "Keywords")
            base <- contents[, c("Name", "Title"), drop = FALSE]
            ## If the contents db is not a data frame, then it has the
            ## aliases collapsed.  Split again as we need the first
            ## alias as the help topic to indicate for matching Rd
            ## objects.
            aliases <- strsplit(contents[, "Aliases"], " +")
            ## Don't do this for keywords though, as these might be
            ## non-standard (and hence contain white space ...).
            encoding <- NULL
        }
        else {
            base <- as.matrix(contents[, c("Name", "Title")])
            aliases <- contents[, "Aliases"]
            encoding <- contents$Encoding # may not be there ...
        }
        if(is.null(encoding))
            encoding <- character(length = nr)
        if(!is.null(defaultEncoding))
            encoding[!nzchar(encoding)] <- defaultEncoding
        keywords <- contents[, "Keywords"]
        ## We create 4 character matrices (cannot use data frames for
        ## efficiency reasons): 'dbBase' holds all character string
        ## data; 'dbAliases', 'dbConcepts' and 'dbKeywords' hold
        ## character vector data in a 3-column character matrix format
        ## with entry, ID of the Rd object the entry comes from, and the
        ## package the object comes from.  The latter is useful when
        ## subscripting the help.search db according to package.
        dbBase <- cbind(packageName, "", IDs, base,
                        topic = unlist(Map(.Rd_topic_for_display,
                                           contents[, "Name"],
                                           aliases)),
                        encoding)
        ## If there are no aliases at all, cbind() below would give
        ## matrix(packageName, ncol = 1L).  (Of course, Rd objects
        ## without aliases are useless ...)
        if(length(tmp <- unlist(aliases)))
            dbAliases <-
                cbind(tmp, rep.int(IDs, lengths(aliases)),
                      packageName)
        ## And similarly if there are no keywords at all.
        if(length(tmp <- unlist(keywords)))
            dbKeywords <-
                cbind(tmp, rep.int(IDs, lengths(keywords)),
                      packageName)
        ## Finally, concepts are a feature added in R 1.8 ...
        if("Concepts" %in% colnames(contents)) {
            concepts <- contents[, "Concepts"]
            if(length(tmp <- unlist(concepts)))
                dbConcepts <-
                    cbind(tmp, rep.int(IDs, lengths(concepts)),
                          packageName)
        }
    }
    else
        dbBase <- matrix(character(), ncol = 7L)

    colnames(dbBase) <- hsearch_index_colnames$Base
    colnames(dbAliases) <- hsearch_index_colnames$Aliases
    colnames(dbKeywords) <- hsearch_index_colnames$Keywords
    colnames(dbConcepts) <- hsearch_index_colnames$Concepts

    list(dbBase, dbAliases, dbKeywords, dbConcepts)
}

hsearch_index_colnames <-
    list(Base = c("Package", "LibPath", "ID", "Name", "Title", "Topic",
         "Encoding"),
         Aliases = c("Alias", "ID", "Package"),
         Keywords = c("Keyword", "ID", "Package"),
         Concepts = c("Concept", "ID", "Package"))

### * .build_links_index

.build_links_index <-
function(contents, package)
{
    if(length(contents)) {
        aliases <- contents$Aliases
        lens <- lengths(aliases)
        files <- sub("\\.[Rr]d$", "\\.html", contents$File)
        structure(file.path("../..", package, "html", rep.int(files, lens)),
                  names = unlist(aliases))
    } else character()
}


### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
#  File src/library/tools/R/install.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2025 The R Core Team
#
# NB: also copyright dates in Usages.
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

#### R based engine for  R CMD INSTALL SHLIB Rprof
####

## R developers can use this to debug the function by running it
## directly as tools:::.install_packages(args), where the args should
## be what commandArgs(TRUE) would return, that is a character vector
## of (space-delimited) terms that would be passed to R CMD INSTALL.  E.g.
##
if(FALSE) {
    tools:::.install_packages(c("--preclean", "--no-multiarch",
				"tree"))
    ## or

    status <- tryCatch(
	tools:::.install_packages(c("--no-clean-on-error", "--no-multiarch",
				    "tree"), no.q = TRUE)
      , error = function(e) as.numeric(sub(".* exit status *", "",
					   conditionMessage(e))))
    ## or

    debugonce(tools:::.install_packages)
    tools:::.install_packages(c("-c", "--debug", "--no-clean-on-error", "--no-multiarch",
                                "tree"))
    ## and then (after about 40 x [Enter]) when do_install is defined:
    debug(do_install) ## and 'c'
}



.install_packages <- function(args = NULL, no.q = interactive(), warnOption = 1)
{
    ## calls system() on Windows for
    ## sh (configure.win/cleanup.win) make zip

    ## global variables
    curPkg <- character() # list of packages in current pkg
    lockdir <- ""
    is_first_package <- TRUE
    stars <- "*"
    user.tmpdir <- Sys.getenv("PKG_BUILD_DIR")
    keep.tmpdir <- nzchar(user.tmpdir)

    ## Need these here in case of an early error, e.g. missing etc/Makeconf
    tmpdir <- ""
    clean_on_error <- TRUE

    R_runR_deps_only <- function(cmd, deps_only_env, multiarch = FALSE, ...) {
        deps_only <-
            config_val_to_logical(Sys.getenv("_R_CHECK_INSTALL_DEPENDS_",
                                             "FALSE"))
        env <- if (deps_only) deps_only_env
               else ""
        ## needed for some packages (AnnotationDbi) that install other
        ## packages during their tests (otherwise system profile fails
        ## because it cannot find the tests startup file)
        env <- paste(env, "R_TESTS=")
        opts <- "--no-save --no-restore --no-echo"
        if (deps_only) {
            opts <- paste(opts, "--no-init-file --no-site-file")
            if (!multiarch)
              ## do not use --no-environ with multiarch, because Renviron
              ## may include architecture-specific settings that may differ
              ## from settings of the host process architecture
              opts <- paste(opts, "--no-environ")
        }
        R_runR(cmd = cmd, Ropts = opts, env = env, ...)
    }

    do_exit <-
	if(no.q)
	    function(status) stop(".install_packages() exit status ", status)
	else
	    function(status) q("no", status = status, runLast = FALSE)

    do_exit_on_error <- function(status = 1L)
    {
        ## If we are not yet processing a package, we will not have
        ## set curPkg
        if(clean_on_error && length(curPkg)) {
            pkgdir <- file.path(lib, curPkg)
            if (nzchar(pkgdir) && dir.exists(pkgdir) &&
                is_subdir(pkgdir, lib)) {
                starsmsg(stars, "removing ", sQuote(pkgdir))
                unlink(pkgdir, recursive = TRUE)
            }

            if (nzchar(lockdir) &&
                dir.exists(lp <- file.path(lockdir, curPkg)) &&
                is_subdir(lp, lockdir)) {
                starsmsg(stars, "restoring previous ", sQuote(pkgdir))
                if (WINDOWS) {
                    file.copy(lp, dirname(pkgdir), recursive = TRUE,
                              copy.date = TRUE)
                    unlink(lp, recursive = TRUE)
                } else {
                    ## some shells require that they be run in a known dir
                    setwd(startdir)
                    if(system(paste("mv -f", shQuote(lp), shQuote(pkgdir))))
                        message("  restoration failed\n")
                }
            }
        }

        do_cleanup()
        do_exit(status=status)
    }

    do_cleanup <- function()
    {
        if(!keep.tmpdir && nzchar(tmpdir)) do_cleanup_tmpdir()
        if (!is_first_package) {
            ## Only need to do this in case we successfully installed
            ## at least one package
            if (lib == .Library && "html" %in% build_help_types)
                utils::make.packages.html(.Library, docdir = R.home("doc"))
        }
        if (nzchar(lockdir)) unlink(lockdir, recursive = TRUE)
    }

    do_cleanup_tmpdir <- function()
    {
        ## Solaris will not remove any directory in the current path
        setwd(startdir)
        if (!keep.tmpdir && dir.exists(tmpdir)) unlink(tmpdir, recursive=TRUE)
    }

    # This produces a (by default single) quoted string for use in a
    # command sent to another R process.
    quote_path <- function(path, quote = "'") {
        path <- gsub("\\", "\\\\", path, fixed = TRUE)
        path <- gsub(quote, paste0("\\", quote), path, fixed = TRUE)
    	paste0(quote, path, quote)
    }

    # Escape backslashes in a replacement string for gsub etc.
    # To be used when the replacement is a path name which may include
    # backslashes, e.g. with UNC paths on Windows.
    quote_replacement <- function(r)
        paste0(gsub("\\", "\\\\", r, fixed=TRUE))

    on.exit(do_exit_on_error())
    WINDOWS <- .Platform$OS.type == "windows"
    cross <- Sys.getenv("R_CROSS_BUILD")
    have_cross <- nzchar(cross)
    if(have_cross && !cross %in% c("x64","singlearch"))
        stop("invalid value ", sQuote(cross), " for R_CROSS_BUILD")
    if (have_cross) {
        WINDOWS <- TRUE
	Sys.setenv(R_OSTYPE = "windows")
    }

    if (WINDOWS) MAKE <- "make"
    else MAKE <- Sys.getenv("MAKE") # FIXME shQuote, default?
    rarch <- Sys.getenv("R_ARCH") # unix only
    if (WINDOWS && nzchar(.Platform$r_arch))
        rarch <- paste0("/", .Platform$r_arch)
    test_archs <- rarch
    if (have_cross) {
        rarch = if (cross == "singlearch") "" else paste0("/", cross)
        test_archs <- c()
    }


    SHLIB_EXT <- if (WINDOWS) ".dll" else {
        ## can we do better?
        mconf <- file.path(R.home(), paste0("etc", rarch), "Makeconf")
        ## PCRE needed for Debian arm* platforms
        sub(".*= ", "", grep("^SHLIB_EXT", readLines(mconf), value = TRUE,
                             perl = TRUE))
    }

    if(getOption("warn") < warnOption) {
        op <- options(warn = warnOption)
        on.exit(options(op), add = TRUE)
    }
    invisible(Sys.setlocale("LC_COLLATE", "C")) # discard output

    if (WINDOWS) {
        rhome <- chartr("\\", "/", R.home())
        ## These might be needed for configure.win and Make{file,vars}.win
        ## Some people have *assumed* that R_HOME uses /
        Sys.setenv(R_HOME = rhome)
        if (nzchar(rarch)) Sys.setenv(R_ARCH = rarch, R_ARCH_BIN = rarch)
    }

    Usage <- function() {
        cat("Usage: R CMD INSTALL [options] pkgs",
            "",
            "Install the add-on packages specified by pkgs.  The elements of pkgs can",
            "be relative or absolute paths to directories with the package",
            "sources, or to gzipped package 'tar' archives.  The library tree",
            "to install to can be specified via '--library'.  By default, packages are",
            "installed in the library tree rooted at the first directory in",
            ".libPaths() for an R session run in the current environment.",
            "",
            "Options:",
            "  -h, --help		print short help message and exit",
            "  -v, --version		print INSTALL version info and exit",
            "  -c, --clean		remove files created during installation",
            "      --preclean	remove files created during a previous run",
	    "  -d, --debug		turn on debugging messages",
            if(WINDOWS) "			and build a debug DLL",
            "  -l, --library=LIB	install packages to library tree LIB",
            "      --no-configure    do not use the package's configure script",
            "      --no-docs		do not install HTML, LaTeX or examples help",
            "      --html		build HTML help",
            "      --no-html		do not build HTML help",
            "      --latex      	install LaTeX help",
            "      --example		install R code for help examples",
            "      --fake		do minimal install for testing purposes",
            "      --no-lock		install on top of any existing installation",
            "			without using a lock directory",
            "      --lock		use a per-library lock directory (default)",
            "      --pkglock		use a per-package lock directory",
            "      			(default for a single package)",
            "      --build    	build binaries of the installed package(s)",
            "      --install-tests	install package-specific tests (if any)",
            "      --no-R, --no-libs, --no-data, --no-help, --no-demo, --no-exec,",
            "      --no-inst",
            "			suppress installation of the specified part of the",
            "			package for testing or other special purposes",
            "      --no-multiarch	build only the main architecture",
            "      --libs-only	only install the libs directory",
            "      --data-compress=	none, gzip (default), bzip2 or xz compression",
            "			to be used for lazy-loading of data",
            "      --resave-data	re-save data files as compactly as possible",
            "      --compact-docs	re-compress PDF files under inst/doc",
            "      --with-keep.source",
            "      --without-keep.source",
            "			use (or not) 'keep.source' for R code",
            "      --with-keep.parse.data",
            "      --without-keep.parse.data",
            "			use (or not) 'keep.parse.data' for R code",
            "      --byte-compile	byte-compile R code",
            "      --no-byte-compile	do not byte-compile R code",
            "      --staged-install	install to a temporary directory and then move",
            "                   	to the target directory (default)",
            "      --no-staged-install	install directly to the target directory",
            "      --no-test-load	skip test of loading installed package",
            "      --no-clean-on-error	do not remove installed package on error",
            "      --merge-multiarch	multi-arch by merging (from a single tarball only)",
            "      --use-vanilla	do not read any Renviron or Rprofile files",
            "      --use-LTO         use Link-Time Optimization",
            "      --no-use-LTO      do not use Link-Time Optimization",
            "      --use-C17         use a C standard at most C17 (also C90, C99)",
            "      --use-C23         use a C standard at least C23",
            "\nfor Unix",
            "      --configure-args=ARGS",
            "			set arguments for the configure scripts (if any)",
            "      --configure-vars=VARS",
            "			set variables for the configure scripts (if any)",
            "      --strip           strip shared object(s)",
            "      --strip-lib       strip static/dynamic libraries under lib/",
            "      --dsym            (macOS only) generate dSYM directory",
            "      --built-timestamp=STAMP",
            "                   set timestamp for Built: entry in DESCRIPTION",
            ## "\nand on Windows only",
            ## "      --force-biarch	attempt to build both architectures",
            ## "			even if there is a non-empty configure.win",
            ## "      --compile-both	compile both architectures on 32-bit Windows",
            "",
            "Which of --html or --no-html is the default depends on the build of R:",
            paste0("for this one it is ",
		   if(static_html) "--html" else "--no-html", "."),
            "",
            "Report bugs at <https://bugs.R-project.org>.", sep = "\n")
    }


    ## Check whether dir is a subdirectory of parent,
    ## to protect against malicious package names like ".." below
    ## Assumes that both directories exist
    is_subdir <- function(dir, parent) {
	rl <- Sys.readlink(dir) ## symbolic link (on POSIX, not Windows) is ok:
	(!is.na(rl) && nzchar(rl)) ||
	    normalizePath(parent) == normalizePath(file.path(dir, ".."))
    }

    fullpath <- function(dir)
    {
        owd <- setwd(dir)
        full <- getwd()
        setwd(owd)
        full
    }

    ## used for LazyData, KeepSource, ByteCompile, Biarch, StagedInstall
    parse_description_field <- function(desc, field, default)
	str_parse_logic(desc[field], default = default,
			otherwise = quote(
			    errmsg("invalid value of ", field, " field in DESCRIPTION")))

    starsmsg <- function(stars, ...)
        message(stars, " ", ..., domain = NA)

    errmsg <- function(...)
    {
        message("ERROR: ", ..., domain = NA)
        do_exit_on_error()
    }

    pkgerrmsg <- function(msg, pkg, ...)
	errmsg(msg, " for package ", sQuote(pkg), ...)

    ## 'pkg' is the absolute path to package sources.
    do_install <- function(pkg)
    {
        if (WINDOWS && endsWith(pkg, ".zip")) {
            pkg_name <- basename(pkg)
            pkg_name <- sub("\\.zip$", "", pkg_name)
            pkg_name <- sub("_[0-9.-]+$", "", pkg_name)
            reuse_lockdir <- lock && !pkglock
            if (pkglock)
                lock <- "pkglock"
            utils:::unpackPkgZip(pkg, pkg_name, lib, libs_only, lock,
                                 reuse_lockdir = reuse_lockdir)
            return()
        }

        setwd(pkg)
        ## We checked this exists, but not that it is readable
        desc <- tryCatch(read.dcf(fd <- file.path(pkg, "DESCRIPTION")),
                         error = identity)
        if(inherits(desc, "error") || !length(desc))
            stop(gettextf("error reading file '%s'", fd),
                 domain = NA, call. = FALSE)
        desc <- desc[1L,]
        ## Let's see if we have a bundle
        if (!is.na(desc["Bundle"])) {
            stop("this seems to be a bundle -- and they are defunct")
        } else {
            pkg_name <- desc["Package"]
            if (is.na(pkg_name)) errmsg("no 'Package' field in 'DESCRIPTION'")
            curPkg <<- pkg_name
        }

        instdir <- file.path(lib, pkg_name) # = <library>/<pkg>
        Sys.setenv(R_PACKAGE_NAME = pkg_name, R_PACKAGE_DIR = instdir)
        status <- .Rtest_package_depends_R_version()
        if (status) do_exit_on_error()

        dir.create(instdir, recursive = TRUE, showWarnings = FALSE)
        if (!dir.exists(instdir)) {
            # This allows a package to be installed if a broken symbolic
            # link (or a regular file) is place (PR#18262)
            unlink(instdir, recursive = FALSE)
            dir.create(instdir, recursive = TRUE, showWarnings = FALSE)
        }
        if (!dir.exists(instdir)) {
            message("ERROR: unable to create ", sQuote(instdir), domain = NA)
            do_exit_on_error()
        }

        if (!is_subdir(instdir, lib)) {
            message("ERROR: ", sQuote(pkg_name), " is not a legal package name",
                    domain = NA)
            do_exit_on_error()
        }

        ## Make sure we do not attempt installing to srcdir.
        owd <- setwd(instdir)
        if (owd == getwd()) pkgerrmsg("cannot install to srcdir", pkg_name)
        setwd(owd)

        ## Figure out whether this is a source or binary package.
        is_source_package <- is.na(desc["Built"])

        if (is_source_package) {
            ## Find out if any C++ or C standard is requested in DESCRIPTION file
            sys_requires <- desc["SystemRequirements"]
            if (!is.na(sys_requires)) {
                sys_requires <- unlist(strsplit(sys_requires, ","))
                for (i in cxx_standards) {
                    pattern <- paste0("^[[:space:]]*C[+][+]",i,"[[:space:]]*$")
                    if(any(grepl(pattern, sys_requires, ignore.case=TRUE))) {
                        Sys.setenv("R_PKG_CXX_STD"=i)
                        on.exit(Sys.unsetenv("R_PKG_CXX_STD"))
                        break
                    }
                }
                if(is.na(use_C)) {
                    if(any(grepl("USE_C17", sys_requires))) use_C <<- 17
                    if(any(grepl("USE_C23", sys_requires))) use_C <<- 23
                    if(any(grepl("USE_C90", sys_requires))) use_C <<- 90
                    if(any(grepl("USE_C99", sys_requires))) use_C <<- 99
                }
            }
        }

        if (!is_first_package) cat("\n")

        if (is_source_package)
            do_install_source(pkg_name, instdir, pkg, desc)
        else
            do_install_binary(pkg_name, instdir, desc)

        ## Add read permission to all, write permission to owner
        ## If group-write permissions were requested, set them
        .Call(C_dirchmod, instdir, group.writable)
        is_first_package <<- FALSE

        if (tar_up) { # Unix only
            starsmsg(stars, "creating tarball")
            version <- desc["Version"]
            filename <- if (!grepl("darwin", R.version$os)) {
                paste0(pkg_name, "_", version, "_R_",
                       Sys.getenv("R_PLATFORM"), ".tar.gz")
            } else {
                paste0(pkg_name, "_", version,".tgz")
            }
            filepath <- file.path(startdir, filename)
            owd <- setwd(lib)
            res <- utils::tar(filepath, curPkg, compression = "gzip",
                              compression_level = 9L,
                              tar = Sys.getenv("R_INSTALL_TAR"))
            if (res)
                errmsg(sprintf("packaging into %s failed", sQuote(filename)))
            message("packaged installation of ",
                    sQuote(pkg_name), " as ", sQuote(filename),
                    domain = NA)
            setwd(owd)
        }

        if (zip_up) { # Windows only
            starsmsg(stars, "MD5 sums")
            .installMD5sums(instdir)
            ## we could use utils::zip() here.
            ZIP <- "zip"                # Windows only
            version <- desc["Version"]
            filename <- paste0(pkg_name, "_", version, ".zip")
            filepath <- shQuote(file.path(startdir, filename))
            ## system(paste("rm -f", filepath))
            unlink(filepath)
            owd <- setwd(lib)
            res <- system(paste(shQuote(ZIP), "-r9Xq", filepath,
                                paste(curPkg, collapse = " ")))
            setwd(owd)
            if (res)
                message("running 'zip' failed", domain = NA)
            else
                message("packaged installation of ",
                        sQuote(pkg_name), " as ", filename, domain = NA)
        }
        if (Sys.getenv("_R_INSTALL_NO_DONE_") != "yes") {
            ## message("", domain = NA)  # ensure next starts on a new line, for R CMD check
            starsmsg(stars, "DONE (", pkg_name, ")")
        }

        curPkg <<- character()
    }


    ## Unix only
    do_install_binary <- function(pkg, instdir, desc)
    {
        starsmsg(stars, "installing *binary* package ", sQuote(pkg), " ...")

        if (file.exists(file.path(instdir, "DESCRIPTION"))) {
            if (nzchar(lockdir))
                system(paste("mv -f", shQuote(instdir),
                             shQuote(file.path(lockdir, pkg))))
            dir.create(instdir, recursive = TRUE, showWarnings = FALSE)
        }
        TAR <- Sys.getenv("TAR", 'tar')
        res <- system(paste("cp -R .", shQuote(instdir),
                            "|| (", TAR, "cd - .| (cd", shQuote(instdir), "&&", TAR, "-xf -))"
                            ))
        if (res) errmsg("installing binary package failed")

        if (tar_up) {
            starsmsg(stars, sQuote(pkg),
                     " was already a binary package and will not be rebuilt")
            tar_up <- FALSE
        }
    }

    ## to be run from package source directory
    run_clean <- function()
    {
        if (dir.exists("src") && length(dir("src", all.files = TRUE)) > 2L) {
            if (WINDOWS) archs <- c("i386", "x64")
            else {
                wd2 <- setwd(file.path(R.home("bin"), "exec"))
                archs <- Sys.glob("*")
                setwd(wd2)
            }
            if(length(archs))
                for(arch in archs) {
                    ss <- paste0("src-", arch)
                    ## it seems fixing permissions is sometimes needed
                    .Call(C_dirchmod, ss, group.writable)
                    unlink(ss, recursive = TRUE)
                }

            owd <- setwd("src")
            if (WINDOWS) {
                if (file.exists("Makefile.ucrt"))
                    system(paste(MAKE, "-f Makefile.ucrt clean"))
                else if (file.exists("Makefile.win"))
                    system(paste(MAKE, "-f Makefile.win clean"))
                else
                    unlink(c("Makedeps",
                             Sys.glob("*_res.rc"),
                             Sys.glob("*.[do]")))
                    # system("rm -f *_res.rc *.o *.d Makedeps")
            } else {
                if (file.exists("Makefile")) system(paste(MAKE, "clean"))
                else ## we will be using SHLIB --preclean
                    unlink(Sys.glob(paste0("*", SHLIB_EXT)))
            }
            setwd(owd)
        }
        if (WINDOWS) {
            if (file.exists("cleanup.ucrt"))
                system("sh ./cleanup.ucrt")
            else if (file.exists("cleanup.win"))
                system("sh ./cleanup.win")
        } else if (file_test("-x", "cleanup")) system("./cleanup")
        else if (file.exists("cleanup"))
            warning("'cleanup' exists but is not executable -- see the 'R Installation and Administration Manual'", call. = FALSE)
        revert_install_time_patches()
    }

    do_install_source <- function(pkg_name, instdir, pkg_dir, desc)
    {
        Sys.setenv("R_INSTALL_PKG" = pkg_name)
        on.exit(Sys.unsetenv("R_INSTALL_PKG"))
        shlib_install <- function(instdir, arch)
        {
            ## install.libs.R allows customization of the libs installation process
            if (file.exists("install.libs.R")) {
                message("installing via 'install.libs.R' to ", instdir,
                        domain = NA)
                ## the following variables are defined to be available,
                ## and to prevent abuse we don't expose anything else
                local.env <- local({ SHLIB_EXT <- SHLIB_EXT
                                     R_PACKAGE_DIR <- instdir
                                     R_PACKAGE_NAME <- pkg_name
                                     R_PACKAGE_SOURCE <- pkg_dir
                                     R_ARCH <- arch
                                     WINDOWS <- WINDOWS
                                     environment()})
                parent.env(local.env) <- .GlobalEnv
                source("install.libs.R", local = local.env)
                return(TRUE)
            }
            ## otherwise proceed with the default which is to just copy *${SHLIB_EXT}
            files <- Sys.glob(paste0("*", SHLIB_EXT))
            if (length(files)) {
                libarch <- if (nzchar(arch)) paste0("libs", arch) else "libs"
                dest <- file.path(instdir, libarch)
                message('installing to ', dest, domain = NA)
                dir.create(dest, recursive = TRUE, showWarnings = FALSE)
                file.copy(files, dest, overwrite = TRUE)
                if((do_strip || config_val_to_logical(Sys.getenv("_R_SHLIB_STRIP_",
                                                                 "false"))) &&
                   nzchar(strip_cmd <- Sys.getenv("R_STRIP_SHARED_LIB"))) {
                    system(paste(c(strip_cmd,
                                   shQuote(file.path(dest, files))),
                                 collapse = " "))
                }
                ## not clear if this is still necessary, but sh version did so
		if (!WINDOWS)
		    Sys.chmod(file.path(dest, files), dmode)
		## macOS does not keep debugging symbols in binaries
		## anymore so optionally we can create dSYMs. This is
		## important since we will blow away .o files so there
		## is no way to create it later.

		if (dsym && startsWith(R.version$os, "darwin")) {
		    starsmsg(stars, gettextf("generating debug symbols (%s)", "dSYM"))
		    dylib <- Sys.glob(paste0(dest, "/*", SHLIB_EXT))
                    for (d in dylib) system(paste0("dsymutil ", d))
		}

                if(config_val_to_logical(Sys.getenv("_R_SHLIB_BUILD_OBJECTS_SYMBOL_TABLES_",
                                                    "TRUE"))
                   && file_test("-f", "symbols.rds")) {
                    file.copy("symbols.rds", dest)
                }
            }
        }

        ## This is only called for Makevars[.win], so assume it
        ## does create a shlib: not so reliably reported on Windows
        ## Note though that it may not create pkg_name.dll, and
        ## graph does not.
        run_shlib <- function(pkg_name, srcs, instdir, arch, use_LTO = NA)
        {
            args <- c(shargs,
                      if(isTRUE(use_LTO)) "--use-LTO",
                      if(isFALSE(use_LTO)) "--no-use-LTO",
                      if(isTRUE(use_C == "17")) "--use-C17"
                      else if(isTRUE(use_C == "23")) "--use-C23"
                      else if(isTRUE(use_C == "90")) "--use-C90"
                      else if(isTRUE(use_C == "99")) "--use-C99",
                      "-o", paste0(pkg_name, SHLIB_EXT),
                      srcs)
            if (WINDOWS && debug) args <- c(args, "--debug")
            if (debug) message("about to run ",
                               "R CMD SHLIB ", paste(args, collapse = " "),
                               domain = NA)
            if (.shlib_internal(args) == 0L) {
                if(WINDOWS && !file.exists("install.libs.R")
                   && !length(Sys.glob("*.dll"))) {
                    message("no DLL was created")
                    return(TRUE)
                }
                shlib_install(instdir, arch)
                return(FALSE)
            } else return(TRUE)
        }

        ## Patch hardcoded paths in shared objects/dynamic libraries
        ## so that they can be moved to a different directory.
        ## Not used on WINDOWS.
        patch_rpaths <- function()
        {
            slibs <- list.files(instdir, recursive = TRUE, all.files = TRUE,
                                full.names = TRUE)
            slibs <- grep("(\\.sl$)|(\\.so$)|(\\.dylib$)|(\\.dll$)", slibs,
                          value = TRUE)
            if (!length(slibs)) return()

            have_file <- nzchar(Sys.which("file"))
            ## file reports macOS dylibs as 'dynamically linked shared library'
            if (have_file) {
                ## RcppParallel has .so files containing ASCII text
                ## (linker script) which make the tools below produce
                ## a lot of error messages. However, some docker
                ## installations do not have "file" utility.
                ## Solaris' "file" does not use 'shared'.
                ##
                ## On macOS, a single "dylib" file can have a shared object
                ## for multiple architectures, so multiple lines with
                ## "shared"/"dynamically linked"
                are_shared <- vapply(slibs,
                    function(l) any(grepl("(shared|dynamically linked)",
                                    system(paste("file", shQuote(l)), intern = TRUE))),
                    NA)
                slibs <- slibs[are_shared]
                if (!length(slibs)) return()
            }

            starsmsg(stars, "checking absolute paths in shared objects and dynamic libraries")

            uname <- system("uname -a", intern = TRUE)
            os <- sub(" .*", "", uname)
            have_chrpath <- nzchar(Sys.which("chrpath"))
            have_patchelf <- nzchar(Sys.which("patchelf"))
            have_readelf <- nzchar(Sys.which("readelf"))
            have_macos_clt <- identical(os, "Darwin") &&
                              nzchar(Sys.which("otool")) &&
                              nzchar(Sys.which("install_name_tool"))
            have_solaris_elfedit <- identical(os, "SunOS") &&
                                    nzchar(Sys.which("elfedit"))

            hardcoded_paths <- FALSE
            failed_fix <- FALSE

            # paths below are protected from the shell, but special
            # characters/spaces should not be used, because they cannot be
            # reproduced correctly in the outputs from external tools

            if (have_solaris_elfedit) {
                ## Solaris only
                ## changes both rpath and DT_NEEDED paths
                for (l in slibs) {
                    out <- suppressWarnings(
                        system(paste("elfedit -re dyn:value", shQuote(l)), intern = TRUE))
                    out <- grep("^[ \t]*\\[[0-9]+\\]", out, value = TRUE)
                    re <- "^[ \t]*\\[([0-9]+)\\][ \t]+([^ \t]+)[ \t]+([^ \t]+)[ \t]*(.*)"
                    paths <- gsub(re, "\\4", out)
                    idxs <- gsub(re, "\\1", out)
                    old_paths <- paths
                    # "\\$ORIGIN/.."
                    paths <- gsub(instdir, quote_replacement(final_instdir),
                                  paths, fixed = TRUE)
                    changed <- paths != old_paths
                    paths <- paths[changed]
                    old_paths <- old_paths[changed]
                    idxs <- idxs[changed]
                    for (i in seq_along(paths)) {
                        hardcoded_paths <- TRUE
                        qp <- gsub('([" \\])', "\\\\\\1", paths[i])
                        qp <- gsub("'", "\\\\'", qp)
                        cmd <- paste0("elfedit -e \"dyn:value -dynndx -s ",
                                     idxs[i], " ", qp, "\" ", shQuote(l))
                        message(cmd)
                        ret <- suppressWarnings(system(cmd, intern = FALSE))
                        if (ret == 0)
                            message("NOTE: fixed path ", sQuote(old_paths[i]))
                    }
                    out <- suppressWarnings(
                        system(paste("elfedit -re dyn:value", shQuote(l)), intern = TRUE))
                    out <- grep("^[ \t]*\\[", out, value = TRUE)
                    paths <- gsub(re, "\\4", out)
                    if (any(grepl(instdir, paths, fixed = TRUE)))
                        failed_fix <- TRUE
                }
            } else if (have_macos_clt) {
                ## macOS only
                for (l in slibs) {
                    ## change identification name of the library
                    out <- suppressWarnings(
                        system(paste("otool -D", shQuote(l)), intern = TRUE))
                    out <- out[-1L] # first line is l (includes instdir)
                    oldid <- out
                    if (length(oldid) == 1 &&
                        grepl(instdir, oldid, fixed = TRUE)) {

                        hardcoded_paths <- TRUE
                        newid <- gsub(instdir, quote_replacement(final_instdir),
                                      oldid, fixed = TRUE)
                        cmd <- paste("install_name_tool -id", shQuote(newid),
                                     shQuote(l))
                        message(cmd)
                        ret <- suppressWarnings(system(cmd, intern = FALSE))
                        if (ret == 0)
                            ## NOTE: install_name does not signal an error in
                            ## some cases
                            message("NOTE: fixed library identification name ",
                                    sQuote(oldid))
                    }

                    ## change paths to other libraries
                    out <- suppressWarnings(
                        system(paste("otool -L", shQuote(l)), intern = TRUE))
                    paths <- grep("\\(compatibility", out, value = TRUE)
                    paths <- gsub("^[ \t]*(.*) \\(compatibility.*", "\\1",
                                  paths)
                    old_paths <- paths
                    # "@loader_path/.."
                    paths <- gsub(instdir, quote_replacement(final_instdir),
                                  paths, fixed = TRUE)
                    changed <- paths != old_paths
                    paths <- paths[changed]
                    old_paths <- old_paths[changed]
                    for(i in seq_along(paths)) {
                        hardcoded_paths <- TRUE
                        cmd <- paste("install_name_tool -change",
                                     shQuote(old_paths[i]), shQuote(paths[i]),
                                     shQuote(l))
                        message(cmd)
                        ret <- suppressWarnings(system(cmd, intern = FALSE))
                        if (ret == 0)
                            ## NOTE: install_name does not signal an error in
                            ## some cases
                            message("NOTE: fixed library path ",
                                    sQuote(old_paths[i]))
                    }
                    out <- suppressWarnings(
                        system(paste("otool -L", shQuote(l)), intern = TRUE))
                    out <- grep("\\(compatibility", out, value = TRUE)
                    if (any(grepl(instdir, out, fixed = TRUE)))
                        failed_fix <- TRUE

                    ## change rpath entries
                    out <- suppressWarnings(
                        system(paste("otool -l", shQuote(l)), intern = TRUE))
                    out <- grep("(^[ \t]*cmd )|(^[ \t]*path )", out,
                                value = TRUE)
                    rpidx <- grep("cmd LC_RPATH$", out)
                    if (length(rpidx)) {
                        paths <- gsub("^[ \t]*path ", "", out[rpidx+1])
                        paths <- gsub("(.*) \\(offset .*", "\\1", paths)
                        old_paths <- paths
                        # "@loader_path/.."
                        paths <- gsub(instdir, quote_replacement(final_instdir),
                                      paths, fixed = TRUE)
                        changed <- paths != old_paths
                        paths <- paths[changed]
                        old_paths <- old_paths[changed]
                        for(i in seq_along(paths)) {
                            hardcoded_paths <- TRUE
                            cmd <- paste("install_name_tool -rpath",
                                         shQuote(old_paths[i]),
                                         shQuote(paths[i]),
                                         shQuote(l))
                            message(cmd)
                            ret <- suppressWarnings(system(cmd))
                            if (ret == 0)
                                message("NOTE: fixed rpath ",
                                        sQuote(old_paths[i]))
                        }
                    }

                    ## check no hard-coded paths are left
                    out <- suppressWarnings(
                        system(paste("otool -l", shQuote(l)), intern = TRUE))
                    out <- out[-1L] # first line is l (includes instdir)
                    if (any(grepl(instdir, out, fixed = TRUE)))
                        failed_fix <- TRUE
                }
            } else if (have_patchelf) {
                ## probably Linux
                for(l in slibs) {
                    # fix rpath
                    rpath <- suppressWarnings(
                        system(paste("patchelf --print-rpath", shQuote(l)),
                               intern = TRUE))
                    old_rpath <- rpath
                    # "\\$ORIGIN/.."
                    rpath <- gsub(instdir, quote_replacement(final_instdir),
                                  rpath, fixed = TRUE)
                    if (length(rpath) && nzchar(rpath) && old_rpath != rpath) {
                        hardcoded_paths <- TRUE
                        cmd <- paste("patchelf", "--set-rpath",
                                         shQuote(rpath), shQuote(l))
                        message(cmd)
                        ret <- suppressWarnings(system(cmd))
                        if (ret == 0)
                            message("NOTE: fixed rpath ", sQuote(old_rpath))
                        rpath <- suppressWarnings(
                            system(paste("patchelf --print-rpath",
                                         shQuote(l)), intern = TRUE))
                        if (any(grepl(instdir, rpath, fixed = TRUE)))
                            failed_fix <- TRUE
                    }
                    # fix DT_NEEDED
                    if (have_readelf) {
                        out <- suppressWarnings(
                            system(paste("readelf -d", shQuote(l)), intern = TRUE))
                        re0 <- "0x.*\\(NEEDED\\).*Shared library:"
                        out <- grep(re0, out, value = TRUE)
                        re <- "^[ \t]*0x[0-9]+[ \t]+\\(NEEDED\\)[ \t]+Shared library:[ \t]*\\[(.*)\\]"
                        paths <- gsub(re, "\\1", out)
                        old_paths <- paths
                        # "\\$ORIGIN/.."
                        paths <- gsub(instdir, quote_replacement(final_instdir),
                                      paths, fixed = TRUE)
                        changed <- paths != old_paths
                        paths <- paths[changed]
                        old_paths <- old_paths[changed]
                        for(i in seq_along(paths)) {
                            cmd <- paste("patchelf --replace-needed",
                                         shQuote(old_paths[i]),
                                         shQuote(paths[i]),
                                         shQuote(l))
                            message(cmd)
                            ret <- suppressWarnings(system(cmd))
                            if (ret == 0)
                                message("NOTE: fixed library path ",
                                        sQuote(old_paths[i]))
                        }
                        out <- suppressWarnings(
                            system(paste("readelf -d", shQuote(l)), intern = TRUE))
                        out <- grep(re0, out, value = TRUE)
                        if (any(grepl(instdir, out, fixed = TRUE)))
                            failed_fix <- TRUE
                    }
                }
            } else if (have_chrpath) {
                ## Linux (possibly Solaris, but there elfedit should be
                ## available, instead); only fixes rpaths, not DT_NEEDED
                for(l in slibs) {
                    out <- suppressWarnings(
                        system(paste("chrpath", shQuote(l)), intern = TRUE))

                    # when multiple rpaths are present, there is a single
                    # RUNPATH= line with the paths separated by :
                    rpath <- grep(".*PATH=", out, value=TRUE)
                    rpath <- gsub(".*PATH=", "", rpath)
                    old_rpath <- rpath
                    # "\\$ORIGIN/.."
                    rpath <- gsub(instdir, quote_replacement(final_instdir),
                                  rpath, fixed = TRUE)
                    if (length(rpath) && nzchar(rpath) && old_rpath != rpath) {
                        hardcoded_paths <- TRUE
                        cmd <- paste("chrpath", "-r", shQuote(rpath),
                                     shQuote(l))
                        message(cmd)
                        ret <- suppressWarnings(system(cmd))
                        if (ret == 0)
                            message("NOTE: fixed rpath ", sQuote(old_rpath))
                        out <- suppressWarnings(
                            system(paste("chrpath", shQuote(l)), intern = TRUE))
                        rpath <- grep(".*PATH=", out, value = TRUE)
                        rpath <- gsub(".*PATH=", "", rpath)
                        if (any(grepl(instdir, rpath, fixed = TRUE)))
                            failed_fix <- TRUE
                    }
                }
            }
            if (hardcoded_paths)
                message("WARNING: shared objects/dynamic libraries with hard-coded temporary installation paths")
            if (failed_fix)
                errmsg("some hard-coded temporary paths could not be fixed")

            if (have_readelf) {
                ## check again, needed mostly on Linux (chrpath may not be
                ## available or there may be DT_NEEDED entries with absolute
                ## paths); ldd is not suitable because it interprets $ORIGIN
                for(l in slibs) {
                    out <- suppressWarnings(
                        system(paste("readelf -d", shQuote(l)), intern = TRUE))
                    out <- grep("^[ \t]*0x", out, value = TRUE)
                    if (any(grepl(instdir, out, fixed = TRUE))) {
                        ## give path relative to installation dir
                        ll <- sub(file.path(instdir, ""), "", l, fixed = TRUE)
                        errmsg("absolute paths in ",
                               sQuote(ll),
                               " include the temporary installation directory:",
                               " please report to the package maintainer",
                               " and use ", sQuote("--no-staged-install"))
                    }
                }
            }
        } # patch_rpaths()

        ## Make the destination directories available to the developer's
        ## installation scripts (e.g. configure)
        Sys.setenv(R_LIBRARY_DIR = lib)

        if (nzchar(lib0)) {
            ## FIXME: is this needed?
            ## set R_LIBS to include the current installation directory
            rlibs <- Sys.getenv("R_LIBS")
            rlibs <- if (nzchar(rlibs)) paste(lib, rlibs, sep = .Platform$path.sep) else lib
            Sys.setenv(R_LIBS = rlibs)
            ## This is needed
            .libPaths(c(lib, .libPaths()))
        }

        Type <- desc["Type"]
        if (!is.na(Type) && Type == "Frontend") {
            if (WINDOWS) errmsg("'Frontend' packages are Unix-only")
            starsmsg(stars, "installing *Frontend* package ", sQuote(pkg_name), " ...")
            if (preclean) system(paste(MAKE, "clean"))
            if (use_configure) {
                if (file_test("-x", "configure")) {
                    res <- system(paste(paste(configure_vars, collapse = " "),
                                        "./configure",
                                        paste(configure_args, collapse = " ")))
                    if (res) pkgerrmsg("configuration failed", pkg_name)
                } else if (file.exists("configure"))
                    errmsg("'configure' exists but is not executable -- see the 'R Installation and Administration Manual'")
            }
            if (file.exists("Makefile"))
                if (system(MAKE)) pkgerrmsg("make failed", pkg_name)
            if (clean) system(paste(MAKE, "clean"))
            return()
        }

        if (!is.na(Type) && Type == "Translation")
            errmsg("'Translation' packages are defunct")

        OS_type <- desc["OS_type"]
        if (WINDOWS) {
            if ((!is.na(OS_type) && OS_type == "unix") && !fake)
                errmsg(" Unix-only package")
        } else {
            if ((!is.na(OS_type) && OS_type == "windows") && !fake)
                errmsg(" Windows-only package")
        }

	if(group.writable) { ## group-write modes if requested:
	    fmode <- "664"
	    dmode <- "775"
	} else {
	    fmode <- "644"
	    dmode <- "755"
	}

        ## At this point we check that we have the dependencies we need.
        ## We cannot use installed.packages() as other installs might be
        ## going on in parallel

        pkgInfo <- .split_description(.read_description("DESCRIPTION"))
        R_install_force_depends_imports <- config_val_to_logical(Sys.getenv(
                "_R_INSTALL_LIBS_ONLY_FORCE_DEPENDS_IMPORTS_", "TRUE"))
        if (libs_only && isFALSE(R_install_force_depends_imports))
            pkgs <- unique(c(names(names(pkgInfo$LinkingTo))))
        else
            pkgs <- unique(c(names(pkgInfo$Depends), names(pkgInfo$Imports),
                             names(pkgInfo$LinkingTo)))
        if (length(pkgs)) {
            miss <- character()
            for (pkg in pkgs) {
                if(!length(find.package(pkg, quiet = TRUE)))
                    miss <- c(miss, pkg)
            }
            if (length(miss) > 1)
                 pkgerrmsg(sprintf("dependencies %s are not available",
                                   paste(sQuote(miss), collapse = ", ")),
                           pkg_name,
			   sprintf("\nPerhaps try a variation of:\ninstall.packages(c(%s))",
				   paste(sQuote(miss, FALSE), collapse = ", ")))
            else if (length(miss))
                pkgerrmsg(sprintf("dependency %s is not available",
                                  sQuote(miss)),
                          pkg_name,
                          sprintf("\nPerhaps try a variation of:\ninstall.packages(%s)",
                                  sQuote(miss, FALSE)))
         }

        starsmsg(stars, "installing *source* package ",
                 sQuote(pkg_name), " ...")

        stars <- "**"

        starsmsg(stars,
                 sprintf("this is package %s version %s",
                         sQuote(desc["Package"]),
                         sQuote(desc["Version"])))

        res <- checkMD5sums(pkg_name, getwd())
        if(!is.na(res) && res) {
            starsmsg(stars,
                     gettextf("package %s successfully unpacked and MD5 sums checked",
                              sQuote(pkg_name)))
        }

        if (file.exists(file.path(instdir, "DESCRIPTION"))) {
            ## Back up a previous version
            if (nzchar(lockdir)) {
                if (debug) starsmsg(stars, "backing up earlier installation")
                if(WINDOWS) {
                    file.copy(instdir, lockdir, recursive = TRUE,
                              copy.date = TRUE)
                    if (more_than_libs) unlink(instdir, recursive = TRUE)
                } else if (more_than_libs)
                    system(paste("mv -f ", shQuote(instdir),
                                 shQuote(file.path(lockdir, pkg_name))))
                else
                    file.copy(instdir, lockdir, recursive = TRUE,
                              copy.date = TRUE)
            } else if (more_than_libs) unlink(instdir, recursive = TRUE)
            if (more_than_libs && dir.exists(instdir))
                # On Windows, a DLL cannot be unlinked if in use
                errmsg("cannot remove earlier installation, is it in use?")
            dir.create(instdir, recursive = TRUE, showWarnings = FALSE)
        }

        pkg_staged_install <- SI <-
            parse_description_field(desc, "StagedInstall", default = NA)
        if (is.na(pkg_staged_install)) pkg_staged_install <- staged_install
        if (have_cross) pkg_staged_install <- FALSE
        if (pkg_staged_install && libs_only) {
            pkg_staged_install <- FALSE
            message("not using staged install with --libs-only")
        }
        if (pkg_staged_install && !lock) {
            pkg_staged_install <- FALSE
            message("staged installation is only possible with locking")
        }

        if (pkg_staged_install) {
            starsmsg(stars, "using staged installation")
            final_instdir <- instdir
            final_lib <- lib
            final_rpackagedir <- Sys.getenv("R_PACKAGE_DIR")
            final_rlibs <- Sys.getenv("R_LIBS")
            final_libpaths <- .libPaths()

            instdir <- file.path(lockdir, "00new", pkg_name)
            Sys.setenv(R_PACKAGE_DIR = instdir)
            dir.create(instdir, recursive = TRUE, showWarnings = FALSE)
            lib <- file.path(lockdir, "00new")

            rlibs <- if (nzchar(final_rlibs))
                         paste(lib, final_rlibs, sep = .Platform$path.sep)
                     else
                         lib
            Sys.setenv(R_LIBS = rlibs)
            .libPaths(c(lib, final_libpaths))
        } else {
            if(isFALSE(SI))
                starsmsg(stars,
                         "using non-staged installation via StagedInstall field")
            else if (Sys.getenv("_R_INSTALL_SUPPRESS_NO_STAGED_MESSAGE_") != "yes")
                starsmsg(stars, "using non-staged installation")
        }

        if (preclean) run_clean()

        if (WINDOWS) {
            # Installation-time patching was enabled as a temporary measure
            # during the transition from MSVCRT to UCRT, when packages with
            # many reverse dependencies had to be updated to link.

            # URL or a local directory with patches: ("no" to no patching)
            it_patches_base <- Sys.getenv("_R_INSTALL_TIME_PATCHES_", "no")

            # The patches are identified by package name. An index is used
            # to map the name to a directory with patches for a given
            # package. There may be multiple patches for a single package,
            # but that hasn't been used. The patches applied to a package
            # are copied into directory install_time_patches inside the
            # package installation (so in library, but also in binary
            # package build) for reference.

            # The patches are automatically reverted on cleanup, see
            # revert_install_time_patches, also during R CMD build (see
            # build.R). The latter is needed to ensure that unpatched source
            # package tarball is produced when the package native code is
            # compiled during R CMD build, e.g. to build vignettes.

            # This feature is experimental, it may be completely removed in
            # the future.

	    if (!it_patches_base %in% c("no", "disabled", "false", "FALSE")) {

                patches_idx <- tryCatch({
                        idxfile <- file(paste0(it_patches_base, "/",
                                               "patches_idx.rds"))
                        patches_idx <- readRDS(idxfile)
                        close(idxfile)
                        patches_idx
                    },
                    error = function(e) NULL)

                if (is.null(patches_idx))
                    message("WARNING: installation-time patches will not be applied, could not get the patches index")
                else {
                    patches_msg <- FALSE
                    for(p in patches_idx[[pkg_name]]) {
                        if (!patches_msg) {
                            patches_msg <- TRUE
                            starsmsg(stars, "applying installation-time patches")
                        }
                        purl <- paste0(it_patches_base, "/", p)
                        have_patch <- nzchar(Sys.which("patch"))
                        if (!have_patch)
                            stop("patch utility is needed for installation-time patching")
                        dir.create("install_time_patches", recursive=TRUE)
                        fname <- paste0("install_time_patches/", basename(p))
                        if (grepl("^http", purl))
                            utils::download.file(purl, destfile = fname, mode = "wb")
    		        else
                            file.copy(purl, fname)

                        if (system2("patch", args = c("--dry-run", "-p2", "--binary", "--force"),
                                    stdin = fname, stdout = NULL, stderr = NULL) != 0) {
                            ## the patch cannot be applied, check if it might
                            ## be reversed
                            if (system2("patch", args = c("--dry-run", "-R", "-p2", "--binary",
                                                          "--force"), stdin = fname) == 0)
                                message("NOTE: Skipping installation-time patch ", purl,
                                        " which seems to be already applied.\n")
                            else
                                message("WARNING: failed to apply patch ", purl, "\n")
                        } else {
                            if (system2("patch", args = c("-p2", "--binary", "--force"),
                                        stdin = fname) != 0)
                                ## should not happen as dry-run succeeded
                                message("WARNING: failed to apply patch ", p, "\n")
                            else
                                message("Applied installation-time patch ", purl,
                                        " and saved it as ", fname,
                                        " in package installation\n")
                        }
                    }
                }
            }
	}

        if (use_configure) {
            if (WINDOWS) {
                if (file.exists(f <- "./configure.ucrt") ||
                    file.exists(f <- "./configure.win")) {
                    ## an approach with less quoting hell
                    ev <- c("CC", "CFLAGS", "CXX", "CXXFLAGS", "CPPFLAGS",
                            "LDFLAGS", "FC", "FCFLAGS")
                    ## skip any which are already set.
                    ev <- ev[!nzchar(Sys.getenv(ev))]
                    ev1 <- ev
                    if (!is.na(use_C))
                        ev1 <- c(sprintf(c("CC%s", "C%sFLAGS"), use_C),
                                 ev[!(ev %in% c("CC", "CFLAGS"))])
                    ev2 <- lapply(ev1, function(x)
                        system2(file.path(R.home("bin"), "Rcmd.exe"),
                                c("config", x), stdout = TRUE))
                    names(ev2) <- ev1
                    do.call(Sys.setenv, ev2)
                    res <- system(paste("sh", f))
                    ## as we skipped those already set, unsetting is safe.
                    Sys.unsetenv(ev2)
                    if (res) pkgerrmsg("configuration failed", pkg_name)
                } else if (file.exists("configure"))
                    message("\n",
                            "   **********************************************\n",
                            "   WARNING: this package has a configure script\n",
                            "         It probably needs manual configuration\n",
                            "   **********************************************\n\n", domain = NA)
            } else {
                ## FIXME: should these be quoted?
                if (file_test("-x", "configure")) {
                    cmd <- paste(paste(configure_vars, collapse = " "),
                                 "./configure",
                                 paste(configure_args, collapse = " "))
                    if (debug) message("configure command: ", sQuote(cmd),
                                       domain = NA)
                    ## in case the configure script calls SHLIB (some do)
                    cmd <- paste("_R_SHLIB_BUILD_OBJECTS_SYMBOL_TABLES_=false",
                                 cmd)
                    ev <- c("CC", "CFLAGS", "CXX", "CXXFLAGS", "CPPFLAGS",
                            "LDFLAGS", "FC", "FCFLAGS")
                    ## skip any which are already set.
                    ev <- ev[!nzchar(Sys.getenv(ev))]
                    ev1 <- ev
                    if (!is.na(use_C))
                        ev1 <- c(sprintf(c("CC%s", "C%sFLAGS"), use_C),
                                 ev[-(1:2)])
                    ev2 <- sapply(ev1, function(x)
                        system2(file.path(R.home("bin"), "R"), c("CMD", "config", x),
                                stdout = TRUE))
                    ev3 <- paste0(ev, "=", shQuote(ev2))
                    ## skip any which are empty, possible for CXX)
                    ev3 <- ev3[nzchar(ev2)]
                    cmd <- paste(c(ev3, cmd), collapse = " ")
                    res <- system(cmd)
                    if (res) pkgerrmsg("configuration failed", pkg_name)
                }  else if (file.exists("configure"))
                    errmsg("'configure' exists but is not executable -- see the 'R Installation and Administration Manual'")
            }
        }


        if (more_than_libs) {
            for (f in c("NAMESPACE", "LICENSE", "LICENCE", "NEWS", "NEWS.md"))
                if (file.exists(f)) {
                    file.copy(f, instdir, TRUE)
		    Sys.chmod(file.path(instdir, f), fmode)
                }

            res <- try(.install_package_description('.', instdir, built_stamp))
            if (inherits(res, "try-error"))
                pkgerrmsg("installing package DESCRIPTION failed", pkg_name)
            if (!file.exists(namespace <- file.path(instdir, "NAMESPACE")) ) {
                if(dir.exists("R"))
                    errmsg("a 'NAMESPACE' file is required")
                else writeLines("## package without R code", namespace)
            }
        }

        if (install_libs && dir.exists("src") &&
            length(dir("src", all.files = TRUE)) > 2L) {
            starsmsg(stars, "libs")
            if (!file.exists(file.path(R.home("include"), "R.h")))
                ## maybe even an error?  But installing Fortran-based packages should work
                warning("R include directory is empty -- perhaps need to install R-devel.rpm or similar", call. = FALSE)
            has_error <- FALSE
            linkTo <- pkgInfo$LinkingTo
            if (!is.null(linkTo)) {
                lpkgs <- sapply(linkTo, function(x) x[[1L]])
                ## we checked that these were all available earlier,
                ## but be cautious in case this changed.
                paths <- find.package(lpkgs, quiet = TRUE)
                bpaths <- basename(paths)
                if (length(paths)) {
                    ## check any version requirements
                    have_vers <-
                        (lengths(linkTo) > 1L) & lpkgs %in% bpaths
                    for (z in linkTo[have_vers]) {
                        p <- z[[1L]]
                        path <- paths[bpaths %in% p]
                        current <- readRDS(file.path(path, "Meta", "package.rds"))$DESCRIPTION["Version"]
                        target <- as.numeric_version(z$version)
                        if (!do.call(z$op, list(as.numeric_version(current), target)))
                            stop(gettextf("package %s %s was found, but %s %s is required by %s",
                                          sQuote(p), current, z$op,
                                          target, sQuote(pkgname)),
                                 call. = FALSE, domain = NA)
                    }
                    clink_cppflags <- paste(paste0("-I'", paths, "/include'"),
                                            collapse = " ")
                    Sys.setenv(CLINK_CPPFLAGS = clink_cppflags)
                }
            } else clink_cppflags <- ""
            libdir <- file.path(instdir, paste0("libs", rarch))
            dir.create(libdir, showWarnings = FALSE)
            if (WINDOWS) {
                owd <- setwd("src")
                if (file.exists(f <- "Makefile.ucrt") || file.exists(f <- "Makefile.win")) {

                    system_makefile <-
                        file.path(R.home(), paste0("etc", rarch), "Makeconf")
                    makefiles <- c(system_makefile,
                                   makevars_site(),
                                   f,
                                   makevars_user())

                    message(paste0("  running 'src/", f, "' ..."), domain = NA)
                    p1 <- function(...) paste(..., collapse = " ")
                    makeargs <-
                        if (!is.na(use_C))
                            sprintf(c("CC='$(CC%s)'", "CFLAGS='$(C%sFLAGS)'"), use_C)
                        else character()
                    cmd <- paste("make --no-print-directory",
                                 p1("-f", shQuote(makefiles)),
                                 p1(makeargs))
                    res <- system(cmd)
                    if (res == 0L) shlib_install(instdir, rarch)
                    else has_error <- TRUE
                } else { ## no src/Makefile.win
                    srcs <- dir(pattern = "\\.([cfmM]|cc|cpp|f90|f95|mm)$",
                                all.files = TRUE)
                    archs <- if(have_cross) {
                        if (cross == "singlearch") "" else cross
                    ## else if (!force_both && !grepl(" x64 ", utils::win.version()))
                    ##     "i386"
                    } else {
                        ## see what is installed
                        ## NB, not R.home("bin")
                        f  <- dir(file.path(R.home(), "bin"))
                        f[f %in% c("i386", "x64")]
                    }
                    one_only <- !multiarch
                    has_configure_ucrt <- file.exists("../configure.ucrt")
                    if(!one_only && (has_configure_ucrt || file.exists("../configure.win"))) {
                            one_only <- sum(nchar(readLines(
                                if(has_configure_ucrt) "../configure.ucrt" else "../configure.win",
                                warn = FALSE), "bytes")) > 0
                        if(one_only && !force_biarch) {
                            if(parse_description_field(desc, "Biarch", FALSE))
                                force_biarch <- TRUE
                            else if (length(archs) > 1L) {
                                if (has_configure_ucrt)
                                    warning("this package has a non-empty 'configure.ucrt' file,\nso building only the main architecture\n", call. = FALSE, domain = NA)
                                else
                                    warning("this package has a non-empty 'configure.win' file,\nso building only the main architecture\n", call. = FALSE, domain = NA)
                            }
                        }
                    }
                    if(force_biarch) one_only <- FALSE
                    if(one_only || length(archs) < 2L)
                        has_error <-
                            run_shlib(pkg_name, srcs, instdir, rarch, use_LTO)
                    else {
                        setwd(owd)
                        test_archs <- archs
                        for(arch in archs) {
                            message("", domain = NA) # a blank line
                            starsmsg("***", "arch - ", arch)
                            ss <- paste0("src-", arch)
                            dir.create(ss, showWarnings = FALSE)
                            file.copy(Sys.glob("src/*"), ss, recursive = TRUE)
                            ## avoid read-only files/dir such as nested .svn
			    .Call(C_dirchmod, ss, group.writable)
                            setwd(ss)

                            ra <- paste0("/", arch)
                            Sys.setenv(R_ARCH = ra, R_ARCH_BIN = ra)
                            has_error <-
                                run_shlib(pkg_name, srcs, instdir, ra, use_LTO)
                            setwd(owd)
                            if (has_error) break
                        }
                    }
                }
                setwd(owd)
            } else { # not WINDOWS
                if (file.exists("src/Makefile")) {
                    if (nzchar(rarch)) {
                        arch <- substr(rarch, 2, 1000)
                        starsmsg(stars, "arch - ", arch)
                    }
                    owd <- setwd("src")
                    system_makefile <-
                        file.path(paste0(R.home("etc"), rarch), "Makeconf")
                    makefiles <- c(system_makefile,
                                   makevars_site(),
                                   "Makefile",
                                   makevars_user())
                    makeargs <-
                        if (!is.na(use_C))
                            sprintf(c("CC='$(CC%s)'", "CFLAGS='$(C%sFLAGS)'"), use_C)
                        else character()
                    p1 <- function(...) paste(..., collapse = " ")
                    cmd <- paste(MAKE,
                                 p1("-f", shQuote(makefiles)),
                                 p1(makeargs))
                    res <- system(cmd)
                    if (res == 0L) shlib_install(instdir, rarch)
                    else has_error <- TRUE
                    setwd(owd)
                } else { ## no src/Makefile
                    owd <- setwd("src")
                    srcs <- dir(pattern = "\\.([cfmM]|cc|cpp|f90|f95|mm)$",
                                all.files = TRUE)
                    ## This allows Makevars to set OBJECTS or its own targets.
                    allfiles <- if (file.exists("Makevars")) c("Makevars", srcs) else srcs
                    wd2 <- setwd(file.path(R.home("bin"), "exec"))
                    archs <- Sys.glob("*")
                    setwd(wd2)
                    if (length(allfiles)) {
                        use_LTO <-
                            if (!is.na(use_LTO)) use_LTO
                            else
                                parse_description_field(desc, "UseLTO", default = NA)
                        ## if there is an executable configure script we install only the main
                        ## sub-architecture
                        if (!multiarch || length(archs) <= 1 ||
                            file_test("-x", "../configure")) {
                            if (nzchar(rarch))
                                starsmsg("***", "arch - ",
                                         substr(rarch, 2, 1000))
                            has_error <- run_shlib(pkg_name, srcs, instdir, rarch, use_LTO)
                        } else {
                            setwd(owd)
                            test_archs <- archs
                            for(arch in archs) {
                                if (arch == "R") {
                                    ## top-level, so one arch without subdirs
                                    has_error <- run_shlib(pkg_name, srcs, instdir, "", use_LTO)
                                } else {
                                    starsmsg("***", "arch - ", arch)
                                    ss <- paste0("src-", arch)
                                    dir.create(ss, showWarnings = FALSE)
                                    file.copy(Sys.glob("src/*"), ss, recursive = TRUE)
                                    setwd(ss)
                                    ra <- paste0("/", arch)
                                    ## FIXME: do this lower down
                                    Sys.setenv(R_ARCH = ra)
                                    has_error <- run_shlib(pkg_name, srcs, instdir, ra, use_LTO)
                                    Sys.setenv(R_ARCH = rarch)
                                    setwd(owd)
                                    if (has_error) break
                                }
                            }
                        }
                    } else warning("no source files found", call. = FALSE)
                }
                setwd(owd)
            }
            if (has_error)
                pkgerrmsg("compilation failed", pkg_name)

            ## if we have subarchs, update DESCRIPTION
            fi <- file.info(Sys.glob(file.path(instdir, "libs", "*")))
            dirs <- basename(row.names(fi[fi$isdir %in% TRUE, ]))
            ## avoid DLLs installed by rogue packages
            if(WINDOWS) dirs <- dirs[dirs %in% c("i386", "x64")]
            if (length(dirs)) {
                descfile <- file.path(instdir, "DESCRIPTION")
                olddesc <- readLines(descfile, warn = FALSE)
                olddesc <- filtergrep("^Archs:", olddesc, useBytes = TRUE)
                newdesc <- c(olddesc,
                             paste("Archs:", paste(dirs, collapse = ", "))
                             )
                writeLines(newdesc, descfile, useBytes = TRUE)
                saveRDS(.split_description(.read_description(descfile)),
                         file.path(instdir, "Meta", "package.rds"))
            }
        } else if (multiarch) {   # end of src dir
            if (WINDOWS) {
                wd2 <- setwd(file.path(R.home(), "bin")) # not R.home("bin")
                archs <- Sys.glob("*")
                setwd(wd2)
                test_archs <- archs[archs %in% c("i386", "x64")]
            } else {
                wd2 <- setwd(file.path(R.home("bin"), "exec"))
                test_archs <- Sys.glob("*")
                setwd(wd2)
            }
        }
        # if (WINDOWS && "x64" %in% test_archs) {
        #     ## we cannot actually test x64 unless this is 64-bit
        #    ## Windows, even if it is installed.
        #     if (!grepl(" x64 ", utils::win.version())) test_archs <- "i386"
        #}

        if (have_cross) Sys.unsetenv("R_ARCH")

        if (WINDOWS && dir.exists("install_time_patches"))
            file.copy("install_time_patches", instdir, recursive = TRUE)

        ## R files must start with a letter
	if (install_R && dir.exists("R") && length(dir("R"))) {
	    starsmsg(stars, "R")
	    dir.create(file.path(instdir, "R"), recursive = TRUE,
		       showWarnings = FALSE)
	    ## This cannot be done in a C locale
	    res <- try(.install_package_code_files(".", instdir))
	    if (inherits(res, "try-error"))
		pkgerrmsg("unable to collate and parse R files", pkg_name)

	    if (file.exists(f <- file.path("R", "sysdata.rda"))) {
                comp <- TRUE
                ## (We set .libPaths)
                if(!is.na(lazycompress <- desc["SysDataCompression"])) {
                    comp <- switch(lazycompress,
                                   "none" = FALSE,
                                   "gzip" = TRUE,
                                   "bzip2" = 2L,
                                   "xz" = 3L,
                                   TRUE)  # default to gzip
                } else if(file.size(f) > 1e6) comp <- 3L # "xz"
		res <- try(sysdata2LazyLoadDB(f, file.path(instdir, "R"),
                                              compress = comp))
		if (inherits(res, "try-error"))
		    pkgerrmsg("unable to build sysdata DB", pkg_name)
	    }
	    if (fake) {
		## Fix up hook functions so they do not attempt to
		## (un)load missing compiled code, initialize ...
		## This does stop them being tested at all.
		if (file.exists("NAMESPACE")) {
		    cat("",
			'.onLoad <- .onAttach <- function(lib, pkg) NULL',
			'.onUnload <- function(libpaths) NULL',
			sep = "\n",
			file = file.path(instdir, "R", pkg_name), append = TRUE)
		    ## <NOTE>
		    ## Tweak fake installation to provide an 'empty'
		    ## useDynLib() for the time being.  Completely
		    ## removing the directive results in checkFF()
		    ## being too aggresive in the case where the
		    ## presence of the directive enables unambiguous
		    ## symbol resolution w/out 'PACKAGE' arguments.
		    ## However, empty directives are not really meant
		    ## to work ...

		    ## encoding issues ... so need useBytes = TRUE
		    ## FIXME: some packages have useDynLib()
		    ## spread over several lines.
		    writeLines(sub("useDynLib.*", 'useDynLib("")',
				   readLines("NAMESPACE", warn = FALSE),
				   perl = TRUE, useBytes = TRUE),
			       file.path(instdir, "NAMESPACE"))
		    ## </NOTE>
		} else {
		    cat("",
                        '.onLoad <- function (libname, pkgname) NULL',
                        '.onAttach <- function (libname, pkgname) NULL',
			'.onDetach <- function(libpath) NULL',
			'.onUnload <- function(libpath) NULL',
			'.Last.lib <- function(libpath) NULL',
			sep = "\n",
			file = file.path(instdir, "R", pkg_name), append = TRUE)
		}
	    }
	}                           # end of R

        ## data files must not be hidden: data() may ignore them
	if (install_data && dir.exists("data") && length(dir("data"))) {
	    starsmsg(stars, "data")
	    files <- Sys.glob(file.path("data", "*")) # ignores dotfiles
	    if (length(files)) {
		is <- file.path(instdir, "data")
		dir.create(is, recursive = TRUE, showWarnings = FALSE)
		file.remove(Sys.glob(file.path(instdir, "data", "*")))
		file.copy(files, is, TRUE)
		thislazy <- parse_description_field(desc, "LazyData",
						    default = lazy_data)
		if (!thislazy && resave_data) {
		    paths <- Sys.glob(c(file.path(is, "*.rda"),
					file.path(is, "*.RData")))
		    if (length(paths)) {
			starsmsg(paste0(stars, "*"), "resaving rda files")
			resaveRdaFiles(paths, compress = "auto")
		    }
		}
		Sys.chmod(Sys.glob(file.path(instdir, "data", "*")), fmode)
		if (thislazy) {
		    starsmsg(paste0(stars, "*"),
                             "moving datasets to lazyload DB")
		    ## 'it is possible that data in a package will
		    ## make use of the code in the package, so ensure
		    ## the package we have just installed is on the
		    ## library path.'
		    ## (We set .libPaths)
                    lazycompress <- desc["LazyDataCompression"]
                    if(!is.na(lazycompress))
                        data_compress <- switch(lazycompress,
                                                "none" = FALSE,
                                                "gzip" = TRUE,
                                                "bzip2" = 2L,
                                                "xz" = 3L,
                                                ## perhaps error?
                                                TRUE)  # default to gzip
		    res <- try(data2LazyLoadDB(pkg_name, lib,
					       compress = data_compress))
		    if (inherits(res, "try-error"))
			pkgerrmsg("lazydata failed", pkg_name)
		}
	    } else warning("empty 'data' directory", call. = FALSE)
        }

        ## demos must start with a letter
	if (install_demo && dir.exists("demo") && length(dir("demo"))) {
	    starsmsg(stars, "demo")
	    dir.create(file.path(instdir, "demo"), recursive = TRUE,
		       showWarnings = FALSE)
	    file.remove(Sys.glob(file.path(instdir, "demo", "*")))
	    res <- try(.install_package_demos(".", instdir))
	    if (inherits(res, "try-error"))
		pkgerrmsg("installing demos failed", pkg_name)
	    Sys.chmod(Sys.glob(file.path(instdir, "demo", "*")), fmode)
	}

        ## dotnames are ignored.
	if (install_exec && dir.exists("exec") && length(dir("exec"))) {
	    starsmsg(stars, "exec")
	    dir.create(file.path(instdir, "exec"), recursive = TRUE,
		       showWarnings = FALSE)
	    file.remove(Sys.glob(file.path(instdir, "exec", "*")))
	    files <- Sys.glob(file.path("exec", "*"))
	    if (length(files)) {
		file.copy(files, file.path(instdir, "exec"), TRUE)
                if (!WINDOWS)
		    Sys.chmod(Sys.glob(file.path(instdir, "exec", "*")), dmode)
	    }
	}

	if (install_inst && dir.exists("inst") &&
            length(dir("inst", all.files = TRUE)) > 2L) {
	    starsmsg(stars, "inst")
            i_dirs <- list.dirs("inst")[-1L] # not inst itself
            i_dirs <- filtergrep(.vc_dir_names_re, i_dirs)
            ## This ignores any restrictive permissions in the source
            ## tree, since the later .Call(C_dirchmod) call will
            ## fix the permissions.

            ## handle .Rinstignore:
            ignore_file <- ".Rinstignore"
            ignore <- if (file.exists(ignore_file)) {
                ignore <- readLines(ignore_file, warn = FALSE)
                ignore[nzchar(ignore)]
            } else character()
            for(e in ignore)
                i_dirs <- filtergrep(e, i_dirs, perl = TRUE, ignore.case = TRUE)
            lapply(gsub("^inst", quote_replacement(instdir), i_dirs),
                   function(p) dir.create(p, FALSE, TRUE)) # be paranoid
            i_files <- list.files("inst", all.files = TRUE,
                                  full.names = TRUE, recursive = TRUE)
            i_files <- filtergrep(.vc_dir_names_re, i_files)
            for(e in ignore)
                i_files <- filtergrep(e, i_files, perl = TRUE, ignore.case = TRUE)
            i_files <- i_files %w/o% c("inst/doc/Rplots.pdf",
                                       "inst/doc/Rplots.ps")
            i_files <- filtergrep("inst/doc/.*[.](log|aux|bbl|blg|dvi)$",
                                  i_files, perl = TRUE, ignore.case = TRUE)
            ## Temporary kludge
            if (!dir.exists("vignettes") && pkgname %notin% c("RCurl"))
                i_files <- filtergrep("inst/doc/.*[.](png|jpg|jpeg|gif|ps|eps)$",
                                      i_files, perl = TRUE, ignore.case = TRUE)
            i_files <- i_files %w/o% "Makefile"
            i2_files <- gsub("^inst", quote_replacement(instdir), i_files)
            file.copy(i_files, i2_files)
            if (!WINDOWS) {
                ## make executable if the source file was (for owner)
                modes <- file.mode(i_files)
                execs <- as.logical(modes & as.octmode("100"))
		Sys.chmod(i2_files[execs], dmode)
            }
            if (compact_docs) {
                pdfs <- dir(file.path(instdir, "doc"), pattern="\\.pdf",
                            recursive = TRUE, full.names = TRUE,
                            all.files = TRUE)
                res <- compactPDF(pdfs, gs_quality = "none")
                ## print selectively
                print(res[res$old > 1e5, ])
            }
	}

        rait <- Sys.getenv("R_ALWAYS_INSTALL_TESTS", "FALSE")
        install_tests <- install_tests || config_val_to_logical(rait)

	if (install_tests && dir.exists("tests") &&
            length(dir("tests", all.files = TRUE)) > 2L) {
	    starsmsg(stars, "tests")
	    file.copy("tests", instdir, recursive = TRUE)
	}

	## LazyLoading/Compiling
	if (install_R && dir.exists("R") && length(dir("R"))) {
            BC <- if (!is.na(byte_compile)) byte_compile
                  else
                      parse_description_field(desc, "ByteCompile", default = TRUE)
            rcps <- Sys.getenv("R_COMPILE_PKGS")
            rcp <- switch(rcps,
                          "TRUE"=, "true"=, "True"=, "yes"=, "Yes"= 1,
                          "FALSE"=,"false"=,"False"=, "no"=, "No" = 0,
                          as.numeric(rcps))
            if (!is.na(rcp))
                BC <- (rcp > 0)
            if (BC) {
                starsmsg(stars,
                         "byte-compile and prepare package for lazy loading")
                ## need to disable JIT
                cmd <- c("Sys.setenv(R_ENABLE_JIT = 0L)",
		    "invisible(compiler::enableJIT(0))",
                    "invisible(compiler::compilePKGS(1L))",
                    "compiler::setCompilerOptions(suppressAll = FALSE)",
                    "compiler::setCompilerOptions(suppressUndefined = TRUE)",
                    "compiler::setCompilerOptions(suppressNoSuperAssignVar = TRUE);")
            } else {
                starsmsg(stars, "preparing package for lazy loading")
                cmd <- ""
            }
            keep.source <-
                parse_description_field(desc, "KeepSource",
                                        default = keep.source)
            ## Some people change current directory in their R profile, but
            ##   at least .getRequiredPackages needs to find the DESCRIPTION
            ##   file
            cmd <- append(cmd, paste0("setwd(", quote_path(getwd()), ")"))
	    ## Something above, e.g. lazydata,  might have loaded the namespace
            cmd <- append(cmd,
                paste0("if (isNamespaceLoaded(\"",pkg_name, "\"))",
                           " unloadNamespace(\"", pkg_name, "\")"))
            cmd <- append(cmd,
                "suppressPackageStartupMessages(.getRequiredPackages(quietly = TRUE))")
            if (pkg_staged_install)
                set.install.dir <- paste0(", set.install.dir = ",
                                          quote_path(final_instdir))
            else
                set.install.dir <- ""
            cmd <- append(cmd,
                paste0("tools:::makeLazyLoading(\"", pkg_name, "\", ",
                                              quote_path(lib), ", ",
                                "keep.source = ", keep.source, ", ",
                        "keep.parse.data = ", keep.parse.data,
                                              set.install.dir, ")"))
            cmd <- paste(cmd, collapse="\n")
            out <- R_runR_deps_only(cmd,
                                    setRlibs(LinkingTo = TRUE, quote = TRUE))
            if(length(out))
                cat(paste(c(out, ""), collapse = "\n"))
            if(length(attr(out, "status")))
		pkgerrmsg("lazy loading failed", pkg_name)
	}

	if (install_help) {
	    starsmsg(stars, "help")
	    if (!dir.exists("man") ||
	       !length(list_files_with_type("man", "docs")))
		cat("No man pages found in package ", sQuote(pkg_name), "\n")
	    encoding <- desc["Encoding"]
	    if (is.na(encoding)) encoding <- "unknown"
	    res <- try(.install_package_Rd_objects(".", instdir, encoding))
	    if (inherits(res, "try-error"))
		pkgerrmsg("installing Rd objects failed", pkg_name)


	    starsmsg(paste0(stars, "*"), "installing help indices")
	    ## always want HTML package index
	    .writePkgIndices(pkg_dir, instdir)
	    if (build_help) {
		## This is used as the default outputEncoding for latex
		outenc <- desc["Encoding"]
		if (is.na(outenc)) outenc <- "UTF-8"
		.convertRdfiles(pkg_dir, instdir,
				types = build_help_types,
				outenc = outenc)
	    }
	    if (dir.exists(figdir <- file.path(pkg_dir, "man", "figures"))) {
		starsmsg(paste0(stars, "*"), "copying figures")
		dir.create(destdir <- file.path(instdir, "help", "figures"))
		file.copy(Sys.glob(c(file.path(figdir, "*.png"),
		                     file.path(figdir, "*.jpg"),
		                     file.path(figdir, "*.jpeg"),
				     file.path(figdir, "*.svg"),
				     file.path(figdir, "*.pdf"))), destdir)
	    }
        }

	## pkg indices: this also tangles the vignettes (if installed)
	if (install_inst || install_demo || install_help) {
	    starsmsg(stars, "building package indices")
            cmd <- c("tools:::.install_package_indices(\".\",",
                     quote_path(instdir), ")")
            cmd <- paste(cmd, collapse="\n")
            out <- R_runR_deps_only(cmd,
                                    setRlibs(LinkingTo = TRUE, quote = TRUE))
            if(length(out))
                cat(paste(c(out, ""), collapse = "\n"))
            if (length(attr(out, "status")))
		errmsg("installing package indices failed")
            if(dir.exists("vignettes")) {
                starsmsg(stars, "installing vignettes")
                enc <- desc["Encoding"]
                if (is.na(enc)) enc <- ""
		if (!fake &&
                    file_test("-f", file.path("build", "vignette.rds")))
		    installer <- .install_package_vignettes3
		else
		## handle pre-3.0.2 tarballs
		## and installation from package sources,
		## including by temp_install_pkg() during R CMD build
		    installer <- .install_package_vignettes2
                res <- try(installer(".", instdir, enc))
	    if (inherits(res, "try-error"))
		errmsg("installing vignettes failed")
            }
	}

	## Install a dump of the parsed NAMESPACE file
        ## For a fake install, use the modified NAMESPACE file we installed
	if (install_R && file.exists("NAMESPACE")) {
	    res <- try(.install_package_namespace_info(if(fake) instdir else ".", instdir))
	    if (inherits(res, "try-error"))
		errmsg("installing namespace metadata failed")
	}

        if (clean) run_clean()

        do_test_load <- function(extra_cmd = NULL) {
            ## Do this in a separate R process, in case it crashes R.

            ## FIXME: maybe the quoting as 'lib' is not quite good enough
            ## On a Unix-alike this calls system(input=)
            ## and that uses a temporary file and redirection.
            cmd <- paste0("tools:::.test_load_package('", pkg_name, "', ", quote_path(lib), ")")
            if (!is.null(extra_cmd))
              cmd <- paste0(cmd, "\n", extra_cmd)
            ## R_LIBS was set already, but Rprofile/Renviron may change it
            tlim <- get_timeout(Sys.getenv("_R_INSTALL_TEST_LOAD_ELAPSED_TIMEOUT_"))
            if (length(test_archs) > 1L) {
                msgs <- character()
                for (arch in test_archs) {
                    starsmsg("***", "arch - ", arch)
                    out <- R_runR_deps_only(cmd,
                        deps_only_env = setRlibs(lib0, self = TRUE, quote = TRUE),
                        arch = arch, timeout = tlim, multiarch = TRUE)
                    if(length(attr(out, "status")))
                        msgs <- c(msgs, arch)
                    if(length(out))
                        cat(paste(c(out, ""), collapse = "\n"))
                }
                if (length(msgs)) {
                    msg <- paste("loading failed for",
                                 paste(sQuote(msgs), collapse = ", "))
                    errmsg(msg) # does not return
                }
            } else {
                out <- R_runR_deps_only(cmd,
                    deps_only_env = setRlibs(lib0, self = TRUE, quote = TRUE),
                    timeout = tlim)
                if(length(out)) {
                    cat(paste(c(out, ""), collapse = "\n"))
                }
                if(length(attr(out, "status")))
                    errmsg("loading failed") # does not return
            }
        }

        if (test_load && !have_cross) {
            if (pkg_staged_install)
	        starsmsg(stars,
                    "testing if installed package can be loaded from temporary location")
            else
	        starsmsg(stars, "testing if installed package can be loaded")
            do_test_load()
        }

        if (pkg_staged_install) {
            if (WINDOWS) {
                unlink(final_instdir, recursive = TRUE) # needed for file.rename
                if (!file.rename(instdir, final_instdir)) {
                    if (dir.exists(instdir) && !dir.exists(final_instdir)) {
                        message("WARNING: moving package to final location failed, copying instead")
                        ret <- file.copy(instdir, dirname(final_instdir),
                                         recursive = TRUE, copy.date = TRUE)
                        if (any(!ret))
                            errmsg("   copying to final location failed")
                        unlink(instdir, recursive = TRUE)
                    } else
                        errmsg("   moving to final location failed")
                }
            } else {
                patch_rpaths()

                unlink(final_instdir, recursive = TRUE)
		  # needed for mv on some file systems, even though
		  # according to POSIX mv should work when the target is an
		  # empty directory
                owd <- setwd(startdir)
                status <- system(paste("mv -f",
                                       shQuote(instdir),
                                       shQuote(dirname(final_instdir))))
                if (status) errmsg("  moving to final location failed")
                setwd(owd)
            }
            instdir <- final_instdir
            lib <- final_lib
            Sys.setenv(R_PACKAGE_DIR = final_rpackagedir)
            Sys.setenv(R_LIBS = final_rlibs)
	    .libPaths(final_libpaths)

            if (test_load) {
                starsmsg(stars,
                    "testing if installed package can be loaded from final location")

                # The test for hard-coded installation path is done together
                # with test loading to save time. The test is intentionally
                # run on a loaded package, to allow for paths to be fixed in
                # .onLoad and loadNamespace().

                serf <- tempfile()
                cmd <- paste0("f <- base::file(", quote_path(serf),
                              ", \"wb\")")
                cmd <- append(cmd,
                paste0("base::invisible(base::suppressWarnings(base::serialize(",
                    "base::as.list(base::getNamespace(\"", pkg_name, "\"), all.names=TRUE), f)))"))
                cmd <- append(cmd, "base::close(f)")
                do_test_load(extra_cmd = paste(cmd, collapse = "\n"))
                starsmsg(stars,
                    "testing if installed package keeps a record of temporary installation path")
                r <- readBin(serf, "raw", n=file.size(serf))
                unlink(serf)
                if (length(grepRaw("00new", r, fixed = TRUE, all = FALSE,
                                   value = FALSE)))
                    errmsg("hard-coded installation path: ",
                           "please report to the package maintainer and use ",
                           sQuote("--no-staged-install"))
            }
        }

        if (do_strip_lib &&
            nzchar(strip_cmd <- Sys.getenv("R_STRIP_STATIC_LIB")) &&
            length(a_s <- Sys.glob(file.path(file.path(lib, curPkg),
                                             "lib", "*.a")))) {
            if(length(a_s) > 1L)
                starsmsg(stars, "stripping static libraries under lib")
            else
                starsmsg(stars, "stripping static library under lib")
            system(paste(c(strip_cmd, shQuote(a_s)), collapse = " "))
        }
        if (do_strip_lib &&
            nzchar(strip_cmd <- Sys.getenv("R_STRIP_SHARED_LIB")) &&
            length(so_s <- Sys.glob(file.path(file.path(lib, curPkg), "lib",
                                              paste0("*", SHLIB_EXT))))) {
            if(length(so_s) > 1L)
                starsmsg(stars, "stripping dynamic libraries under lib")
            else
                starsmsg(stars, "stripping dynamic library under lib")
            system(paste(c(strip_cmd, shQuote(so_s)), collapse = " "))
        }
    } ## do_install_source

    options(showErrorCalls = FALSE)
    pkgs <- character()
    if (is.null(args)) {
        args <- commandArgs(TRUE)
        ## it seems that splits on spaces, so try harder.
        args <- paste(args, collapse = " ")
        args <- strsplit(args,'nextArg', fixed = TRUE)[[1L]][-1L]
    }
    args0 <- args

    startdir <- getwd()
    if (is.null(startdir))
        stop("current working directory cannot be ascertained")
    lib <- lib0 <- ""
    clean <- FALSE
    preclean <- FALSE
    debug <- FALSE
    static_html <- nzchar(system.file("html", "mean.html", package="base"))
    build_html <- static_html
    build_latex <- FALSE
    build_example <- FALSE
    use_configure <- TRUE
    configure_args <- character()
    configure_vars <- character()
    fake <- FALSE
##    lazy <- TRUE
    lazy_data <- FALSE
    byte_compile <- NA # means take from DESCRIPTION file.
    staged_install <- NA # means not given by command line argument
    ## Next is not very useful unless R CMD INSTALL reads a startup file
    lock <- getOption("install.lock", NA) # set for overall or per-package
    pkglock <- FALSE  # set for per-package locking
    libs_only <- FALSE
    tar_up <- zip_up <- FALSE
    shargs <- character()
    multiarch <- TRUE
    force_biarch <- FALSE
    force_both <- FALSE
    test_load <- TRUE
    merge <- FALSE
    dsym <- nzchar(Sys.getenv("PKG_MAKE_DSYM"))
    get_user_libPaths <- FALSE
    data_compress <- TRUE # FALSE (none), TRUE (gzip), 2 (bzip2), 3 (xz)
    resave_data <- FALSE
    compact_docs <- FALSE
    keep.source <- getOption("keep.source.pkgs")
    keep.parse.data <- getOption("keep.parse.data.pkgs")
    use_LTO <- NA # means take from DESCRIPTION file.
    use_C <- NA # means take from DESCRIPTION file.
    built_stamp <- character()

    install_libs <- TRUE
    install_R <- TRUE
    install_data <- TRUE
    install_demo <- TRUE
    install_exec <- TRUE
    install_inst <- TRUE
    install_help <- TRUE
    install_tests <- FALSE
    do_strip <- do_strip_lib <- FALSE

    while(length(args)) {
        a <- args[1L]
        if (a %in% c("-h", "--help")) {
            Usage()
            do_exit(0)
        }
        else if (a %in% c("-v", "--version")) {
            cat("R add-on package installer: ",
                R.version[["major"]], ".",  R.version[["minor"]],
                " (r", R.version[["svn rev"]], ")\n", sep = "")
            cat("",
                .R_copyright_msg(2000),
                "This is free software; see the GNU General Public License version 2",
                "or later for copying conditions.  There is NO warranty.",
                sep = "\n")
	    do_exit(0)
        } else if (a %in% c("-c", "--clean")) {
            clean <- TRUE
            shargs <- c(shargs, "--clean")
        } else if (a == "--preclean") {
            preclean <- TRUE
            shargs <- c(shargs, "--preclean")
        } else if (a %in% c("-d", "--debug")) {
            debug <- TRUE
        } else if (a == "--no-configure") {
            use_configure <- FALSE
        } else if (a == "--no-docs") {
            build_html <- build_latex <- build_example <- FALSE
        } else if (a == "--no-html") {
            build_html <- FALSE
        } else if (a == "--html") {
            build_html <- TRUE
        } else if (a == "--latex") {
            build_latex <- TRUE
        } else if (a == "--example") {
            build_example <- TRUE
        } else if (a == "-l") {
            if (length(args) >= 2L) {lib <- args[2L]; args <- args[-1L]}
            else stop("-l option without value", call. = FALSE)
        } else if (substr(a, 1, 10) == "--library=") {
            lib <- substr(a, 11, 1000)
        } else if (substr(a, 1, 17) == "--configure-args=") {
            configure_args <- c(configure_args, substr(a, 18, 1000))
        } else if (substr(a, 1, 17) == "--configure-vars=") {
            configure_vars <- c(configure_vars, substr(a, 18, 1000))
        } else if (a == "--fake") {
            fake <- TRUE
        } else if (a == "--no-lock") {
            lock <- pkglock <- FALSE
        } else if (a == "--lock") {
            lock <- TRUE; pkglock <- FALSE
        } else if (a == "--pkglock") {
            lock <- pkglock <- TRUE
        } else if (a == "--libs-only") {
            libs_only <- TRUE
        } else if (a == "--no-multiarch") {
            multiarch <- FALSE
        ## } else if (a == "--force-biarch") {
        ##     force_biarch <- TRUE
        ## } else if (a == "--compile-both") {
        ##     force_both <- TRUE
        } else if (a == "--maybe-get-user-libPaths") {
            get_user_libPaths <- TRUE
        } else if (a == "--build") {
            if (WINDOWS) zip_up <- TRUE else tar_up <- TRUE
        } else if (substr(a, 1, 16) == "--data-compress=") {
            dc <- substr(a, 17, 1000)
            dc <- match.arg(dc, c("none", "gzip", "bzip2", "xz"))
            data_compress <- switch(dc,
                                    "none" = FALSE,
                                    "gzip" = TRUE,
                                    "bzip2" = 2,
                                    "xz" = 3)
        } else if (a == "--resave-data") {
            resave_data <- TRUE
        } else if (a == "--install-tests") {
            install_tests <- TRUE
        } else if (a == "--no-inst") {
            install_inst <- FALSE
        } else if (a == "--no-R") {
            install_R <- FALSE
        } else if (a == "--no-libs") {
            install_libs <- FALSE
        } else if (a == "--no-data") {
            install_data <- FALSE
        } else if (a == "--no-demo") {
            install_demo <- FALSE
        } else if (a == "--no-exec") {
            install_exec <- FALSE
        } else if (a == "--no-help") {
            install_help <- FALSE
        } else if (a == "--no-test-load") {
            test_load <- FALSE
        } else if (a == "--no-clean-on-error") {
            clean_on_error  <- FALSE
        } else if (a == "--merge-multiarch") {
            merge <- TRUE
        } else if (a == "--compact-docs") {
            compact_docs <- TRUE
        } else if (a == "--with-keep.source") {
            keep.source <- TRUE
        } else if (a == "--without-keep.source") {
            keep.source <- FALSE
        } else if (a == "--with-keep.parse.data") {
            keep.parse.data <- TRUE
        } else if (a == "--without-keep.parse.data") {
            keep.parse.data <- FALSE
        } else if (a == "--byte-compile") {
            byte_compile <- TRUE
        } else if (a == "--no-byte-compile") {
            byte_compile <- FALSE
        } else if (a == "--use-LTO") {
            use_LTO <- TRUE
        } else if (a == "--no-use-LTO") {
            use_LTO <- FALSE
        } else if (a == "--use-C17") {
            use_C <- 17
        } else if (a == "--use-C23") {
            use_C <- 23
        } else if (a == "--use-C90") {
            use_C <- 90
        } else if (a == "--use-C99") {
            use_C <- 99
        } else if (a == "--staged-install") {
            staged_install <- TRUE
        } else if (a == "--no-staged-install") {
            staged_install <- FALSE
        } else if (a == "--dsym") {
            dsym <- TRUE
        } else if (a == "--strip") {
            do_strip <- TRUE
        } else if (a == "--strip-lib") {
            do_strip_lib <- TRUE
        } else if (substr(a, 1, 18) == "--built-timestamp=") {
            built_stamp <- substr(a, 19, 1000)
        } else if (startsWith(a, "-")) {
            message("Warning: unknown option ", sQuote(a), domain = NA)
        } else pkgs <- c(pkgs, a)
        args <- args[-1L]
    }

    if (keep.tmpdir) {
      make_tmpdir <- function(prefix, nchars = 8, ntries = 100) {
        for(i in 1:ntries) {
          name <- paste(sample(c(0:9, letters, LETTERS), nchars, replace=TRUE), collapse="")
          path <- paste(prefix, name, sep = "/")
          if (dir.create(path, showWarnings = FALSE, recursive = T)) {
            return(path)
          }
        }
        stop("cannot create unique directory for build")
      }
      tmpdir <- make_tmpdir(user.tmpdir)
    } else {
      tmpdir <- tempfile("R.INSTALL")
      if (!dir.create(tmpdir))
          stop("cannot create temporary directory")
    }

    if (merge) {
        if (length(pkgs) != 1L || !file_test("-f", pkgs))
            stop("ERROR: '--merge-multiarch' applies only to a single tarball",
                 call. = FALSE)
        if (WINDOWS) {
            f  <- dir(file.path(R.home(), "bin"))
            archs <- f[f %in% c("i386", "x64")]
            if (length(archs) > 1L) {
                args <- args0 %w/o% c("--merge-multiarch", "--build")
                ## this will report '* DONE (foo)' if it works, which
                ## R CMD check treats as an indication of success.
                ## so use a backdoor to suppress it.
                Sys.setenv("_R_INSTALL_NO_DONE_" = "yes")
                for (arch in archs) {
                    cmd <- c(shQuote(file.path(R.home(), "bin", arch,
                                               "Rcmd.exe")),
                             "INSTALL", shQuote(args), "--no-multiarch")
                    if (arch == "x64") {
                        ## this will suppress stars message "using
                        ## non-staged installation", which could otherwise
                        ## be turned into R CMD check note
                        Sys.setenv("_R_INSTALL_SUPPRESS_NO_STAGED_MESSAGE_" = "yes")
                        cmd <- c(cmd, "--libs-only --no-staged-install",
                                 if(zip_up) "--build")
                        Sys.unsetenv("_R_INSTALL_NO_DONE_")
                    }
                    cmd <- paste(cmd, collapse = " ")
                    if (debug) message("about to run ", cmd, domain = NA)
                    message("\n", "install for ", arch, "\n", domain = NA)
                    res <- system(cmd)
                    if (arch == "x64")
                        Sys.unsetenv("_R_INSTALL_SUPPRESS_NO_STAGED_MESSAGE_")
                    if(res) break
                }
            }
        } else {
            archs  <- dir(file.path(R.home("bin"), "exec"))
            if (length(archs) > 1L) {
                args <- args0 %w/o% c("--merge-multiarch", "--build")
                ## this will report '* DONE (foo)' if it works, which
                ## R CMD check treats as an indication of success.
                ## so use a backdoor to suppress it.
                Sys.setenv("_R_INSTALL_NO_DONE_" = "yes")
                last <- archs[length(archs)]
                for (arch in archs) {
                    cmd <- c(shQuote(file.path(R.home("bin"), "R")),
                             "--arch", arch, "CMD",
                             "INSTALL", shQuote(args), "--no-multiarch")
                    if (arch != archs[1L]) {
                        ## this will suppress stars message "using
                        ## non-staged installation", which could otherwise
                        ## be turned into R CMD check note
                        Sys.setenv("_R_INSTALL_SUPPRESS_NO_STAGED_MESSAGE_" = "yes")
                        cmd <- c(cmd, "--libs-only --no-staged-install")
                    }
                    if (arch == last) {
                        Sys.unsetenv("_R_INSTALL_NO_DONE_")
                        if(tar_up) cmd <- c(cmd, "--build")
                    }
                    cmd <- paste(cmd, collapse = " ")
                    if (debug) message("about to run ", cmd, domain = NA)
                    message("\n", "install for ", arch, "\n", domain = NA)
                    res <- system(cmd)
                    if (arch != archs[1L])
                        Sys.unsetenv("_R_INSTALL_SUPPRESS_NO_STAGED_MESSAGE_")
                    if(res) break
                }
            }
        }
        if (length(archs) > 1L) {
            if (res) do_exit_on_error()
            do_cleanup()
            on.exit()
            return(invisible())
        }
        message("only one architecture so ignoring '--merge-multiarch'",
                domain = NA)
    }

    ## now unpack tarballs and do some basic checks
    allpkgs <- character()
    for(pkg in pkgs) {
        if (debug) message("processing ", sQuote(pkg), domain = NA)
        if (file_test("-f", pkg)) {
            if (WINDOWS && endsWith(pkg, ".zip")) {
                if (debug) message("a zip file", domain = NA)
                pkgname <- basename(pkg)
                pkgname <- sub("\\.zip$", "", pkgname)
                pkgname <- sub("_[0-9.-]+$", "", pkgname)
                allpkgs <- c(allpkgs, pkg)
                next
            }
            if (debug) message("a file", domain = NA)
            of <- dir(tmpdir, full.names = TRUE)
            ## force the use of internal untar unless over-ridden
            ## so e.g. .tar.xz works everywhere
            if (utils::untar(pkg, exdir = tmpdir,
                             tar =  Sys.getenv("R_INSTALL_TAR", "internal")))
                errmsg("error unpacking tarball")
            ## Now see what we got
            nf <- dir(tmpdir, full.names = TRUE)
            new <- nf %w/o% of
            if (!length(new))
                errmsg("cannot extract package from ", sQuote(pkg))
            if (length(new) > 1L)
                errmsg("extracted multiple files from ", sQuote(pkg))
            if (dir.exists(new)) pkgname <- basename(new)
            else errmsg("cannot extract package from ", sQuote(pkg))
            if (file.exists(file.path(tmpdir, pkgname, "DESCRIPTION"))) {
                allpkgs <- c(allpkgs, file.path(tmpdir, pkgname))
            } else errmsg("cannot extract package from ", sQuote(pkg))
        } else if (file.exists(file.path(pkg, "DESCRIPTION"))) {
            if (debug) message("a directory", domain = NA)
            pkgname <- basename(pkg)
            allpkgs <- c(allpkgs, fullpath(pkg))
        } else {
            warning("invalid package ", sQuote(pkg), call. = FALSE)
            next
        }
    }

    if (!length(allpkgs))
        stop("ERROR: no packages specified", call.=FALSE)


    if (!nzchar(lib)) {
        lib <- if (get_user_libPaths) { ## need .libPaths()[1L] *after* the site- and user-initialization
	    system(paste(shQuote(file.path(R.home("bin"), "Rscript")),
                         "-e 'cat(.libPaths()[1L])'"),
                   intern = TRUE)
        }
        else .libPaths()[1L]
        starsmsg(stars, "installing to library ", sQuote(lib))
    } else {
        lib0 <- lib <- path.expand(lib)
        ## lib is allowed to be a relative path.
        ## should be OK below, but be sure.
        cwd <- tryCatch(setwd(lib), error = function(e)
                        stop(gettextf("ERROR: cannot cd to directory %s", sQuote(lib)),
                             call. = FALSE, domain = NA))
        lib <- getwd()
        setwd(cwd)
    }
    ok <- dir.exists(lib)
    if (ok) {
        if (WINDOWS) {
            ## file.access is unreliable on Windows
            ## the only known reliable way is to try it
            fn <- file.path(lib, paste0("_test_dir_", Sys.getpid()))
            unlink(fn, recursive = TRUE) # precaution
            res <- try(dir.create(fn, showWarnings = FALSE))
            if (inherits(res, "try-error") || !res) ok <- FALSE
            else unlink(fn, recursive = TRUE)
        } else ok <- file.access(lib, 2L) == 0L
    }
    if (!ok)
        stop("ERROR: no permission to install to directory ",
             sQuote(lib), call. = FALSE)

    group.writable <- if(WINDOWS) FALSE else {
	## install package group-writable  iff  in group-writable lib
        d <-  as.octmode("020")
	(file.mode(lib) & d) == d ## TRUE  iff  g-bit is "w"
    }

    if (libs_only) {
	install_R <- FALSE
	install_data <- FALSE
	install_demo <- FALSE
	install_exec <- FALSE
	install_inst <- FALSE
	install_help <- FALSE
    }
    more_than_libs <- !libs_only
    ## if(!WINDOWS && !more_than_libs) test_load <- FALSE


    mk_lockdir <- function(lockdir)
    {
        if (file.exists(lockdir)) {
            message("ERROR: failed to lock directory ", sQuote(lib),
                    " for modifying\nTry removing ", sQuote(lockdir),
                    domain = NA)
            do_cleanup_tmpdir()
            do_exit(status = 3)
        }
        dir.create(lockdir, recursive = TRUE)
        if (!dir.exists(lockdir)) {
            message("ERROR: failed to create lock directory ", sQuote(lockdir),
                    domain = NA)
            do_cleanup_tmpdir()
            do_exit(status = 3)
        }
        if (debug) starsmsg(stars, "created lock directory ", sQuote(lockdir))
    }

    if (is.na(lock)) {
        lock <- TRUE
        pkglock <- length(allpkgs) == 1L
    }
    if (lock && !pkglock) {
        lockdir <- file.path(lib, "00LOCK")
        mk_lockdir(lockdir)
    }
    if (is.na(staged_install)) {
        # environment variable intended as temporary
        rsi <- Sys.getenv("R_INSTALL_STAGED")
        rsi <- switch(rsi,
                      "TRUE"=, "true"=, "True"=, "yes"=, "Yes"= 1,
                      "FALSE"=,"false"=,"False"=, "no"=, "No" = 0,
                      as.numeric(rsi))
        if (!is.na(rsi))
            staged_install <- (rsi > 0)
        else
            staged_install <- TRUE
    }
    if  ((tar_up || zip_up) && fake)
        stop("building a fake installation is disallowed")

    if (fake) {
        use_configure <- FALSE
        if("--html" %notin% args0)
            build_html <- FALSE
        build_latex <- FALSE
        build_example <- FALSE
	install_libs <- FALSE
	install_demo <- FALSE
	install_exec <- FALSE
#	install_inst <- FALSE
    }

    build_help_types <- character()
    if (build_html) build_help_types <- c(build_help_types, "html")
    if (build_latex) build_help_types <- c(build_help_types, "latex")
    if (build_example) build_help_types <- c(build_help_types, "example")
    build_help <- length(build_help_types) > 0L

    if (debug)
        starsmsg(stars, "build_help_types=",
                 paste(build_help_types, collapse = " "))

    if (debug)
        starsmsg(stars, "DBG: 'R CMD INSTALL' now doing do_install()")

    for(pkg in allpkgs) {
        if (pkglock) {
            lockdir <- file.path(lib, paste0("00LOCK-", basename(pkg)))
            mk_lockdir(lockdir)
        }
        do_install(pkg)
    }
    do_cleanup()
    on.exit()
    invisible()
} ## .install_packages()

## for R CMD SHLIB on all platforms
.SHLIB <- function()
{
    status <- .shlib_internal(commandArgs(TRUE))
    q("no", status = (status != 0), runLast=FALSE)
}

## for .SHLIB and R CMD INSTALL on all platforms
.shlib_internal <- function(args)
{
    Usage <- function()
        cat("Usage: R CMD SHLIB [options] files | linker options",
            "",
            "Build a shared object for dynamic loading from the specified source or",
            "object files (which are automagically made from their sources) or",
            "linker options.  If not given via '--output', the name for the shared",
            "object is determined from the first source or object file.",
            "",
            "Options:",
            "  -h, --help		print short help message and exit",
            "  -v, --version		print version info and exit",
            "  -o, --output=LIB	use LIB as (full) name for the built library",
            "  -c, --clean		remove files created during compilation",
            "  --preclean		remove files created during a previous run",
            "  -n, --dry-run		dry run, showing commands that would be used",
            "  --use-LTO		use Link-Time Optimization",
            "  --no-use-LTO		do not use Link-Time Optimization",
            "  --use-C17	        use a C standard at most C17 (alsp C90, C99)",
            "  --use-C23	        use a C standard at least C23",
            "",
            "Windows only:",
            "  -d, --debug		build a debug DLL",
            "",
            "Report bugs at <https://bugs.R-project.org>.",
            sep = "\n")

    ## FIXME shQuote here?
    p1 <- function(...) paste(..., collapse = " ")

    WINDOWS <- .Platform$OS.type == "windows"
    cross <- Sys.getenv("R_CROSS_BUILD")
    if(nzchar(cross)) {
        if(!cross %in% c("x64", "singlearch"))
            stop("invalid value ", sQuote(cross), " for R_CROSS_BUILD")
        WINDOWS <- TRUE
        Sys.setenv(R_ARCH = if (cross == "singlearch") "" else paste0("/", cross))
    }

    if (!WINDOWS) {
        mconf <- readLines(file.path(paste0(R.home("etc"), Sys.getenv("R_ARCH")),
                                     "Makeconf"))
        SHLIB_EXT <- sub(".*= ", "", grep("^SHLIB_EXT", mconf, value = TRUE,
                                          perl = TRUE))
        SHLIB_LIBADD <- sub(".*= ", "", grep("^SHLIB_LIBADD", mconf,
                                             value = TRUE, perl = TRUE))
        MAKE <- Sys.getenv("MAKE")
        rarch <- Sys.getenv("R_ARCH")
    } else {
        rhome <- chartr("\\", "/", R.home())
        Sys.setenv(R_HOME = rhome)
        SHLIB_EXT <- ".dll"
        SHLIB_LIBADD <- ""
        MAKE <- "make"
        ## Formerly for winshlib.mk to pick up Makeconf
        rarch <- Sys.getenv("R_ARCH", NA_character_)
        if(is.na(rarch)) {
            if (nzchar(.Platform$r_arch)) {
                rarch <- paste0("/", .Platform$r_arch)
                Sys.setenv(R_ARCH = rarch)
            } else rarch <- ""
        }
    }

    OBJ_EXT <- ".o" # all currrent compilers, but not some on Windows

    ## The order of inclusion of Makefiles on a Unix-alike is
    ## package's src/Makevars
    ## etc/Makeconf
    ## site Makevars
    ## share/make/shlib.mk
    ## user Makevars
    ## and similarly elsewhere
    objs <- character()
    shlib <- ""
    makefiles <-
        c(file.path(paste0(R.home("etc"), rarch), "Makeconf"),
          makevars_site(),
          file.path(R.home("share"), "make",
                    if (WINDOWS) "winshlib.mk" else "shlib.mk"))
    shlib_libadd <- if (nzchar(SHLIB_LIBADD)) SHLIB_LIBADD else character()
    with_c <- FALSE
    with_cxx <- FALSE
    with_f77 <- FALSE
    with_f9x <- FALSE
    with_objc <- FALSE
    use_cxxstd <- NULL
    use_fc_link <- FALSE
    use_lto <- NA
    use_C <- ""
    pkg_libs <- character()
    clean <- FALSE
    preclean <- FALSE
    dry_run <- FALSE
    debug <- FALSE

    while(length(args)) {
        a <- args[1L]
        if (a %in% c("-h", "--help")) {
            Usage()
            return(0L)
        }
        else if (a %in% c("-v", "--version")) {
            cat("R shared object builder: ",
                R.version[["major"]], ".",  R.version[["minor"]],
                " (r", R.version[["svn rev"]], ")\n", sep = "")
            cat("",
                .R_copyright_msg(2000),
                "This is free software; see the GNU General Public License version 2",
                "or later for copying conditions.  There is NO warranty.",
                sep = "\n")
            return(0L)
        } else if (a %in% c("-n", "--dry-run")) {
            dry_run <- TRUE
        } else if (a %in% c("-d", "--debug")) {
            debug <- TRUE
        } else if (a %in% c("-c", "--clean")) {
            clean <- TRUE
        } else if (a == "--preclean") {
            preclean <- TRUE
        } else if (a == "--use-LTO") {
            use_lto <- TRUE
        } else if (a == "--no-use-LTO") {
            use_lto <- FALSE
        } else if (a == "--use-C17") {
            use_C <- 17
        } else if (a == "--use-C23") {
            use_C <- 23
        } else if (a == "--use-C90") {
            use_C <- 90
        } else if (a == "--use-C99") {
            use_C <- 99
        } else if (a == "-o") {
            if (length(args) >= 2L) {shlib <- args[2L]; args <- args[-1L]}
            else stop("-o option without value", call. = FALSE)
        } else if (substr(a, 1, 9) == "--output=") {
            shlib <- substr(a, 10, 1000)
        } else {
            ## a source file or something like -Ldir -lfoo
            base <- sub("\\.[[:alnum:]]*$", "", a)
            ext <- sub(paste0(base, "."),  "", a, fixed = TRUE)
            nobj <- ""
            if (nzchar(ext)) {
                ## This will not work if there are no source files in
                ## the top-level directory
                if (ext %in% c("cc", "cpp")) {
                    with_cxx <- TRUE
                    nobj <- base
                } else if (ext == "m") {
                    with_objc <- TRUE
                    nobj <- base
                } else if (ext %in% c("mm", "M")) {
                    ## ObjC++ implies ObjC because we need ObjC runtime
                    ## ObjC++ implies C++ because we use C++ linker
                    with_objc <- with_cxx <- TRUE
                    nobj <- base
                } else if (ext == "f") {
                    with_f77 <- TRUE
                    nobj <- base
                } else if (ext %in% c("f90", "f95")) {
                    with_f9x <- TRUE
                    nobj <- base
                } else if (ext == "c") {
                    with_c <- TRUE
                    nobj <- base
                } else if (ext == "o") {
                    nobj <- base
                }
                if (nzchar(nobj) && !nzchar(shlib))
                    shlib <- paste0(nobj, SHLIB_EXT)
            }
            if (nzchar(nobj)) objs <- c(objs, nobj)
            else pkg_libs <- c(pkg_libs, a)
        }
        args <- args[-1L]
    }

    if (length(objs)) objs <- paste0(objs, OBJ_EXT, collapse = " ")

    makefiles <- c(makefiles, makevars_user())

    makeobjs <- paste0("OBJECTS=", shQuote(objs))
    if (WINDOWS && (file.exists(fn <- "Makevars.ucrt") || file.exists(fn <- "Makevars.win"))) {
        makefiles <- c(fn, makefiles)
        lines <- readLines(fn, warn = FALSE)
        if (length(grep("^OBJECTS *=", lines, perl=TRUE, useBytes = TRUE)))
            makeobjs <- ""
        if (length(ll <- grep("^CXX_STD *=", lines, perl = TRUE,
                              value = TRUE, useBytes = TRUE)) == 1) {
            val <- gsub("^CXX_STD *= *CXX", "", ll)
            val <- gsub(" +$", "", val)
            if (val %in% cxx_standards) {
                use_cxxstd <- val
                with_cxx <- TRUE
            }
        }
        if (any(grepl("^USE_FC_TO_LINK", lines, perl=TRUE, useBytes = TRUE)))
            use_fc_link <- TRUE
    } else if (file.exists("Makevars")) {
        makefiles <- c("Makevars", makefiles)
        lines <- readLines("Makevars", warn = FALSE)
        if (length(grep("^OBJECTS *=", lines, perl = TRUE, useBytes = TRUE)))
            makeobjs <- ""
        if (length(ll <- grep("^CXX_STD *=", lines, perl = TRUE,
                              value = TRUE, useBytes = TRUE)) == 1) {
            val <- gsub("^CXX_STD *= *CXX", "", ll)
            val <- gsub(" +$", "", val)
            if (val %in% cxx_standards) {
                use_cxxstd <- val
                with_cxx <- TRUE
            }
        }
        if (any(grepl("^USE_FC_TO_LINK", lines, perl=TRUE, useBytes = TRUE)))
            use_fc_link <- TRUE
    }
    if (is.null(use_cxxstd)) {
        for (i in cxx_standards) {
            if (nzchar(Sys.getenv(paste0("USE_CXX", i)))) {
                use_cxxstd <- i
                break
            }
        }
    }
    if (is.null(use_cxxstd)) {
        val <- Sys.getenv("R_PKG_CXX_STD")
        if (val %in% cxx_standards) {
            use_cxxstd <- val
        }
    }

    if (with_cxx) {
        checkCXX <- function(cxxstd) {
            for (i in rev(seq_along(makefiles))) {
                lines <- readLines(makefiles[i], warn = FALSE)
                pattern <- paste0("^CXX", cxxstd, " *= *")
                ll <- grep(pattern, lines, perl = TRUE, value = TRUE,
                           useBytes = TRUE)
                for (j in rev(seq_along(ll))) {
                    cxx <- gsub(pattern, "", ll[j])
                    return(nzchar(cxx))
                }
            }
            return(FALSE)
        }
        if (!is.null(use_cxxstd)) {
            if (use_cxxstd == "98") {
                stop("C++98 standard requested but unsupported",
                     call. = FALSE, domain = NA)
            }
            if (!checkCXX(use_cxxstd)) {
                stop(paste0("C++", use_cxxstd, " standard requested but CXX",
                            use_cxxstd, " is not defined"),
                     call. = FALSE, domain = NA)
            }
        }
    }

    makeargs <- paste0("SHLIB=", shQuote(shlib))
    if (with_cxx) {
        if (!is.null(use_cxxstd)) {
            cxx_makeargs <- sprintf(c("CXX='$(CXX%s) $(CXX%sSTD)'",
                                      "CXXFLAGS='$(CXX%sFLAGS)'",
                                      "CXXPICFLAGS='$(CXX%sPICFLAGS)'",
                                      "SHLIB_LDFLAGS='$(SHLIB_CXX%sLDFLAGS)'",
                                      "SHLIB_LD='$(SHLIB_CXX%sLD)'"),
                                    use_cxxstd, use_cxxstd)
            makeargs <- c(cxx_makeargs, makeargs)
        }
        else {
            makeargs <-  c("SHLIB_LDFLAGS='$(SHLIB_CXXLDFLAGS)'",
                           "SHLIB_LD='$(SHLIB_CXXLD)'", makeargs)
        }
    } else if (use_fc_link && (with_f77 || with_f9x))
        makeargs <- c("SHLIB_LDFLAGS='$(SHLIB_FCLDFLAGS)'",
                      "SHLIB_LD='$(SHLIB_FCLD)'",
                      ## avoid $(LIBINTL) and $(LIBR)
                      "ALL_LIBS='$(PKG_LIBS) $(SHLIB_LIBADD) $(SAN_LIBS)'",
                      makeargs)
    if (with_objc) shlib_libadd <- c(shlib_libadd, "$(OBJC_LIBS)")
    if (with_f77 || with_f9x) {
        if (use_fc_link)
            shlib_libadd <- c(shlib_libadd, "$(FCLIBS_XTRA)")
        else
            shlib_libadd <- c(shlib_libadd, "$(FLIBS) $(FCLIBS_XTRA)")
    }
    if (nzchar(use_C)) {
        checkC <- function(cstd) {
            for (i in rev(seq_along(makefiles))) {
                lines <- readLines(makefiles[i], warn = FALSE)
                pattern <- paste0("^CC", cstd, " *= *")
                ll <- grep(pattern, lines, perl = TRUE, value = TRUE,
                           useBytes = TRUE)
                for (j in rev(seq_along(ll))) {
                    cs <- gsub(pattern, "", ll[j])
                    return(nzchar(cs))
                }
            }
            return(FALSE)
        }
        if (!checkC(use_C)) {
            stop(paste0("C", use_C, " standard requested but CC", use_C,
                        " is not defined"),
                 call. = FALSE, domain = NA)
        }
        c_makeargs <- sprintf(c("CC='$(CC%s)'", "CFLAGS='$(C%sFLAGS)'"), use_C)
        makeargs <- c(c_makeargs, makeargs)
    }
    if (length(pkg_libs))
        makeargs <- c(makeargs,
                      paste0("PKG_LIBS='", p1(pkg_libs), "'"))
    if (length(shlib_libadd))
        makeargs <- c(makeargs,
                      paste0("SHLIB_LIBADD='", p1(shlib_libadd), "'"))
    if (with_f9x && file.exists("Makevars") &&
        length(grep("^\\s*PKG_FCFLAGS", lines, perl = TRUE, useBytes = TRUE)))
        makeargs <- c(makeargs, "P_FCFLAGS='$(PKG_FCFLAGS)'")

    if (WINDOWS && debug) makeargs <- c(makeargs, "DEBUG=T")
    ## TCLBIN is needed for tkrplot and tcltk2
    if (WINDOWS && rarch == "/x64") makeargs <- c(makeargs, "WIN=64 TCLBIN=")

    build_objects_symbol_tables <-
        config_val_to_logical(Sys.getenv("_R_SHLIB_BUILD_OBJECTS_SYMBOL_TABLES_",
                                         "FALSE"))

    makeargs <- c(makeargs,
                  if(isTRUE(use_lto))
                      c(paste0("LTO=", shQuote("$(LTO_OPT)")),
                        paste0("LTO_FC=", shQuote("$(LTO_FC_OPT)")))
                  else if(isFALSE(use_lto)) c("LTO=", "LTO_FC=")
                  )
    ## if(config_val_to_logical(Sys.getenv("_R_CXX_USE_NO_REMAP_", "TRUE")))
    ##      makeargs <- c(makeargs, "CXX_DEFS=-DR_NO_REMAP")
##    if(config_val_to_logical(Sys.getenv("_R_USE_STRICT_R_HEADERS_", "FALSE")))
##         makeargs <- c(makeargs, "XDEFS=-DSTRICT_R_HEADERS=1")

    cmd <- paste(MAKE, p1(paste("-f", shQuote(makefiles))), p1(makeargs),
                 p1(makeobjs))
    if (dry_run) {
        cat("make cmd is\n  ", cmd, "\n\nmake would use\n", sep = "")
        system(paste(cmd, "-n"))
        res <- 0
    } else {
        lines <- system(paste(MAKE, p1(paste("-f", shQuote(makefiles))),
                              "compilers"), intern = TRUE)
        if (with_c) {
            cc <- lines[grep("^CC =", lines)]
            cc <- sub("CC = ", "", cc)
            ## We do not strip flags in configure so leave them here
            ## cc <- sub(" -.*", "", cc)
            ## As this might be more than one word we use system not system2.
            cc_ver <- try(system(paste(cc, "--version"),
                                 intern = TRUE), silent = TRUE)
            if(!inherits(cc_ver, "try-error"))
                message("using C compiler: ", sQuote(cc_ver[1L]))
        }
        if (with_f77 || with_f9x) {
            fc <- lines[grep("^FC =", lines)]
            fc <- sub("FC = ", "", fc)
            ## fc <- sub(" -.*", "", fc)
            fc_ver <- try(system(paste(fc, "--version"),
                                 intern = TRUE), silent = TRUE)
            if(!inherits(fc_ver, "try-error"))
                message("using Fortran compiler: ", sQuote(fc_ver[1L]))
        }
        if (with_cxx) {
            cxx <- lines[grep("^CXX =", lines)]
            cxx <- sub("CXX = ", "", cxx)
            ## cxx <- sub(" -.*", "", cxx)
            if(nzchar(cxx)) {
                cxx_ver <- try(system(paste(cxx, "--version"),
                                 intern = TRUE), silent = TRUE)
                if(!inherits(cxx_ver, "try-error")) {
                    message("using C++ compiler: ", sQuote(cxx_ver[1L]))
                    if(!is.null(use_cxxstd))
                        message("using C++", use_cxxstd)
                }
            }
        }
        if (Sys.info()["sysname"] == "Darwin" &&
            (with_c|| with_f77 || with_f9x || with_cxx)) {
            ## report the SDK in use: this changed at Xcode/CLT 26
            sdk <- try(system2("xcrun", "--show-sdk-version", TRUE, TRUE), silent = TRUE)
            if(!inherits(sdk, "try-error")) {
                sdk <- if (length(attr(sdk, "status"))) NA_character_
                       else paste0("MacOSX", sdk, ".sdk")
                message("using SDK: ", sQuote(sdk))
            }
        }
        if (preclean) system(paste(cmd, "shlib-clean"))
        res <- system(cmd)
        if((res == 0L) && build_objects_symbol_tables) {
            ## Should only do this if the previous one went ok.
            system(paste(cmd, "symbols.rds"))
        }
        if (clean) system(paste(cmd, "shlib-clean"))
    }
    res # probably a multiple of 256
}


## called for base packages from src/Makefile[.win] and from
## .install_packages in this file.  Really *help* indices.
.writePkgIndices <-
    function(dir, outDir, OS = .Platform$OS.type, html = TRUE)
{
    re <- function(x)
    {
        ## sort order for topics, a little tricky
        ## FALSE sorts before TRUE
        xx <- rep.int(TRUE, length(x))
        xx[grep("-package", x, fixed = TRUE)] <- FALSE
        order(xx, toupper(x), x)
    }

    html_header <- function(pkg, title, version, encoding, conn)
    {
        cat(paste(HTMLheader(title, Rhome="../../..",
                             up="../../../doc/html/packages.html",
                             css = "R.css"),
                  collapse = "\n"),
           '<h2>Documentation for package &lsquo;', pkg, '&rsquo; version ',
            version, '</h2>\n\n', sep = "", file = conn)

	cat('<ul><li><a href="../DESCRIPTION" type="text/plain',
            ## These days we should really always have UTF-8 ...
            if(!is.na(encoding) && (encoding == "UTF-8"))
                "; charset=utf-8",
            '">DESCRIPTION file</a>.</li>\n',
            sep = "", file=conn)
	if (file.exists(file.path(outDir, "doc")))
	    cat('<li><a href="../doc/index.html">User guides, package vignettes and other documentation.</a></li>\n',
                file=conn)
	if (file.exists(file.path(outDir, "demo")))
	    cat('<li><a href="../demo">Code demos</a>.  Use <a href="../../utils/help/demo">demo()</a> to run them.</li>\n',
                sep = "", file=conn)
        for(nfile in c("NEWS", "NEWS.Rd", "NEWS.md")) {
            if(file.exists(file.path(outDir, nfile))) {
                cat('<li><a href="../', nfile, '">Package NEWS</a>.</li>\n',
                    sep = "", file=conn)
                break
            }
        }

        cat('</ul>\n\n<h2>Help Pages</h2>\n\n\n',
            sep ="", file = conn)
    }

    firstLetterCategory <- function(x)
    {
        x[endsWith(x, "-package")] <- " "
        x <- toupper(substr(x, 1, 1))
        x[x > "Z"] <- "misc"
        x[x < "A" & x != " "] <- "misc"
        x
    }

    ## This may well already have been done:
    Rd <- if (file.exists(f <- file.path(outDir, "Meta", "Rd.rds")))
        readRDS(f)
    else {
        ## Keep this in sync with .install_package_Rd_indices().
        ## Rd objects should already have been installed.
        db <- tryCatch(Rd_db(basename(outDir), lib.loc = dirname(outDir)),
                       error = function(e) NULL)
        ## If not, we build the Rd db from the sources:
        if (is.null(db)) db <- Rd_db(dir = dir)
        Rd <- Rd_contents(db)
        saveRDS(Rd, file.path(outDir, "Meta", "Rd.rds"))
        Rd
    }

    topics <- Rd$Aliases
    M <- if (!length(topics)) {
        list2DF(list(Topic = character(),
                     File = character(),
                     Title = character(),
                     Internal = character()))
    } else {
        lens <- lengths(topics)
        files <- sub("\\.[Rr]d$", "", Rd$File)
        internal <- (vapply(Rd$Keywords,
                            function(x) match("internal", x, 0L),
                            0L) > 0L)
        list2DF(list(Topic = unlist(topics),
                     File = rep.int(files, lens),
                     Title = rep.int(Rd$Title, lens),
                     Internal = rep.int(internal, lens)))
    }
    ## FIXME duplicated aliases warning
    outman <- file.path(outDir, "help")
    dir.create(outman, showWarnings = FALSE)
    MM <- M[re(M[, 1L]), 1:2]
    utils::write.table(MM, file.path(outman, "AnIndex"),
                       quote = FALSE, row.names = FALSE, col.names = FALSE,
                       sep = "\t")
    a <- structure(MM[, 2L], names=MM[, 1L])
    saveRDS(a, file.path(outman, "aliases.rds"))

    ## have HTML index even if no help pages
    outman <- file.path(outDir, "html")
    dir.create(outman, showWarnings = FALSE)
    outcon <- file(file.path(outman, "00Index.html"), "wt")
    on.exit(close(outcon))
    ## we know we have a valid file by now.
    desc <- read.dcf(file.path(outDir, "DESCRIPTION"))[1L, ]
    ## re-encode if necessary
    if(!is.na(enc <- desc["Encoding"])) {
        ## should be valid in UTF-8, might be invalid in declared encoding
        desc <- iconv(desc, enc, "UTF-8", sub = "byte")
    }
    ## drop internal entries
    M <- M[!M[, 4L], ]
    if (desc["Package"] %in% c("base", "graphics", "stats", "utils")) {
        for(pass in 1:2) {
            ## we skip method aliases
            gen <- gsub("\\.data\\.frame", ".data_frame", M$Topic)
            gen <- sub("\\.model\\.matrix$", ".modelmatrix", gen)
            gen <- sub("^(all|as|is|file|Sys|row|na|model)\\.", "\\1_", gen)
            gen <- sub("^(.*)\\.test", "\\1_test", gen)
            gen <- sub("([-[:alnum:]]+)\\.[^.]+$", "\\1", gen)
            last <- nrow(M)
            nongen <- gen %in% c("ar", "bw", "contr", "dyn", "lm", "qr", "ts", "which", ".Call", ".External", ".Library", ".First", ".Last")
            nc <- nchar(gen)
            asg <- (nc > 3) & endsWith(gen, "<-")
            skip <- (gen == c("", gen[-last])) & (M$File == c("", M$File[-last])) & !nongen
            skip <- skip | asg
            ##N <- cbind(M$Topic, gen, c("", gen[-last]), skip)
            M <- M[!skip, ]
        }
    }

    # Collapse method links into unique (generic, file) pairs
    M$Topic <- sub("^([^,]*),.*-method$", "\\1-method", M$Topic)
    M <- M[!duplicated(M[, c("Topic", "File")]),]
    M <- M[re(M[, 1L]), ]

    ## encode some entries.
    htmlize <- function(x, backtick)
    {
        x <- gsub("&", "&amp;", x, fixed = TRUE)
        x <- gsub("<", "&lt;",  x, fixed = TRUE)
        x <- gsub(">", "&gt;",  x, fixed = TRUE)
        if (backtick) {
            x <- gsub("---", "-", x, fixed = TRUE)
            x <- gsub("--",  "-", x, fixed = TRUE)
            ## these have been changed in the Rd parser
            #x <- gsub("``", "&ldquo;", x, fixed = TRUE)
            #x <- gsub("''", "&rdquo;", x, fixed = TRUE)
            #x <- gsub("\\`([^']+)'", "&lsquo;\\1&rsquo;", x)
            #x <- gsub("`", "'", x, fixed = TRUE)
        }
        x
    }
    M$HTopic <- htmlize(M$Topic, FALSE)
    M$ Title <- htmlize(M$Title, TRUE)

    ## No need to handle encodings: everything is in UTF-8

    html_header(desc["Package"], htmlize(desc["Title"], TRUE),
                desc["Version"], desc["Encoding"], outcon)

    use_alpha <- (nrow(M) > 100)
    if (use_alpha) {
        first <- firstLetterCategory(M$Topic)
        nm <- sort(names(table(first)))
        m <- match(" ", nm, 0L) # -package
        if (m) nm <- c(" ", nm[-m])
        m <- match("misc", nm, 0L) # force last in all locales.
        if (m) nm <- c(nm[-m], "misc")
	writeLines(c('<p style="text-align: center;">',
		     paste0("<a href=\"#", nm, "\">", nm, "</a>"),
		     "</p>\n"), outcon)
        for (f in nm) {
            MM <- M[first == f, ]
            if (f != " ")
                cat("\n<h2><a id=\"", f, "\">-- ", f, " --</a></h2>\n\n",
                    sep = "", file = outcon)
	    writeLines(c('<table style="width: 100%;">',
			 paste0('<tr><td style="width: 25%;"><a href="', MM[, 2L], '.html">',
				MM$HTopic, '</a></td>\n<td>', MM[, 3L],'</td></tr>'),
			 "</table>"), outcon)
       }
    } else if (nrow(M)) {
	writeLines(c('<table style="width: 100%;">',
		     paste0('<tr><td style="width: 25%;"><a href="', M[, 2L], '.html">',
			    M$HTopic, '</a></td>\n<td>', M[, 3L],'</td></tr>'),
		     "</table>"), outcon)
    } else { # no rows
         writeLines("There are no help pages in this package", outcon)
    }
    writeLines('</div></body></html>', outcon)
    file.copy(file.path(R.home("doc"), "html", "R.css"), outman)
    invisible(NULL)
}

### * .convertRdfiles

## possible types are "html", "latex", "example"
## outenc is used as the default output encoding for latex conversion
.convertRdfiles <-
    function(dir, outDir, types = "html", silent = FALSE, outenc = "UTF-8")
{
    showtype <- function(type) {
    	if (!shown) {
            nc <- nchar(bf)
            if (nc < 38L)
                cat("    ", bf, rep.int(" ", 40L - nc), sep = "")
            else
                cat("    ", bf, "\n", rep.int(" ", 44L), sep = "")
            shown <<- TRUE
        }
        ## 'example' is always last, so 5+space
        cat(type, rep.int(" ", max(0L, 6L - nchar(type))), sep = "")
    }

    dirname <- c("html", "latex", "R-ex")
    ext     <- c(".html", ".tex", ".R")
    names(dirname) <- names(ext) <- c("html", "latex", "example")
    mandir <- file.path(dir, "man")
    if (!dir.exists(mandir)) return()
    desc <- readRDS(file.path(outDir, "Meta", "package.rds"))$DESCRIPTION
    pkg <- desc["Package"]
    ver <- desc["Version"]

    for(type in types)
        dir.create(file.path(outDir, dirname[type]), showWarnings = FALSE)

    cat("  converting help for package ", sQuote(pkg), "\n", sep = "")

    ## FIXME: add this lib to lib.loc?
    if ("html" %in% types) {
        ## may be slow, so add a message
        if (!silent) message("    finding HTML links ...", appendLF = FALSE, domain = NA)
        Links <- findHTMLlinks(outDir, level = 0:1)
        if (!silent) message(" done")
        Links2 <- character()
    }

    ## Rd objects may already have been installed.
    db <- tryCatch(Rd_db(basename(outDir), lib.loc = dirname(outDir)),
                   error = function(e) NULL)
    ## If not, we build the Rd db from the sources:
    if (is.null(db)) db <- Rd_db(dir = dir)
    if (!length(db)) return()

    .whandler <-  function(e) {
        .messages <<- c(.messages,
                        paste("Rd warning:", conditionMessage(e)))
        tryInvokeRestart("muffleWarning")
    }
    .ehandler <- function(e) {
        message("", domain = NA) # force newline
        unlink(ff)
        stop(conditionMessage(e), domain = NA, call. = FALSE)
    }
    .convert <- function(expr)
        withCallingHandlers(tryCatch(expr, error = .ehandler),
                            warning = .whandler)

    files <- names(db) # not full file names
    for(nf in files) {
        .messages <- character()
        Rd <- db[[nf]]
        attr(Rd, "source") <- NULL
	bf <- sub("\\.[Rr]d$", "", basename(nf)) # e.g. nf = "unix/Signals.Rd"
	f <- attr(Rd, "Rdfile")# full file name

        shown <- FALSE

        if ("html" %in% types) {
            type <- "html"
            ff <- file.path(outDir, dirname[type],
                            paste0(bf, ext[type]))
            if (!file_test("-f", ff) || file_test("-nt", f, ff)) {
                showtype(type)
                ## assume prepare_Rd was run when dumping the .rds
                ## so use defines = NULL for speed
                .convert(Rd2HTML(Rd, ff, package = c(pkg, ver),
                                 defines = NULL,
                                 Links = Links, Links2 = Links2))
            }
        }
        if ("latex" %in% types) {
            type <- "latex"
            ff <- file.path(outDir, dirname[type],
                            paste0(bf, ext[type]))
            if (!file_test("-f", ff) || file_test("-nt", f, ff)) {
                showtype(type)
                .convert(Rd2latex(Rd, ff, defines = NULL,
                                  outputEncoding = outenc,
                                  writeEncoding = (outenc != "UTF-8")))
            }
        }
        if ("example" %in% types) {
            type <- "example"
            ff <- file.path(outDir, dirname[type],
                            paste0(bf, ext[type]))
            if (!file_test("-f", ff) || file_test("-nt", f, ff)) {
                .convert(Rd2ex(Rd, ff, defines = NULL))
                if (file_test("-f", ff)) showtype(type)
            }
        }
        if (shown) {
            cat("\n")
            if (length(.messages)) writeLines(unique(.messages))
        }
    }

    ## Now check for files to remove.
    ## These start with a letter.
    bfs <- sub("\\.[Rr]d$", "", basename(files)) # those to keep
    if ("html" %in% types) {
        type <- "html"
        have <- list.files(file.path(outDir, dirname[type]))
        have2 <- sub(".html", "", basename(have), fixed=TRUE)
        drop <- have[have2 %notin% c(bfs, "00Index", "R.css")]
        unlink(file.path(outDir, dirname[type], drop))
    }
    if ("latex" %in% types) {
        type <- "latex"
        have <- list.files(file.path(outDir, dirname[type]))
        have2 <- sub(".tex", "", basename(have), fixed=TRUE)
        drop <- have[have2 %notin% bfs]
        unlink(file.path(outDir, dirname[type], drop))
    }
    if ("example" %in% types) {
        type <- "example"
        have <- list.files(file.path(outDir, dirname[type]))
        have2 <- sub(".R", "", basename(have), fixed=TRUE)
        drop <- have[have2 %notin% bfs]
        unlink(file.path(outDir, dirname[type], drop))
    }

}

### * .makeDllRes

.makeDllRes <-
function(name="", version = "0.0")
{
    if (file.exists(f <- "../DESCRIPTION") ||
        file.exists(f <- "../../DESCRIPTION")) {
        desc <- read.dcf(f)[[1L]]
        if (!is.na(f <- desc["Package"])) name <- f
        if (!is.na(f <- desc["Version"])) version <- f
    }
    writeLines(c('#include <windows.h>',
                 '#include "Rversion.h"',
                 '',
                 'VS_VERSION_INFO VERSIONINFO',
                 'FILEVERSION R_FILEVERSION',
                 'PRODUCTVERSION 3,0,0,0',
                 'FILEFLAGSMASK 0x3L',
                 'FILEOS VOS__WINDOWS32',
                 'FILETYPE VFT_APP',
                 'BEGIN',
                 '    BLOCK "StringFileInfo"',
                 '    BEGIN',
                 '        BLOCK "040904E4"',
                 '        BEGIN'))
    cat("            VALUE \"FileDescription\", \"DLL for R package `", name,"'\\0\"\n",
        "            VALUE \"FileVersion\", \"", version, "\\0\"\n", sep = "")
    writeLines(c(
                 '            VALUE "Compiled under R Version", R_MAJOR "." R_MINOR " (" R_YEAR "-" R_MONTH "-" R_DAY ")\\0"',
                 '            VALUE "Project info", "https://www.r-project.org\\0"',
                 '        END',
                 '    END',
                 '    BLOCK "VarFileInfo"',
                 '    BEGIN',
                 '        VALUE "Translation", 0x409, 1252',
                 '    END',
                 'END'))
}

### * makevars_user

makevars_user <-
function()
{
    m <- character()
    if(.Platform$OS.type == "windows") {
        if(!is.na(f <- Sys.getenv("R_MAKEVARS_USER", NA_character_))) {
            if(file.exists(f)) m <- f
        }
        else if((Sys.getenv("R_ARCH") == "/x64") &&
                file.exists(f <- path.expand("~/.R/Makevars.ucrt")))
            m <- f
        else if((Sys.getenv("R_ARCH") == "/x64") &&
                file.exists(f <- path.expand("~/.R/Makevars.win64")))
            m <- f
        else if(file.exists(f <- path.expand("~/.R/Makevars.win")))
            m <- f
        else if(file.exists(f <- path.expand("~/.R/Makevars")))
            m <- f
    }
    else {
        if(!is.na(f <- Sys.getenv("R_MAKEVARS_USER", NA_character_))) {
            if(file.exists(f)) m <- f
        }
        else if(file.exists(f <- path.expand(paste0("~/.R/Makevars-",
                                                    Sys.getenv("R_PLATFORM")))))
            m <- f
        else if(file.exists(f <- path.expand("~/.R/Makevars")))
            m <- f
    }
    m
}

revert_install_time_patches <- function()
{
    WINDOWS <- .Platform$OS.type == "windows"
    if (WINDOWS && dir.exists("install_time_patches")) {
        patches <- sort(list.files("install_time_patches"),
                        decreasing = TRUE)
        for(p in patches) {
            fname <- paste0("install_time_patches/", p)
            if (system2("patch",
                        args = c("-p2", "--binary", "--force", "--reverse"),
                        stdin = fname) != 0)
                message("WARNING: failed to revert patch ", p, "\n")
            else
                message("Reverted installation-time patch ", p,
                        " in package installation\n")
        }
        unlink("install_time_patches", recursive = TRUE)
    }
}

### * makevars_site

makevars_site <-
function()
{
    m <- character()
    if(is.na(f <- Sys.getenv("R_MAKEVARS_SITE", NA_character_)))
        f <- file.path(paste0(R.home("etc"), Sys.getenv("R_ARCH")),
                       "Makevars.site")
    if(file.exists(f))
        m <- f
    m
}

cxx_standards <- c("26", "23", "20", "17", "14", "11", "98")

### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
#  File src/library/tools/R/license.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2019 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

## <NOTE>
## We want *standardized* license specs so that we can compute on them.
## In particular, we want to know whether licenses are recognizable as
## FOSS (https://en.wikipedia.org/wiki/Free_and_open-source_software)
## licenses.
##
## A license spec is standardized ("canonical") if it is an alternative
## of component specs which are one of the following:
##
## A. "Unlimited"
## B. "file LICENSE" or "file LICENCE"
## C. A specification based on the R license db
##    * A standard short specification (SSS field)
##    * The name or abbreviation of an unversioned license
##    * The name of abbreviation of a versioned license, optionally
##      followed by a version spec
##    * The name of a versioned license followed by the version
##    * The abbrevation of a versioned license combined with '-',
##   optionally followed by an extension spec as in B (in principle,
##   only if the base license is extensible).
##
## A license spec is standardizable if we know to transform it to
## standardized form.
##
## Note that the R license db also contains non-FOSS licenses, and hence
## information (FOSS field) on the FOSS status of the licenses.
## Ideally, a license taken as FOSS would be approved as free by the FSF
## and as open by the OSI: we also take licenses as FOSS when approved
## by the FSF (and not rejected by the OSI).
##
## See
##   https://www.gnu.org/licenses/license-list.html
##   https://opensource.org/licenses/alphabetical
## fot the FSF and OSI license lists, and also
##   https://www.fsf.org/licensing/licenses
##   https://en.wikipedia.org/wiki/List_of_FSF_approved_software_licences
##   https://en.wikipedia.org/wiki/List_of_OSI_approved_software_licences
## for more information.
## </NOTE>

re_anchor <-
function(s)
    if(length(s)) paste0("^", s, "$") else character()

re_group <-
function(s)
    if(length(s)) paste0("(", s, ")") else character()

re_or <-
function(s, group = TRUE) {
    if(!length(s))
        character()
    else if(group)
        re_group(paste(s, collapse = "|"))
    else
        paste(s, collapse = "|")
}

.make_R_license_db <-
function(paths = NULL)
{
    if(is.null(paths))
        paths <- unlist(strsplit(Sys.getenv("R_LICENSE_DB_PATHS"),
                                 .Platform$path.sep, fixed = TRUE))
    paths <- c(paths,
               file.path(R.home("share"), "licenses", "license.db"))
    ldb <- Reduce(function(u, v) merge(u, v, all = TRUE),
                  lapply(unique(normalizePath(paths)), read.dcf))
    ## Merging matrices gives a data frame.
    ldb <- as.matrix(ldb)
    ldb[is.na(ldb)] <- ""
    ## (Could also keep NAs and filter on is.finite() in subsequent
    ## computations.)
    ## FOSS == "yes" implues Restricts_use = "no":
    ldb[ldb[, "FOSS"] == "yes", "Restricts_use"] <- "no"
    ldb <- data.frame(ldb, stringsAsFactors = FALSE)
    ldb$Labels <- R_license_db_labels(ldb)
    ldb[!duplicated(ldb$Labels), ]
}

R_license_db_labels <-
function(ldb)
{
    if(is.null(ldb)) return(NULL)
    lab <- ldb$SSS
    pos <- which(lab == "")
    abbrevs <- ldb$Abbrev[pos]
    versions <- ldb$Version[pos]
    lab[pos] <- ifelse(nzchar(abbrevs), abbrevs, ldb$Name[pos])
    ind <- nzchar(versions)
    pos <- pos[ind]
    lab[pos] <- sprintf("%s version %s", lab[pos], versions[ind])
    lab
}

R_license_db <- local({
    val <- NULL
    function(new) {
        if(!missing(new))
            val <<- new
        else
            val
    }
})

R_license_db(.make_R_license_db())

.make_R_license_db_vars <-
function()
{
    ## Build license regexps and tables according to the specs.

    ldb <- R_license_db()

    ## Standard short specification (SSS field) from the R license db.
    pos <- which(nzchar(ldb$SSS))
    names(pos) <- ldb$SSS[pos]
    tab_sss <- pos

    has_version <- nzchar(ldb$Version)
    has_abbrev <- nzchar(ldb$Abbrev)

    ## Name or abbreviation of an unversioned license from the R license
    ## db.
    pos <- which(!has_version)
    names(pos) <- ldb$Name[pos]
    tab_unversioned <- pos
    pos <- which(has_abbrev & !has_version)
    tab_unversioned[ldb$Abbrev[pos]] <- pos

    ## Versioned licenses from the R license db.
    ## Style A: Name of abbreviation of a versioned license, optionally
    ##   followed by a version spec
    ## Style B: Name of a versioned license followed by the version.
    ## Style C: Abbrevation of a versioned license combined with '-'.
    pos <- which(has_version)
    names(pos) <- ldb$Name[pos]
    tab_versioned_style_A <- split(pos, names(pos))
    tab_versioned_style_B <- pos
    names(tab_versioned_style_B) <-
        paste(names(pos), ldb$Version[pos])
    pos <- which(has_version & has_abbrev)
    tab_versioned_style_A <-
        c(tab_versioned_style_A, split(pos, ldb$Abbrev[pos]))
    tab_versioned_style_C <- pos
    names(tab_versioned_style_C) <-
        sprintf("%s-%s",
                ldb$Abbrev[pos],
                ldb$Version[pos])

    operators <- c("<", "<=", ">", ">=", "==", "!=")
    re_numeric_version <- .standard_regexps()$valid_numeric_version
    re_single_version_spec <-
        paste0("[[:space:]]*",
               re_or(operators),
               "[[:space:]]*",
               re_group(re_numeric_version),
               "[[:space:]]*")
    re_version_spec <-
        paste0("\\(",
               paste0("(", re_single_version_spec, ",)*"),
               re_single_version_spec,
               "\\)")

    re_sss <- re_or(names(tab_sss))
    re_unversioned <- re_or(names(tab_unversioned))
    re_versioned_style_A <-
        paste0(re_or(names(tab_versioned_style_A)),
               "[[:space:]]*",
               paste0("(", re_version_spec, ")*"))
    ## Let's be nice ...
    re_versioned_style_B <-
        re_or(paste0(ldb$Name[has_version],
                     "[[:space:]]+([Vv]ersion[[:space:]]+)?",
                     ldb$Version[has_version]))
    re_versioned_style_C <- re_or(names(tab_versioned_style_C))

    re_license_in_db <-
        re_or(c(re_sss,
                re_unversioned,
                re_versioned_style_A,
                re_versioned_style_B,
                re_versioned_style_C))

    re_license_file <- "file LICEN[CS]E"
    re_license_extension <-
        sprintf("[[:space:]]*\\+[[:space:]]*%s", re_license_file)

    ## <NOTE>
    ## Many standard licenses actually do not allow extensions.
    ## Ideally, we would only allow the extension markup for extensible
    ## standard licenses, as identified via an Extensible: TRUE field in
    ## the license db.  But version ranges make this tricky: e.g.,
    ##   GPL (>= 2) + file LICENSE
    ## is not right as GPL-2 does not allow extensions ...
    ## Hence, for now allow the extension markup with all standard
    ## licenses.
    ## </NOTE>

    re_component <-
        re_anchor(re_or(c(sprintf("%s(%s)?",
                                  re_license_in_db,
                                  re_license_extension),
                          re_license_file,
                          "Unlimited")))
    list(re_component = re_component,
         re_license_file = re_license_file,
         re_license_extension = re_license_extension,
         re_single_version_spec = re_single_version_spec,
         re_sss = re_sss,
         re_unversioned = re_unversioned,
         re_versioned_style_A = re_versioned_style_A,
         re_versioned_style_B = re_versioned_style_B,
         re_versioned_style_C = re_versioned_style_C,
         tab_sss = tab_sss,
         tab_unversioned = tab_unversioned,
         tab_versioned_style_A = tab_versioned_style_A,
         tab_versioned_style_B = tab_versioned_style_B,
         tab_versioned_style_C = tab_versioned_style_C)
}

R_license_db_vars <- local({
    val <- NULL
    function(new) {
        if(!missing(new))
            val <<- new
        else
            val
    }
})


R_license_db_vars(.make_R_license_db_vars())

R_license_db_refresh_cache <-
function(paths = NULL)
{
    R_license_db(.make_R_license_db(paths))
    R_license_db_vars(.make_R_license_db_vars())
}

## Standardizable license specs:

## License specifications found on CRAN/BioC/Omegahat and manually
## classified as standardizable software licenses (even though not
## standardized/canonical), provided as a list of license specs named by
## the respective standardizations.
## With ongoing standardization this should gradually be eliminated.
## Last updated: 2009-02-19.

## Nasty issues.
## * There really is no GPL version 2.0.
##   Unfortunately, the FSF uses 2.0 in URLs or links
##   (https://www.gnu.org/licenses/old-licenses/gpl-2.0.html)
##   The text clearly says "Version 2, June 1991".
## * There really is no LGPL version 2.0.
##   Unfortunately, the FSF uses 2.0 in URLs or links
##   (https://www.gnu.org/licenses/old-licenses/).
##   The text clearly says "Version 2, June 1991".
## * CeCILL is a bit of a mess: the current version is referred to as
##   "version 2" (http://www.cecill.info/licences.en.html) but
##    internally uses "Version 2.0 dated 2006-09-05"
##    (http://www.cecill.info/licences/Licence_CeCILL_V2-en.txt).

.standardizable_license_specs <-
list("Artistic-2.0" =
     c("The Artistic License, Version 2.0",
       "Artistic 2.0",
       "Artistic-2.0, see https://opensource.org/licenses/artistic-license-2.0.php"
       ),

     "BSL" =
     c("Boost Software License",
       "Boost Software License 1.0",
       "BSL 1.0"
       ),

     "CeCILL-2" =
     c("CeCILL-2.0"
       ),

     "GPL" =
     c("GNU Public License",
       "Gnu GPL",
       "GNU GPL",
       "GPL (https://www.gnu.org/copyleft/gpl.html)"
       ),

     "GPL-2" =
     c(## <NOTE>
       ## There is no GPL 2.0, see above.
       "GNU General Public License 2.0.",
       "GPL 2.0",
       "GPL version 2.0",
       "GPL2.0",
       ## </NOTE>
       "GPL Version 2",
       "GNU GPL Version 2",
       "GNU GPL version 2",
       "GNU GPL version 2.",
       "GPL (version 2)",
       "GPL 2",
       "GPL 2.",
       "GPL version 2",
       "GPL version 2 (June, 1991)",
       "GPL version 2.",
       "GPL2",
       ## BioC:
       "GPL V2",
       "GPL, version 2"
       ),

     "GPL-3" =
     c("GPL Version 3",
       "GPL version 3",
       "GNU General Public Licence (GPLv3)",
       "GPL 3",
       "GPL v3"
       ),

     "GPL (>= 2)" =
     c(## <NOTE>
       ## There is no GPL 2.0, see above.
       "GNU GPL v2.0 or greater",
       "GPL 2.0 or higher",
       "GPL 2.0 or newer",
       "GPL version 2.0 or later",
       "GPL version 2.0 or newer",
       ## </NOTE>
       "GNU GPL (version 2 or later)",
       "GNU GPL (version 2 or later); see the file COPYING for details",
       "GNU GPL version 2 or newer",
       "GNU General Public License version 2 or newer",
       "GPL version 2 or later",
       "GPL ( version 2 or later)",
       "GPL (Version 2 or above)",
       "GPL (Version 2 or later)",
       "GPL (version 2 or higher)",
       "GPL (version 2 or later)",
       "GPL (version 2 or later, see the included file GPL)",
       "GPL (version 2 or newer)",
       "GPL 2 or later",
       "GPL 2 or newer",
       "GPL version 2 or any later version",
       "GPL Version 2 or later",
       "GPL Version 2 or later.",
       "GPL Version 2 or newer",
       "GPL Version 2 or newer.",
       "GPL version 2 (June, 1991) or later",
       "GPL version 2 (or newer)",
       "GPL version 2 or later.",
       "GPL version 2 or newer",
       "GPL version 2 or newer (https://www.gnu.org/copyleft/gpl.html)",
       "GPL version 2 or newer (see README).",
       "GPL version 2 or newer.",
       "GPL version 2 or newer. https://www.gnu.org/copyleft/gpl.html",
       "GPL version 2, or, at your option, any newer version.",
       "GPL Version 2 (or later)",
       "GPL version 2 (or later)",
       "GPL version 2 or higher",
       "GPL2 or later",
       "GPL>=2",
       "GNU General Public License (version 2 or later)"
       ),

     "GPL (>= 3)" =
     c("GPL (version 3 or later)",
       "GPL >=3"
       ),

     "GPL | LGPL" =
     c("GPL or LGPL by your choice"
       ),

     "GPL | BSD" =
     c("GPL, BSD"
       ),

     "GPL-2 | file LICENSE" =
     c("use under GPL2, or see file LICENCE"
       ),

     "LGPL" =
     c("LGPL (see <https://opensource.org/licenses/lgpl-license.php>).",
       "GNU LGPL (same as wxWidgets)."
       ),

     "LGPL-2" =
     c("LGPL2",
       "LGPL2.0"
       ),

     "LGPL-2.1" =
     c("LGPL version 2.1"
       ),

     "LGPL-3" =
     c("LGPL-v3"
       ),

     "LGPL (>= 2.0)" =
     c(## <NOTE>
       ## There is no LGPL-2.0, see above.
       "LGPL >= 2.0",
       ## </NOTE>
       "LGPL Version 2 or later.",
       "LGPL version 2 or newer",
       "LGPL (version 2 or later)",
       "LGPL version 2 or later"
       ),

     "LGPL (>= 2.1)" =
     c("LGPL version 2.1 or later"
       ),

     "LGPL (>= 3.0)" =
     c("LGPL >=3"
       ),

     "X11" =
     c("X11 (http://www.x.org/Downloads_terms.html)"
       ),

     "Unlimited" =
     c("Unlimited use and distribution."
       )
)

.standardizable_license_specs_db <-
data.frame(ispecs =
           unlist(.standardizable_license_specs),
           ospecs =
           rep.int(names(.standardizable_license_specs),
                   lengths(.standardizable_license_specs)),
           stringsAsFactors = FALSE)

analyze_license <-
function(x)
{
    .make_results <- function(is_empty = FALSE,
                              is_canonical = FALSE,
                              bad_components = character(),
                              is_standardizable = FALSE,
                              is_verified = FALSE,
                              standardization = NA_character_,
                              components = NULL,
                              expansions = NULL,
                              extensions = NULL,
                              pointers = NULL,
                              is_FOSS = NA,
                              restricts_use = NA)
        list(is_empty = is_empty,
             is_canonical = is_canonical,
             bad_components = bad_components,
             is_standardizable = is_standardizable,
             is_verified = is_verified,
             standardization = standardization,
             components = components,
             expansions = expansions,
             extensions = extensions,
             pointers = pointers,
             is_FOSS = is_FOSS,
             restricts_use = restricts_use)


    x <- trimws(x)
    if(is.na(x) || (x == "")) {
        ## Not really a lot to check ...
        ## (Note that non-standardizable license specs are dropped by
        ## writePACKAGES() and friends.)
        return(.make_results(is_empty = TRUE))
    }

    pointers <- NULL
    extensions <- NULL
    expansions <- NULL
    is_verified <- FALSE
    is_FOSS <- NA
    restricts_use <- NA

    ## Try splitting into the individual components.
    components <-
        trimws(unlist(strsplit(gsub("[[:space:]]*\\+[[:space:]]*",
                                    " + ", x),
                               "|", fixed = TRUE)))

    ## Now analyze the individual components.
    ok <- grepl(R_license_db_vars()$re_component, components)
    bad_components <- components[!ok]
    is_canonical <- all(ok)

    ## Is the license specification standardizable?
    standardizable <-
        components %in% .standardizable_license_specs_db$ispecs
    is_standardizable <- (is_canonical || all(standardizable))

    standardization <- if(is_standardizable) {
        ## Standardize the ones which are standardizable but not yet
        ## standardized.
        ind <- !ok & standardizable
        if(any(ind))
            components[ind] <-
                .standardize_license_components(components[ind])
        ## Canonicalize the standardized ones a bit more (as we are
        ## rather generous about using whitespace).
        ind <- ok & grepl("\\(", components)
        if(any(ind)) {
            s <- sub("[[:space:]]*\\([[:space:]]*", " \\(",
                     components[ind])
            s <- sub("[[:space:]]*\\)", "\\)", s)
            s <- gsub("[[:space:]]*,[[:space:]]*", ", ", s)
            ## Really re_or(operators) ...
            s <- gsub("[[:space:]]+(<=?|>=?|==|!=)", " \\1", s)
            components[ind] <-
                gsub(sprintf("[[:space:]]*(%s)",
                             .standard_regexps()$valid_numeric_version),
                     " \\1", s)
        }
        paste(components, collapse = " | ")
    } else NA_character_

    ## Analyze components provided that we know we can standardize.
    if(is_standardizable) {
        verifiable <- function(x, v = "yes")
            !is.null(x) && all(!is.na(x) & (x == v))
        ## (More generally we could test for positive length of x: but
        ## a length test is needed because all(NULL) |=> TRUE.)

        expansions <- lapply(components,
                             expand_license_spec_component_from_db)

        ## The license is FOSS if there is one component which is
        ## "Unlimited" or has a positive number of expansions all of
        ## which are FOSS.
        ## If all components have a positive number of expansions where
        ## at least one is not FOSS, the license is not FOSS.
        ## Otherwise we do not know.
        is_FOSS <- if(any(components == "Unlimited")) {
            TRUE
        } else if(any(vapply(expansions,
                             function(e) verifiable(e$FOSS),
                             NA))) {
            TRUE
        } else if(all(vapply(expansions,
                             function(e) any(e$FOSS == "no"),
                             NA))) {
            FALSE
        } else
            NA

        ## The license is verified (as FOSS) if it was verified as FOSS.
        is_verified <- !is.na(is_FOSS) && is_FOSS

        ## The license does not restrict use if it is verified as FOSS,
        ## or if there is one component with a positive number of
        ## expansions all of which do not restrict use.
        ## If all components have a positive number of expansions where
        ## at least one of which restricts use, the license restricts
        ## use.
        ## Otherwise, we do not know.
        restricts_use <- if(is_verified) {
            FALSE
        } else if(any(vapply(expansions,
                             function(e)
                             (length(e) &&
                              all(e$Restricts_use == "no")),
                             NA))) {
            FALSE
        } else if(all(vapply(expansions,
                             function(e)
                                 any(e$Restricts_use == "yes"),
                             NA))) {
            TRUE
        } else
            NA

        re <- R_license_db_vars()$re_license_file
        pos <- grep(sprintf("%s$", re), components)
        if(length(pos)) {
            elements <- components[pos]
            ## Components with license file pointers.
            pointers <- sub(".*file ", "", elements)
            ## Components with license extensions.
            ind <- grepl("+", elements, fixed = TRUE)
            if(any(ind))
                extensions <-
                    data.frame(components = elements[ind],
                               extensible =
                               vapply(expansions[pos[ind]],
                                      function(e)
                                          verifiable(e$Extensible),
                                      NA),
                               stringsAsFactors = FALSE)
        }

        ## Replace expansions by their labels from the license db.
        ## (As these are unique, we can always easily get the full
        ## expansions back.)
        expansions <- lapply(expansions, `[[`, "Labels")
        ## Components which are "Unlimited" or "file LICEN[CS]E" have
        ## empty expansions:
        ind <- grepl(sprintf("^(Unlimited|%s)$", re), components)
        if(any(ind)) expansions[ind] <- as.list(components[ind])
        ## Components with license extensions have this dropped in the
        ## expansion.
        m <- regexpr(sprintf("\\+ *%s$", re), components)
        ind <- (m > -1L)
        expansions[ind] <-
            Map(paste, expansions[ind], regmatches(components, m))
    }

    if(any(startsWith(components, "Part of R"))) { # base package
        is_verified <- is_FOSS <- TRUE
        restricts_use <- FALSE
    }

    .make_results(is_canonical = is_canonical,
                  bad_components = bad_components,
                  is_standardizable = is_standardizable,
                  standardization = standardization,
                  is_verified = is_verified,
                  components = components,
                  expansions = expansions,
                  extensions = extensions,
                  pointers = pointers,
                  is_FOSS = is_FOSS,
                  restricts_use = restricts_use)
}

.standardize_license_components <-
function(x)
{
    with(.standardizable_license_specs_db,
         ospecs[match(x, ispecs)])
}

analyze_licenses <-
function(x, db = NULL)
{
    x <- as.character(x)
    if(!length(x)) return(NULL)
    ## As analyzing licenses is costly, only analyze the unique specs.
    v <- unique(x)
    out <- as.data.frame(do.call(rbind, lapply(v, analyze_license)),
                         stringsAsFactors = FALSE)
    pos <- match(c("is_empty", "is_canonical", "is_standardizable",
                   "is_verified", "standardization", "is_FOSS",
                   "restricts_use"),
                 names(out))
    out[pos] <- lapply(out[pos], unlist)
    ## And re-match specs to the unique specs.
    out <- out[match(x, v), ]
    rownames(out) <- NULL
    if(!is.null(db)) {
        ## db should be a package db (data frame or character matrix)
        ## with rows corresponding to the elements of x.
        cnms <- colnames(db)
        if(!is.na(pos <- match("License_is_FOSS", cnms))) {
            lif <- db[, pos]
            pos <- which(!is.na(lif))
            out$is_FOSS[pos] <- out$is_verified[pos] <-
                (lif[pos] == "yes")
            ## is_FOSS implies !restricts_use:
            pos <- pos[lif[pos] == "yes"]
            out$restricts_use[pos] <- FALSE
        }
        if(!is.na(pos <- match("License_restricts_use", cnms))) {
            lru <- db[, pos]
            pos <- which(!is.na(lru))
            out$restricts_use[pos] <- (lru[pos] == "yes")
            ## restricts_use implies !is_FOSS:
            pos <- pos[lru[pos] == "yes"]
            out$is_FOSS[pos] <- out$is_verified[pos] <- FALSE
        }
    }
    out
}

build_license_db <-
function(dir, unpacked = FALSE)
{
    CRAN <- getOption("repos")["CRAN"]
    if(missing(dir) && substr(CRAN, 1L, 7L) == "file://")
        dir <- file.path(substring(CRAN, 8L), "src", "contrib")

    fields <- c("License", "License_is_FOSS", "License_restricts_use",
                "Maintainer")
    db <- .build_repository_package_db(dir, fields, unpacked = unpacked)
    ## Actually, for Omegehat this is not a good idea as this retains
    ## old versions in the "main" src/contrib directory.  But let's not
    ## worry about this for now ...

    db <- do.call(rbind, db)

    ## Retain what is needed ...
    data.frame(db[ , c("Package", "Version", fields)],
               stringsAsFactors = FALSE)
}

analyze_licenses_in_license_db <-
function(db)
{
    results <- cbind(db, analyze_licenses(db$License, db))
    ## Keep License_is_FOSS and License_restricts_use columns for now,
    ## so that we can identify the is_FOSS and restricts_use values
    ## obtained from these.
    results
}

analyze_licenses_in_repository <-
function(dir, unpacked = FALSE, full = TRUE)
{
    db <- build_license_db(dir, unpacked)
    if(!full) {
        ## Only keep the highest available versions.
        ## Such an option might be useful for build_license_db()
        ## itself.
        db <- .remove_stale_dups(db)
    }
    analyze_licenses_in_license_db(db)
}

summarize_license_db <-
function(db)
{
    packages <- db$Package
    if(any(duplicated(packages)))
        packages <- sprintf("%s_%s", packages, db$Version)
    packages <- split(packages, db$License)
    licenses <- names(packages)
    out <- data.frame(Licenses = licenses, stringsAsFactors = FALSE)
    ## To get the 'packages' list into a data frame without I() ...
    out$Packages <- packages
    cat(formatDL(out$Licenses,
                 vapply(out$Packages,
                        function(p) paste(unique(p), collapse = " "),
                        ""),
                 style = "list"),
        sep = "\n\n")
    invisible(out)
}

expand_license_spec_component_from_db <-
function(x)
{
    ## Determine the license from the db matching a license spec
    ## component.

    ldb <- R_license_db()
    ldb_vars <- R_license_db_vars()

    .numeric_version_meets_constraints_p <-
    function(version, constraints)
    {
        version <- as.numeric_version(version)
        for(term in constraints) {
            re <- ldb_vars$re_single_version_spec
            op     <- sub(re, "\\1", term)
            target <- sub(re, "\\2", term)
            if(!do.call(op, list(version, target)))
                return(FALSE)
        }
        TRUE
    }

    if(x == "Unlimited" ||
       grepl(x, ldb_vars$re_license_file))
        return(NULL)

    ## Drop possible license extension.
    x <- sub(ldb_vars$re_license_extension, "", x)

    if(grepl(re_anchor(ldb_vars$re_sss), x)) {
        pos <- ldb_vars$tab_sss[x]
        ldb[pos, ]
    }
    else if(grepl(re_anchor(ldb_vars$re_unversioned), x)) {
        pos <- ldb_vars$tab_unversioned[x]
        ldb[pos, ]
    }
    else if(grepl(re <-
                  re_anchor(ldb_vars$re_versioned_style_A),
                  x)) {
        ## Extract name/abbrev and version spec.
        v <- sub(re, "\\2", x)
        x <- sub(re, "\\1", x)
        ## First, find the matching entries matching the name/abbrev.
        pos <- ldb_vars$tab_versioned_style_A[[x]]
        entries <- ldb[pos, ]
        ## Now determine the entries satisfying the version spec.
        v <- sub("[[:space:]]*\\((.*)\\)[[:space:]]*", "\\1", v)
        if(nzchar(v)) {
            constraints <-
                unlist(strsplit(v, "[[:space:]]*,[[:space:]]*"))
            entries <-
                entries[vapply(entries$Version,
                               .numeric_version_meets_constraints_p,
                               constraints,
                               FUN.VALUE = NA), ]
        }
        entries
    }
    else if(grepl(re_anchor(ldb_vars$re_versioned_style_B),
                  x)) {
        re <- sprintf("[[:space:]]+([Vv]ersion[[:space:]]+)?(%s)",
                      .standard_regexps()$valid_numeric_version)
        x <- sub(re, " \\2", x)
        pos <- ldb_vars$tab_versioned_style_B[x]
        ldb[pos, ]
    }
    else if(grepl(re_anchor(ldb_vars$re_versioned_style_C),
                  x)) {
        pos <- ldb_vars$tab_versioned_style_C[x]
        ldb[pos, ]
    }

}

.license_component_is_for_stub_and_ok <-
function(com, dir)
{
    parts <-
        unlist(strsplit(com, "[[:space:]]*\\+[[:space:]]*file *"))
    ## Should really allow getting this from R_license_db_vars().

    fields_for_stubs <-
        c(rep.int(list(c("YEAR", "COPYRIGHT HOLDER")),
                  4L),
          rep.int(list(c("YEAR", "COPYRIGHT HOLDER", "ORGANIZATION")),
                  2L))
    names(fields_for_stubs) <-
        c("MIT License", "MIT",
          "BSD 2-clause License", "BSD_2_clause",
          "BSD 3-clause License", "BSD_3_clause")

    fields_to_have <- fields_for_stubs[[parts[1L]]]
    if(is.null(fields_to_have)) return(1L)

    fields <- tryCatch(read.dcf(file.path(dir, parts[2L])),
                       error = identity)
    if(inherits(fields, "error"))
        return(2L)
    if(!identical(sort(colnames(fields)),
                  sort(fields_to_have)))
        return(3L)
    if(!all(!is.na(fields) & nzchar(fields)))
        return(4L)

    0L
}

read_debian_copyright_file <-
function(file, keep = TRUE)
{
    ## See
    ## <https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/>.
    
    select <- function(x, i, vars, keep = TRUE) {
        pos <- match(vars, colnames(x), nomatch = 0L)
        if(keep && any(!pos)) {
            x <- cbind(x, NA_character_)
            pos[!pos] <- ncol(x)
        }
        y <- x[i, pos, drop = FALSE]
        colnames(y) <- vars
        y
    }

    fields_in_header_para <-
        c("Format", "Upstream-Name", "Upstream-Contact", "Source",
          "Disclaimer", "Comment", "License", "Copyright")
    fields_in_files_para <-
        c("Files", "Copyright", "License", "Comment")
    fields_in_license_para <-
        c("License", "Comment")
    
    x <- tryCatch(read.dcf(file, keep.white = TRUE),
                  error = identity)
    if(inherits(x, "error") || !length(x)) return()

    header <- drop(select(x, 1L, fields_in_header_para, keep))
    if(is.na(fmt <- header["Format"]) ||
       !grepl("copyright-format", fmt))
        return()

    pos <- which(!is.na(x[, "Files"]))

    list(header = header,
         files =
             select(x, pos, fields_in_files_para, keep),
         licenses =
             select(x, - c(1L, pos), fields_in_license_para, keep))
}
#  File src/library/tools/R/logging.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2015 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

### emulation of Perl Logfile.pm

newLog <-
function(filename = "")
{
    con <- if(nzchar(filename)) file(filename, "wt") else 0L

    Log <- new.env(parent = emptyenv())
    Log$con <- con
    Log$filename <- filename
    Log$stars <- "*"
    Log$errors <- 0L
    Log$warnings <- 0L
    Log$notes <- 0L

    Log
}

closeLog <-
function(Log)
    if(Log$con > 2L) close(Log$con)

printLog <-
function(Log, ...)
{
    quotes <- function(x) gsub("'([^']*)'", sQuote("\\1"), x)
    args <- lapply(list(...), quotes)
    do.call(cat, c(args, sep = ""))
    if (Log$con > 0L) do.call(cat, c(args, sep = "", file = Log$con))
}

printLog0 <-
function(Log, ...)
{
    cat(..., sep = "")
    if (Log$con > 0L) cat(..., file = Log$con, sep = "")
}

## unused
## setStars <- function(Log, stars) {Log$stars <- stars; Log}

checkingLog <-
function(Log, ...)
    printLog(Log, Log$stars, " checking ", ..., " ...")

creatingLog <-
function(Log, text)
    printLog(Log, Log$stars, " creating ", text, " ...")

messageLog <-
function(Log, ...)
    printLog(Log, Log$stars, " ", ..., "\n")

resultLog <-
function(Log, text)
    printLog(Log, " ", text, "\n")

errorLog <-
function(Log, ...)
{
    resultLog(Log, "ERROR")
    text <- paste0(...)
    if (length(text) && nzchar(text)) printLog(Log, ..., "\n")
    Log$errors <- Log$errors + 1L
}

## <NOTE>
## Perhaps the arguments to errorLog(), warningLog() and noteLog()
## should be synchronized?
## </NOTE>

warningLog <-
function(Log, text = "")
{
    resultLog(Log, "WARNING")
    if(nzchar(text)) printLog(Log, text, "\n")
    Log$warnings <- Log$warnings + 1L
}

noteLog <-
function(Log, text = "")
{
    resultLog(Log, "NOTE")
    if(nzchar(text)) printLog(Log, text, "\n")
    Log$notes <- Log$notes + 1L
}

infoLog <-
function(Log, text = "")
{
    resultLog(Log, "INFO")
    if(nzchar(text)) printLog(Log, text, "\n")
}

summaryLog <-
function(Log)
{
    messageLog(Log, "DONE")
    message("")
    counts <- c(ERROR = Log$errors,
                WARNING = Log$warnings,
                NOTE = Log$notes)
    counts <- counts[counts > 0L]
    if(!length(counts))
        printLog(Log,
                 "Status: OK\n")
    else {
        printLog(Log,
                 sprintf("Status: %s\n",
                         paste(sprintf("%d %s%s",
                                       counts,
                                       names(counts),
                                       ifelse(counts > 1L, "s", "")),
                               collapse = ", ")))
        message(sprintf("See\n  %s\nfor details.", sQuote(Log$filename)))
    }
    message("")
}
#  File src/library/tools/R/makeLazyLoad.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2022 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

code2LazyLoadDB <-
    function(package, lib.loc = NULL,
             keep.source = getOption("keep.source.pkgs"),
             keep.parse.data = getOption("keep.parse.data.pkgs"),
             compress = TRUE, set.install.dir = NULL)
{
    pkgpath <- find.package(package, lib.loc, quiet = TRUE)
    if(!length(pkgpath))
        stop(packageNotFoundError(package, lib.loc, sys.call()))
    dbbase <- file.path(pkgpath, "R", package)
    if (packageHasNamespace(package, dirname(pkgpath))) {
        if (! is.null(.getNamespace(as.name(package))))
            stop("namespace must not be already loaded")
        ns <- suppressPackageStartupMessages(loadNamespace(
                  package = package, lib.loc = lib.loc,
                  keep.source = keep.source, keep.parse.data = keep.parse.data,
                  partial = TRUE))
        makeLazyLoadDB(ns, dbbase, compress = compress,
                       set.install.dir = set.install.dir)
    }
    else
        stop("all packages should have a NAMESPACE")
}

sysdata2LazyLoadDB <- function(srcFile, destDir, compress = TRUE)
{
    e <- new.env(hash=TRUE)
    load(srcFile, e)
    makeLazyLoadDB(e, file.path(destDir, "sysdata"), compress = compress)
}

list_data_in_pkg <-
function(package, lib.loc = NULL, dir, use_datalist = TRUE)
{
    if(!missing(package)) { # installed package
        dir <- find.package(package, lib.loc, quiet = TRUE)
        if(!length(dir))
            stop(packageNotFoundError(package, lib.loc, sys.call()))
    } else { # the dir case (source or installed pkgpath)
        if(missing(dir))
            stop("you must specify 'package' or 'dir'")
        if(!dir.exists(dir))
            stop(gettextf("directory '%s' does not exist", dir), domain = NA)
        dir <- file_path_as_absolute(dir)
        package <- character(0L)
        lib.loc <- NULL
    }
    if(dir.exists(dataDir <- file.path(dir, "data"))) {
        if(file.exists(sv <- file.path(dataDir, "Rdata.rds"))) {
            ans <- readRDS(sv)
        } else if(file.exists(sv <- file.path(dataDir, "datalist"))
                  && use_datalist
                  && !file.info(sv)$isdir) { # package cp4p had a directory
            ## BioC mess this file up, of course!
            ans <- strsplit(readLines(sv, warn = FALSE), ":")
            nms <- lapply(ans, function(x) x[1L])
            ans <- lapply(ans, function(x)
                          if(length(x) == 1L) x[1L] else
                          strsplit(x[2L], " +")[[1L]][-1L])
            names(ans) <- nms
        } else {
            if (!length(package)) { # the dir case
                ## data(package=character(0L)) will look in getwd()/data
                owd <- setwd(dir)
                on.exit(setwd(owd))
            }
            files <- list_files_with_type(dataDir, "data")
            ## omit compression extensions
            files <- unique(basename(file_path_sans_ext(files, TRUE)))
            ans <- vector("list", length(files))
            dataEnv <- new.env(hash=TRUE)
            names(ans) <- files
            for(f in files) {
                ## This occasionally fails on uninstalled sources,
                ## hence the tryCatch().  And e.g. CHNOSZ gave
                ## messages and cricketr gave warnings.
                tryCatch(suppressMessages(suppressWarnings(utils::data(list = f, package = package, lib.loc = lib.loc, envir = dataEnv))), error = identity)
                ans[[f]] <- ls(envir = dataEnv, all.names = TRUE)
                rm(list = ans[[f]], envir = dataEnv)
            }
        }
        ans
    } else NULL
}

data2LazyLoadDB <- function(package, lib.loc = NULL, compress = TRUE)
{
    options(warn=1)
    pkgpath <- find.package(package, lib.loc, quiet = TRUE)
    if(!length(pkgpath))
        stop(packageNotFoundError(package, lib.loc, sys.call()))
    dataDir <- file.path(pkgpath, "data")
    ## set the encoding for text files to be read, if specified
    enc <- .read_description(file.path(pkgpath, "DESCRIPTION"))["Encoding"]
    if(!is.na(enc)) {
        op <- options(encoding=enc)
        on.exit(options(encoding=op[[1L]]))
    }
    if(dir.exists(dataDir)) {
        if(file.exists(file.path(dataDir, "Rdata.rds")) &&
	    file.exists(file.path(dataDir, paste0(package, ".rdx"))) &&
	    file.exists(file.path(dataDir, paste0(package, ".rdb"))) ){
            warning("package seems to be using lazy loading for data already")
        }
	else {
            dataEnv <- new.env(hash = TRUE)
            tmpEnv <- new.env()
            f0 <- files <- list_files_with_type(dataDir, "data")
            ## omit compression extensions
            files <- unique(basename(file_path_sans_ext(files, TRUE)))
            dlist <- vector("list", length(files))
            names(dlist) <- files
            loaded <- character(0L)
            for(f in files) {
                utils::data(list = f, package = package, lib.loc = lib.loc,
                        envir = dataEnv, overwrite = TRUE)
                utils::data(list = f, package = package, lib.loc = lib.loc,
                        envir = tmpEnv, overwrite = TRUE)
                tmp <- ls(envir = tmpEnv, all.names = TRUE)
                rm(list = tmp, envir = tmpEnv)
                dlist[[f]] <- tmp
                loaded <- c(loaded, tmp)
            }
            dup <- duplicated(loaded)
            if(any(dup))
                warning(sprintf(ngettext(sum(dup),
                                         "object %s is created by more than one data call",
                                         "objects %s are created by more than one data call"),
                                paste(sQuote(loaded[dup]), collapse=", ")),
                        call. = FALSE, domain = NA)

            if(length(loaded)) {
                dbbase <- file.path(dataDir, "Rdata")
                makeLazyLoadDB(dataEnv, dbbase, compress = compress)
                saveRDS(dlist, file.path(dataDir, "Rdata.rds"),
                         compress = compress)
                unlink(f0)
            }
        }
    }
}

makeLazyLoadDB <- function(from, filebase, compress = TRUE, ascii = FALSE,
                           variables, set.install.dir = NULL)
{
    ## pre-empt any problems with interpretation of 'ascii'
    ascii <- as.logical(ascii)
    if (is.na(ascii)) stop("'ascii' must be TRUE or FALSE", domain = NA)
    ascii <- as.integer(ascii)

    envlist <- function(e)
        .Internal(getVarsFromFrame(ls(e, all.names = TRUE), e, FALSE))

    ## This can be inefficient if there are many environments,
    ## e.g. from source references (PR18236), but has to be used in
    ## initial bootstrapping since hash tables in the utils package
    ## are not yet available.
    envtable <- function() {
        idx <- 0
        envs <- NULL
        enames <- character(0L)
        find <- function(v, keys, vals) {
            for (i in seq_along(keys))
                if (identical(v, keys[[i]]))
                    return(vals[i])
	    NULL
	}
        getname <- function(e) find(e, envs, enames)
        insert <- function(e) {
            idx <<- idx + 1
            name <- paste0("env::", idx)
            envs <<- c(e, envs)
            enames <<- c(name, enames)
            name
        }
        list(insert = insert, getname = getname)
    }
    ## Use a hash table once utils is fully available.
    if (file.exists(system.file("R", "utils.rdx", package = "utils")) &&
        is.environment(tryCatch(loadNamespace("utils"), error=identity)))
        envtable <- function() {
            idx <- 0
            h <- utils::hashtab()
            getname <- function(e) utils::gethash(h, e)
            insert <- function(e) {
                idx <<- idx + 1
                name <- paste0("env::", idx)
                utils::sethash(h, e, name)
                name
            }
            list(insert = insert, getname = getname)
        }

    lazyLoadDBinsertValue <- function(value, file, ascii, compress, hook)
        .Internal(lazyLoadDBinsertValue(value, file, ascii, compress, hook))

    lazyLoadDBinsertListElement <- function(x, i, file, ascii, compress, hook)
        .Internal(lazyLoadDBinsertValue(x[[i]], file, ascii, compress, hook))

    lazyLoadDBinsertVariable <- function(n, e, file, ascii, compress, hook) {
        x <- .Internal(getVarsFromFrame(n, e, FALSE))
        .Internal(lazyLoadDBinsertValue(x[[1L]], file, ascii, compress, hook))
    }

    mapfile <- paste0(filebase, ".rdx")
    datafile <- paste0(filebase, ".rdb")
    close(file(datafile, "wb")) # truncate to zero
    table <- envtable()
    varenv <- new.env(hash = TRUE)
    envenv <- new.env(hash = TRUE)

    # bindings of names from "lazy" will be serialized independently so that
    # they can be loaded lazily, after the other bindings have already been
    # eagerly loaded

    lazyenvhook <- function(e, bindings, lazy) {
        bnames <- names(bindings)
        lnames <- intersect(bnames, lazy)
        if (length(lnames)) {
            enames <- setdiff(bnames, lazy)
            edata <- list(bindings = bindings[enames],
                          enclos = parent.env(e),
                          attributes = attributes(e),
                          isS4 = isS4(e),
                          locked = environmentIsLocked(e))
            ekey <- lazyLoadDBinsertValue(edata, datafile, ascii,
                          compress, envhook)
            lkeys <- lapply(lnames, function(varname) {
                lazyLoadDBinsertValue(bindings[[varname]], datafile,
                                      ascii, compress, envhook)
            })
            names(lkeys) <- lnames
            list(eagerKey = ekey, lazyKeys = lkeys)
        }
    }

    envhook <- function(e) {
        if (is.environment(e)) {
            name <- table$getname(e)
            if (is.null(name)) {
                name <- table$insert(e)
                bindings <- envlist(e)
                key <- NULL

                if (!is.null(set.install.dir)) {
                    if (inherits(e, "srcfilecopy") &&
                            "filename" %in% names(bindings))
                        bindings[["filename"]] <- set.install.dir

                    if (identical(e, nsinfo) && "path" %in% names(bindings))
                        bindings[["path"]] <- set.install.dir
                }

                if (inherits(e, "srcfile"))
                    key <- lazyenvhook(e, bindings, c("lines", "parseData"))

                if (is.null(key)) {
                    data <- list(bindings = bindings,
                                 enclos = parent.env(e),
                                 attributes = attributes(e),
                                 isS4 = isS4(e),
                                 locked = environmentIsLocked(e))
                    key <- lazyLoadDBinsertValue(data, datafile, ascii,
                                                 compress, envhook)
                }
                assign(name, key, envir = envenv)
            }
            name
        }
    }

    if (is.null(from) || is.environment(from)) {
        if (! missing(variables))
            vars <- variables
        else vars <- ls(from, all.names = TRUE)
    }
    else if (is.list(from)) {
        vars <- names(from)
        if (length(vars) != length(from) || any(!nzchar(vars)))
            stop("source list must have names for all elements")
    }
    else stop("source must be an environment or a list")

    if (!is.null(set.install.dir) && is.environment(from)
            && ".__NAMESPACE__." %in% vars) {
        x <- .Internal(getVarsFromFrame(".__NAMESPACE__.", from, FALSE))
        nsinfo <- x[[1L]]
    } else
        nsinfo <- NULL

    for (i in seq_along(vars)) {
        key <- if (is.null(from) || is.environment(from))
            lazyLoadDBinsertVariable(vars[i], from, datafile,
                                     ascii, compress,  envhook)
        else lazyLoadDBinsertListElement(from, i, datafile, ascii,
                                         compress, envhook)
        assign(vars[i], key, envir = varenv)
    }

    vals <- lapply(vars, get, envir = varenv, inherits = FALSE)
    names(vals) <- vars

    rvars <- ls(envenv, all.names = TRUE)
    rvals <- lapply(rvars, get, envir = envenv, inherits = FALSE)
    names(rvals) <- rvars

    val <- list(variables = vals, references = rvals,
                compressed = compress)
    saveRDS(val, mapfile)
}

makeLazyLoading <-
    function(package, lib.loc = NULL, compress = TRUE,
             keep.source = getOption("keep.source.pkgs"),
             keep.parse.data = getOption("keep.parse.data.pkgs"),
             set.install.dir = NULL)
{
    if(!is.logical(compress) && compress %notin% c(2,3))
	stop(gettextf("invalid value for '%s' : %s", "compress",
		      "should be FALSE, TRUE, 2 or 3"), domain = NA)
    if(!getOption("warn")) options(warn = 1L) # ( keep warn=2 !)
    findpack <- function(package, lib.loc) {
        pkgpath <- find.package(package, lib.loc, quiet = TRUE)
        if(!length(pkgpath))
            stop(packageNotFoundError(package, lib.loc, sys.call()))
        pkgpath
    }

    if (package == "base")
        stop("this cannot be used for package 'base'")

    loaderFile <- file.path(R.home("share"), "R", "nspackloader.R")
    pkgpath <- findpack(package, lib.loc)
    codeFile <- file.path(pkgpath, "R", package)

    if (!file.exists(codeFile)) {
        warning("package contains no R code")
        return(invisible())
    }
    if (file.size(codeFile) == file.size(loaderFile))
        warning("package seems to be using lazy loading already")
    else {
        code2LazyLoadDB(package, lib.loc = lib.loc,
                        keep.source = keep.source,
                        keep.parse.data = keep.parse.data,
                        compress = compress,
                        set.install.dir = set.install.dir)
        file.copy(loaderFile, codeFile, TRUE)
    }

    invisible()
}
#  File src/library/tools/R/md5.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2024 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

md5sum <- function(files, bytes) {
    if (!missing(files) && !missing(bytes))
        stop("'files' and 'bytes' are mutually exclusive")
    if (!missing(bytes)) {
        if (!is.raw(bytes)) stop("'bytes' must be a raw vector")
        .Call(C_Rmd5, bytes)
    } else {
        files <- path.expand(files)
        structure(.Call(C_Rmd5, files), names=files)
    }
}

.installMD5sums <- function(pkgDir, outDir = pkgDir)
{
    dot <- getwd()
    if (is.null(dot))
        stop("current working directory cannot be ascertained")
    setwd(pkgDir)
    x <- md5sum(dir(".", recursive=TRUE))
    setwd(dot)
    x <- x[names(x) != "MD5"]
    cat(paste(x, names(x), sep=" *"), sep="\n",
        file=file.path(outDir, "MD5"))
}

checkMD5sums <- function(package, dir)
{
    if(missing(dir)) dir <- find.package(package, quiet = TRUE)
    if(length(dir) != 1L) return(NA)
    md5file <- file.path(dir, "MD5")
    if(!file.exists(md5file)) return(NA)
    inlines <- readLines(md5file)
    ## now split on the first space.
    xx <- sub("^([0-9a-fA-F]*)(.*)", "\\1", inlines)
    nmxx <- names(xx) <- sub("^[0-9a-fA-F]* [ |*](.*)", "\\1", inlines)
    dot <- getwd()
    if (is.null(dot))
        stop("current working directory cannot be ascertained")
    setwd(dir)
    x <- md5sum(dir(dir, recursive = TRUE))
    setwd(dot)
    x <- x[names(x) != "MD5"]
    nmx <- names(x)
    res <- TRUE
    not.here <- (nmxx %notin% nmx)
    if(any(not.here)) {
        res <- FALSE
        if (sum(not.here) > 1L)
            cat("files", paste(sQuote(nmxx[not.here]), collapse = ", "),
                "are missing\n", sep = " ")
        else
            cat("file", sQuote(nmxx[not.here]), "is missing\n", sep = " ")
    }
    nmxx <- nmxx[!not.here]
    diff <- xx[nmxx] != x[nmxx]
    if(any(diff)) {
        res <- FALSE
        files <- nmxx[diff]
        if(length(files) > 1L)
            cat("files", paste(sQuote(files), collapse = ", "),
                "have the wrong MD5 checksums\n", sep = " ")
        else cat("file", sQuote(files), "has the wrong MD5 checksum\n")
    }
    res
}
#  File src/library/tools/R/news.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2025 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/


## .build_news_db_from_R_NEWS <-
## function()
## {
##     db <- readNEWS(chop = "keepAll")
##     ## This currently is a list of x.y lists of x.y.z lists of
##     ## categories list of entries.
##     flatten <- function(e)
##         cbind(rep.int(names(e), lengths(e)),
##               unlist(lapply(e,
##                             function(s) {
##                                 ## Also remove leading white space and
##                                 ## trailing blank lines.
##                                 lapply(s,
##                                        function(e)
##                                            sub("[[:space:]]*$", "",
##                                                paste(sub("^ ", "", e),
##                                                      collapse = "\n")))
##                             }),
##                             use.names = FALSE))
##     db <- lapply(Reduce(c, db), flatten)
##     db <- do.call(rbind, Map(cbind, names(db), db))
##     ## Squeeze in an empty date column.
##     .make_news_db(cbind(db[, 1L], NA_character_, db[, -1L]),
##                   logical(nrow(db)))
## }

.build_news_db <-
function(package, lib.loc = NULL, format = NULL, reader = NULL)
{
    dir <- system.file(package = package, lib.loc = lib.loc)
    ## Or maybe use find.package()?

    ## <NOTE>
    ## We had planned to eventually add support for DESCRIPTION
    ##   News/File
    ##   News/Format
    ##   News/Reader
    ##   News/Reader@R
    ## entries.  But now that there are NEWS.Rd and NEWS.md, there
    ## seems little point in providing format/reader support ...
    ## </NOTE>

    ## Look for new-style inst/NEWS.Rd installed as NEWS.Rd
    ## If not found, look for NEWS.md.
    ## If not found, look at old-style
    ##   NEWS inst/NEWS
    ## installed as NEWS (and ignore ChangeLog files).
    nfile <- file.path(dir, "NEWS.Rd")
    if(file_test("-f", nfile))
        return(.build_news_db_from_package_NEWS_Rd(nfile))

    nfile <- file.path(dir, "NEWS.md")
    if(file_test("-f", nfile))
        return(.build_news_db_from_package_NEWS_md(nfile))

    nfile <- file.path(dir, "NEWS")
    if(!file_test("-f", nfile))
        return(invisible())
    ## Return NULL for now, no message that there is no NEWS or
    ## ChangeLog file.

    if(!is.null(format))
        .NotYetUsed("format", FALSE)
    if(!is.null(reader))
        .NotYetUsed("reader", FALSE)

    reader <- .news_reader_default

    reader(nfile)
}

.news_reader_default <-
function(file)
{
    .collapse <- function(s) paste(s, collapse = "\n")

    lines <- readLines(file, warn = FALSE)

    ## Re-encode if necessary.
    if(any(ind <- is.na(nchar(lines, allowNA = TRUE)))) {
        dir <- dirname(file)
        if(basename(dir) == "inst")
            dir <- dirname(file)
        ## This should now contain the DESCRIPTION file.
        encoding <-
            if(file.exists(dfile <- file.path(dir, "DESCRIPTION")))
                .read_description(dfile)["Encoding"]
            else
                NA
        if(!is.na(encoding))
            lines[ind] <- iconv(lines[ind], encoding, "")
        ## Last resort.
        if(anyNA(nchar(lines[ind], allowNA = TRUE)))
            lines[ind] <- iconv(lines[ind], "", "", sub = "byte")
    }

    ## Save what we read in case we cannot figure out the news, in which
    ## case we simply return one entry with the whole text.
    olines <- lines
    ## Get rid of underlines and friends.
    lines <-
        lines[!grepl("^[[:space:]]*[[:punct:]]*[[:space:]]*$", lines)]

    ## Determine lines containing version numbers, without being too
    ## liberal.
    re_valid_package_name <- .standard_regexps()$valid_package_name
    re_v <- sprintf("^([[:space:]]*(%s)|(%s))(%s).*$",
                    paste0("CHANGES? *(IN|FOR).*VERSION *",
                           "|",
                           "CHANGES? *(IN|FOR|TO) *"),
                    sprintf(paste(## TeachingDemos pomp ouch
                                  "NEW IN .*",
                                  ## HyperbolicDist nls2 proto
                                  "VERSION:? *",
                                  "%s +",
                                  ## E.g., lattice:
                                  ##   Changes in lattice 0.17
                                  "CHANGES IN %s +",
                                  ## sv*
                                  "== Changes in %s +",
                                  ## tcltk2
                                  "== Version +",
                                  ## R2WinBUGS
                                  "update *",
                                  "v *",
                                  "",
                                  sep = "|"),
                            re_valid_package_name,
                            re_valid_package_name,
                            re_valid_package_name),
                    .standard_regexps()$valid_package_version
                    )
    ## Some people use
    ##   $PACKAGE version $VERSION
    ## Let us try handling this later, or ask people to write their own
    ## readers.
    ind <- grepl(re_v, lines, ignore.case = TRUE)

    if(!any(ind))
        return(.make_news_db(cbind(NA_character_,
                                   NA_character_,
                                   NA_character_,
                                   .collapse(olines))))
    ## Could add an empty list of bad chunks (as none were found).

    ## Everything before the first version line is a header which will
    ## be dropped.
    if(!ind[1L]) {
	pos <- seq_len(which.max(ind) - 1L)
        lines <- lines[-pos]
        ind <- ind[-pos]
    }

    ## Try catching date entries at the end of version lines as well.
    re_d <- sprintf("^.*(%s)[[:punct:][:space:]]*$",
                    "[[:digit:]]{4}-[[:digit:]]{2}-[[:digit:]]{2}")
    ## Could try to allow for non ISO date specs ...

    ## Version lines determine the chunks, which after the version line
    ## should either start with a line tag (category) or an itemize
    ## "bullet".
    chunks <- split(lines, cumsum(ind))

    do_chunk <- function(chunk, header = NA_character_) {
        ## Process a single chunk.

        ## If there is no category header, the first line is the version
        ## line, after which the next non blank line should start with a
        ## line tag (category) or an itemize "bullet".
        if(!is.na(header))
            date <- NA_character_
        else {
            txt <- chunk[1L]
            header <- sub(re_v, "\\6", txt, ignore.case = TRUE)
            date <- if(grepl(re_d, txt, perl = TRUE))
                sub(re_d, "\\1", txt, perl = TRUE)
            else
                NA_character_
        }

        lines <- chunk[-1L]
        s <- .collapse(lines)
        if(grepl("^[[:space:]]*([o*+-])", s)) {
            sep <- sub("^[[:space:]]*([o*+-]).*$", "\\1", s)
            ire <- sprintf("^[[:space:]]*([%s])[[:space:]]+", sep)
            ind <- grepl(ire, lines)
            list(entries =
                 sapply(split(lines, cumsum(ind)),
                        function(s)
                        sub(ire, "", .collapse(sub("^\t?", "", s)))
                        ),
                 header = header,
                 chunk = chunk,
                 date = date)
        } else {
            ## Categories should be non-empty starting in column 1.
            re_c <- "^([[:alpha:]].*)[[:space:]]*$"
            ind <- grepl(re_c, lines)
            ## If we detect neither bullet items nor categories, the
            ## chunk is in a different format than we can recognize.
            ## Return no entries, and have the finisher give the whole
            ## chunk and push it onto the bad chunk list.
            if(!any(ind)) {
                list(entries = character(),
                     header = header,
                     chunk = chunk,
                     date = date)
            } else {
                pos <- cumsum(ind) > 0
                list(entries =
                     Map(do_chunk,
                         split(lines[pos], cumsum(ind)[pos]),
                         sub("[[:punct:]]*$", "",
                             sub(re_c, "\\1", lines[ind]))),
                     header = header,
                     chunk = chunk,
                     date = date)
            }
        }
    }

    out <- lapply(chunks, do_chunk)
    ## Now assemble pieces.
    reporter <- function(x) {
        warning(gettextf("Cannot process chunk/lines:\n%s",
                         .collapse(paste0("  ", x))),
                domain = NA, call. = FALSE)
        NULL
    }
    finisher <- function(x) {
        entries <- x$entries
        version <- x$header
        date <- x$date
        if(is.list(entries)) {
            do.call(rbind,
                    lapply(entries,
                           function(x) {
                               entries <- x$entries
                               bad <- if(!length(entries)) {
                                   reporter(x$chunk)
                                   entries <-
                                       sub("^[[:space:]]*", "",
                                           .collapse(x$chunk[-1L]))
                                   TRUE
                               }
                               else FALSE
                               cbind(version, date, x$header, entries,
                                     bad)
                           }))
        }
        else {
            bad <- if(!length(entries)) {
                reporter(x$chunk)
                entries <-
                    sub("^[[:space:]]*", "",
                        .collapse(x$chunk[-1L]))
                TRUE
            }
            else FALSE
            cbind(version, date, NA_character_, entries, bad)
        }
    }

    out <- do.call(rbind, lapply(out, finisher))

    ## Try to remove a common 'exdent' from the entries.
    entries <- out[, 4L]
    exdent <-
        unlist(lapply(gregexpr("\n *", entries), attr, "match.length"))
    exdent <- exdent[exdent > 1L]
    if(length(exdent)) {
        out[, 4L] <-
            gsub(sprintf("\n%s", strrep(" ", min(exdent) - 1L)),
                 "\n", entries)
    }

    .make_news_db(out[, -5L, drop = FALSE], as.logical(out[, 5L]))
}

.make_news_db <-
function(x, bad = NULL, classes = NULL)
{
    ## Expect x to be a character matrix giving at least
    ##   version date category text
    ## in its first 4 columns.
    ## Could of course check for this using
    ##   if(!is.character(x) || ncol(x) < 4L)
    out <- data.frame(x, row.names = NULL, stringsAsFactors = FALSE)
    ## Note that we cannot do
    ##   dimnames(out) <- list(NULL,
    ##                         c("Version", "Date", "Category", "Text"))
    colnames(out)[1L : 4L] <-
        c("Version", "Date", "Category", "Text")
    if(!is.null(bad))
        attr(out, "bad") <- bad
    class(out) <- unique(c(classes, "news_db", "data.frame"))
    out
}

## Transform NEWS.Rd

Rd2txt_NEWS_in_Rd_options <-
    list(sectionIndent = 0L, sectionExtra = 2L,
         minIndent = 4L, code_quote = FALSE,
         underline_titles = FALSE)

Rd2txt_NEWS_in_Rd <-
function(f, out = "", outputEncoding = "UTF-8") {
    if (endsWith(f, ".rds")) f <- readRDS(f)
    Rd2txt(f, out,
           stages = c("install", "render"),
           outputEncoding = outputEncoding,
           options = Rd2txt_NEWS_in_Rd_options,
           macros = file.path(R.home("share"), "Rd", "macros", "system.Rd"))
 }

Rd2HTML_NEWS_in_Rd <-
function(f, out, ...) {
    if (endsWith(f, ".rds")) f <- readRDS(f)
    Rd2HTML(f, out, stages = c("install", "render"),
           macros = file.path(R.home("share"), "Rd", "macros", "system.Rd"), ...)
}

Rd2pdf_NEWS_in_Rd <-
function(f, pdf_file)
{
    if (endsWith(f, ".rds")) f <- readRDS(f)
    f2 <- tempfile()
    ## See the comments in ?texi2dvi about spaces in paths
    f3 <- if(grepl(" ", Sys.getenv("TMPDIR")))
        file.path("/tmp", "NEWS.tex")
    else
        file.path(tempdir(), "NEWS.tex")
    out <- file(f3, "w")
    Rd2latex(f, f2,
             stages = c("install", "render"),
             outputEncoding = "UTF-8", writeEncoding = FALSE,
             macros = file.path(R.home("share"), "Rd", "macros", "system.Rd"))
    cat("\\documentclass[", Sys.getenv("R_PAPERSIZE"), "paper]{book}\n",
        "\\usepackage[hyper]{Rd}\n",
        "\\usepackage[utf8]{inputenc}\n",
        "\\usepackage{graphicx}\n",
        "\\setkeys{Gin}{width=0.7\\textwidth}\n",
        .file_path_to_LaTeX_graphicspath(file.path(R.home("doc"), "html")),
        "\n",
        "\\hypersetup{pdfpagemode=None,pdfstartview=FitH}\n",
        "\\begin{document}\n",
        "\\chapter*{}\\sloppy\n",
        "\\begin{center}\n\\huge\n",
        "NEWS for ", R.version$version.string, "\n",
        "\\end{center}\n",
        sep = "", file = out)
    writeLines(readLines(f2), out)
    writeLines("\\end{document}", out)
    close(out)
    od <- setwd(dirname(f3))
    on.exit(setwd(od))
    ## avoid broken texi2pdf scripts: this is simple LaTeX
    ## and emulation suffices
    texi2pdf("NEWS.tex", quiet = TRUE, texi2dvi = "emulation",
             ## ensure _this_ R's Rd.sty is found first:
             texinputs = file.path(R.home("share"), "texmf", "tex", "latex"))
    setwd(od); on.exit()
    invisible(file.copy(file.path(dirname(f3), "NEWS.pdf"),
                        pdf_file, overwrite = TRUE))
}

## Transform old-style plain text NEWS file to Rd.

news2Rd <-
function(file, out = stdout(), codify = FALSE)
{
    ## For add-on packages, the given NEWS file should be in the root
    ## package source directory or its 'inst' subdirectory, so that we
    ## can use the DESCRIPTION metadata to obtain the package name and
    ## encoding.

    file <- file_path_as_absolute(file)

    if(file_test("-d", file)) {
        dir <- file
        dfile <- file.path(dir, "DESCRIPTION")
        if(!file_test("-f", dfile))
            stop("DESCRIPTION file not found")
        file <- file.path(dir, "inst", "NEWS")
        if(!file_test("-f", file)) {
            file <- file.path(dir, "NEWS")
            if(!file_test("-f", file))
                stop("NEWS file not found")
        }
    } else {
        dir <- dirname(file)
        dfile <- file.path(dir, "DESCRIPTION")
        if(!file_test("-f", dfile)) {
            if((basename(dir) != "inst") ||
               !file_test("-f",
                          dfile <- file.path(dirname(dir),
                                             "DESCRIPTION")))
                stop("DESCRIPTION file not found")
        }
    }

    ## No longer support taking NEWS files without correponding
    ## DESCRIPTION file as being from R itself (PR #16556).

    meta <- .read_description(dfile)

    wto <- function(x) writeLines(x, con = out, useBytes = TRUE)
    cre <- "(\\W|^)(\"[[:alnum:]_.]*\"|[[:alnum:]_.:]+\\(\\))(\\W|$)"

    if(is.character(out)) {
        out <- file(out, "wt")
        on.exit(close(out))
    }
    if(!isOpen(out, "wt")) {
        open(out, "wt")
        on.exit(close(out))
    }

    ## had   if(format == "R") {
    ## and this was   } else { format == "default" :
    {
        news <- .news_reader_default(file)
        bad <- attr(news, "bad")
        if(!length(bad))
            stop("No news found in given file using package default format.")
        if(any(bad)) {
            bad <- news$Text[bad]
            stop("Could not extract news from the following text chunks:\n",
                 paste(sprintf("\nChunk %s:\n%s",
                               format(seq_along(bad)), bad),
                       collapse = "\n"))
        }

        encoding <- meta["Encoding"]
        package <- meta["Package"]

        texts <- toRd(news$Text)
        if(codify)
            texts <- gsub(cre, "\\1\\\\code{\\2}\\3", texts)
        ## Note that .news_reader_default re-encodes ...
        if(!is.na(encoding))
            texts <- iconv(texts, to = encoding, sub = "byte", mark = FALSE)
        news$Text <- texts

        wto(c("\\name{NEWS}",
              sprintf("\\title{News for Package '%s'}", package)))
        if(!is.na(encoding))
            wto(sprintf("\\encoding{%s}", encoding))

        ## Similar to print.news_db():
        vchunks <- split(news, news$Version)
        ## Re-order according to decreasing version.
        vchunks <- vchunks[order(as.numeric_version(names(vchunks)),
                                 decreasing = TRUE)]
        dates <- vapply(vchunks, function(v) v$Date[1L], "")
        if(any(ind <- !is.na(dates)))
            names(vchunks)[ind] <-
                sprintf("%s (%s)", names(vchunks)[ind], dates[ind])
        vheaders <- sprintf("\\section{Changes in %s version %s}{",
                            package, names(vchunks))
        for(i in seq_along(vchunks)) {
            wto(vheaders[i])
            vchunk <- vchunks[[i]]
            if(all(!is.na(category <- vchunk$Category)
                   & nzchar(category))) {
                ## need to preserve order of headings.
                cchunks <-
                    split(vchunk,
                          factor(category, levels = unique(category)))
                cheaders <- sprintf("  \\subsection{%s}{",
                                    names(cchunks))
                for(j in seq_along(cchunks)) {
                    wto(c(cheaders[j],
                          "    \\itemize{",
                          paste("      \\item",
                                gsub("\n", "\n        ",
                                     cchunks[[j]]$Text, fixed=TRUE)),
                          "    }",
                          "  }"))
                }
            } else {
                wto(c("  \\itemize{",
                      paste("    \\item",
                            gsub("\n", "\n      ", vchunk$Text, fixed=TRUE)),
                      "  }"))
            }
            wto("}")
        }
    }
}

.build_news_db_from_R_NEWS_Rd <-
function(file = NULL, Rfile = "NEWS.rds")
{
    x <- if(is.null(file))
        readRDS(file.path(R.home("doc"), Rfile))
    else {
        ## Expand \Sexpr et al now because this does not happen when using
        ## fragments.
        macros <- initialRdMacros()
        prepare_Rd(parse_Rd(file, macros = macros), stages = "install")
    }

    db <- .extract_news_from_Rd(x)
    skip <- c("CHANGES in previous versions", "LATER NEWS", "OLDER NEWS")
    db <- db[!(db[,1L] %in% skip),,drop = FALSE]

    ## Squeeze in an empty date column.
    .make_news_db(cbind(sub("^CHANGES IN (R )?(VERSION )?", "", db[, 1L]),
                        NA_character_,
                        db[, 2L],
                        Text = sub("\n*$", "", db[, 3L]),
                        HTML = db[, 4L]),
                  NULL,
                  "news_db_from_Rd")
}

.build_news_db_from_package_NEWS_Rd <-
function(file)
{
    macros <- initialRdMacros()
    x <- prepare_Rd(parse_Rd(file, macros = macros, encoding = "UTF-8"),
                    stages = "install")

    db <- .extract_news_from_Rd(x)

    ## Post-process section names to extract versions and dates.
    re_v <- sprintf(".*version[[:space:]]+(%s).*$",
                    .standard_regexps()$valid_package_version)
    reDt <- "[[:digit:]]{4}-[[:digit:]]{2}-[[:digit:]]{2}"
    rEnd <- "[[:punct:][:space:]]*$"
    re_d1 <- sprintf(paste0("^.*(%s)", rEnd), reDt)
    ## or ending with '(YYYY-MM-DD, <note>)'
    re_d2 <- sprintf(paste0("^.*\\((%s)[[:punct:]] .*\\)", rEnd), reDt)
    nms <- db[, 1L]
    ind <- grepl(re_v, nms, ignore.case = TRUE)
    if(!all(ind))
        warning(gettextf("Cannot extract version info from the following section titles:\n%s",
                         paste0("  ", unique(nms[!ind]), collapse = "\n")),
                domain = NA, call. = FALSE)
    .make_news_db(cbind(ifelse(ind,
			       sub(re_v, "\\1", nms, ignore.case = TRUE),
			       NA_character_),
			ifelse(grepl(re_d1, nms, perl = TRUE),
			       sub(re_d1, "\\1", nms, perl = TRUE),
			       ifelse(grepl(re_d2, nms, perl = TRUE),
				      sub(re_d2, "\\1", nms, perl = TRUE),
				      NA_character_)),
			db[, 2L],
                        Text = sub("\n*$", "", db[, 3L]),
                        HTML = db[, 4L]),
                  NULL,
                  "news_db_from_Rd")
}

.extract_news_from_Rd <-
function(x)
{
    get_section_names <- function(x)
        sapply(x, function(e) .Rd_get_text(e[[1L]]))

    get_item_texts <- function(x) {
        ## Currently, chunks should consist of a single \itemize list
        ## containing the news items.  Notify if there is more than one
        ## such list, and stop if there is none.

        pos <- which(RdTags(x) == "\\itemize")
        if(!length(pos)) {
            stop(gettextf("Malformed NEWS.Rd file:\nChunk starting\n  %s\ncontains no \\itemize.",
                          substr(sub("^[[:space:]]*", "",
                                     .Rd_deparse(x)),
                                 1L, 60L)),
                 domain = NA)
        } else if(length(pos) > 1L) {
            warning(gettextf("Malformed NEWS.Rd file:\nChunk starting\n  %s\ncontains more than one \\itemize.\nUsing the first one.",
                             substr(sub("^[[:space:]]*", "",
                                        .Rd_deparse(x)),
                                    1L, 60L)),
                    domain = NA, call. = FALSE)
            pos <- pos[1L]
        }

        x <- x[[pos]]

        out <- file()
        on.exit(close(out))

        Rd2txt_options <- Rd2txt_NEWS_in_Rd_options
        Rd2txt_options$width <- 72L

        ## Extract and process \item chunks:
        y <- split(x, cumsum(RdTags(x) == "\\item"))
        y <- y[names(y) != "0"]
        if(!length(y)) {
            warning(gettextf("Malformed NEWS.Rd file:\nChunk starting\n  %s\ncontains no \\item.",
                             substr(sub("^[[:space:]]*", "",
                                        .Rd_deparse(x)),
                                    1L, 60L)),
                    domain = NA, call. = FALSE)
            return(matrix(character(), 0L, 2L,
                          dimnames = list(NULL, c("Text", "HTML"))))
        }
        do.call(rbind,
                lapply(y,
                       function(e) {
                           ## Drop \item.
                           e <- e[-1L]
                           ## Convert to text.
                           Rd2txt(e, fragment = TRUE, out = out,
                                  options = Rd2txt_options)
                           one <- paste(readLines(out, warn = FALSE),
                                        collapse = "\n")
                           ## Need warn = FALSE to avoid warning about
                           ## incomplete final line for e.g. 'cluster'.
                           ## Convert to HTML.
                           Rd2HTML(e, fragment = TRUE, out = out)
                           two <- paste(readLines(out, warn = FALSE),
                                        collapse = "\n")
                           cbind(Text = one, HTML = two)
                       }))
    }

    cbind_safely <- function(u, v)
        cbind(rep_len(u, NROW(v)), v)

    x <- x[RdTags(x) == "\\section"]
    y <- Map(cbind_safely,
             get_section_names(x),
             lapply(x,
                    function(e) {
                        z <- e[[2L]]
                        ind <- RdTags(z) == "\\subsection"
                        if(any(ind)) {
                            z <- z[ind]
                            do.call(rbind,
                                    Map(cbind_safely,
                                        get_section_names(z),
                                        lapply(z,
                                               function(e)
                                                   get_item_texts(e[[2L]]))))
                        } else {
                            cbind_safely(NA_character_,
                                         get_item_texts(z))
                        }
                    }))
    y <- do.call(rbind, y)
    ## Sanitze HTML.
    s <- trimws(y[, "HTML"])
    i <- which(startsWith(s, "<p>") & !endsWith(s, "</p>"))
    if(length(i)) {
        z <- s[i]
        j <- which((lengths(gregexpr("</?p>", z)) %% 2L) > 0L)
        if(length(j))
            s[i[j]] <- paste0(z[j], "</p>")
    }
    y[, "HTML"] <- s

    y

}

.build_news_db_from_package_NEWS_md <-
function(f)
{
    md <- readLines(f, encoding = "UTF-8", warn = FALSE)
    ## Maybe complain?
    if(!length(md)) return()

    ## Handle YAML header.
    if(md[1L] == "---") {
        for(pos in seq.int(2L, length(md)))
            if(md[pos] == "---") break
        md[seq_len(pos)] <- ""
    }

    doc <- commonmark::markdown_xml(md,
                                    extensions = TRUE,
                                    sourcepos = TRUE)
    doc <- xml2::xml_ns_strip(xml2::read_xml(doc))

    nodes <- xml2::xml_children(doc)    # Need xml2::xml_root()?

    ## Inline for efficiency.
    .markdown_text <- commonmark::markdown_text
    .markdown_html <- commonmark::markdown_html
    .xml_attr <- xml2::xml_attr
    .xml_name <- xml2::xml_name
    .xml_text <- xml2::xml_text

    get_text_and_HTML <- function(sp) {
        ## Sourcepos sp already split into l1 c2 l2 c2, for legibility:
        l1 <- sp[1L]; c1 <- sp[2L]; l2 <- sp[3L]; c2 <- sp[4L]
        txt <- if(l1 < l2) {
                   c(substring(md[l1], c1),
                     md[seq.int(from = l1 + 1L,
                                length.out = l2 - l1 - 1L)],
                     substring(md[l2], 1L, c2))
               } else
                   substring(md[l1], c1, c2)
        c(.markdown_text(txt, width = 72L),
          .markdown_html(txt))
    }

    do_vchunk <- function(nodes) {
        ## Get version and date from heading.
        version <- .xml_text(nodes[[1L]])
        nodes <- nodes[-1L]
        if(!length(nodes))
            return(rbind(c(version, "", "", "")))
        ## Unlike news in Rd where we (currently) insist on all news to
        ## be given as items in itemize lists, for md we only split news
        ## in version chunks according to category.  If the chunks has
        ## headings, we take those with the same level as the first one
        ## to start category chunks, and everything before the first
        ## such heading as a chunk with an empty category (empty instead
        ## of missing to make querying more convenient).  If there are
        ## no headings, we have a single version chunk with no (empty)
        ## category.
        ind <- .xml_name(nodes) == "heading"
        pos <- which(ind)
        if(length(pos)) {
            lev <- .xml_attr(nodes[pos], "level")
            ind[pos] <- (lev == lev[1L])
            if((pos[1L]) > 1L) {
                ini <- seq_len(pos[1L] - 1L)
                out <- list(do_cchunk(nodes[ini], FALSE))
                nodes <- nodes[-ini]
                ind <- ind[-ini]
            } else
                out <- list()
            out <- c(out,
                     lapply(split(nodes, cumsum(ind)),
                            do_cchunk, TRUE))
            cbind(version, do.call(rbind, out))
        } else {
            rbind(c(version,
                    do_cchunk(nodes, FALSE)))
        }

    }

    do_cchunk <- function(nodes, heading) {
        ## See above: if the category chunk has a heading, we extract
        ## the category from it.  Otherwise, the category is empty.
        if(heading) {
            category <- .xml_text(nodes[[1L]])
            nodes <- nodes[-1L]
        } else {
            category <- ""
        }

        if(!length(nodes))
            return(c(category, "", ""))

        ## Compute text and HTML by converting everything from the start
        ## of the first sourcepos to the end of the last sourcepos.
        sp <- c(.xml_attr(nodes[[1L]], "sourcepos"),
                .xml_attr(nodes[[length(nodes)]], "sourcepos"))
        ## (If there is one node, nodes[c(1L, length(nodes))] would give
        ## that node only once.  Could also special case ...)
        sp <- as.integer(unlist(strsplit(sp, "[:-]"))[c(1L, 2L, 7L, 8L)])

        c(category, get_text_and_HTML(sp))
    }

    ind <- .xml_name(nodes) == "heading"
    pos <- which(ind)
    if(!length(pos)) return()

    ## Skip leading headings until we find one from which we can extract
    ## a version number.  Then drop everything ahead of this, and take
    ## all headings with the same level to start version chunks.

    re_v <- sprintf("(^|.*[[:space:]]+)[vV]?(%s).*$",
                    .standard_regexps()$valid_package_version)
    while(length(pos) &&
          !grepl(re_v, .xml_text(nodes[[pos[1L]]])))
        pos <- pos[-1L]
    if(!length(pos)) return()

    lev <- .xml_attr(nodes[pos], "level")
    ind[pos] <- (lev == lev[1L])
    if(pos[1L] > 1L) {
        ini <- seq_len(pos[1L] - 1L)
        nodes <- nodes[-ini]
        ind <- ind[-ini]
    }
    vchunks <- split(nodes, cumsum(ind))
    db <- do.call(rbind, lapply(vchunks, do_vchunk))

    ## Very similar to .build_news_db_from_package_NEWS_Rd() ...

    ## Post-process section names to extract versions and dates.
    reDt <- "[[:digit:]]{4}-[[:digit:]]{2}-[[:digit:]]{2}"
    rEnd <- "[[:punct:][:space:]]*$"
    re_d1 <- sprintf(paste0("^.*(%s)", rEnd), reDt)
    ## or ending with '(YYYY-MM-DD, <note>)'
    re_d2 <- sprintf(paste0("^.*\\((%s)[[:punct:]] .*\\)", rEnd), reDt)
    nms <- db[, 1L]
    ind <- grepl(re_v, nms, ignore.case = TRUE)
    if(!all(ind))
        warning(gettextf("Cannot extract version info from the following section titles:\n%s",
                         paste0("  ", unique(nms[!ind]), collapse = "\n")),
                domain = NA, call. = FALSE)

    .make_news_db(cbind(ifelse(ind,
                               sub(re_v, "\\2", nms, ignore.case = TRUE),
                               NA_character_),
                        ifelse(grepl(re_d1, nms, perl = TRUE),
                               sub(re_d1, "\\1", nms, perl = TRUE),
                               ifelse(grepl(re_d2, nms, perl = TRUE),
                                      sub(re_d2, "\\1", nms, perl = TRUE),
                                      NA_character_)),
                        db[, 2L],
                        Text = sub("\n*$", "", db[, 3L]),
                        HTML = db[, 4L]),
                  NULL,
                  "news_db_from_md")
}

format.news_db_from_md <-
function(x, ...)
{
    do_vchunk <- function(vchunk) {
        z <- unlist(Map(c, vchunk$Category, vchunk$Text),
                    use.names = FALSE)
        z[nzchar(z)]
    }

    vchunks <- split(x, x$Version)
    ## Re-order according to decreasing version.
    vchunks <- vchunks[order(numeric_version(names(vchunks),
                                             strict = FALSE),
                             decreasing = TRUE)]
    if(!length(vchunks))
        return(character())

    dates <- vapply(vchunks, function(v) v$Date[1L], "")
    vheaders <-
        format(sprintf("Changes in version %s%s",
                       names(vchunks),
                       ifelse(is.na(dates), "",
                              sprintf(" (%s)", dates))),
               justify = "centre", width = 72L)

    Map(c, vheaders, lapply(vchunks, do_vchunk),
        USE.NAMES = FALSE)
}

.news_db_has_no_bad_entries <-
function(x)
{
    (is.null(bad <- attr(x, "bad")) ||
     (length(bad) == NROW(x)) && !any(bad))
}
#  File src/library/tools/R/orcidtools.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 2024 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

### ** .ORCID_iD_regexp

.ORCID_iD_regexp <-
    "[0-9]{4}-[0-9]{4}-[0-9]{4}-[0-9]{3}[X0-9]"

### ** .ORCID_iD_variants_regexp

.ORCID_iD_variants_regexp <-
    sprintf("^<?((https?://|)orcid.org/)?(%s)>?$", .ORCID_iD_regexp)

### ** .ORCID_iD_canonicalize

.ORCID_iD_canonicalize <- function(x)
    sub(.ORCID_iD_variants_regexp, "\\3", x)

### ** .ORCID_iD_is_valid

.ORCID_iD_is_valid <- function(x) {
    one <- function(s) {
        if(!grepl(.ORCID_iD_variants_regexp, s))
            return(FALSE)
        s <- .ORCID_iD_canonicalize(s)
        ## Checksum test, see
        ## <https://support.orcid.org/hc/en-us/articles/360006897674-Structure-of-the-ORCID-Identifier>
        s <- strsplit(gsub("-", "", s, fixed = TRUE), "")[[1L]]
        x <- as.numeric(s[-16L])
        t <- sum(x * 2 ^ (15L : 1L))
        rem <- t %% 11
        res <- (12 - rem) %% 11
        z <- if(res == 10) "X" else as.character(res)
        z == s[16L]
    }
    vapply(x, one, NA)
}

### ** .ORCID_iD_is_alive

.ORCID_iD_is_alive <- function(x) {
    ## See
    ## <https://info.orcid.org/documentation/api-tutorials/api-tutorial-read-data-on-a-record/#h-a-note-on-non-existent-orcids>
    ## Assume all given ids are canonical.
    urls <- sprintf("https://orcid.org/%s", x)
    hdrs <- list("Accept" = "application/xml")
    resp <- .curl_multi_run_worker(urls, nobody = TRUE, hdrs = hdrs)
    vapply(resp, .curl_response_status_code, 0L) == 200L
}

### ** .ORCID_iD_from_person

.ORCID_iD_from_person <- function(x)
    vapply(unclass(x),
           function(e) e$comment["ORCID"] %||% NA_character_,
           "")

### ** .ORCID_iD_db_from_package_sources

.ORCID_iD_db_from_package_sources <-
function(dir, add = FALSE)
{
    ids1 <- .ORCID_iD_from_person(.persons_from_metadata(dir))
    ids1 <- ids1[!is.na(ids1)]
    ids2 <- .ORCID_iD_from_person(.persons_from_citation(dir))
    ids2 <- ids2[!is.na(ids2)]
    db  <- data.frame(ID = c(character(), ids1, ids2),
                      Parent = c(rep_len("DESCRIPTION",
                                         length(ids1)),
                                 rep_len("inst/CITATION",
                                         length(ids2))))
    if(add)
        db$Parent <- file.path(basename(dir), db$Parent)
    db
}

### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
#  File src/library/tools/R/packages.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2022 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

write_PACKAGES <-
function(dir = ".", fields = NULL,
         type = c("source", "mac.binary", "win.binary"),
         verbose = FALSE, unpacked = FALSE, subdirs = FALSE,
         latestOnly = TRUE, addFiles = FALSE, rds_compress = "xz",
         validate = FALSE)
{
    if(missing(type) && .Platform$OS.type == "windows")
        type <- "win.binary"
    type <- match.arg(type)

    paths <- ""
    if(is.logical(subdirs) && subdirs) {
        owd <- setwd(dir)
        paths <- list.dirs(".")
        setwd(owd)
        paths <- c("", paths[paths != "."])
        ## now strip leading ./
        paths <- sub("^[.]/", "", paths)
    } else if(is.character(subdirs)) paths <- c("", subdirs)

    ## Older versions created only plain text and gzipped DCF files with
    ## the (non-missing and non-empty) package db entries, and hence did
    ## so one path at a time.  We now also serialize the db directly,
    ## and hence first build the whole db, and then create the files in
    ## case some packages were found.

    db <- NULL
    addPaths <- !identical(paths, "")

    for(path in paths) {
        this <- if(nzchar(path)) file.path(dir, path) else dir
        desc <- .build_repository_package_db(this, fields, type, verbose,
                                             unpacked, validate)
        desc <- .process_repository_package_db_to_matrix(desc,
                                                         path,
                                                         addFiles,
                                                         addPaths,
                                                         latestOnly)
        if(NROW(desc))
            db <- rbind(db, desc)

    }

    np <- .write_repository_package_db(db, dir, rds_compress)

    invisible(np)
}

.write_repository_package_db <-
function(db, dir, rds_compress)
{
   np <- NROW(db)
   if(np > 0L) {
       ## To save space, empty entries are not written to the DCF, so
       ## that read.dcf() on these will have the entries as missing.
       ## Hence, change empty to missing in the db.
       db[!is.na(db) & (db == "")] <- NA_character_
       con <- file(file.path(dir, "PACKAGES"), "wt")
       write.dcf(db, con)
       close(con)
       con <- gzfile(file.path(dir, "PACKAGES.gz"), "wt")
       write.dcf(db, con)
       close(con)
       rownames(db) <- db[, "Package"]
       saveRDS(db, file.path(dir, "PACKAGES.rds"), compress = rds_compress)
   }

   invisible(np)
}

.process_repository_package_db_to_matrix <-
function(desc, path, addFiles, addPaths, latestOnly)
{
    desc <- Filter(length, desc)

    if(length(desc)) {
        Files <- names(desc)
        fields <- names(desc[[1L]])
        desc <- matrix(unlist(desc), ncol = length(fields), byrow = TRUE)
        colnames(desc) <- fields
        if(addFiles) desc <- cbind(desc, File = Files)
        if(addPaths) desc <- cbind(desc, Path = path)
        if(latestOnly) desc <- .remove_stale_dups(desc)

        ## Standardize licenses or replace by NA.
        license_info <- analyze_licenses(desc[, "License"])
            desc[, "License"] <-
                ifelse(license_info$is_standardizable,
                       license_info$standardization,
                       NA)
        }
    desc
}

## factored out so it can be used in multiple
## places without threat of divergence
.get_pkg_file_pattern = function(type = c("source", "mac.binary", "win.binary"),
                                 ext.only = FALSE)
{

    type <- match.arg(type)
    ## FIXME: might the source pattern be more general?
    ## was .tar.gz prior to 2.10.0

    ret = switch(type,
                 "source" = "_.*\\.tar\\.[^_]*$",
                 "mac.binary" = "_.*\\.tgz$",
                 "win.binary" = "_.*\\.zip$")
    if(ext.only)
        ret = gsub("_.*", "", fixed = TRUE, ret)
    ret
}
## this is OK provided all the 'fields' are ASCII -- so be careful
## what you add.
.build_repository_package_db <-
function(dir, fields = NULL,
         type = c("source", "mac.binary", "win.binary"),
         verbose = getOption("verbose"),
         unpacked = FALSE, validate = FALSE)
{
    if(unpacked)
        return(.build_repository_package_db_from_source_dirs(dir,
                                                             fields,
                                                             verbose,
                                                             validate))

       package_pattern <- .get_pkg_file_pattern(type)
    files <- list.files(dir, pattern = package_pattern, full.names = TRUE)

    if(!length(files))
        return(list())
    type <- match.arg(type)
    db <- .process_package_files_for_repository_db(files,
                                                   type,
                                                   fields,
                                                   verbose,
                                                   validate)
    db
}

.process_package_files_for_repository_db <-
function(files, type, fields, verbose, validate = FALSE)
{

    files <- normalizePath(files, mustWork=TRUE) # files comes from list.files, mustWork ok
    ## Add the standard set of fields required to build a repository's
    ## PACKAGES file:
    fields <- unique(c(.get_standard_repository_db_fields(type), fields))
    ## files was without path at this point in original code,
    ## use filetbs instead to compute pkg names and set db names
    filetbs <- basename(files)
    packages <- sapply(strsplit(filetbs, "_", fixed = TRUE), `[`, 1L)
    db <- vector(length(files), mode = "list")
    names(db) <- filetbs #files was not full paths before
    ## Many (roughly length(files)) warnings are *expected*, hence
    ## suppressed.
    op <- options(warn = -1)
    on.exit(options(op))
    if(verbose) message("Processing packages:")
    if(type == "win.binary") {
        for(i in seq_along(files)) {
            if(verbose) message(paste0("  ", files[i]))
            con <- unz(files[i], file.path(packages[i], "DESCRIPTION"))
            temp <- tryCatch(read.dcf(con, fields = fields)[1L, ],
                             error = identity)
            if(inherits(temp, "error")) {
                close(con)
                next
            }
            db[[i]] <- temp
            close(con)
        }
    } else {
        cwd <- getwd()
        if (is.null(cwd))
            stop("current working directory cannot be ascertained")
        td <- tempfile("PACKAGES")
        if(!dir.create(td)) stop("unable to create ", td)
        on.exit(unlink(td, recursive = TRUE), add = TRUE)
        setwd(td)
        for(i in seq_along(files)) {
            if(verbose) message(paste0("  ", files[i]))
            p <- file.path(packages[i], "DESCRIPTION")
            ## temp <- try(system(paste("tar zxf", files[i], p)))
            temp <- try(utils::untar(files[i], files = p))
            if(!inherits(temp, "try-error")) {
                temp <- tryCatch(read.dcf(p, fields = fields)[1L, ],
                                 error = identity)
                if(!inherits(temp, "error")) {
                    if(validate) {
                        ## .check_package_description() by default goes via
                        ## .read_description() which re-encodes and insists on a
                        ## single entry unlike the above read.dcf() call.
                        ok <- .check_package_description(db = temp[!is.na(temp)])
                        ## FIXME: no format.check_package_description yet.
                        if(any(as.integer(lengths(ok)) > 0L)) {
                            message(paste(gettextf("Invalid DESCRIPTION file for package %s",
                                                   sQuote(basename(dirname(p)))),
                                          paste(format(ok), collapse = "\n\n"),
                                          sep = "\n\n"),
                                    domain = NA)
                            next
                        }
                    }
                    if("NeedsCompilation" %in% fields &&
                       is.na(temp["NeedsCompilation"])) {
                        l <- utils::untar(files[i], list = TRUE)
                        temp["NeedsCompilation"] <-
                            if(any(l == file.path(packages[i], "src/"))) "yes" else "no"
                    }
                    temp["MD5sum"] <- md5sum(files[i])
                    db[[i]] <- temp
                } else {
                    message(gettextf("reading DESCRIPTION for package %s failed with message:\n  %s",
                                     sQuote(basename(dirname(p))),
                                     conditionMessage(temp)),
                            domain = NA)
                }
            }
            unlink(packages[i], recursive = TRUE)
        }
        setwd(cwd)
    }
    if(verbose) message("done")

    db
}

.build_repository_package_db_from_source_dirs <-
function(dir, fields = NULL, verbose = getOption("verbose"),
         validate = FALSE)
{
    dir <- file_path_as_absolute(dir)
    fields <- unique(c(.get_standard_repository_db_fields(), fields))
    paths <- list.files(dir, full.names = TRUE)
    paths <- paths[dir.exists(paths) &
                   file_test("-f", file.path(paths, "DESCRIPTION"))]
    db <- vector(length(paths), mode = "list")
    if(verbose) message("Processing packages:")
    for(i in seq_along(paths)) {
        if(verbose) message(paste0("  ", basename(paths[i])))
        temp <- tryCatch(read.dcf(file.path(paths[i], "DESCRIPTION"),
                                  fields = fields)[1L, ],
                         error = identity)
        if(!inherits(temp, "error")) {
            if(validate) {
                ## .check_package_description() by default goes via
                ## .read_description() which re-encodes and insists on a
                ## single entry unlike the above read.dcf() call.
                ok <- .check_package_description(db = temp[!is.na(temp)])
                ## FIXME: no format.check_package_description yet.
                if(any(as.integer(lengths(ok)) > 0L)) {
                    warning(paste(gettextf("Invalid DESCRIPTION file for package %s",
                                           sQuote(basename(paths[i]))),
                                  paste(format(ok), collapse = "\n\n"),
                                  sep = "\n\n"),
                            domain = NA,
                            call. = FALSE)
                    next
                }
            }
            if(is.na(temp["NeedsCompilation"])) {
                temp["NeedsCompilation"] <-
                    if(dir.exists(file.path(paths[i], "src"))) "yes" else "no"
            }
            ## Cannot compute MD5 sum of the source tar.gz when working
            ## on the unpacked sources ...
            db[[i]] <- temp
        } else {
            warning(gettextf("reading DESCRIPTION for package %s failed with message:\n  %s",
                             sQuote(basename(paths[i])),
                             conditionMessage(temp)),
                    domain = NA)
        }
    }
    if(verbose) message("done")
    names(db) <- basename(paths)
    db
}

dependsOnPkgs <-
function(pkgs, dependencies = "strong",
         recursive = TRUE, lib.loc = NULL,
         installed = utils::installed.packages(lib.loc, fields = "Enhances"))
{
    dependencies <- .expand_dependency_type_spec(dependencies)

    av <- installed[, dependencies, drop = FALSE]
    rn <- as.character(installed[, "Package"])
    need <- apply(av, 1L, function(x)
                  any(pkgs %in% utils:::.clean_up_dependencies(x)) )
    uses <- rn[need]
    if(recursive) {
        p <- pkgs
        repeat {
            p <- unique(c(p, uses))
            need <- apply(av, 1L, function(x)
                          any(p %in% utils:::.clean_up_dependencies(x)) )
            uses <- unique(c(p, rn[need]))
            if(length(uses) <= length(p)) break
        }
    }
    setdiff(uses, pkgs)
}

.remove_stale_dups <-
function(ap)
{
    ## Given a matrix from available.packages, return a copy
    ## with no duplicate packages, being sure to keep the packages
    ## with highest version number.
    ## (Also works for data frame package repository dbs.)
    pkgs <- ap[ , "Package"]
    dup_pkgs <- pkgs[duplicated(pkgs)]
    if (length(dup_pkgs) > 100) {
        ## Some packages may be in multiple repositories in the same
        ## version. Handle those specially for performance reasons.
        ap <- ap[!duplicated(ap[, c("Package", "Version")]), , drop = FALSE]
        pkgs <- ap[ , "Package"]
        dup_pkgs <- pkgs[duplicated(pkgs)]
    }
    stale_dups <- integer(length(dup_pkgs))
    i <- 1L
    for (dp in dup_pkgs) {
        wh <- which(dp == pkgs)
        vers <- package_version(ap[wh, "Version"])
        keep_ver <- max(vers)
	keep_idx <- which.max(vers == keep_ver) # they might all be max
        wh <- wh[-keep_idx]
        end_i <- i + length(wh) - 1L
        stale_dups[i:end_i] <- wh
        i <- end_i + 1L
    }
    ## Possible to have only one package in a repository
    if(length(stale_dups)) ap[-stale_dups, , drop = FALSE] else ap
}

package_dependencies <-
function(packages = NULL, db = NULL, which = "strong",
         recursive = FALSE, reverse = FALSE,
         verbose = getOption("verbose"))
{
    packages1 <- unique(packages)

    if(is.null(db)) db <- utils::available.packages()

    fields <- which <- .expand_dependency_type_spec(which)
    if(is.character(recursive)) {
        recursive <- .expand_dependency_type_spec(recursive)
        if(identical(which, recursive))
            recursive <- TRUE
        else
            fields <- unique(c(fields, recursive))
    }

    ind <- if(!is.character(recursive) && !recursive && !reverse &&
              !is.null(packages)) {
               ## For forward non-recursive depends, we can simplify
               ## matters by subscripting the db right away---modulo
               ## boundary cases.
               match(packages1, db[, "Package"], nomatch = 0L)
           } else !duplicated(db[, "Package"])

    db <- as.data.frame(db[ind, c("Package", fields), drop = FALSE])

    ## Avoid recomputing package dependency names in recursive
    ## invocations.
    for(f in fields) {
        if(!is.list(d <- db[[f]]))
            db[[f]] <- lapply(d, .extract_dependency_package_names)
    }

    if(is.character(recursive)) {
        ## Direct dependencies:
        d_d <- Recall(packages, db, which, FALSE,
                      reverse, verbose)
        ## Recursive dependencies of all these:
        d_r <- Recall(unique(unlist(d_d)), db, recursive, TRUE,
                      reverse, verbose)
        ## Now glue together:
        return(lapply(d_d,
                      function(p) {
                          sort(unique(c(p, unlist(d_r[p],
                                                  use.names = FALSE))))
                      }))
    }

    depends <-
        do.call(Map,
                c(list("c"),
                  db[which],
                  list(USE.NAMES = FALSE)))

    depends <- lapply(depends, unique)

    if(!recursive && !reverse) {
        names(depends) <- db$Package
        if(!is.null(packages)) {
            depends <- depends[match(packages, names(depends))]
            names(depends) <- packages
        }
        return(depends)
    }

    all_packages <- sort(unique(c(db$Package, unlist(depends))))

    if(!recursive) {
        ## Need to invert.
        depends <-
            split(rep.int(db$Package, lengths(depends)),
                  factor(unlist(depends), levels = all_packages))
        if(!is.null(packages)) {
            depends <- depends[match(packages, names(depends))]
            names(depends) <- packages
        }
        return(depends)
    }

    ## Recursive dependencies.
    ## We need to compute the transitive closure of the dependency
    ## relation, but e.g. Warshall's algorithm (O(n^3)) is
    ## computationally infeasible.
    ## Hence, in principle, we do the following.
    ## Take the current list of pairs (i,j) in the relation.
    ## Iterate over all j and whenever i R j and j R k add (i,k).
    ## Repeat this until no new pairs get added.
    ## To do this in R, we use a 2-column matrix of (i,j) rows.
    ## We then create two lists which for all j contain the i and k
    ## with i R j and j R k, respectively, and combine these.
    ## This works reasonably well, but of course more efficient
    ## implementations should be possible.
    matchP <- match(rep.int(db$Package, lengths(depends)),
		    all_packages)
    matchD <- match(unlist(depends), all_packages)
    tab <- if(reverse)
	split(matchP,
	      factor(matchD, levels = seq_along(all_packages)))
    else
	split(matchD,
	      factor(matchP, levels = seq_along(all_packages)))
    if(is.null(packages)) {
        if(reverse) {
            packages1 <- all_packages
            p_L <- seq_along(all_packages)
        } else {
            packages1 <- db$Package
            p_L <- match(packages1, all_packages)
        }
    } else {
        p_L <- match(packages1, all_packages, nomatch = 0L)
        if(any(ind <- (p_L == 0L))) {
            p_L <- p_L[!ind]
        }
    }
    p_R <- tab[p_L]
    pos <- cbind(rep.int(p_L, lengths(p_R)), unlist(p_R))
    ctr <- 0L

    ## posunique() speeds up computing "unique(pos)" in the following loop.
    ## When the number of packages is small enough, we can easily represent
    ## an edge from i to j by a single integer number. Finding duplicates in
    ## a vector of integers is much faster than in rows of a matrix.
    shift <- as.integer(2^15)  ## allows to fit two numbers to an integer
    if (length(pos) && max(pos) < shift)
        posunique <- function(p)
            p[!duplicated(p[,1L]*shift + p[,2L]), , drop = FALSE]
    else
        posunique <- function(p) unique(p)

    repeat {
        if(verbose) cat("Cycle:", (ctr <- ctr + 1L))
        p_L <- split(pos[, 1L], pos[, 2L])
        new <- do.call(rbind,
                       Map(function(i, k)
                           cbind(rep.int(i, length(k)),
                                 rep(k, each = length(i))),
                           p_L, tab[as.integer(names(p_L))]))

        ## could be just posunique(rbind(pos, new)), but computing this
        ## iteratively is faster
        npos <- posunique(rbind(pos, posunique(new)))
        nnew <- nrow(npos) - nrow(pos)
        if(verbose) cat(" NNew:", nnew, "\n")
        if(!nnew) break
        pos <- npos
    }
    depends <-
        split(all_packages[pos[, 2L]],
              factor(all_packages[pos[, 1L]], levels = packages1))
    if(!is.null(packages)) {
        depends <- depends[match(packages, names(depends))]
        names(depends) <- packages
    }
    depends
}

.expand_dependency_type_spec <-
function(x)
{
    if(identical(x, "strong"))
        c("Depends", "Imports", "LinkingTo")
    else if(identical(x, "most"))
        c("Depends", "Imports", "LinkingTo", "Suggests")
    else if(identical(x, "all"))
        c("Depends", "Imports", "LinkingTo", "Suggests", "Enhances")
    else
        x
    ## (Could also intersect x with the possible types.)
}

## .extract_dependency_package_names <-
## function(x)
## {
##     ## Assume a character *string*.
##     if(is.na(x)) return(character())
##     x <- strsplit(x, ",", fixed = TRUE)[[1L]]
##     ## FIXME: The following is much faster on Linux but apparently not
##     ## on Windows:
##     ## x <- sub("(?s)[[:space:]]*([[:alnum:].]+).*", "\\1", x, perl = TRUE)
##     x <- sub("[[:space:]]*([[:alnum:].]+).*", "\\1", x)
##     x[nzchar(x) & (x != "R")]
## }

.extract_dependency_package_names <-
function(x)
    .Call(C_package_dependencies_scan, x)
#  File src/library/tools/R/parseLatex.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2023 The R Core Team
#  Copyright (C) 2025 Duncan Murdoch
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

parseLatex <- function(text, filename = "text",
                     verbose = FALSE, verbatim = c("verbatim", "verbatim*",
                     "Sinput", "Soutput"),
		     verb = "\\Sexpr",
		     defcmd = c("\\newcommand", "\\renewcommand",
		     	   "\\providecommand", "\\def", "\\let"),
		     defenv = c("\\newenvironment",
		     	   "\\renewenvironment"))
{
    ## the internal function must get some sort of srcfile
    srcfile <- srcfilecopy(filename, text)
    text <- paste(text, collapse="\n")
    
    keywords <- c(as.character(verb), as.character(defcmd),
    	      as.character(defenv))
    # types:  1=verb, 2=defcmd, 3=defenv
    keywordtype <- rep(1:3, c(length(verb), length(defcmd),
    			  length(defenv)))
    
    .External2(C_parseLatex, text, srcfile, verbose, as.character(verbatim), keywords, keywordtype)
}


# This converts a latex object into a single element character vector
deparseLatex <- function(x, dropBraces = FALSE)
{
    specials <- c("\\", "#", "$", "%", "&", "~", "_", "^", "{", "}")
    result <- character()
    lastTag <- "TEXT"
    expectArg <- FALSE
    for (i in seq_along(x)) {
        a <- x[[i]]
        tag <- attr(a, "latex_tag")
        if (is.null(tag)) tag <- "NULL"
        result <- c(result,
        switch(tag,
        VERB = ,
        MACRO = ,
        COMMENT = a,
        TEXT = c(if (lastTag == "MACRO" && expectArg && grepl("^[[:alpha:]]", a))
                     ## restore space that the parser has eaten ('\item text')
                     " ",
                 a),
        BLOCK = if (dropBraces && !expectArg)
                    Recall(a)
                else
                    c("{", Recall(a), "}"),
        ENVIRONMENT = c(
        	"\\begin{", a[[1L]], "}",
        	Recall(a[[2L]]),
        	"\\end{", a[[1L]], "}"),
        MATH = c("$", Recall(a), "$"), # \( and \) parse as MACRO
        DISPLAYMATH = c("$$", Recall(a), "$$"),
        DEFINITION = Recall(a),
        NULL = stop("Internal error, no tag", domain = NA)
        ))
        lastTag <- tag
        expectArg <-
            if (tag == "MACRO")
                a %notin% paste0("\\", c(specials, "(", ")"))
            else
                expectArg &&
                    tag %in% c("BLOCK", "COMMENT") # \cmd{}{}, \cmd%
                    ## currently ignoring \cmd  {}, \cmd[]{}, \cmd*{}
    }
    paste(result, collapse="")
}

print.LaTeX <- function(x, ...)
{
    cat(deparseLatex(x), "\n", sep = "")
    invisible(x)
}

latex_tag <- function(x, tag)
{
    if (!is.null(x)) attr(x, "latex_tag") <- tag
    x
}

# This makes substitutions within a latex object to replace latex characters
# with UTF8 characters where possible.
latexToUtf8 <- function(x)
{
    i <- 0L
    whitespace <- c(' ', '\t', '\n')
    while (i < length(x)) {
    	i <- i + 1L
        a <- x[[i]]
        tag <- attr(a, "latex_tag")
        if (tag == "MACRO") {
            numargs <- latexArgCount[a]
            if (!is.na(numargs)) { # Do we know this macro?
		args <- vector("list", numargs)
		j <- i
		getNext <- TRUE
		k <- 1L
		while (k <= numargs) {
		    if (getNext) {
			j <- j + 1L
			if (j > length(x)) {
			    warning("argument for ", c(a), " not found", domain = NA)
			    nextobj <- latex_tag("", "TEXT")
			    nexttag <- "TEXT"
			    nextchars <- ""
			} else {
			    nextobj <- x[[j]]
			    nexttag <- attr(nextobj, "latex_tag")
			    if (nexttag == "TEXT")
				nextchars <- strsplit(nextobj, "")[[1L]]
			}
			getNext <- FALSE
		    }
		    switch(nexttag,
			TEXT = {
			    args[[k]] <- latex_tag(nextchars[1L], "TEXT")
			    nextchars <- nextchars[-1L]
			    if (!length(nextchars)) getNext <- TRUE
			    if (args[[k]] %in% whitespace) next
			    k <- k+1L
			},
			COMMENT = getNext <- TRUE, # strip comments
			MACRO = { # Something like \'\i; assume 2nd macro has no args
			    args[[k]] <- nextobj
			    k <- k+1L
			},
			BLOCK =,
			ENVIRONMENT =,
			MATH =,
			DISPLAYMATH = {
			    args[[k]] <- latexToUtf8(nextobj)
			    k <- k+1L
			    getNext <- TRUE
			},
			NULL = stop("Internal error:  NULL tag", domain = NA))
		}
		index <- a
		for (i1 in seq_along(args)) {
		    if (is.null(latexTable[[index]])) break
		    nextobj1 <- args[[i1]]
		    nexttag1 <- attr(nextobj1, "latex_tag")
		    index <- c(index, switch(nexttag1,
			    MACRO =,
			    TEXT = nextobj1,
			    BLOCK = if (length(nextobj1))
					deparseLatex(nextobj1, dropBraces=TRUE)
				    else " ")) # index for empty arg {}
		}
		subst <- latex_tag(latexTable[[index]], "TEXT")

		if (!is.null(subst) && !is.list(subst)) { # We've made a substitution, which will always
		  	       	       # be a new latex object, possibly containing UTF8
		    x[[i]] <- subst

		    if (numargs) {
		    	if (nexttag == "TEXT" && length(nextchars)) {
		    	    # We've partially used up the next one, so rebuild it
			    nextobj[1L] <- paste(nextchars, collapse="")
			    x[[j]] <- nextobj
			    j <- j-1L
			}
			while (j > i) {
			    # Remove the used up args
			    x[[j]] <- NULL
			    j <- j-1L
			}
	            }
		} else
		    i <- j
	    }
	} else if (tag == "BLOCK")
	    x[[i]] <- latexToUtf8(a)
    }
    x
}

makeLatexTable <- function(utf8table)
{
    all <- list()
    for (i in seq_along(utf8table)) {
    	if (grepl("^[{].*[}]$", c <- utf8table[i]))
    	    all[[as.character(i)]] <- parseLatex(c)[[1L]]
    }
    table <- list()
    for (i in names(all)) {
    	codepoint <- as.numeric(i)
    	macro <- all[[i]][[1L]]
    	args <- all[[i]][-1L]
    	index <- macro
    	getNext <- TRUE
    	repeat {
	    if (getNext) {
	    	if (!length(args)) break
		nextobj <- args[[1L]]
		args <- args[-1L]
		nexttag <- attr(nextobj, "latex_tag")
		if (nexttag == "TEXT")
		    nextchars <- strsplit(nextobj, "")[[1L]]
		else
		    nextchars <- character()
		getNext <- FALSE
	    }
    	    if (nexttag == "TEXT") {
    	    	if (length(nextchars)) {
    	    	    arg <- nextchars[1L]
    	    	    nextchars <- nextchars[-1L]
    	    	} else {
    	    	    getNext <- TRUE
    	    	    next
    	    	}
    	    } else if (nexttag == "BLOCK") {
    	    	if (!length(nextobj)) {
    	    	    arg <- " "  # index for empty arg {}
    	    	    getNext <- TRUE
    	    	} else {
    	    	    arg <- nextobj[[1L]]
    	    	    nextobj <- nextobj[-1L]
    	    	    argtag <- attr(arg, "latex_tag")
    	    	    if (argtag != "TEXT")
    	    	    	stop("internal error", domain = NA)
    	    	}
    	    } else if (nexttag == "MACRO") {
    	    	arg <- nextobj[1L]
    	    	getNext <- TRUE
    	    }
    	    index <- c(index, arg)
    	}
    	repeat{  # Need to record \a macros twice
	    oldArgCount <- latexArgCount[macro]
	    argCount <- length(index) - 1L
	    if (is.na(oldArgCount))
		latexArgCount[macro] <<- argCount
	    else if (oldArgCount != length(index) - 1L)
    	    stop("Inconsistent arg count for ", macro, domain = NA)

	    for (i in seq_along(index)) {
		if (is.null(entry <- table[[index[1L:i]]])) {
		    if (i < length(index))
			table[[index[1L:i]]] <- list()
		    else
			table[[index]] <- intToUtf8(codepoint)
		} else if (!is.list(entry))
		    warning("entry for ", codepoint, "=", index[1L:i], " already defined to be", entry, domain = NA)
	    }
	    if (index[1L] != "\\a") break
	    index <- index[-1L]
	    index[1L] <- macro <- sub("^", "\\\\", index[1L])
	}
    }
    table[["\\textemdash"]] <- "\u2014"
    latexArgCount[["\\textemdash"]] <<- 0

    ## Variants of latin A/a with ring above.
    table[["\\AA"]] <- "\u00c5"
    latexArgCount[["\\AA"]] <<- 0
    table[["\\aa"]] <- "\u00e5"
    latexArgCount[["\\aa"]] <<- 0

    ## Variants of accented i: LaTeX no longer needs dotless \i for accents
    for (accent in c("`", "'", "^", '"')) {
        table[[c(paste0("\\", accent), "i")]] <- table[[c(paste0("\\", accent), "\\i")]]
        if (accent %in% c("'", "`"))
            table[[c("\\a", accent, "i")]] <- table[[c("\\a", accent, "\\i")]]
    }

    ## Also handle (some) LaTeX specials:
    table[["\\&"]] <- "&"
    latexArgCount[["\\&"]] <<- 0    
    table[["\\~"]][[" "]] <- "~"
    table[["\\%"]] <- "%"
    latexArgCount[["\\%"]] <<- 0    
    
    table
}
#  File src/library/tools/R/parseRd.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2022 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

parse_Rd <- function(file, srcfile = NULL, encoding = "unknown",
                     verbose = FALSE, fragment = FALSE,
                     warningCalls = TRUE,
                     macros = file.path(R.home("share"), "Rd", "macros", "system.Rd"),
		     permissive = FALSE)
{
    if(is.character(file)) {
        file0 <- file
        if(file == "") {
            file <- stdin()
        } else {
            ## keep.source is FALSE in batch use
            ## encoding issues here, for now use file encoding
            if (missing(srcfile)) ## && isTRUE(getOption("keep.source")))
                srcfile <- srcfile(file)
        }
    } else file0 <- "<connection>"
    lines <- readLines(file, warn = FALSE)
    if(is.character(macros))
    	macros <- initialRdMacros(macros = macros)
    ## remove old-style marking for data, keep line nos
    lines[lines == "\\non_function{}"] <- ""
    ## Extract the encoding if marked in the file:
    ## do this in two steps to minimize warnings in MBCS locales
    ## Note this is required to be on a line by itself,
    ## but some people have preceding whitespace
    enc <- grep("\\encoding{", lines, fixed = TRUE, useBytes=TRUE, value=TRUE)
    enc <- grep("^[[:space:]]*\\\\encoding\\{([^}]*)\\}.*", enc, value = TRUE)
    if(length(enc)) {
        if(length(enc) > 1L)
            warning(file0, ": multiple \\encoding lines, using the first",
                    domain = NA, call. = warningCalls)
        ## keep first one
        enc <- enc[1L]
        enc <- sub("^[[:space:]]*\\\\encoding\\{([^}]*)\\}.*", "\\1", enc)
        if(verbose) message("found encoding ", sQuote(enc), domain = NA)
        encoding <- if(enc %in% c("UTF-8", "utf-8", "utf8")) "UTF-8" else enc
    }
    if (length(encoding) != 1L || encoding == "unknown") encoding <- ""

    ## the internal function must get some sort of srcfile
    if (!inherits(srcfile, "srcfile"))
    	srcfile <- srcfile(file0)
    basename <- basename(srcfile$filename)
    srcfile$encoding <- encoding
    srcfile$Enc <- "UTF-8"

    if (encoding == "ASCII") {
        if (anyNA(iconv(lines, "", "ASCII")))
            stop(file0, ": non-ASCII input and no declared encoding",
                 domain = NA, call. = warningCalls)
    } else {
        validate <- config_val_to_logical(Sys.getenv("_R_CHECK_VALIDATE_UTF8_",
                                                     "FALSE"))
        if (encoding %in% c("unknown", "")  && l10n_info()[["UTF-8"]])
            encoding <- "UTF-8"
        if (encoding %in% c("UTF-8", "UTF8")) { ## package qgraph used UTF8
            valid <- validUTF8(lines)
            if (any(!valid))
                if (validate)
                    stop(sprintf("invalid UTF-8 in file %s", sQuote(file0)),
                         domain = NA, call. = FALSE)
                else
                    warning(sprintf("invalid UTF-8 in file %s", sQuote(file0)),
                            domain = NA, call. = FALSE)
        }
	if (encoding != "UTF-8")
    	    lines <- iconv(lines, encoding, "UTF-8", sub = "byte")
        ## Strip UTF-8 BOM if necessary.
        bytes <- charToRaw(lines[1L])
        if(identical(as.integer(bytes[1L : 3L]),
                     c(0xefL, 0xbbL, 0xbfL)))
            lines[1L] <- rawToChar(bytes[-(1L : 3L)])
    }

    tcon <- file()
    writeLines(lines, tcon, useBytes = TRUE)
    on.exit(close(tcon))

    warndups <- config_val_to_logical(Sys.getenv("_R_WARN_DUPLICATE_RD_MACROS_", "FALSE"))

    result <- if(permissive)
                  ## FIXME:  this should test for a special class of warning rather than testing the
                  ##         message, but those are currently not easily generated from C code.
                  withCallingHandlers(.External2(C_parseRd, tcon, srcfile, "UTF-8",
                                                 verbose, basename, fragment,
                                                 warningCalls, macros, warndups),
		       warning = function(w)
			    if (grepl("unknown macro", conditionMessage(w)))
				tryInvokeRestart("muffleWarning"))
              else
                  .External2(C_parseRd, tcon, srcfile, "UTF-8",
                             verbose, basename, fragment, warningCalls,
                             macros, warndups)
    result <- expandDynamicFlags(result)
    if (permissive)
	permissify(result)
    else
        result
}

print.Rd <- function(x, deparse = FALSE, ...)
{
    cat(as.character.Rd(x, deparse = deparse), sep = "")
    invisible(x)
}

as.character.Rd <- function(x, deparse = FALSE, ...)
{
    ZEROARG <- c("\\cr", "\\dots", "\\ldots", "\\R", "\\tab") # Only these cause trouble when {} is added
    MULTIARG <- c("\\section", "\\subsection", "\\item", "\\enc",
                  "\\method", "\\S3method", "\\S4method", "\\tabular",
                  "\\if", "\\href", "\\ifelse")
    USERMACROS <- c("USERMACRO", "\\newcommand", "\\renewcommand")
    EQN <- c("\\deqn", "\\eqn", "\\figure")
    modes <- c(RLIKE = 1L, LATEXLIKE = 2L, VERBATIM = 3L, INOPTION = 4L, COMMENTMODE = 5L, UNKNOWNMODE = 6L)
    tags  <- c(RCODE = 1L, TEXT      = 2L, VERB     = 3L,                COMMENT     = 5L, UNKNOWN     = 6L)
    state <- c(braceDepth = 0L, inRString = 0L)
    needBraces <- FALSE  # if next character is alphabetic, separate by braces.
    inEqn <- 0L

    pr <- function(x, quoteBraces) {
        tag <- attr(x, "Rd_tag")
        if (is.null(tag) || tag == "LIST") tag <- ""
    	if (is.list(x)) {
    	    savestate <- state
    	    state <<- c(0L, 0L)
    	    needBraces <<- FALSE
    	    if (tag == "Rd") { # a whole file
    	        result <- character()
    	    	for (i in seq_along(x))
                    result <- c(result, pr(x[[i]], quoteBraces))
    	    } else if (startsWith(tag, "#")) {
    	    	if (deparse) {
    	    	    dep <- deparseRdElement(x[[1L]][[1L]],
                                            c(state, modes["LATEXLIKE"],
                                              inEqn,
                                              as.integer(quoteBraces)))
    	    	    result <- c(tag, dep[[1L]])
    	    	} else
    	    	    result <- c(tag, x[[1L]][[1L]])
    	    	for (i in seq_along(x[[2L]]))
                    result <- c(result, pr(x[[2L]][[i]], quoteBraces))
    	    	result <- c(result, "#endif\n")
    	    } else if (tag %in% ZEROARG) {
    	    	result <- tag
    	    	needBraces <<- TRUE
    	    } else if (tag %in% MULTIARG) {
    	    	result <- tag
    	    	for (i in seq_along(x))
                    result <- c(result, pr(x[[i]], quoteBraces))
    	    } else if (tag %in% EQN) {
    	    	result <- tag
    	    	inEqn <<- 1L
    	    	result <- c(result, pr(x[[1L]], quoteBraces))
    	    	inEqn <<- 0L
    	    	if (length(x) > 1L)
    	    	    result <- c(result, pr(x[[2L]], quoteBraces))
    	    } else {
    	    	result <- tag
    	    	if (!is.null(option <- attr(x, "Rd_option")))
    	    	    result <- c(result, "[", pr(option, quoteBraces), "]")
    	    	result <- c(result, "{")
    	    	for (i in seq_along(x))
                    result <- c(result, pr(x[[i]], quoteBraces))
    	    	result <- c(result, "}")
    	    }
    	    if (state[1L])  # If braces didn't match within the list, try again, quoting them
    	    	result <- pr(x, TRUE)
    	    state <<- savestate
    	} else if (tag %in% USERMACROS) {
    	    	result <- c()
    	} else {
    	    if (deparse) {
    		dep <- deparseRdElement(as.character(x), c(state, tags[tag], inEqn, as.integer(quoteBraces)))
    	    	result <- dep[[1L]]
    	    	state <<- dep[[2L]][1L:2L]
    	    } else {
	        if (inherits(x, "Rd"))
		    class(x) <- setdiff(class(x), "Rd") # Avoid infinite recursion from misuse (PR#16448)
    	    	result <- as.character(x)
	    }
    	    if (needBraces) {
    	    	if (grepl("^[[:alpha:]]", result)) result <- c("{}", result)
    	    	needBraces <<- FALSE
    	    }
        }
    	result
    }
    if (is.null(attr(x, "Rd_tag"))) attr(x, "Rd_tag") <- "Rd"
    pr(x, quoteBraces = FALSE)
}

deparseRdElement <- function(element, state)
    .Call(C_deparseRd, element, state)

# Convert unknown tags into text displaying the tag with braces if necessary
# This allows unknown LateX macros to be embedded in the text, and to be just passed
# through.

permissify <- function(Rd)
{
    tags <- RdTags(Rd)
    oldclass <- class(Rd)
    oldsrcref <- utils::getSrcref(Rd)
    oldtag <- attr(Rd, "Rd_tag")
    i <- 0
    while (i < length(tags)) {
        i <- i+1
   	if (tags[i] == "UNKNOWN") {
   	    Rd[[i]] <- tagged(Rd[[i]], "TEXT", utils::getSrcref(Rd[[i]]))
            while (i < length(tags)) {
		if (tags[i+1] == "LIST") {
		    Rd <- c(Rd[seq_len(i)],
                            list(tagged("{", "TEXT", utils::getSrcref(Rd[[i+1]]))),
                            permissify(Rd[[i+1]]),
                            list(tagged("}", "TEXT", utils::getSrcref(Rd[[i+1]]))),
			    Rd[seq_along(Rd)[-seq_len(i+1)]])
		    tags <- RdTags(Rd)
		    i <- i+3
		} else if (tags[i+1] == "TEXT" && grepl("^ *$", Rd[[i+1]]))
		    i <- i + 1
		else
		    break
            }
        } else if (is.recursive(Rd[[i]]))
            Rd[[i]] <- permissify(Rd[[i]])
    }
    class(Rd) <- oldclass
    attr(Rd, "srcref") <- oldsrcref
    attr(Rd, "Rd_tag") <- oldtag
    Rd
}
#  File src/library/tools/R/pkg2HTML.R
#
#  Copyright (C) 2023-2024 The R Core Team
#  Part of the R package, https://www.R-project.org
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/


## Experimental interface to convert package source directory into
## single-page manual

## FIXME: Rd2HTML(standalone=FALSE) needs to include mathjaxr
## detection results in the info attribute.

## This cannot be done per Rd file, but we can switch to mathjaxr if
## any Rd file in a package uses mathjaxr

.convert_package_rdfiles <- function(package, dir = NULL, lib.loc = NULL, ...,
                                     stages = c("build", "later", "install", "render"),
                                     xLinks = character(0))
{
    ## if 'package' is an installed package (simplest) just use
    ## Rd_db(package) to get parsed Rd files. Otherwise, if 'package'
    ## is a .tar.gz file, assume that it's a source package (so unpack
    ## and call Rd_db on those). If 'package' is missing but 'dir' is
    ## not, interpret as source package (no need to unpack)

    isPkgTarball <- function(x) {
        length(x) == 1L && 
            endsWith(x, "tar.gz") &&
            length(strsplit(basename(x), "_", fixed = TRUE)[[1]]) == 2L
    }
    isURL <- function(x) {
        length(x) == 1L && 
            (startsWith(x, "http://") || startsWith(x, "https://"))
    }
    db <- 
        if (!missing(package) && isTRUE(isPkgTarball(package)))
        {
            ## If URL, download first
            if (isURL(package)) {
                destdir <- tempfile("dir")
                if (!dir.create(destdir))
                    stop(gettextf("unable to create temporary directory %s",
                                  sQuote(destdir)))
                utils::download.file(package, destfile = file.path(destdir, basename(package)))
                package <- file.path(destdir, basename(package))
            }
            ## Unpack first.
            ## Copied from src/library/utils/R/unix/mac.install.R::unpackPkg
            tmpDir <- tempfile("pkg")
            if (!dir.create(tmpDir))
                stop(gettextf("unable to create temporary directory %s",
                              sQuote(tmpDir)))
            utils::untar(package, exdir = tmpDir)
            pkgdir <- list.dirs(tmpDir, recursive = FALSE)
            if (length(pkgdir) != 1)
                stop(gettextf("expected one package directory, found %d.",
                              length(pkgdir)))
            Rd_db(dir = pkgdir, stages = stages)
        }
        else {
            ## FIXME: needs cleanup
            pkgdir <- if (is.null(dir)) find.package(package, lib.loc) else dir
            if (is.null(dir)) Rd_db(package, , lib.loc, stages = stages)
            else Rd_db(, dir, lib.loc, stages = stages)
        }

    ## create links database for help links. Level 0 links are
    ## obtained directly from the db, which is useful for non-installed packages.
    Links0 <- .build_links_index(Rd_contents(db), basename(pkgdir))
    Links <- c(Links0, findHTMLlinks(pkgdir, level = 1))
    Links2 <- xLinks
    
    rd2lines <- function(Rd, ...) {
        ## Rd2HTML() returns output location, which is not useful
        ## here, but also attributes that are.
        outlines <-
            utils::capture.output(
                       h <- Rd2HTML(Rd, out = "",
                                    package = pkgdir, # to extract pkgname/version info
                                    Links = Links, Links2 = Links2,
                                    ...)
                   )
        list(outlines = outlines, info = attr(h, "info"))
    }
    structure(lapply(db, rd2lines, standalone = FALSE, ...),
              descfile = file.path(pkgdir, "DESCRIPTION"))

}



pkg2HTML <- function(package, dir = NULL, lib.loc = NULL,
                     outputEncoding = "UTF-8",
                     stylesheet = file.path(R.home("doc"), "html", "R-nav.css"),
                     hooks = list(pkg_href = function(pkg) sprintf("%s.html", pkg)),
                     texmath = getOption("help.htmlmath"),
                     prism = TRUE,
                     out = NULL,
                     toc_entry = c("title", "name"),
                     ...,
                     Rhtml = FALSE,
                     mathjax_config = file.path(R.home("doc"), "html", "mathjax-config.js"),
                     include_description = TRUE)
{
    toc_entry <- match.arg(toc_entry)
    hcontent <- .convert_package_rdfiles(package = package, dir = dir, lib.loc = lib.loc,
                                         outputEncoding = outputEncoding,
                                         Rhtml = Rhtml, hooks = hooks,
                                         texmath = "katex", prism = prism, ...)
    descfile <- attr(hcontent, "descfile")
    descmeta <- .read_description(descfile)
    pkgname <- descmeta["Package"]
    if (is.null(out)) {
        out <- if (is.null(hooks$pkg_href)) ""
               else hooks$pkg_href(pkgname)
    }
    
    ## Sort by name, as in PDF manual (check exact code). The
    ## '<pkg>-package.Rd' summary page, if present, should come first.
    hcontent <- hcontent[order(vapply(hcontent,
                                      function(h) h$info$name,
                                      ""))]
    if (length(hcontent) > 1 &&
        length(wsumm <- which(vapply(hcontent,
                                     function(h) isTRUE(h$info$pkgsummary),
                                     FALSE))) > 0L) {
        hcontent <- c(hcontent[wsumm], hcontent[-wsumm])
    }
    rdnames <- vapply(hcontent, function(h) h$info$name, "")
    rdtitles <- vapply(hcontent, function(h) h$info$title[[1L]], "")
    ## rdtitles <- vapply(hcontent, function(h) h$info$htmltitle[[1L]], "") # FIXME: has extra <p>
    use_mathjax <- any(vapply(hcontent, function(h) h$info$mathjaxr, FALSE))
    if (missing(texmath) || is.null(texmath))
        texmath <- if (use_mathjax) "mathjax" else "katex"

    toclines <- sprintf("<li><a href='#%s'>%s</a></li>",
                        name2id(rdnames),
                        switch(toc_entry, title = rdtitles, name = rdnames))

    language <- descmeta["Language"]
    if(is.na(language))
        language <- "en"
    else if(grepl(",", language))
        language <- NA_character_
    ## If DESCRIPTION specifices several languages, we currently cannot
    ## tell which one will be used for the package Rd files.  We could
    ## guess to use the first language given, for now simply take the
    ## language as unknown.
    
    ## Now to make a file with header + DESCRIPTION + TOC + content + footer

    hfcomps <- # should we be able to specify static URLs here?
        HTMLcomponents(title = paste0("Help for package ", pkgname), logo = FALSE,
                       up = NULL, top = NULL,
                       css = stylesheet,
                       outputEncoding = outputEncoding,
                       dynamic = FALSE, prism = prism,
                       doTexMath = TRUE,
                       texmath = if (use_mathjax) "mathjax" else texmath,
                       MATHJAX_CONFIG_STATIC = mathjax_config,
                       language = language)

    writeHTML <- function(..., sep = "\n", append = TRUE)
        cat(..., file = out, sep = sep, append = append)

    ## cat(hfcomps$header, fill = TRUE) # debug
    writeHTML(hfcomps$header, sep = "", append = FALSE)
    ## writeHTML(sprintf("<header class='top'><h1>Package {%s}</h1><hr></header>",
    ##                   pkgname))
    writeHTML('<nav class="package" aria-label="Topic Navigation">',
              '<div class="dropdown-menu">',
              sprintf('<h1>Package {%s}</h1>', pkgname),
              '<h2>Contents</h2>',
              '<ul class="menu">',
              toclines,
              '</ul>',
              '</div>',
              '<hr>',
              '</nav>',
              '<main>')

    if (include_description) writeHTML(.DESCRIPTION_to_HTML(descfile))
    lapply(hcontent, function(h) writeHTML("<hr>", h$outlines))
    writeHTML('</main>')
    writeHTML(hfcomps$footer, sep = "")
    invisible(out)
}


#  File src/library/tools/R/QC.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2025 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

## R CMD check uses
## .find_charset
## .check_namespace
## .check_package_depends
## .check_demo_index
## .check_vignette_index
## .check_package_subdirs
## .check_citation
## .check_package_ASCII_code
## .check_package_code_syntax
## .check_packages_used
## .check_package_code_shlib
## .check_package_code_startup_functions
## .check_package_code_assign_to_globalenv
## .check_package_code_attach
## .check_package_code_data_into_globalenv
## .check_package_code_class_is_string
## .check_code_usage_in_package
## .check_bogus_return
## .check_dotInternal
## .check_package_parseRd
## .check_Rd_xrefs
## undoc
## codoc
## codocData
## codocClasses
## checkDocFiles
## checkDocStyle
## checkFF
## checkS3methods
## checkReplaceFuns
## .check_package_datasets
## .check_package_compact_datasets
## .check_package_compact_sysdata
## .check_make_vars
## .createExdotR (testing.R)
## .runPackageTestsR (testing.R)
## .get_LaTeX_errors_from_log_file
## .check_package_CRAN_incoming
## checkRdContents

## R CMD build uses .check_package_subdirs

## NB: 'tools' cannot use NAMESPACE imports from utils, as it exists first

## "The language elements" : all are .Primitive *and* print as .Primitive("...")
langElts <- c("(", "{", ":", "~",
              "<-", "<<-", "=",
              "[", "[[", "[[<-", "[<-", "@", "@<-", "$", "$<-",
              "&&", "||",
              "break", "for", "function", "if", "next", "repeat", "return", "while")

## Code "existing conceptually" in base,
## typically function names of default methods for .Primitive s:
conceptual_base_code <- c("c.default")

##' a "default" print method (see NAMESPACE):
.print.via.format <- function(x, ...) {
    writeLines(format(x, ...))
    invisible(x)
}

## utility for whether Rd sources are available.
.haveRds <- function(dir)
{
    ## either source package or pre-2.10.0 installed package
    dir.exists (file.path(dir, "man")) ||
    file.exists(file.path(dir, "help", "paths.rds"))
}

### * undoc/F/out

undoc <-
function(package, dir, lib.loc = NULL)
{
    ## Argument handling.
    ## <NOTE>
    ## Earlier versions used to give an error if there were no Rd
    ## objects.  This is not right: if there is code or data but no
    ## documentation, everything is undocumented ...
    ## </NOTE>
    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dirdir <- dirname(dir <- find.package(package, lib.loc))
        ## Using package installed in @code{dir} ...
        is_base <- package == "base"

        all_doc_topics <- Rd_aliases(package, lib.loc = dirdir)

        ## Load package into code_env.
        if(!is_base)
            .load_package_quietly(package, dirdir)
        code_env <- .package_env(package)

        code_objs <- ls(envir = code_env, all.names = TRUE)
        pkgname <- package
    }
    else {
        if(missing(dir))
            stop("you must specify 'package' or 'dir'")
        pkgname <- basename(dir)
        dirdir  <- dirname(dir)
        ## Using sources from directory @code{dir} ...
        if(!dir.exists(dir))
            stop(gettextf("directory '%s' does not exist", dir),
                 domain = NA)
        else
            dir <- file_path_as_absolute(dir)
        is_base <- pkgname == "base"

        all_doc_topics <- Rd_aliases(dir = dir)

        code_env <- new.env(hash = TRUE)
        code_dir <- file.path(dir, "R")
        if(dir.exists(code_dir)) {
            dfile <- file.path(dir, "DESCRIPTION")
            meta <- if(file_test("-f", dfile))
                .read_description(dfile)
            else
                character()
            .source_assignments_in_code_dir(code_dir, code_env, meta)
            sys_data_file <- file.path(code_dir, "sysdata.rda")
            if(file_test("-f", sys_data_file))
                load(sys_data_file, code_env)
        }

        code_objs <- ls(envir = code_env, all.names = TRUE)

        if(file.exists(file.path(dir, "NAMESPACE")) &&
           ## Code in NAMESPACE could e.g. check the version of one of
           ## its Imports.
           !inherits(tryCatch(nsInfo <-
                                  parseNamespaceFile(pkgname,
                                                     dirdir),
                              error = identity),
                     "error")) {
            ## Look only at exported objects (and not declared S3
            ## methods).
            OK <- intersect(code_objs, nsInfo$exports)
            for(p in nsInfo$exportPatterns)
                OK <- c(OK, grep(p, code_objs, value = TRUE))
            code_objs <- unique(OK)
        }
    }

    ## Find the data sets to work on.
    data_dir <- file.path(dir, "data")
    data_objs <- if(dir.exists(data_dir))
        unlist(.try_quietly(list_data_in_pkg(dir = dir)),
               use.names = FALSE)
    else
        character()

    ## There was a time when packages contained code or data (or both).
    ## But not anymore ...
    if(!missing(package) && !length(code_objs) && !length(data_objs)
       && getOption("verbose"))
        message("neither code nor data objects found")

    if(!is_base) {
        ## Code objects in add-on packages with names starting with a
        ## dot are considered 'internal' (not user-level) by
        ## convention.
        if(!config_val_to_logical(Sys.getenv("_R_CHECK_UNDOC_USE_ALL_NAMES_",
                                             "FALSE")))
            code_objs <- grep("^[^.].*", code_objs, value = TRUE)
        else {
            code_objs <- code_objs %w/o% c(".Depends")
            code_objs <- code_objs[!(startsWith(code_objs, ".__C__") |
                                     startsWith(code_objs, ".__T__"))]
        }
        ## Note that this also allows us to get rid of S4 meta objects
        ## (with names starting with '.__C__' or '.__M__'; well, as long
        ## as there are none in base).

        ## Implicit generic functions exist to turn method dispatch on
        ## in this package, but their definition and documentation belongs
        ## to the package in their package slot, so eliminate any
        ## foreign generic functions from code_objs
        if(.isMethodsDispatchOn()) {
            is <- methods::is           # speed
            code_objs <-
                Filter(function(f) {
                    fdef <- code_env[[f]] # faster than get()
                    ## Running methods::is() on data sets can trigger
                    ## loading additional packages for which startup
                    ## messages et al need suppressing ...
                    if(suppressMessages(is(fdef, "genericFunction")))
                        fdef@package == pkgname
                    else
                        TRUE
                },
                code_objs)
        }

        ## Allow group generics to be undocumented other than in base.
        ## In particular, those from methods partially duplicate base
        ## and are documented in base's groupGenerics.Rd.
        code_objs <- setdiff(code_objs,
                             c("Arith", "Compare", "Complex", "Logic",
                               "Math", "Math2", "Ops", "Summary", "matrixOps"))
    }

    undoc_things <-
        list("code objects" =
             unique(setdiff(code_objs, all_doc_topics)),
             "data sets" =
             unique(setdiff(data_objs, all_doc_topics)))

    if(.isMethodsDispatchOn()) {
        ## Undocumented S4 classes?
        S4_classes <- methods::getClasses(code_env)
        ## <NOTE>
        ## There is no point in worrying about exportClasses directives
        ## in a NAMESPACE file when working on a package source dir, as
        ## we only source the assignments, and hence do not get any
        ## S4 classes or methods.
        ## </NOTE>
        ## The bad ones:
        S4_classes <-
            S4_classes[vapply(S4_classes, utils:::topicName, " ",
                              type = "class", USE.NAMES = FALSE)
                       %notin% all_doc_topics]
        undoc_things <-
            c(undoc_things, list("S4 classes" = unique(S4_classes)))
    }

    if(.isMethodsDispatchOn()) {
        ## Undocumented S4 methods?
        ## <NOTE>
        ## There is no point in worrying about exportMethods directives
        ## in a NAMESPACE file when working on a package source dir, as
        ## we only source the assignments, and hence do not get any
        ## S4 classes or methods.
        ## </NOTE>
        .make_S4_method_siglist <- function(g) {
            mlist <- .get_S4_methods_list(g, code_env)
            sigs <- .make_siglist(mlist) #  s/#/,/g
            if(length(sigs))
                paste0(g, ",", sigs)
            else
                character()
        }
        S4_methods <- lapply(.get_S4_generics(code_env),
                             .make_S4_method_siglist)
        S4_methods <- as.character(unlist(S4_methods, use.names = FALSE))

        ## The bad ones:
        S4_methods <-
            S4_methods[vapply(S4_methods, utils:::topicName, " ",
                               type="method", USE.NAMES = FALSE)
                       %notin% all_doc_topics]
        undoc_things <-
            c(undoc_things,
              list("S4 methods" =
                   unique(sub("([^,]*),(.*)",
                              "generic '\\1' and siglist '\\2'",
                              S4_methods))))
    }
    if(is_base) {
        ## We use .ArgsEnv and .GenericArgsEnv in checkS3methods() and
        ## codoc(), so we check here that the set of primitives has not
        ## been changed.
        ff <- as.list(baseenv(), all.names = TRUE)
        prims <- names(ff)[vapply(ff, is.primitive, NA)]
        prototypes <- sort(c(names(.ArgsEnv), names(.GenericArgsEnv)))
        extras <- setdiff(prototypes, prims)
        if(length(extras))
            undoc_things <- c(undoc_things, list(prim_extra = extras))
        miss <- setdiff(prims, c(langElts, prototypes))
        if(length(miss))
            undoc_things <- c(undoc_things, list(primitives = miss))
    }

    class(undoc_things) <- "undoc"
    undoc_things
}

format.undoc <-
function(x, ...)
{
    .fmt <- function(i) {
        tag <- names(x)[i]
        msg <- switch(tag,
                      "code objects" =
                      gettext("Undocumented code objects:"),
                      "data sets" =
                      gettext("Undocumented data sets:"),
                      "S4 classes" =
                      gettext("Undocumented S4 classes:"),
                      "S4 methods" =
                      gettext("Undocumented S4 methods:"),
                      prim_extra =
                      gettext("Prototyped non-primitives:"),
                      gettextf("Undocumented %s:", tag))
        c(msg,
          ## We avoid markup for indicating S4 methods, hence need to
          ## special-case output for these ...
          if(tag == "S4 methods") {
              strwrap(x[[i]], indent = 2L, exdent = 4L)
          } else {
              .pretty_format(x[[i]])
          })
    }

    as.character(unlist(lapply(which(lengths(x) > 0L), .fmt)))
}

### * codoc

##
is_data_for_dataset <- function(e) ## trigger for data(foo) or data(foo, package="bar") and similar
    length(e) >= 2L && e[[1L]] == quote(data) && e[[2L]] != quote(...) && length(e) <= 4L

codoc <-
function(package, dir, lib.loc = NULL,
         use.values = NULL, verbose = getOption("verbose"))
{
    has_namespace <- FALSE

    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        ## Using package installed in @code{dir} ...
        code_dir <- file.path(dir, "R")
        if(!dir.exists(code_dir))
            stop(gettextf("directory '%s' does not contain R code", dir),
                 domain = NA)
        if(!.haveRds(dir))
            stop(gettextf("directory '%s' does not contain Rd objects", dir),
                 domain = NA)
        is_base <- basename(dir) == "base"

        dirdir <- dirname(dir)

        ## Load package into code_env.
        if(!is_base)
            .load_package_quietly(package, dirdir)
        code_env <- .package_env(package)

        objects_in_code <- sort(names(code_env))

        if(is_base) {
            objects_in_code_or_namespace <- objects_in_code
        } else {
            has_namespace <- TRUE
            ns_env <- asNamespace(package)
            S3Table <- ns_env[[".__S3MethodsTable__."]]
            functions_in_S3Table <- ls(S3Table, all.names = TRUE)
            objects_in_ns <-
                setdiff(sort(names(ns_env)),
                        c(".__NAMESPACE__.", ".__S3MethodsTable__."))
            ns_S3_methods_db <- getNamespaceInfo(package, "S3methods")
            ns_S3_methods <- if(is.null(ns_S3_methods_db))
                                 character()
                             else
                                 paste(ns_S3_methods_db[, 1L],
                                       ns_S3_methods_db[, 2L],
                                       sep = ".")
            objects_in_code_or_namespace <-
                unique(c(objects_in_code, objects_in_ns, ns_S3_methods))
            objects_in_ns <- setdiff(objects_in_ns, objects_in_code)
        }

        package_name <- package
    }
    else {
        if(missing(dir))
            stop("you must specify 'package' or 'dir'")
        ## Using sources from directory @code{dir} ...
        if(!dir.exists(dir))
            stop(gettextf("directory '%s' does not exist", dir), domain = NA)
        ## else
        package_name <- basename(dir) # early, before resolving sym.links etc in next line:
        dirdir <- dirname(dir)        # early, ...
        dir <- file_path_as_absolute(dir)
        code_dir <- file.path(dir, "R")
        if(!dir.exists(code_dir))
            stop(gettextf("directory '%s' does not contain R code", dir),
                 domain = NA)
        if(!.haveRds(dir))
            stop(gettextf("directory '%s' does not contain Rd objects", dir),
                 domain = NA)
        is_base <- package_name == "base"

        code_env <- new.env(hash = TRUE)
        dfile <- file.path(dir, "DESCRIPTION")
        meta <- if(file_test("-f", dfile)) .read_description(dfile) else character()
        .source_assignments_in_code_dir(code_dir, code_env, meta)
        sys_data_file <- file.path(code_dir, "sysdata.rda")
        if(file_test("-f", sys_data_file)) load(sys_data_file, code_env)

        objects_in_code <- sort(names(code_env))
        objects_in_code_or_namespace <- objects_in_code

        ## Does the package have a NAMESPACE file?
        ## Also, do not attempt to find S3 methods.
        if(file.exists(file.path(dir, "NAMESPACE")) &&
           ## Code in NAMESPACE could e.g. check the version of one of
           ## its Imports.
           !inherits(tryCatch(nsInfo <-
                                  parseNamespaceFile(package_name,
                                                     dirdir),
                              error = identity),
                     "error")) {
            has_namespace <- TRUE
            objects_in_ns <- objects_in_code
            functions_in_S3Table <- character()
            ns_env <- code_env

            ## Look only at exported objects.
            OK <- intersect(objects_in_code, nsInfo$exports)
            for(p in nsInfo$exportPatterns)
                OK <- c(OK, grep(p, objects_in_code, value = TRUE))
            objects_in_code <- unique(OK)
        }
    }

    ## Find the data sets to work on.
    data_dir <- file.path(dir, "data")
    if(dir.exists(data_dir)) {
        data_sets_in_code_variables <-
            .try_quietly(list_data_in_pkg(dir = dir))
        data_sets_in_code <- names(data_sets_in_code_variables)
    } else
        data_sets_in_code <- data_sets_in_code_variables <- character()

    ## Find the function objects to work on.
    functions_in_code <-
        Filter(function(f) {
                   ## This is expensive
                   f <- get(f, envir = code_env)
                   typeof(f) == "closure"
               },
               objects_in_code)
    ## Sourcing all R code files in the package is a problem for base,
    ## where this misses the .Primitive functions.  Hence, when checking
    ## base for objects shown in \usage but missing from the code, we
    ## get the primitive functions from the version of R we are using.
    ## Maybe one day we will have R code for the primitives as well ...
    ## As from R 2.5.0 we do for most generics.
    if(is_base) {
        objects_in_base <-
            sort(names(baseenv()))
        objects_in_code <-
            c(objects_in_code,
              conceptual_base_code,
              Filter(.is_primitive_in_base, objects_in_base),
              c(".First.lib", ".Last.lib", ".Random.seed",
                ".onLoad", ".onAttach", ".onDetach", ".onUnload"))
        objects_in_code_or_namespace <- objects_in_code
        known_env <- .make_S3_primitive_generic_env(code_env, fixup=TRUE)
        extras <- ls(known_env, all.names = TRUE)
        functions_in_code <- c(functions_in_code, extras)
        code_env <- known_env
        known_env <- .make_S3_primitive_nongeneric_env(code_env)
        extras <- ls(known_env, all.names = TRUE)
        functions_in_code <- c(functions_in_code, extras)
        code_env <- known_env
    }

    ## Build a list with the formals of the functions in the code
    ## indexed by the names of the functions.
    function_args_in_code <-
        lapply(functions_in_code,
               function(f) formals(get(f, envir = code_env))) # get is expensive
    names(function_args_in_code) <- functions_in_code
    if(has_namespace) {
        functions_in_ns <-
            Filter(function(f) {
                       f <- get(f, envir = ns_env) # get is expensive
                       is.function(f) && (length(formals(f)) > 0L)
                   },
                   objects_in_ns)
        function_args_in_ns <-
            lapply(functions_in_ns,
                   function(f) formals(get(f, envir = ns_env)))
        names(function_args_in_ns) <- functions_in_ns

        function_args_in_S3Table <-
            lapply(functions_in_S3Table, function(f) formals(S3Table[[f]]))
        names(function_args_in_S3Table) <- functions_in_S3Table

        tmp <- c(function_args_in_code, function_args_in_S3Table,
                 function_args_in_ns)
        keep <- !duplicated(names(tmp))
        function_args_in_code <- tmp[keep]
        functions_in_code <- names(function_args_in_code)
    }
    if(.isMethodsDispatchOn()) {
        ## <NOTE>
        ## There is no point in worrying about exportMethods directives
        ## in a NAMESPACE file when working on a package source dir, as
        ## we only source the assignments, and hence do not get any
        ## S4 classes or methods.
        ## </NOTE>
        ## <NOTE>
        ## In principle, we can get codoc checking for S4 methods
        ## documented explicitly using the \S4method{GENERIC}{SIGLIST}
        ## markup by adding the corresponding "pseudo functions" using
        ## the Rd markup as their name.  However note that the formals
        ## recorded in the methods db only pertain to the signature, not
        ## to the ones of the function actually registered ... hence we
        ## use methods::unRematchDefinition() which knows how to extract
        ## the formals in the method definition from the
        ##   function(ARGLIST) {
        ##     .local <- function(FORMALS) BODY
        ##     .local(ARGLIST)
        ##   }
        ## redefinitions obtained by methods::rematchDefinition().
        ## </NOTE>
        check_S4_methods <-
            !isFALSE(as.logical(Sys.getenv("_R_CHECK_CODOC_S4_METHODS_")))
        if(check_S4_methods) {
            unRematchDef <- methods::unRematchDefinition
            get_formals_from_method_definition <- function(m)
                formals(unRematchDef(m))
            lapply(.get_S4_generics(code_env),
                   function(f) {
                       mlist <- .get_S4_methods_list(f, code_env)
                       sigs <- .make_siglist(mlist)
                       if(!length(sigs)) return()
                       nm <- sprintf("\\S4method{%s}{%s}", f, sigs)
                       args <- lapply(mlist,
                                      get_formals_from_method_definition)
                       names(args) <- nm
                       functions_in_code <<-
                           c(functions_in_code, nm)
                       function_args_in_code <<-
                           c(function_args_in_code, args)
                   })
        }
    }

    check_codoc <- function(fName, ffd) {
        ## Compare the formals of the function in the code named 'fName'
        ## and formals 'ffd' obtained from the documentation.
        ffc <- function_args_in_code[[fName]]
        ident <- if(isFALSE(use.values)) {
                     ffc <- names(ffc)
                     ffd <- names(ffd)
                     identical(ffc, ffd)
                 } else {
                     identical(names(ffc), names(ffd)) &&
                         {
                             vffc <- as.character(ffc) # values
                             vffd <- as.character(ffd) # values
                             if(!isTRUE(use.values)) {
                                 ind <- nzchar(vffd)
                                 vffc <- vffc[ind]
                                 vffd <- vffd[ind]
                             }
                             identical(vffc, vffd)
                         }
                 }
        if(!ident)
            list(list(name = fName, code = ffc, docs = ffd))
    } #{check_codoc}

    db <- if(!missing(package))
        Rd_db(package, lib.loc = dirdir)
    else
        Rd_db(dir = dir)

    ## <FIXME>
    ## How exactly do we recognize docs for defunct/deprecated?
    db_names <- .Rd_get_names_from_Rd_db(db)
    ## pkg-defunct.Rd is not expected to list arguments
    ind <- db_names %in% paste0(package_name, "-defunct")
    db <- db[!ind]
    ## </FIXME>

    db_usages <- lapply(db, .Rd_get_section, "usage")
    ## FIXME: all db_usages entries are full of "srcref" which are never used
    db_usages <- lapply(db_usages, .parse_usage_as_much_as_possible)
    ind <- vapply(db_usages,
                  function(x) !is.null(attr(x, "bad_lines")), NA, USE.NAMES=FALSE)
    bad_lines <- lapply(db_usages[ind], attr, "bad_lines")

    bad_doc_objects <- list()
    functions_in_usages <- character()
    variables_in_usages <- character()
    data_sets_in_usages <- character()
    functions_in_usages_not_in_code <- list()
    data_sets_in_usages_not_in_code <- list()
    variables_in_usages_not_in_code <- list()
    objects_as_in <- c(objects_in_code_or_namespace
                     , names(compatibilityEnv()) # objects in other platforms
                     , if(missing(package) && str_parse_logic(meta["LazyData"], FALSE))
                           unlist(data_sets_in_code_variables, use.names = FALSE)
                     , if(is_base)
                           c("NA", "NULL", "Inf", "NaN", "TRUE", "FALSE", ".Autoloaded")
                       )

    for(nm in names(db)) {
        exprs <- db_usages[[nm]]
        if(!length(exprs)) next

        ## Get variable names and data set usages first, mostly for
        ## curiosity.
        ind <- vapply(exprs, is.name, NA, USE.NAMES=FALSE)
        if(any(ind)) {
            variables <- vapply(exprs[ind], deparse, "")
            variables_in_usages <- c(variables_in_usages, variables)
            variables <- setdiff(variables, objects_as_in)
            if(length(variables))
                variables_in_usages_not_in_code[[nm]] <- variables
            exprs <- exprs[!ind]
        }

        exprs <- exprs[vapply(exprs, is.call, NA, USE.NAMES=FALSE)]

        ind <- vapply(exprs, is_data_for_dataset, NA, USE.NAMES=FALSE)
        if(any(ind)) {
            data_sets <- vapply(exprs[ind],
                                function(e) as.character(e[[2L]]),
                                "")
            data_sets_in_usages <- c(data_sets_in_usages, data_sets)
            data_sets <- setdiff(data_sets, data_sets_in_code)
            if(length(data_sets))
                data_sets_in_usages_not_in_code[[nm]] <- data_sets
            exprs <- exprs[!ind]
        }
        ## Split out replacement function usages.
        ind <- vapply(exprs, .is_call_from_replacement_function_usage, NA, USE.NAMES=FALSE)
        replace_exprs <- exprs[ind]
        exprs <- exprs[!ind]
        ## Ordinary functions.
        functions <- vapply(exprs, function(e) as.character(e[[1L]]), "")
        ## Catch assignments: checkDocFiles() will report these, so drop
        ## them here.
        ## And also unary/binary operators
        ind <- (functions %notin% c("<-", "=", "+", "-"))
        exprs <- exprs[ind]
        functions <- functions[ind]
        functions <- .transform_S3_method_markup(as.character(functions))
        ind <- functions %in% functions_in_code
        bad_functions <-
            mapply(functions[ind],
                   exprs[ind],
                   FUN = function(x, y)
                   check_codoc(x, as.pairlist(as.alist.call(y[-1L]))),
                   SIMPLIFY = FALSE)
        ## Replacement functions.
        if(length(replace_exprs)) {
            replace_funs <-
                paste0(vapply(replace_exprs,
                              function(e) as.character(e[[2L]][[1L]]),
                              ""),
                       "<-")
            replace_funs <- .transform_S3_method_markup(replace_funs)
            functions <- c(functions, replace_funs)
            ind <- (replace_funs %in% functions_in_code)
            if(any(ind)) {
                bad_replace_funs <-
                    mapply(replace_funs[ind],
                           replace_exprs[ind],
                           FUN = function(x, y)
                           check_codoc(x,
                                      as.pairlist(c(as.alist.call(y[[2L]][-1L]),
                                                    as.alist.symbol(y[[3L]])))),
                           SIMPLIFY = FALSE)
                bad_functions <-
                    c(bad_functions, bad_replace_funs)
            }
        }

        bad_functions <- do.call(c, bad_functions)
        if(length(bad_functions))
            bad_doc_objects[[nm]] <- bad_functions

        ## Determine functions with a \usage entry in the documentation
        ## but 'missing from the code'.  If a package has a namespace, we
        ## really need to look at all objects in the namespace (hence
        ## 'objects_as_in' contains 'objects_in_code_or_namespace'),
        ## as one can access the internal
        ## symbols via ':::' and hence package developers might want to
        ## provide function usages for some of the internal functions.
        ## <FIXME>
        ## We may still have \S4method{}{} entries in functions, which
        ## cannot have a corresponding object in the code.  Hence, we
        ## remove these function entries, but should really do better,
        ## by comparing the explicit \usage entries for S4 methods to
        ## what is actually in the code.  We most likely also should do
        ## something similar for S3 methods.
        ind <- grepl(.S4_method_markup_regexp, functions)
        if(any(ind))
            functions <- functions[!ind]
        ## </FIXME>
        bad_functions <- setdiff(functions, objects_as_in)
        if(length(bad_functions))
            functions_in_usages_not_in_code[[nm]] <- bad_functions

        functions_in_usages <- c(functions_in_usages, functions)
    }

    ## Determine (function) objects in the code without a \usage entry.
    ## Of course, these could still be 'documented' via \alias.
    ## </NOTE>
    ## Older versions only printed this information without returning it
    ## (in case 'verbose' was true).  We now add this as an attribute to
    ## the bad_doc_objects returned.
    ## </NOTE>
    objects_in_code_not_in_usages <-
        setdiff(objects_in_code,
                c(functions_in_usages, variables_in_usages))
    functions_in_code_not_in_usages <-
        intersect(functions_in_code, objects_in_code_not_in_usages)
    ## (Note that 'functions_in_code' does not necessarily contain all
    ## (exported) functions in the package.)

    ## Determine functions which have no usage but really should have.
    ## If there is no namespace (including base), we have no idea.
    ## If there is one, everything "exported" (in the package env)
    ## should also have a \usage, apart from
    ## * Defunct functions
    ## * S4 generics.  Note that as per R-exts,
    ##     exporting methods on a generic in the namespace will also
    ##     export the generic, and exporting a generic in the namespace
    ##     will also export its methods.
    ##   so it seems there is really no way to figure out whether an
    ##   exported S4 generic should have a \usage entry or not ...
    functions_missing_from_usages <-
        if(!has_namespace && !is_base)
            character()
        else {
            functions <- functions_in_code_not_in_usages
            if(is_base)
                functions <-
                    setdiff(functions,
                            c(sprintf("%s.%s",
                                      .S3_methods_table[, 1L],
                                      .S3_methods_table[, 2L]),
                              c(".First.sys", ".OptRequireMethods",
                                "+", "-")))
            else {
                pname <- basename(dir)
                if(pname == "utils")
                    functions <- functions %w/o% "?"
                else if(pname == "grDevices")
                    functions <- functions %w/o% "x11"
            }
            if(.isMethodsDispatchOn()) {
                ## Drop the functions which have S4 methods.
                functions <-
                    setdiff(functions, names(.get_S4_generics(code_env)))
            }
            ## Drop the defunct functions.
            predicate <-
                .predicate_for_calls_with_names(".Defunct", "base")
            is_defunct <- function(f) {
                f <- get(f, envir = code_env) # get is expensive
                if(!is.function(f)) return(FALSE)
                predicate(.get_top_call_in_fun(f))
            }
            functions[!vapply(functions, is_defunct, NA, USE.NAMES=FALSE)]
        }
    objects_missing_from_usages <-
        if(!has_namespace) character() else {
            c(functions_missing_from_usages,
              setdiff(objects_in_code_not_in_usages,
                      c(functions_in_code, data_sets_in_code)))
                                       }

    attr(bad_doc_objects, "objects_in_code_not_in_usages") <-
        objects_in_code_not_in_usages
    attr(bad_doc_objects, "functions_in_code_not_in_usages") <-
        functions_in_code_not_in_usages
    attr(bad_doc_objects, "functions_in_usages_not_in_code") <-
        functions_in_usages_not_in_code
    attr(bad_doc_objects, "function_args_in_code") <-
        function_args_in_code
    attr(bad_doc_objects, "data_sets_in_usages_not_in_code") <-
        data_sets_in_usages_not_in_code
    if(config_val_to_logical(Sys.getenv("_R_CHECK_CODOC_VARIABLES_IN_USAGES_",
                                        "FALSE"))) {
        attr(bad_doc_objects, "variables_in_usages_not_in_code") <-
            variables_in_usages_not_in_code
    }
    attr(bad_doc_objects, "objects_missing_from_usages") <-
        objects_missing_from_usages
    attr(bad_doc_objects, "functions_missing_from_usages") <-
        functions_missing_from_usages
    attr(bad_doc_objects, "has_namespace") <- has_namespace
    attr(bad_doc_objects, "bad_lines") <- bad_lines
    class(bad_doc_objects) <- "codoc"
    bad_doc_objects
}

print.codoc <-
function(x, ...)
{
    functions_in_usages_not_in_code <-
        attr(x, "functions_in_usages_not_in_code")
    if(length(functions_in_usages_not_in_code)) {
        for(fname in names(functions_in_usages_not_in_code)) {
            writeLines(gettextf("Functions or methods with usage in Rd file '%s' but not in code:",
                                fname))
            .pretty_print(sQuote(unique(functions_in_usages_not_in_code[[fname]])))
            writeLines("")
        }
    }

    data_sets_in_usages_not_in_code <-
        attr(x, "data_sets_in_usages_not_in_code")
    if(length(data_sets_in_usages_not_in_code)) {
        for(fname in names(data_sets_in_usages_not_in_code)) {
            writeLines(gettextf("Data with usage in Rd file '%s' but not in code:",
                                fname))
            .pretty_print(sQuote(unique(data_sets_in_usages_not_in_code[[fname]])))
            writeLines("")
        }
    }

    variables_in_usages_not_in_code <-
        attr(x, "variables_in_usages_not_in_code")
    if(length(variables_in_usages_not_in_code)) {
        for(fname in names(variables_in_usages_not_in_code)) {
            writeLines(gettextf("Variables with usage in Rd file '%s' but not in code:",
                                fname))
            .pretty_print(sQuote(unique(variables_in_usages_not_in_code[[fname]])))
            writeLines("")
        }
    }

    ## In general, functions in the code which only have an \alias but
    ## no \usage entry are not necessarily a problem---they might be
    ## mentioned in other parts of the Rd object documenting them, or be
    ## 'internal'.  However, if a package has a namespace, then all
    ## *exported* functions should have \usage entries (apart from
    ## defunct functions and S4 generics, see the above comments for
    ## functions_missing_from_usages).  Currently, this information is
    ## returned in the codoc object but not shown.  Eventually, we might
    ## add something like
    ##     functions_missing_from_usages <-
    ##         attr(x, "functions_missing_from_usages")
    ##     if(length(functions_missing_from_usages)) {
    ##         writeLines("Exported functions without usage information:")
    ##         .pretty_print(functions_in_code_not_in_usages)
    ##         writeLines("")
    ##     }
    ## similar to the above.

    if(!length(x))
        return(invisible(x))

    has_only_names <- is.character(x[[1L]][[1L]][["code"]])

    format_args <- function(s) {
        if(!length(s))
            "function()"
        else if(has_only_names)
            paste0("function(", paste(s, collapse = ", "), ")")
        else {
            s <- paste(deparse(s), collapse = "")
            s <- gsub(" = ([,\\)])", "\\1", s)
            s <- gsub("<unescaped bksl>", "\\", s, fixed = TRUE)
            s <- gsub("^pairlist", "function", s)
            gsub("^as.pairlist\\(alist\\((.*)\\)\\)$", "function(\\1)", s)
        }
    }

    summarize_mismatches_in_names <- function(nfc, nfd) {
        if(length(nms <- setdiff(nfc, nfd)))
            writeLines(c(gettext("  Argument names in code not in docs:"),
                         strwrap(paste(nms, collapse = " "),
                                 indent = 4L, exdent = 4L)))
        if(length(nms <- setdiff(nfd, nfc)))
            writeLines(c(gettext("  Argument names in docs not in code:"),
                         strwrap(paste(nms, collapse = " "),
                                 indent = 4L, exdent = 4L)))
        len <- min(length(nfc), length(nfd))
        if(len) {
            len <- seq_len(len)
            nfc <- nfc[len]
            nfd <- nfd[len]
            ind <- which(nfc != nfd)
            len <- length(ind)
            if(len) {
                if(len > 3L) {
                    writeLines(gettext("  Mismatches in argument names (first 3):"))
                    ind <- ind[1L:3L]
                } else {
                    writeLines(gettext("  Mismatches in argument names:"))
                }
                for(i in ind) {
                    writeLines(sprintf("    Position: %d Code: %s Docs: %s",
                                       i, nfc[i], nfd[i]))
                }
            }
        }
    }

    summarize_mismatches_in_values <- function(ffc, ffd) {
        ## Be nice, and match arguments by names first.
        nms <- intersect(names(ffc), names(ffd))
        vffc <- ffc[nms]
        vffd <- ffd[nms]
        ind <- which(as.character(vffc) != as.character(vffd))
        len <- length(ind)
        if(len) {
            if(len > 3L) {
                writeLines(gettext("  Mismatches in argument default values (first 3):"))
                ind <- ind[1L:3L]
            } else {
                writeLines(gettext("  Mismatches in argument default values:"))
            }
            for(i in ind) {
                multiline <- FALSE
                cv <- deparse(vffc[[i]])
                if(length(cv) > 1L) {
                    cv <- paste(cv, collapse = "\n      ")
                    multiline <- TRUE
                }
                dv <- deparse(vffd[[i]])
                if(length(dv) > 1L) {
                    dv <- paste(dv, collapse = "\n      ")
                    multiline <- TRUE
                }
                dv <- gsub("<unescaped bksl>", "\\", dv, fixed = TRUE)
                sep <- if(multiline) "\n    " else " "
                writeLines(sprintf("    Name: '%s'%sCode: %s%sDocs: %s",
                                   nms[i], sep, cv, sep, dv))
            }
        }
    }

    summarize_mismatches <- function(ffc, ffd) {
        if(has_only_names)
            summarize_mismatches_in_names(ffc, ffd)
        else {
            summarize_mismatches_in_names(names(ffc), names(ffd))
            summarize_mismatches_in_values(ffc, ffd)
        }
    }

    for(fname in names(x)) {
        writeLines(gettextf("Codoc mismatches from Rd file '%s':",
                            fname))
        xfname <- x[[fname]]
        for(i in seq_along(xfname)) {
            ffc <- xfname[[i]][["code"]]
            ffd <- xfname[[i]][["docs"]]
            writeLines(c(xfname[[i]][["name"]],
                         strwrap(gettextf("Code: %s", format_args(ffc)),
                                 indent = 2L, exdent = 17L),
                         strwrap(gettextf("Docs: %s", format_args(ffd)),
                                 indent = 2L, exdent = 17L)))
            summarize_mismatches(ffc, ffd)
        }
        writeLines("")
    }

    invisible(x)
}

### * codocClasses

codocClasses <-
function(package, lib.loc = NULL)
{
    ## Compare the 'structure' of S4 classes in an installed package
    ## between code and documentation.
    ## Currently, only compares the slot names.

    ## <NOTE>
    ## This is patterned after the current codoc().
    ## It would be useful to return the whole information on class slot
    ## names found in the code and matching documentation (rather than
    ## just the ones with mismatches).
    ## Currently, we only return the names of all classes checked.
    ## </NOTE>

    bad_Rd_objects <- structure(list(), class = "codocClasses")

    ## Argument handling.
    if(length(package) != 1L)
        stop("argument 'package' must be of length 1")
    dir <- find.package(package, lib.loc)
    if(!dir.exists(file.path(dir, "R")))
        stop(gettextf("directory '%s' does not contain R code", dir),
             domain = NA)
    if(!.haveRds(dir))
        stop(gettextf("directory '%s' does not contain Rd objects", dir),
             domain = NA)
    is_base <- basename(dir) == "base"

    ## Load package into code_env.
    if(!is_base)
        .load_package_quietly(package, dirname(dir))
    code_env <- .package_env(package)

    if(!.isMethodsDispatchOn())
        return(bad_Rd_objects)

    S4_classes <- methods::getClasses(code_env)
    if(!length(S4_classes)) return(bad_Rd_objects)

    sApply <- function(X, FUN, ...) ## fast and special case - only
        unlist(lapply(X = X, FUN = FUN, ...), recursive=FALSE, use.names=FALSE)
    ## Build Rd data base.
    db <- Rd_db(package, lib.loc = dirname(dir))

    ## Need some heuristics now.  When does an Rd object document just
    ## one S4 class so that we can compare (at least) the slot names?
    ## Try the following:
    ## 1) \docType{} identical to "class";
    ## 2) either exactly one \alias{} or only one ending in "-class"
    ## 3) a non-empty user-defined section 'Slots'.

    ## As going through the db to extract sections can take some time,
    ## we do the vectorized metadata computations first, and try to
    ## subscript whenever possible.

    idx <- vapply(lapply(db, .Rd_get_doc_type), identical, NA, "class",
                  USE.NAMES=FALSE)
    if(!any(idx)) return(bad_Rd_objects)
    db <- db[idx]
    stats <- c(n.S4classes = length(S4_classes), n.db = length(db))

    aliases <- lapply(db, .Rd_get_metadata, "alias")
    named_class <- lapply(aliases, endsWith, suffix="-class")
    nClass <- sApply(named_class, sum)
    oneAlias <- lengths(aliases, use.names=FALSE) == 1L
    idx <- oneAlias | nClass == 1L
    if(!any(idx)) return(bad_Rd_objects)
    db <- db[idx]
    stats["n.cl"] <- length(db)

    ## keep only the foo-class alias in case there was more than one:
    multi <- idx & !oneAlias
    aliases[multi] <-
        mapply(`[`, aliases[multi], named_class[multi],
               SIMPLIFY = FALSE, USE.NAMES = FALSE)
    aliases <- unlist(aliases[idx], use.names = FALSE)

    Rd_slots <- lapply(db, .Rd_get_section, "Slots", FALSE)
    idx <- lengths(Rd_slots) > 0L
    if(!any(idx)) return(bad_Rd_objects)
    db <- db[idx]; aliases <- aliases[idx]; Rd_slots <- Rd_slots[idx]
    stats["n.final"] <- length(db)

    db_names <- names(db)

    .get_slot_names <- function(x) {
        ## Get \describe (inside user-defined section 'Slots'):
        ## Should this allow for several \describe blocks?
        x <- .Rd_get_section(x, "describe")
        ## Get the \item tags inside \describe.
        txt <- .Rd_get_item_tags(x)
        if(!length(txt)) return(character())
        txt <- gsub("\\\\l?dots", "...", txt)
        ## And now strip enclosing '\code{...}:'
        txt <- gsub("\\\\code\\{([^}]*)\\}:?", "\\1", as.character(txt))
        txt <- unlist(strsplit(txt, ", *"))
        trimws(txt)
    }

    .inheritedSlotNames <- function(ext) {
        supcl <- methods::.selectSuperClasses(ext)
        unique(unlist(lapply(lapply(supcl, methods::getClassDef),
                             methods::slotNames),
                      use.names=FALSE))
    }

    S4topics <- vapply(S4_classes, utils:::topicName, " ",
                       type="class", USE.NAMES=FALSE)
    S4_checked <- S4_classes[has.a <- S4topics %in% aliases]
    idx <- match(S4topics[has.a], aliases)
    for(icl in seq_along(S4_checked)) {
        cl <- S4_checked[icl]
        cld <- methods::getClass(cl, where = code_env)
        ii <- idx[icl]
        ## Add sanity checking later ...
        scld <- methods::slotNames(cld)
        codeSlots <- if(!is.null(scld)) sort(scld) else character()
        docSlots  <- sort(.get_slot_names(Rd_slots[[ii]]))
        superSlots <- .inheritedSlotNames(cld@contains)
        if(length(superSlots)) ## allow '\dots' in docSlots
            docSlots <-
                docSlots[docSlots %notin% c("...", "\\dots")]
        ## was if(!identical(slots_in_code, slots_in_docs)) {
        if(!all(docSlots %in% codeSlots) ||
           !all(setdiff(codeSlots, superSlots) %in% docSlots) ) {
            bad_Rd_objects[[db_names[ii]]] <-
                list(name = cl,
                     code = codeSlots,
                     inherited = superSlots,
                     docs = docSlots)
        }
    }

    attr(bad_Rd_objects, "S4_classes_checked") <- S4_checked
    attr(bad_Rd_objects, "stats") <- stats
    bad_Rd_objects
} ## end{ codocClasses }

format.codocClasses <-
function(x, ...)
{
    .fmt <- function(nm) {
        wrapPart <- function(nam) {
            capWord <- function(w) sub("\\b(\\w)", "\\U\\1", w, perl = TRUE)

            if(length(O <- docObj[[nam]]))
                strwrap(sprintf("%s: %s", gettextf(capWord(nam)),
                                paste(O, collapse = " ")),
                        indent = 2L, exdent = 8L)
        }

        docObj <- x[[nm]]
        c(gettextf("S4 class codoc mismatches from Rd file '%s':",
                   nm),
          gettextf("Slots for class '%s'", docObj[["name"]]),
          wrapPart("code"),
          wrapPart("inherited"),
          wrapPart("docs"),
          "")
    }

    as.character(unlist(lapply(names(x), .fmt)))
}

### * codocData

codocData <-
function(package, lib.loc = NULL)
{
    ## Compare the 'structure' of 'data' objects (variables or data
    ## sets) in an installed package between code and documentation.
    ## Currently, only compares the variable names of data frames found.

    ## <NOTE>
    ## This is patterned after the current codoc().
    ## It would be useful to return the whole information on data frame
    ## variable names found in the code and matching documentation
    ## (rather than just the ones with mismatches).
    ## Currently, we only return the names of all data frames checked.
    ## </NOTE>

    bad_Rd_objects <- structure(list(), class = "codocData")

    ## Argument handling.
    if(length(package) != 1L)
        stop("argument 'package' must be of length 1")

    dir <- find.package(package, lib.loc)

    ## Build Rd data base.
    db <- Rd_db(package, lib.loc = dirname(dir))

    is_base <- basename(dir) == "base"

    ## Load package into code_env.
    if(!is_base)
        .load_package_quietly(package, dirname(dir))
    code_env <- .package_env(package)
    ns_env <- asNamespace(package)

    ## Could check here whether the package has any variables or data
    ## sets (and return if not).

    ## Need some heuristics now.  When does an Rd object document a
    ## data.frame (could add support for other classes later) variable
    ## or data set so that we can compare (at least) the names of the
    ## variables in the data frame?  Try the following:
    ## * just one \alias{};
    ## * if documentation was generated via prompt, there is a \format
    ##   section starting with 'A data frame with' (but many existing Rd
    ##   files instead have 'This data frame contains' and containing
    ##   one or more \describe sections inside.

    ## As going through the db to extract sections can take some time,
    ## we do the vectorized metadata computations first, and try to
    ## subscript whenever possible.
    aliases <- lapply(db, .Rd_get_metadata, "alias")
    idx <- lengths(aliases) == 1L
    if(!any(idx)) return(bad_Rd_objects)
    db <- db[idx]
    aliases <- aliases[idx]

    names(db) <- names(db)

    .get_var_names_from_item_tags <- function(s, nice = TRUE) {
        if(!length(s)) return(character())

        nms <- character()
        ## Handle trailing colons and leading/trailing white space.
        s <- sub("^[[:space:]]*", "", sub("([[:space:]]*:)?[[:space:]]*$", "", s))
        ## Handle \samp entries: need to match until the first unescaped
        ## rbrace.
        re <- "\\\\samp\\{(([^\\}]|[\\].)*)\\}([[:space:]]*,[[:space:]]*)?"
        m <- gregexpr(re, s)
        if(any(unlist(m) > -1)) {
            nms <- sub(re, "\\1", unlist(regmatches(s, m)))
            ## Unescape Rd escapes.
            nms <- gsub("\\\\([{}%])", "\\1", nms)
            regmatches(s, m) <- ""
        }
        ## Handle \code entries, assuming that they can be taken literally
        ## (no escaping or quoting to obtain valid R syntax).
        re <- "\\\\code\\{([^}]*)\\}([[:space:]]*,[[:space:]]*)?"
        m <- gregexpr(re, s)
        add <- regmatches(s, m)
        lens <- lengths(add)
        add <- sub(re, "\\1", unlist(add))
        ## The old code base simply dropped the \code markup via
        ##   gsub("\\\\code\\{(.*)\\}:?", "\\1", s)
        ## unescaped underscores and stripped whitespace.
        ## Let us be nice about such whitespace inside a single \code (by
        ## default), as this should always render ok in the manual, but not
        ## about escaped underscores e.g.,
        ##   ElemStatLearn/man/marketing.Rd: Dual\_Income
        ## and comma-separated lists inside
        ## \code, e.g.,
        ##   prefmod/man/trdel.Rd: \code{V1,V2,V3,V4,V5,V6,V7,V8,V9,V10}
        ## as these will not render correctly.
        if(nice) {
            ind <- rep.int(lens == 1L, lens)
            add[ind] <- trimws(add[ind])
        }
        nms <- c(nms, add)
        regmatches(s, m) <- ""
        ## Handle rest.
        nms <- c(nms, unlist(strsplit(s, "[[:space:]]*,[[:space:]]*")))
        nms
    }

    .get_data_frame_var_names <- function(x) {
        ## Make sure that there is exactly one format section:
        ## using .Rd_get_section() would get the first one.
        x <- x[RdTags(x) == "\\format"]
        if(length(x) != 1L) return(character())
        ## Drop comments.
        ## <FIXME>
        ## Remove calling .Rd_drop_comments() eventually.
        x <- .Rd_drop_comments(x[[1L]])
        ## </FIXME>
        ## What did the format section start with?
        if(!grepl("^[[:space:]]*(A|This) data frame",
                  .Rd_deparse(x, tag = FALSE)))
            return(character())
        ## Get \describe inside \format.
        ## Should this allow for several \describe blocks?
        x <- .Rd_get_section(x, "describe")
        ## Get the \item tags inside \describe.
        x <- .Rd_get_item_tags(x)
        ## And extract the variable names from these.
        .get_var_names_from_item_tags(x)
    }

    Rd_var_names <- lapply(db, .get_data_frame_var_names)

    idx <- (lengths(Rd_var_names) > 0L)
    if(!length(idx)) return(bad_Rd_objects)
    aliases <- unlist(aliases[idx])
    Rd_var_names <- Rd_var_names[idx]

    db_names <- names(db)[idx]

    data_env <- new.env(hash = TRUE)
    data_dir <- file.path(dir, "data")
    ## with lazy data we have data() but don't need to use it.
    has_data <- dir.exists(data_dir) &&
        !file_test("-f", file.path(data_dir, "Rdata.rdb"))
    data_exts <- .make_file_exts("data")

    ## Now go through the aliases.
    data_frames_checked <- character()
    for(i in seq_along(aliases)) {
        ## Store the documented variable names.
        var_names_in_docs <- sort(Rd_var_names[[i]])
        ## Try finding the variable or data set given by the alias.
        al <- aliases[i]
        if(!is.null(A <- get0(al, envir = code_env, mode = "list", inherits = FALSE)))
            al <- A
        else if(!is_base &&
                !is.null(A <- get0(al, envir = ns_env, mode = "list", inherits = FALSE)))
            al <- A
        else if(has_data) {
            ## Should be a data set.
            if(!length(dir(data_dir)
                       %in% paste(al, data_exts, sep = "."))) {
                next                    # What the hell did we pick up?
            }
            ## Try loading the data set into data_env.
            utils::data(list = al, envir = data_env)
            if(!is.null(A <- get0(al, envir = data_env, mode = "list", inherits = FALSE)))
                al <- A

            ## And clean up data_env.
            rm(list = ls(envir = data_env, all.names = TRUE),
               envir = data_env)
        }
        if(!is.data.frame(al)) next
        ## Now we should be ready:
        data_frames_checked <- c(data_frames_checked, aliases[i])
        var_names_in_code <- sort(names(al))
        if(!identical(var_names_in_code, var_names_in_docs))
            bad_Rd_objects[[db_names[i]]] <-
                list(name = aliases[i],
                     code = var_names_in_code,
                     docs = var_names_in_docs)
    }

    attr(bad_Rd_objects, "data_frames_checked") <-
        as.character(data_frames_checked)
    bad_Rd_objects
}

format.codocData <-
function(x, ...)
{
    format_args <- function(s) paste(s, collapse = " ")

    .fmt <- function(nm) {
        docObj <- x[[nm]]
        ## FIXME singular or plural?
        c(gettextf("Data codoc mismatches from Rd file '%s':", nm),
          gettextf("Variables in data frame '%s'", docObj[["name"]]),
          strwrap(gettextf("Code: %s", format_args(docObj[["code"]])),
                  indent = 2L, exdent = 8L),
          strwrap(gettextf("Docs: %s", format_args(docObj[["docs"]])),
                  indent = 2L, exdent = 8L),
          "")
    }

    as.character(unlist(lapply(names(x), .fmt)))
}

### * checkDocFiles

checkDocFiles <-
function(package, dir, lib.loc = NULL, chkInternal = NULL)
{
    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        ## Using package installed in @code{dir} ...
    }
    else {
        if(missing(dir))
            stop("you must specify 'package' or 'dir'")
        ## Using sources from directory @code{dir} ...
        if(!dir.exists(dir))
            stop(gettextf("directory '%s' does not exist", dir),
                 domain = NA)
        else
            dir <- file_path_as_absolute(dir)
    }

    check_internal_specially <- FALSE
    ## Do
    ##   if(!isTRUE(chkInternal) && !isFALSE(chkInternal))
    ## more efficiently.
    if(is.null(chkInternal) ||
       !is.logical(chkInternal) ||
       (length(chkInternal) != 1L) ||
       is.na(chkInternal))
        chkInternal <- check_internal_specially <- TRUE

    db <- if(!missing(package))
              Rd_db(package, lib.loc = dirname(dir))
          else
              Rd_db(dir = dir)

    db_aliases  <- lapply(db, .Rd_get_metadata, "alias")
    db_keywords <- lapply(db, .Rd_get_metadata, "keyword")

    db_usages <- lapply(db, .Rd_get_section, "usage")
    ## We traditionally also use the usage "texts" for some sanity
    ## checking ...
    ## <FIXME>
    ## Remove calling .Rd_drop_comments() eventually.
    db_usage_texts <-
        lapply(db_usages,
               function(e) .Rd_deparse(.Rd_drop_comments(e)))
    ## </FIXME>
    db_usages <- lapply(db_usages, .parse_usage_as_much_as_possible)
    ind <- vapply(db_usages,
                  function(x) !is.null(attr(x, "bad_lines")),
                  NA)
    bad_lines <- lapply(db_usages[ind], attr, "bad_lines")

    db_argument_names <- lapply(db, .Rd_get_argument_names)

    bad_doc_objects <- list()
    all_special <- (length(bad_lines) == 0L)

    for(nm in names(db)) {
        ## <FIXME>
        ## There really should be no \arguments or \value without a
        ## \usage, so it should be "safe" to skip checking \arguments in
        ## case of no \usage.
        ## However, currently the above is not ensured.
        ## Ideally, checkRd() should complain ...
        exprs <- db_usages[[nm]]
        if(!length(exprs)) next
        ## </FIXME>

        ## If !chkInternal, exclude internal Rd objects from further
        ## computations.  Otherwise, maybe treat them specially, and
        ## ignore arguments in \usage but not in \arguments.
        internal <- "internal" %in% db_keywords[[nm]]
        if(internal && !chkInternal) next
        special <- (internal && check_internal_specially)

        aliases <- db_aliases[[nm]]
        arg_names_in_arg_list <- db_argument_names[[nm]]

        ## Determine function names ('functions') and corresponding
        ## arguments ('arg_names_in_usage') in the \usage.  Note how we
        ## try to deal with data set documentation.
        ind <- vapply(exprs,
                      function(e)
                          is.call(e) && !is_data_for_dataset(e),
                      NA, USE.NAMES=FALSE)
        exprs <- exprs[ind]
        ## Split out replacement function usages.
        ind <- vapply(exprs, .is_call_from_replacement_function_usage,
                      NA, USE.NAMES=FALSE)
        replace_exprs <- exprs[ind]
        exprs <- exprs[!ind]
        ## Ordinary functions.
        functions <-
            vapply(exprs, function(e) as.character(e[[1L]]), "")
        ## Catch assignments.
        ind <- functions %in% c("<-", "=")
        assignments <- exprs[ind]
        if(any(ind)) {
            exprs     <- exprs    [!ind]
            functions <- functions[!ind]
        }
        ## (Note that as.character(sapply(exprs, `[[`, 1L)) does not do
        ## what we want due to backquotifying.)
        arg_names_in_usage <-
            unlist(lapply(exprs,
                          function(e) .arg_names_from_call(e[-1L])))
        ## Replacement functions.
        if(length(replace_exprs)) {
            replace_funs <-
                paste0(vapply(replace_exprs,
                              function(e) as.character(e[[2L]][[1L]]), ""),
                       "<-")
            functions <- c(functions, replace_funs)
            arg_names_in_usage <-
                c(arg_names_in_usage,
                  unlist(lapply(replace_exprs,
                                function(e)
                                c(.arg_names_from_call(e[[2L]][-1L]),
                                  .arg_names_from_call(e[[3L]])))))
        }
        ## And finally transform the S3 \method{}{} markup into the
        ## usual function names ...
        ## <NOTE>
        ## If we were really picky, we would worry about possible
        ## namespace renaming.
        functions <- .transform_S3_method_markup(functions)
        ## </NOTE>
        ## Also transform the markup for S4 replacement methods.
        functions <- .transform_S4_method_markup(functions)

        ## Now analyze what we found.
        arg_names_in_usage_missing_in_arg_list <- if(special) NULL else
            setdiff(arg_names_in_usage, arg_names_in_arg_list)
        arg_names_in_arg_list_missing_in_usage <-
            setdiff(arg_names_in_arg_list, arg_names_in_usage)
        if(length(arg_names_in_arg_list_missing_in_usage)) {
            usage_text <- db_usage_texts[[nm]]
            bad_args <- character()
            ## In the case of 'over-documented' arguments, try to be
            ## defensive and reduce to arguments which either are not
            ## syntactically valid names or do not match the \usage text
            ## (modulo word boundaries).
            bad <- !grepl("^[[:alnum:]._]+$",
                          arg_names_in_arg_list_missing_in_usage)
            if(any(bad)) {
                bad_args <- arg_names_in_arg_list_missing_in_usage[bad]
                arg_names_in_arg_list_missing_in_usage <-
                    arg_names_in_arg_list_missing_in_usage[!bad]
            }
            bad <- vapply(arg_names_in_arg_list_missing_in_usage,
                          function(x)
                              !grepl(paste0("(^|\\W)",
                                            reQuote(x),
                                            "($|\\W)"),
                                     gsub("\\\\dots", "...",
                                          usage_text)),
                          NA)
            arg_names_in_arg_list_missing_in_usage <-
                c(bad_args,
                  arg_names_in_arg_list_missing_in_usage[as.logical(bad)])
            ## Note that the fact that we can parse the raw \usage does
            ## not imply that over-documented arguments are a problem:
            ## this works for Rd files documenting e.g. shell utilities
            ## but fails for files with special syntax (Extract.Rd).
        }

        ## Also test whether the objects we found from the \usage all
        ## have aliases, provided not all aliases end in '-deprecated'.
        ## This exception allows packages to keep the original help page of a
        ## deprecated function at help("<fun>-deprecated") (see ?deprecated)
        ## and alias <fun> to help("<pkg>-deprecated").
        functions_not_in_aliases <-
            if(!all(endsWith(aliases, "-deprecated"))) {
                ## Argh.  There are good reasons for keeping \S4method{}{}
                ## as is, but of course this is not what the aliases use ...
                ## <FIXME>
                ## Should maybe use utils:::topicName(), but in any case, we
                ## should have functions for converting between the two
                ## forms, see also the code for undoc().
                aliases <- sub("([^,]+),(.+)-method$",
                               "\\\\S4method{\\1}{\\2}",
                               aliases)
                ## </FIXME>
                aliases <- gsub("\\%", "%", aliases, fixed=TRUE)
                setdiff(functions, aliases)
            }
            else character()

        if((length(arg_names_in_usage_missing_in_arg_list))
           || anyDuplicated(arg_names_in_arg_list)
           || (length(arg_names_in_arg_list_missing_in_usage))
           || (length(functions_not_in_aliases))
           || (length(assignments))) {
            bad_doc_objects[[nm]] <-
                list(missing = arg_names_in_usage_missing_in_arg_list,
                     duplicated =
                     arg_names_in_arg_list[duplicated(arg_names_in_arg_list)],
                     overdoc = arg_names_in_arg_list_missing_in_usage,
                     unaliased = functions_not_in_aliases,
                     assignments = assignments)
            if(!special)
                all_special <- FALSE
        }
    } # for(..)

    structure(bad_doc_objects, class = "checkDocFiles",
              "bad_lines" = bad_lines,
              "all_special" = all_special)
}

format.checkDocFiles <-
function(x, ...)
{
    .fmt <- function(nm) {
        c(character(),
          if(length(arg_names_in_usage_missing_in_arg_list <-
                    x[[nm]][["missing"]])) {
              c(gettextf("Undocumented arguments in Rd file '%s'",
                         nm),
                .pretty_format(unique(arg_names_in_usage_missing_in_arg_list)))
          },
          if(length(duplicated_args_in_arg_list <-
                    x[[nm]][["duplicated"]])) {
              c(gettextf("Duplicated \\argument entries in Rd file '%s':",
                         nm),
                .pretty_format(duplicated_args_in_arg_list))
          },
          if(length(arg_names_in_arg_list_missing_in_usage <-
                    x[[nm]][["overdoc"]])) {
              c(gettextf("Documented arguments not in \\usage in Rd file '%s':",
                         nm),
                .pretty_format(unique(arg_names_in_arg_list_missing_in_usage)))
          },
          if(length(functions_not_in_aliases <-
                    x[[nm]][["unaliased"]])) {
              c(gettextf("Objects in \\usage without \\alias in Rd file '%s':",
                         nm),
                .pretty_format(unique(functions_not_in_aliases)))
          },
          if(length(assignments <-
                    x[[nm]][["assignments"]])) {
              c(gettextf("Assignments in \\usage in Rd file '%s':",
                         nm),
                sprintf("  %s", unlist(lapply(assignments, format))))
          },
          "")
    }

    y <- as.character(unlist(lapply(names(x), .fmt)))

    if(length(bad_lines <- attr(x, "bad_lines")))
        y <- c(y,
               unlist(lapply(names(bad_lines),
                             function(nm) {
                                 c(gettextf("Bad \\usage lines found in Rd file '%s':",
                                            nm),
                                   paste0("  ", bad_lines[[nm]]))
                             })),
               "")

    ## <NOTE>
    ## Terrible hack, see comments on
    ##    __R_CHECK_DOC_FILES_NOTE_IF_ALL_SPECIAL__
    ## in check.R
    if(length(y) &&
       !length(bad_lines) &&
       (Sys.getenv("__R_CHECK_DOC_FILES_NOTE_IF_ALL_SPECIAL__",
                   "FALSE") == "TRUE") &&
       isTRUE(attr(x, "all_special")))
        y <- c(y, "All issues in internal Rd files checked specially.")
    ## </NOTE>

    y
}

### * checkDocStyle

checkDocStyle <-
function(package, dir, lib.loc = NULL)
{
    has_namespace <- FALSE

    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        ## Using package installed in 'dir' ...
        dfile <- file.path(dir, "DESCRIPTION")
        meta <- if(file_test("-f", dfile))
            .read_description(dfile)
        else
            character()
        code_dir <- file.path(dir, "R")
        if(!dir.exists(code_dir))
            stop(gettextf("directory '%s' does not contain R code",
                          dir),
                 domain = NA)
        if(!.haveRds(dir))
            stop(gettextf("directory '%s' does not contain Rd objects", dir),
                 domain = NA)
        package_name <- package
        is_base <- package_name == "base"

        ## Load package into code_env.
        if(!is_base)
            .load_namespace_quietly(package, dirname(dir))
        code_env <- asNamespace(package)

        objects_in_code <- sort(names(code_env))

        if(!is_base) {
            has_namespace <- TRUE
            ## Determine names of declared S3 methods and associated S3
            ## generics.
            ns_S3_methods_db <- getNamespaceInfo(package, "S3methods")
            ns_S3_generics <- as.character(ns_S3_methods_db[, 1L])
            ns_S3_methods <- ns_S3_methods_db[, 3L]
            if(!is.character(ns_S3_methods)) {
                ## As of 2018-07, direct calls to registerS3method()
                ## could have registered a function object (not name).
                ind <- vapply(ns_S3_methods, is.character, NA)
                ns_S3_methods[!ind] <- ""
                ns_S3_methods <- as.character(ns_S3_methods)
            }
        }
    }
    else {
        if(missing(dir))
            stop("you must specify 'package' or 'dir'")
        package_name <- basename(dir) # early, before resolving sym.links
        ## Using sources from directory @code{dir} ...
        if(!dir.exists(dir))
            stop(gettextf("directory '%s' does not exist", dir),
                 domain = NA)
        else
            dir <- file_path_as_absolute(dir)
        code_dir <- file.path(dir, "R")
        if(!dir.exists(code_dir))
            stop(gettextf("directory '%s' does not contain R code",
                          dir),
                 domain = NA)
        if(!.haveRds(dir))
            stop(gettextf("directory '%s' does not contain Rd objects", dir),
                 domain = NA)
        is_base <- package_name == "base"

        code_env <- new.env(hash = TRUE)
        dfile <- file.path(dir, "DESCRIPTION")
        meta <- if(file_test("-f", dfile)) .read_description(dfile) else character()
        .source_assignments_in_code_dir(code_dir, code_env, meta)
        sys_data_file <- file.path(code_dir, "sysdata.rda")
        if(file_test("-f", sys_data_file)) load(sys_data_file, code_env)

        objects_in_code <- sort(names(code_env))

        if(file.exists(file.path(dir, "NAMESPACE")) &&
           !inherits(tryCatch(nsInfo <-
                                  parseNamespaceFile(package_name,
                                                     dirname(dir)),
                              error = identity),
                     "error")) {
            has_namespace <- TRUE
            ## Determine exported objects.
            OK <- intersect(objects_in_code, nsInfo$exports)
            for(p in nsInfo$exportPatterns)
                OK <- c(OK, grep(p, objects_in_code, value = TRUE))
            objects_in_code <- unique(OK)
            ## Determine names of declared S3 methods and associated S3
            ## generics.
            ns_S3_methods_db <- .get_namespace_S3_methods_db(nsInfo)
            ns_S3_generics <- ns_S3_methods_db[, 1L]
            ns_S3_methods  <- ns_S3_methods_db[, 3L]
        }

    }

    ## Find the function objects in the given package.
    functions_in_code <-
        Filter(function(f) is.function(get(f, envir = code_env)),  # get is expensive
               objects_in_code)

    ## Find all S3 generics "as seen from the package".
    all_S3_generics <- .get_S3_generics_in_base()
    if(!is_base) {
        all_S3_generics <-
            unique(c(Filter(function(f)
                                .is_S3_generic(f, envir = code_env),
                            functions_in_code),
                     if(!missing(package))
                         .get_S3_generics_in_env(parent.env(code_env)),
                     all_S3_generics))
    }
    ## Make the group S3 generics "visible" from code_env.
    code_env <- .make_S3_group_generic_env(parent = code_env)

    ## Find all methods in the given package for the generic functions
    ## determined above.  Store as a list indexed by the names of the
    ## generic functions.
    ## Change in 3.0.0: we only look for methods named generic.class,
    ## not those registered by a 3-arg S3method().
    methods_stop_list <- nonS3methods(package_name)
    methods_in_package <-
        Map(function(g) {
                ## This shouldn't happen any more ...
                if(!exists(g, envir = code_env)) return(character())
                ## <FIXME>
                ## We should really determine the name g dispatches for,
                ## see a current version of methods() [2003-07-07].
                ## (Care is needed for internal generics and group
                ## generics.)
                name <- paste0(g, ".")
                methods <-
                    functions_in_code[startsWith(functions_in_code, name)]
                ## </FIXME>
                methods <- setdiff(methods, methods_stop_list)
                if(has_namespace) {
                    ## Find registered methods for generic g.
                    methods2 <- ns_S3_methods[ns_S3_generics == g]
                    ## but for these purposes check name.
                    OK <- startsWith(methods2, name)
                    methods <- c(methods, methods2[OK])
                }
                methods
            },
            all_S3_generics)
    all_methods_in_package <- unlist(methods_in_package)
    ## There are situations where S3 methods might be documented as
    ## functions (i.e., with their full name), if they do something
    ## useful also for arguments not inheriting from the class they
    ## provide a method for.
    ## But then they should be exported under another name, and
    ## registered as an S3 method.
    ## Prior to 2.14.0 we used to allow this in the case the
    ## package has a namespace and the method is exported (even though
    ## we strongly prefer using FOO(as.BAR(x)) to FOO.BAR(x) for such
    ## cases).
    ## But this caused discontinuities with adding namespaces.
    ## Historical exception
    if(package_name == "cluster")
        all_methods_in_package <-
            setdiff(all_methods_in_package, functions_in_code)

    db <- if(!missing(package))
        Rd_db(package, lib.loc = dirname(dir))
    else
        Rd_db(dir = dir)

    ## Ignore pkg-deprecated.Rd and pkg-defunct.Rd.
    ## <FIXME>
    ## How exactly do we recognize docs for defunct/deprecated?
    db_names <- .Rd_get_names_from_Rd_db(db)
    ind <- db_names %in% paste(package_name, c("deprecated", "defunct"),
                               sep = "-")
    db <- db[!ind]
    ## </FIXME>

    db_usages <-
        lapply(db,
               function(Rd) {
                   Rd <- .Rd_get_section(Rd, "usage")
                   .parse_usage_as_much_as_possible(Rd)
               })
    ind <- vapply(db_usages,
                  function(x) !is.null(attr(x, "bad_lines")),
                  NA)
    bad_lines <- lapply(db_usages[ind], attr, "bad_lines")

    bad_doc_objects <- list()

    for(nm in names(db)) {

        ## Determine function names in the \usage.
        exprs <- db_usages[[nm]]
        exprs <- exprs[lengths(exprs) > 1L]
        ## Ordinary functions.
        functions <-
            vapply(exprs, function(e) as.character(e[[1L]]), "")
        ## (Note that as.character(sapply(exprs, `[[`, 1L)) does not do
        ## what we want due to backquotifying.)
        ## Replacement functions.
        ind <- vapply(exprs, .is_call_from_replacement_function_usage, NA)
        if(any(ind)) {
            replace_funs <-
                paste0(vapply(exprs[ind],
                              function(e) as.character(e[[2L]][[1L]]),
                              ""),
                       "<-")
            functions <- c(functions, replace_funs)
        }

        methods_with_full_name <-
            intersect(functions, all_methods_in_package)

        functions <- .transform_S3_method_markup(functions)

        methods_with_generic <-
            Map(function(g)
                    intersect(functions, methods_in_package[[g]]),
                intersect(functions, all_S3_generics))

        if((length(methods_with_generic)) ||
           (length(methods_with_full_name)))
            bad_doc_objects[[nm]] <-
                list(withGeneric  = methods_with_generic,
                     withFullName = methods_with_full_name)

    }

    attr(bad_doc_objects, "bad_lines") <- bad_lines
    class(bad_doc_objects) <- "checkDocStyle"
    bad_doc_objects
}

format.checkDocStyle <-
function(x, ...)
{
    .fmt <- function(nm) {
        ## <NOTE>
        ## With \method{GENERIC}{CLASS} now being transformed to show
        ## both GENERIC and CLASS info, documenting S3 methods on the
        ## same page as their generic is not necessarily a problem any
        ## more (as one can refer to the generic or the methods in the
        ## documentation, in particular for the primary argument).
        ## Hence, even if we still provide information about this, we
        ## no longer print it by default.  One can still access it via
        ##   lapply(checkDocStyle("foo"), `[[`, "withGeneric")
        ## (but of course it does not print that nicely anymore),
        ## </NOTE>
        methods_with_full_name <- x[[nm]][["withFullName"]]
        if(length(methods_with_full_name)) {
            c(gettextf("S3 methods shown with full name in Rd file '%s':",
                       nm),
              .pretty_format(methods_with_full_name),
              "")
        } else {
            character()
        }
    }

    as.character(unlist(lapply(names(x), .fmt)))
}


### * checkFF

checkFF <-
function(package, dir, file, lib.loc = NULL,
         registration = FALSE, check_DUP = FALSE,
         verbose = getOption("verbose"))
{
    allow_suppress <- !nzchar(Sys.getenv("_R_CHECK_FF_AS_CRAN_"))
    suppressCheck <- function(e)
        allow_suppress &&
            length(e) == 2L && is.call(e) && is.symbol(e[[1L]]) &&
                as.character(e[[1L]]) == "dontCheck"

    has_namespace <- FALSE
    is_installed_msg <- is_installed <- FALSE
    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        dfile <- file.path(dir, "DESCRIPTION")
        db <- .read_description(dfile)
        pkg <- pkgDLL <- basename(dir)
        ## Using package installed in @code{dir} ...
        code_dir <- file.path(dir, "R")
        if(!dir.exists(code_dir))
            stop(gettextf("directory '%s' does not contain R code",
                          dir),
                 domain = NA)
        have_registration <- FALSE
        if(basename(dir) != "base") {
            .load_namespace_quietly(package, dirname(dir))
            code_env <- asNamespace(package)
            if(!is.null(DLLs <- get0("DLLs", envir = code_env$.__NAMESPACE__.))) {
                ## fake installs have this, of class DLLInfoList
                if(length(DLLs)) has_namespace <- TRUE
                if(length(DLLs) && inherits(DLLs[[1L]], "DLLInfo")) {
                    pkgDLL <- unclass(DLLs[[1L]])$name # different for data.table
                    if(registration) {
                        reg <- getDLLRegisteredRoutines(DLLs[[1L]])
                        have_registration <- sum(lengths(reg)) > 0L
                    }
                }
            }
        } else {
            has_namespace <- have_registration <- TRUE
            code_env <-.package_env(package)
        }
        is_installed <- TRUE
    }
    else if(!missing(dir)) {
        have_registration <- FALSE
        ## Using sources from directory @code{dir} ...
        if(!dir.exists(dir))
            stop(gettextf("directory '%s' does not exist", dir),
                 domain = NA)
        else
            dir <- file_path_as_absolute(dir)
        pkg <- pkgDLL <- basename(dir)
        dfile <- file.path(dir, "DESCRIPTION")
        enc <- NA; db <- NULL
        if(file.exists(dfile)) {
            db <- .read_description(dfile)
            enc <- db["Encoding"]
        }
        if(pkg == "base") has_namespace <- TRUE
        if(file.exists(file.path(dir, "NAMESPACE")) &&
           !inherits(tryCatch(nm <- parseNamespaceFile(basename(dir),
                                                       dirname(dir)),
                              error = identity),
                     "error")) {
            has_namespace <- length(nm$dynlibs) > 0L
        }
        code_dir <- file.path(dir, "R")
        if(!dir.exists(code_dir))
            stop(gettextf("directory '%s' does not contain R code",
                          dir),
                 domain = NA)
        file <- tempfile()
        on.exit(unlink(file))
        if(!file.create(file)) stop("unable to create ", file, domain = NA)
        if(!all(.file_append_ensuring_LFs(file,
                                          list_files_with_type(code_dir,
                                                               "code"))))
            stop("unable to write code files", domain = NA)
    }
    else if(!missing(file)) {
        pkg <- enc <- NA
    } else
        stop("you must specify 'package', 'dir' or 'file'")

    if(missing(package) && !file_test("-f", file))
        stop(gettextf("file '%s' does not exist", file),
             domain = NA)

    ## Should there really be a 'verbose' argument?
    ## It may be useful to extract all foreign function calls but then
    ## we would want the calls back ...
    ## What we currently do is the following: if 'verbose' is true, we
    ## show all foreign function calls in abbreviated form with the line
    ## ending in either 'OK' or 'MISSING', and we return the list of
    ## 'bad' FF calls (i.e., where the 'PACKAGE' argument is missing)
    ## *invisibly* (so that output is not duplicated).
    ## Otherwise, if not verbose, we return the list of bad FF calls.

    bad_exprs <- empty_exprs <- wrong_pkg <- other_problem <- list()
    other_desc <- character()
    bad_pkg <- character()
    dup_false <- list()
    FF_funs <- FF_fun_names <- c(".C", ".Fortran", ".Call", ".External",
                                 ".Call.graphics", ".External.graphics")
    ## As pointed out by DTL, packages could use non-base FF calls for
    ## which missing 'PACKAGE' arguments are not necessarily a problem.
    if(!missing(package)) {
        is_FF_fun_from_base <-
            vapply(FF_funs,
                   function(f) {
                       e <- .find_owner_env(f, code_env)
                       (identical(e, baseenv())
                        || identical(e, .BaseNamespaceEnv))
                   },
                   NA)
        FF_funs <- FF_funs[is_FF_fun_from_base]
    }
    ## Also, need to handle base::.Call() etc ...
    FF_funs <- c(FF_funs, sprintf("base::%s", FF_fun_names))

    check_registration <- function(e, fr) {
        sym <- e[[2L]]
        name <- deparse(sym, nlines = 1L)
        if (name == "...")
            return ("SYMBOL OK") # we cannot check this, e.g. RProtoBuf

        if (is.character(sym)) {
            if (!have_registration) return ("SYMBOL OK")
            FF_fun <- as.character(e[[1L]])
            sym <- reg[[FF_fun]][[sym]]
            if(is.null(sym)) return ("SYMBOL OK")
        }

        if (!is_installed) {
            if (!is_installed_msg) {
                other_problem <<- c(other_problem, e)
                other_desc <<- c(other_desc, "foreign function registration not tested, as package was not installed")
                is_installed_msg <<- TRUE
            }
            return("OTHER") # registration checks need the package to be installed
        }
        if (is.symbol(sym)) { # it might be something like pkg::sym (that's a call)
            if (!exists(name, code_env, inherits = FALSE)) {
                if (allow_suppress &&
                    name %in% utils::suppressForeignCheck(, package))
                    return ("SYMBOL OK") # skip false positives
                if (have_registration) {
                    if (name %in% fr) {
                        other_problem <<- c(other_problem, e)
                        other_desc <<-
                            c(other_desc,
                              sprintf("symbol %s in the local frame",
                                      sQuote(name)))
                    } else {
                        other_problem <<- c(other_problem, e)
                        other_desc <<-
                            c(other_desc,
                              sprintf("symbol %s not in namespace",
                                      sQuote(name)))
                    }
                }
                return("OTHER")
            }
        } else if (suppressCheck(sym))
            return("SKIPPED")

        sym <- tryCatch(eval(sym, code_env), error = function(e) e)
        if (inherits(sym, "error")) {
            if (have_registration || !allow_suppress)  {
                other_problem <<- c(other_problem, e)
                other_desc <<-
                    c(other_desc, sprintf("Evaluating %s during check gives error\n%s",
                                          sQuote(name), sQuote(sym$message)))
            }
            return("OTHER")
        }

        FF_fun <- as.character(e[[1L]])
        ## lmom's sym evaluate to character, so try to look up.
        ## FIXME: maybe check this is not PACKAGE = "another package"
        if (is.character(sym)) {
            if (!have_registration) return ("SYMBOL OK")
            sym <- reg[[FF_fun]][[sym]]
            if(is.null(sym)) return ("SYMBOL OK")
        }

        ## These are allowed and used by SU's packages so skip for now
        if (inherits(sym, "RegisteredNativeSymbol")
            || inherits(sym, "NativeSymbol"))
            return ("SYMBOL OK")

        if (!inherits(sym, "NativeSymbolInfo")) {
            other_problem <<- c(other_problem, e)
            ## other_desc <<- c(other_desc, sprintf("\"%s\" is not of class \"%s\"", name, "NativeSymbolInfo"))
            other_desc <<- c(other_desc, sprintf("%s is of class \"%s\"",
                                                 sQuote(name), class(sym)))
            return("OTHER")
        }
        ## This might be symbol from another (base?) package.
        ## Allow for Rcpp modules
        parg <- unclass(sym$dll)$name
        if(length(parg) == 1L && parg %notin% c("Rcpp", pkgDLL)) {
            wrong_pkg <<- c(wrong_pkg, e)
            bad_pkg <<- c(bad_pkg, parg)
        }
        numparms <- sym$numParameters
        if (length(numparms) && numparms >= 0) {
            ## We have to be careful if ... is in the call.
            if (any(as.character(e) == "...")) {
                other_problem <<- c(other_problem, e)
                other_desc <<-
                    c(other_desc,
                      sprintf("call includes ..., expected %d %s",
                              numparms,
                              if(numparms > 1L) "parameters" else "parameter"))
            } else {
                callparms <- length(e) - 2L
                if ("PACKAGE" %in% names(e)) callparms <- callparms - 1L
                if (FF_fun %in% c(".C", ".Fortran"))
                    callparms <- callparms - length(intersect(names(e), c("NAOK", "DUP", "ENCODING")))
                if (!is.null(numparms) && numparms >= 0L && numparms != callparms) {
                    other_problem <<- c(other_problem, e)
                    other_desc <<-
                        c(other_desc,
                          sprintf("call to %s with %d %s, expected %d",
                                  sQuote(name), callparms,
                                  if(callparms > 1L) "parameters" else "parameter",
                                  numparms))
                    return("OTHER")
                }
            }
        }
        if (inherits(sym, "CallRoutine") &&
            (FF_fun %notin% c(".Call", ".Call.graphics"))) {
            other_problem <<- c(other_problem, e)
            other_desc <<- c(other_desc, sprintf("%s registered as %s, but called with %s", sQuote(name), ".Call", FF_fun))
            return("OTHER")
        }
        if (inherits(sym, "ExternalRoutine") && !(FF_fun %in% c(".External", ".External.graphics"))) {
            other_problem <<- c(other_problem, e)
            other_desc <<- c(other_desc, sprintf("%s registered as %s, but called with %s", sQuote(name), ".External", FF_fun))
            return("OTHER")
        }

        "SYMBOL OK"
    }

    find_bad_exprs <- function(e) {
        if(is.call(e) || is.expression(e)) {
            ## <NOTE>
            ## This picks up all calls, e.g. a$b, and they may convert
            ## to a vector.  The function is the first element in all
            ## the calls we are interested in.
            ## BDR 2002-11-28
            ## </NOTE>
            if(deparse(e[[1L]])[1L] %in% FF_funs) {
                if(registration) check_registration(e, fr)
                dup <- e[["DUP"]]
                if(!is.null(dup) && !isTRUE(dup))
                    dup_false <<- c(dup_false, e)
                this <- ""
                this <- parg <- e[["PACKAGE"]]
                if (!is.na(pkg) && is.character(parg) &&
                    nzchar(parg) && parg != pkgDLL) {
                    wrong_pkg <<- c(wrong_pkg, e)
                    bad_pkg <<- c(bad_pkg, this)
                }
                parg <- if(!is.null(parg) && any(nzchar(parg))) "OK"
                else if(identical(parg, "")) {
                    empty_exprs <<- c(empty_exprs, e)
                    "EMPTY"
                } else if(!is.character(sym <- e[[2L]])) {
                    if (!registration) {
                        sym <- tryCatch(eval(sym, code_env),
                                        error = function(e) e)
                        if (inherits(sym, "NativeSymbolInfo")) {
                            ## This might be symbol from another package.
                            ## Allow for Rcpp modules
                            parg <- unclass(sym$dll)$name
                            if(length(parg) == 1L &&
                               parg %notin% c("Rcpp", pkgDLL)) {
                                wrong_pkg <<- c(wrong_pkg, e)
                                bad_pkg <<- c(bad_pkg, parg)
                            }
                        }
                    }
                    "Called with symbol"
                } else if(!has_namespace) {
                    bad_exprs <<- c(bad_exprs, e)
                    "MISSING"
                } else "MISSING but in a function in a namespace"
                if(verbose)
                    if(is.null(this))
                        cat(deparse(e[[1L]]), "(", deparse(e[[2L]]),
                            ", ... ): ", parg, "\n", sep = "")
                    else
                        cat(deparse(e[[1L]]), "(", deparse(e[[2L]]),
                            ", ..., PACKAGE = \"", this, "\"): ",
                            parg, "\n", sep = "")
            } else if (deparse(e[[1L]])[1L] %in% "<-") {
                fr <<- c(fr, as.character(e[[2L]]))
            }
            for(i in seq_along(e)) Recall(e[[i]])
        }
    }

    if(!missing(package)) {
        checkFFmy <- function(f)
            if(typeof(f) == "closure") {
                env <- environment(f)
                if(isNamespace(env)) {
                    nm <- getNamespaceName(env)
                    if (nm == package) body(f) else NULL
                } else body(f)
            } # else NULL
        exprs <- lapply(ls(envir = code_env, all.names = TRUE),
                        function(f) checkFFmy(get(f, envir = code_env))) # get is expensive
        if(.isMethodsDispatchOn()) {
            ## Also check the code in S4 methods.
            ## This may find things twice if a setMethod() with a bad FF
            ## call is from inside a function (e.g., InitMethods()).
            for(f in .get_S4_generics(code_env)) {
                mlist <- .get_S4_methods_list(f, code_env)
                exprs <- c(exprs, lapply(mlist, body))
            }
            refs <- .get_ref_classes(code_env)
            if(length(refs)) {
                exprs2 <- lapply(unlist(refs, FALSE), checkFFmy)
                exprs <- c(exprs, exprs2)
            }
        }
    } else {
        if(!is.na(enc) &&
           (Sys.getlocale("LC_CTYPE") %notin% c("C", "POSIX"))) {
            ## FIXME: what if conversion fails on e.g. UTF-8 comments
            con <- file(file, encoding=enc)
            on.exit(close(con))
        } else con <- file
        exprs <-
            tryCatch(parse(file = con, n = -1L),
                     error = function(e)
                     stop(gettextf("parse error in file '%s':\n%s",
                                   file,
                                   .massage_file_parse_error_message(conditionMessage(e))),
                          domain = NA, call. = FALSE))
    }
    for(i in seq_along(exprs)) {
        fr <- character()
        find_bad_exprs(exprs[[i]])
    }
    attr(bad_exprs, "wrong_pkg") <- wrong_pkg
    attr(bad_exprs, "bad_pkg") <- bad_pkg
    attr(bad_exprs, "empty") <- empty_exprs
    attr(bad_exprs, "other_problem") <- other_problem
    attr(bad_exprs, "other_desc") <- other_desc
    if(check_DUP) attr(bad_exprs, "dup_false") <- dup_false
    if (length(bad_pkg)) {              # check against dependencies.
        bases <- .get_standard_package_names()$base
        bad <- bad_pkg %w/o% bases
        if (length(bad)) {
            depends <- .get_requires_from_package_db(db, "Depends")
            imports <- .get_requires_from_package_db(db, "Imports")
            suggests <- .get_requires_from_package_db(db, "Suggests")
            enhances <- .get_requires_from_package_db(db, "Enhances")
            bad <- bad %w/o% c(depends, imports, suggests, enhances)
            attr(bad_exprs, "undeclared") <- bad
        }
    }
    class(bad_exprs) <- "checkFF"
    if(verbose)
        invisible(bad_exprs)
    else
        bad_exprs
}

format.checkFF <-
function(x, ...)
{
    xx <- attr(x, "empty")
    y <- attr(x, "wrong_pkg")
    z <- attr(x, "bad_pkg")
    zz <- attr(x, "undeclared")
    other_problem <- attr(x, "other_problem")

    res <- character()
    if (length(x)) {
        .fmt <- function(x)
            paste0("  ", deparse(x[[1L]]), "(", deparse(x[[2L]]), ", ...)")
        msg <- ngettext(length(x),
                        "Foreign function call without 'PACKAGE' argument:",
                        "Foreign function calls without 'PACKAGE' argument:",
                        domain = NA)
        res <- c(msg, unlist(lapply(x, .fmt)))
    }
    if (length(xx)) {
        .fmt <- function(x)
            paste0("  ", deparse(x[[1L]]), "(", deparse(x[[2L]]), ", ...)")
        msg <- ngettext(length(x),
                        "Foreign function call with empty 'PACKAGE' argument:",
                        "Foreign function calls with empty 'PACKAGE' argument:",
                        domain = NA)
       res <- c(res, msg, unlist(lapply(xx, .fmt)))
    }

    if (length(y)) {
        bases <- .get_standard_package_names()$base
        .fmt2 <- function(x, z) {
            if("PACKAGE" %in% names(x))
                paste0("  ", deparse(x[[1L]]), "(", deparse(x[[2L]]),
                       ", ..., PACKAGE = \"", z, "\")")
            else
                paste0("  ", deparse(x[[1L]]), "(", deparse(x[[2L]]), ", ...)")
        }
        base <- z %in% bases
        if(any(base)) {
            xx <- unlist(lapply(seq_along(y)[base],
                                function(i) .fmt2(y[[i]], z[i])))
            xx <- unique(xx)
            msg <- ngettext(length(xx),
                            "Foreign function call to a base package:",
                            "Foreign function calls to a base package:",
                            domain = NA)
            res <- c(res, msg, sort(xx))
        }
        if(any(!base)) {
            xx <-  unlist(lapply(seq_along(y)[!base],
                                 function(i) .fmt2(y[[i]], z[i])))
            xx <- unique(xx)
            msg <- ngettext(length(xx),
                            "Foreign function call to a different package:",
                            "Foreign function calls to a different package:",
                            domain = NA)
            res <- c(res, msg, sort(xx))
        }
    }
    if (length(zz)) {
        zz <- unique(zz)
        msg <- ngettext(length(zz),
                        "Undeclared package in foreign function calls:",
                        "Undeclared packages in foreign function calls:",
                        domain = NA)
        res <- c(res, msg, paste("  ", paste(sQuote(sort(zz)), collapse = ", ")))
    }
    if (length(other_problem)) {
        msg <- ngettext(length(other_problem),
                        "Registration problem:",
                        "Registration problems:",
                        domain = NA)
        res <- c(res, msg)
        other_desc <- attr(x, "other_desc")
        for (i in seq_along(other_problem)) {
            res <- c(res, paste0("  ", other_desc[i], ":"),
                          paste0("   ", deparse(other_problem[[i]])))
        }
    }
    z3 <- attr(x, "dup_false")
     if (length(z3)) {
        msg <- ngettext(length(z3),
                        "Call with DUP:",
                        "Calls with DUP:",
                        domain = NA)
        res <- c(res, msg)
        for (i in seq_along(z3)) {
            res <- c(res, paste0("   ", deparse(z3[[i]])))
        }
    }
   res
}

### * checkS3methods

checkS3methods <-
function(package, dir, lib.loc = NULL)
{
    ## Check S3 generics and methods consistency.

    ## Unfortunately, what is an S3 method is not clear.
    ## These days, S3 methods for a generic GEN are found
    ## A. via GEN.CLS lookup from the callenv to its topenv;
    ## B. the S3 registry;
    ## C. GEN.CLS lookup from the parent of the topenv to baseenv,
    ##    skipping everything on the search path between globalenv and
    ##    baseenv.
    ## Thus if "package code" calls GEN, we first look in the package
    ## namespace itself, then in the registry, and then in the package
    ## imports and .BaseNamespaceEnv (and globalenv and baseenv again).
    ##
    ## Clearly, everything registered via S3method() should be an S3
    ## method.  Interestingly, we seem to have some registrations for
    ## non-generics, such as grDevices::axis().  These are "harmless"
    ## but likely not "as intended", and hence inconsistencies are not
    ## ignored.
    ##
    ## If the package namespace has a function named GEN.CLS, it is used
    ## as an S3 method for an S3 generic named GEN (and hence "is an S3
    ## method") only if the package code actually calls GEN (see A
    ## above).  So one could argue that we should not be looking at all
    ## GEN.CLS matches with GEN a generic in the package itself, its
    ## imports or base, but restrict to only the ones where the package
    ## code calls GEN.  Doable, but not straightforward (calls could be
    ## PKG::GEN) and possibly quite time consuming.  For generics from
    ## the package itself or its imports, not restricting should not
    ## make a difference (why define or import when not calling?), but
    ## for generics from base it may: hence we filter out the mismatches
    ## for base GEN not called in the package.
    ##
    ## If a package provides an S3 generic GEN, there is no need to
    ## register GEN.CLS functions for "internal use" (see above).
    ## However, if GEN is exported then likely all GEN.CLS functions
    ## should be registered as S3 methods.  Hence, these cases are now
    ## noted.
    ##
    ## We used to report the apparent S3 methods exported but not
    ## registered (in fact, looking at GEN.CLS matches also for generics
    ## found in the former base packages and the direct strong package
    ## dependencies), controllable via the environment variable
    ## _R_CHECK_S3_METHODS_NOT_REGISTERED_.  Should we continue doing so?
    ## Such functions have not been used for S3 dispatch for several
    ## years now, so it would seem that reporting these functions is no
    ## longer necessary.

    ## Note that checkS3methods(dir = DIR) cannot easily know about
    ## imported generics.  One could try to approximate the imports
    ## based on already loaded or available namespaces imported from,
    ## but that's a lot of effort for very little benefit, so for now we
    ## simply only look for generics in the package and base namespaces
    ## in case we work on the package sources only.

    S3_methods_info <- matrix(character(), 0L, 4L)
    exports <- character()

    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        ## Using package installed in @code{dir} ...
        code_dir <- file.path(dir, "R")
        if(!dir.exists(code_dir))
            stop(gettextf("directory '%s' does not contain R code",
                          dir),
                 domain = NA)
        is_base <- basename(dir) == "base"

        ## Load package into code_env.
        if(!is_base)
            .load_namespace_quietly(package, dirname(dir))

        code_env <- asNamespace(package)
        if(!is_base) {
            S3_methods_info <- getNamespaceInfo(code_env, "S3methods")
            exports <- getNamespaceExports(code_env)
        }
    }
    else {
        if(missing(dir))
            stop("you must specify 'package' or 'dir'")
        ## Using sources from directory @code{dir} ...
        if(!dir.exists(dir))
            stop(gettextf("directory '%s' does not exist", dir),
                 domain = NA)
        else
            dir <- file_path_as_absolute(dir)
        code_dir <- file.path(dir, "R")
        if(!dir.exists(code_dir))
            stop(gettextf("directory '%s' does not contain R code",
                          dir),
                 domain = NA)
        is_base <- basename(dir) == "base"

        code_env <- new.env(hash = TRUE, parent = .BaseNamespaceEnv)
        dfile <- file.path(dir, "DESCRIPTION")
        meta <- if(file_test("-f", dfile))
            .read_description(dfile)
        else
            character()
        .source_assignments_in_code_dir(code_dir, code_env, meta)
        sys_data_file <- file.path(code_dir, "sysdata.rda")
        if(file_test("-f", sys_data_file)) load(sys_data_file, code_env)

        objects_in_code <- sort(names(code_env))

        if(file.exists(file.path(dir, "NAMESPACE")) &&
           ## Code in NAMESPACE could e.g. check the version of one of
           ## its Imports.
           !inherits(tryCatch(nsInfo <-
                                  parseNamespaceFile(basename(dir),
                                                     dirname(dir)),
                              error = identity),
                     "error")) {
            ## Determine exported objects.
            exports <- intersect(objects_in_code, nsInfo$exports)
            for(p in nsInfo$exportPatterns)
                exports <- c(exports,
                             grep(p, objects_in_code, value = TRUE))
            exports <- unique(exports)
            ## Determine S3 methods info.
            S3_methods_info <- .get_namespace_S3_methods_db(nsInfo)
        }
    }

    funs_in_env <- function(env, nms = NULL) {
        if(is.null(nms)) nms <- names(env)
        Filter(function(f) is.function(env[[f]]), nms)
    }

    gens_in_env <- function(env, nms = NULL) {
        if(is.null(nms)) nms <- names(env)
        Filter(function(f) .is_S3_generic(f, env), nms)
    }

    S3_group_generics_env <-
        .make_S3_group_generic_env()
    S3_primitive_generics_env <-
        .make_S3_primitive_generic_env()

    dod <- .predicate_for_calls_with_names(c(".Defunct", ".Deprecated"),
                                           "base")

    nfg <- function(gname, env) {
        ## To correctly get the arguments of a "known" S3 generic, we
        ## must do the following.
        ## First, get the generic (using get0).
        ## If this is NULL and we have one of the group generics, use
        ## the S3_group_generics_env.
        ## If this is not NULL and we have a primitive, use
        ## S3_primitive_generics_env.
        ## Otherwise, if a closure, it could still be the case that we
        ## got the S4 group generics from methods, which for Summary
        ## and Math changes the formals.  So we need to check where we
        ## found the closure.
        gcode <- get0(gname, envir = env, mode = "function")
        if(gname %in% names(S3_group_generics_env)) {
            if(is.null(gcode) ||
               identical(gcode, get0(gname, .BaseNamespaceEnv)))
                gcode <- S3_group_generics_env[[gname]]
        } else if(is.primitive(gcode))
            gcode <- S3_primitive_generics_env[[gname]]
        if(!is.null(gcode))
            names(formals(gcode))
        else
            NULL
    }

    nff <- function(f) names(formals(f))

    one <- function(e) {
        gname <- e[[1L]]
        gargs <- nfg(gname, code_env)
        mname <- sprintf("%s.%s", e[[1L]], e[[2L]])
        mcode <- if(is.character(v <- e[[3L]]))
                     get0(v, envir = code_env)
                 else
                     v
        if(!is.null(mcode) &&
           dod(.get_top_call_in_fun(mcode)))
            mcode <- NULL
        margs <- if(!is.null(mcode)) nff(mcode) else NULL
        list(gname, gargs, mname, margs)
    }

    gen_dot_cls_matches <- function(g, nms) {
        if(length(g) != 1L)
            return(character())
        else
            nms[startsWith(nms, paste0(g, "."))]
    }

    methods_not_registered_with_exported_generic <- g.c <- character()

    functions_in_code <- funs_in_env(code_env)

    generics_in_base <- unique(c(gens_in_env(.BaseNamespaceEnv),
                                 names(S3_group_generics_env),
                                 names(S3_primitive_generics_env),
                                 .get_internal_S3_generics()))
    ## Alternatively, can do
    ##   generics_in_base <- .get_S3_generics_in_base()
    ## but we need the S3 group and primitive generics envs anyway.

    if(is_base) {
        generics <- generics_in_code <- generics_in_base
    } else {
        generics_in_code <- gens_in_env(code_env)
        generics <- c(generics_in_code,
                      ## Generics from imports
                      if(!missing(package))
                          setdiff(gens_in_env(parent.env(code_env)),
                                  functions_in_code),
                      ## Generics from base
                      setdiff(generics_in_base,
                              c(functions_in_code,
                                funs_in_env(parent.env(code_env)))))
    }

    nsm <- nonS3methods(basename(dir))
    gnm <- lapply(generics,
                  function(g) {
                      methods <-
                          gen_dot_cls_matches(g, functions_in_code)
                      if((n <- length(methods)) > 0L) {
                          gargs <- nfg(g, code_env)
                          entries <-
                              lapply(methods,
                                     function(m) {
                                         mcode <- code_env[[m]]
                                         margs <-
                                             if(dod(.get_top_call_in_fun(mcode)))
                                                 NULL
                                             else
                                                 nff(mcode)
                                         list(g, gargs, m, margs)
                                     })
                          names(entries) <- methods
                          entries
                      } else NULL
                  })
    gnm <- do.call(c, gnm)
    gnm <- gnm[setdiff(names(gnm), nsm)]

    if(!is_base) {
        g.c <- sprintf("%s.%s",
                       S3_methods_info[, 1L],
                       S3_methods_info[, 2L])
        ## Record apparent S3 methods not registered for exported
        ## generics.
        ind <- (vapply(gnm, `[[`, "", 1L) %in%
                intersect(generics_in_code, exports))
        methods_not_registered_with_exported_generic <-
            setdiff(names(gnm)[ind], c(g.c, nsm))
        ## Add additional generics and methods from the registry.
        S3_methods_info <-
            S3_methods_info[!(g.c %in% names(gnm)), ,
                            drop = FALSE]
        ## Cannot easily handle delayed registration.
        S3_methods_info <-
            S3_methods_info[is.na(as.character(S3_methods_info[, 4L])), ,
                            drop = FALSE]
        if(NROW(S3_methods_info)) {
            add <- apply(S3_methods_info, 1L, one, simplify = FALSE)
            names(add) <- vapply(add, `[[`, "", 3L)
            gnm <- c(gnm, add)
        }
    }

    check_args <- function(gName, gArgs, mName, mArgs) {
        ## Drop the ones where gArgs is NULL (presumably the language
        ## elements) or mArgs is NULL (a primitive?).
        if(is.null(gArgs) || is.null(mArgs)) return()
        ## handle round.POSIXt and packages defining methods for round()
        ## without a ... argument.
        if(gName == "round" && length(mArgs) >= 1 && mArgs[[1]] == "x") return()
        if(gName == "plot") gArgs <- gArgs[-2L] # drop "y"
        ## FIXME: not quite right, could be another plot generic ...
        ogArgs <- gArgs
        omArgs <- mArgs
        ## If m is a formula method, its first argument *may* be called
        ## formula.  (Note that any argument name mismatch throws an
        ## error in current S-PLUS versions.)
        if(endsWith(mName, ".formula")) {
            if(gArgs[1L] != "...") gArgs <- gArgs[-1L]
            if(mArgs[1L] != "...") mArgs <- mArgs[-1L]
        }
        dotsPos <- which(gArgs == "...")
        ipos <- if(length(dotsPos))
            seq_len(dotsPos[1L] - 1L)
        else
            seq_along(gArgs)
        ## careful, this could match multiply in incorrect funs.
        dotsPos <- which(mArgs == "...")
        if(length(dotsPos))
            ipos <- ipos[seq_len(dotsPos[1L] - 1L)]
        posMatchOK <- identical(gArgs[ipos], mArgs[ipos])
        argMatchOK <- all(gArgs %in% mArgs) || length(dotsPos) > 0L
        margMatchOK <- all(mArgs %in% c("...", gArgs)) || "..." %in% ogArgs
        if(posMatchOK && argMatchOK && margMatchOK)
            NULL
        else if (gName %in% c("+", "-", "*", "/", "^", "%%", "%/%", "&", "|",
                              "!", "==", "!=", "<", "<=", ">=", ">")
                 && (length(ogArgs) == length(omArgs)) )
            NULL
        else {
            l <- list(ogArgs, omArgs)
            names(l) <- c(gName, mName)
            l
        }
    }

    bad_methods <- Filter(length,
                          lapply(gnm,
                                 function(e)
                                     do.call(check_args, e)))
    if(length(bad_methods) && !is_base) {
        ## For now, split out the mismatches for GEN.CLS functions not
        ## registered as methods, split according to GEN a generic in
        ## the package or not, and the mismatches for methods registered
        ## for a non-generic.
        gen <- vapply(bad_methods, function(e) names(e)[1L], "")
        ## Not registered.
        i1 <- !(names(bad_methods) %in% g.c)
        ## Generic not generic?  One can register an S3 method for an S4
        ## generic.
        if(any(i3 <- i1 & (gen %in% generics_in_base))) {
            ## Check whether the base generics are actually called.
            ## Note that these things are hard to find out: we check for
            ## calls to names of base generics, but these could be local
            ## functions ...
            p3 <- which(i3)
            gennames <- intersect(gen, generics_in_base)
            predicate <- .predicate_for_calls_with_names(gennames,
                                                         "base")
            calls <- lapply(code_env, .find_calls, predicate,
                            recursive = TRUE)
            used <- (gen[p3] %in% unique(.call_names(unlist(calls))))
            if(!all(used)) {
                keep <- - p3[!used]
                bad_methods <- bad_methods[keep]
                gen <- gen[keep]
                i1 <- i1[keep]
            }
        }
        i2 <- !(gen %in% generics)
        if(any(i2) && .isMethodsDispatchOn()) {
            p2 <- which(i2)
            i2[p2] <- ! vapply(gen[p2],
                               function(g) {
                                   gcode <- get0(g, code_env)
                                   if(is.null(gcode))
                                       FALSE
                                   else
                                       methods::is(gcode,
                                                   "genericFunction")
                               },
                               NA)
        }
        if(any(i1) || any(i2)) {
            i3 <- (gen %in% generics_in_code)
            bad_methods_not_registered_with_generic_in_code <-
                bad_methods[i1 & i3]
            bad_methods_not_registered_with_generic_not_in_code <-
                bad_methods[i1 & !i3]
            bad_methods_registered_for_non_generic <-
                bad_methods[i2]
            bad_methods <- bad_methods[!i1 & !i2]
            if(length(bad_methods_not_registered_with_generic_in_code))
                attr(bad_methods,
                     "bad_methods_not_registered_with_generic_in_code") <-
                    bad_methods_not_registered_with_generic_in_code
            if(length(bad_methods_not_registered_with_generic_not_in_code))
                attr(bad_methods,
                     "bad_methods_not_registered_with_generic_not_in_code") <-
                    bad_methods_not_registered_with_generic_not_in_code
            if(length(bad_methods_registered_for_non_generic))
                attr(bad_methods,
                     "bad_methods_registered_for_non_generic") <-
                    bad_methods_registered_for_non_generic
        }
    }

    if(length(methods_not_registered_with_exported_generic))
        attr(bad_methods, "methods_not_registered_with_exported_generic") <-
            methods_not_registered_with_exported_generic

    class(bad_methods) <- "checkS3methods"
    bad_methods
}

format.checkS3methods <-
function(x, ...)
{
    .fmt_args <- function(s)
        paste0("function(", paste(s, collapse = ", "), ")")

    .fmt_bad_one <- function(e) {
        paste(c(paste0(names(e)[1L], ":"),
                strwrap(.fmt_args(e[[1L]]), indent = 2L, exdent = 11L),
                paste0(names(e)[2L], ":"),
                strwrap(.fmt_args(e[[2L]]), indent = 2L, exdent = 11L)),
              collapse = "\n")
    }

    .fmt_bad_all <- function(x) {
        if(!length(x)) return(character())
        paste(vapply(x, .fmt_bad_one, ""), collapse = "\n\n")
    }

    show_possible_issues <-
        config_val_to_logical(Sys.getenv("_R_CHECK_S3_METHODS_SHOW_POSSIBLE_ISSUES_",
                                         "FALSE"))

    s <- .fmt_bad_all(x)
    if(show_possible_issues)
        s <- c(s,
               if(length(bad <- c(attr(x,
                                       "bad_methods_not_registered_with_generic_in_code"),
                                  attr(x,
                                       "bad_methods_not_registered_with_generic_not_in_code"))))
                   paste0("Mismatches for apparent methods not registered:\n",
                          .fmt_bad_all(bad)),
               if(length(bad <- attr(x,
                                     "bad_methods_registered_for_non_generic")))
                   paste0("Mismatches for methods registered for non-generic:\n",
                          .fmt_bad_all(bad)),
               if(length(met <- attr(x,
                                     "methods_not_registered_with_exported_generic")))
                   paste0("Apparent methods for exported generics not registered:\n",
                          paste(strwrap(paste(sort(met), collapse = " "),
                                        exdent = 2L, indent = 2L),
                                collapse = "\n")))
    if(length(s))
        paste(s, collapse = "\n\n")
    else
        character()
}

### * checkReplaceFuns

checkReplaceFuns <-
function(package, dir, lib.loc = NULL)
{
    ns_S3_methods_db <- NULL

    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        ## Using package installed in @code{dir} ...
        code_dir <- file.path(dir, "R")
        if(!dir.exists(code_dir))
            stop(gettextf("directory '%s' does not contain R code",
                          dir),
                 domain = NA)
        is_base <- basename(dir) == "base"

        ## Load package into code_env.
        if(!is_base)
            .load_namespace_quietly(package, dirname(dir))
        code_env <- asNamespace(package)

        if(!is_base)
            ns_S3_methods_db <- .getNamespaceInfo(code_env, "S3methods")
    } else { # missing(package)
        if(missing(dir))
            stop("you must specify 'package' or 'dir'")
        ## Using sources from directory @code{dir} ...
        if(!dir.exists(dir))
            stop(gettextf("directory '%s' does not exist", dir),
                 domain = NA)
        else
            dir <- file_path_as_absolute(dir)
        code_dir <- file.path(dir, "R")
        if(!dir.exists(code_dir))
            stop(gettextf("directory '%s' does not contain R code",
                          dir),
                 domain = NA)
        is_base <- basename(dir) == "base"

        code_env <- new.env(hash = TRUE)
        dfile <- file.path(dir, "DESCRIPTION")
        meta <- if(file_test("-f", dfile))
            .read_description(dfile)
        else
            character()
        .source_assignments_in_code_dir(code_dir, code_env, meta)
        sys_data_file <- file.path(code_dir, "sysdata.rda")
        if(file_test("-f", sys_data_file)) load(sys_data_file, code_env)

        ## Does the package have a NAMESPACE file?
        if(file.exists(file.path(dir, "NAMESPACE")) &&
           !inherits(tryCatch(nsInfo <-
                                  parseNamespaceFile(basename(dir),
                                                     dirname(dir)),
                              error = identity),
                     "error")) {
            ns_S3_methods_db <- .get_namespace_S3_methods_db(nsInfo)
        }
    }

    objects_in_code <- sort(names(code_env))
    replace_funs <- character()

    if(!is.null(ns_S3_methods_db)) {
        ns_S3_generics <- as.character(ns_S3_methods_db[, 1L])
        ns_S3_methods <- ns_S3_methods_db[, 3L]
        if(!is.character(ns_S3_methods)) {
            ## As of 2018-07, direct calls to registerS3method()
            ## could have registered a function object (not name).
            ind <- vapply(ns_S3_methods, is.character, NA)
            ns_S3_methods[!ind] <- ""
            ns_S3_methods <- as.character(ns_S3_methods)
        }
        ## S3 replacement methods from namespace registration?
        replace_funs <- ns_S3_methods[endsWith(ns_S3_generics, "<-")]
        ## Now remove the functions registered as S3 methods.
        objects_in_code <- setdiff(objects_in_code, ns_S3_methods)
    }

    replace_funs <-
        c(replace_funs, grep("<-", objects_in_code, value = TRUE))
    ## Drop %xxx% binops.
    ## Spotted by Hugh Parsonage <hugh.parsonage@gmail.com>.
    replace_funs <-
        replace_funs[!(startsWith(replace_funs, "%") &
                       endsWith(replace_funs, "%"))]

    .check_last_formal_arg <- function(f) {
        arg_names <- names(formals(f))
        if(!length(arg_names))
            TRUE                        # most likely a .Primitive()
        else
            identical(arg_names[length(arg_names)], "value")
    }

    ## Find the replacement functions (which have formal arguments) with
    ## last arg not named 'value'.
    bad_replace_funs <- if(length(replace_funs)) {
        Filter(function(f) {
                   ## Always get the functions from code_env ...
                   ## Should maybe get S3 methods from the registry ...
                   f <- get(f, envir = code_env)  # get is expensive
                   is.function(f) && ! .check_last_formal_arg(f)
               },
               replace_funs)
    } else character()

    if(.isMethodsDispatchOn()) {
        S4_generics <- .get_S4_generics(code_env)
        ## Assume that the ones with names ending in '<-' are always
        ## replacement functions.
        S4_generics <- S4_generics[endsWith(names(S4_generics), "<-")]
        bad_S4_replace_methods <-
            lapply(S4_generics,
                   function(f) {
                       mlist <- .get_S4_methods_list(f, code_env)
                       ind <- !vapply(mlist, .check_last_formal_arg, NA)
                       if(!any(ind))
                           character()
                       else {
                           sigs <- .make_siglist(mlist[ind])
                           sprintf("\\S4method{%s}{%s}", f, sigs)
                       }
                   })
        bad_replace_funs <-
            c(bad_replace_funs,
              unlist(bad_S4_replace_methods, use.names = FALSE))
    }

    class(bad_replace_funs) <- "checkReplaceFuns"
    bad_replace_funs
}

format.checkReplaceFuns <-
function(x, ...)
{
    if(length(x))
        .pretty_format(unclass(x))
    else
        character()
}

### * checkTnF

checkTnF <-
function(package, dir, file, lib.loc = NULL)
{
    code_files <- docs_files <- character()

    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        ## Using package installed in @code{dir} ...
        dir <- find.package(package, lib.loc)
        if(file.exists(file.path(dir, "R", "all.rda"))) {
            warning("cannot check R code installed as image")
        }
        code_file <- file.path(dir, "R", package)
        if(file.exists(code_file))      # could be data-only
            code_files <- code_file
        example_dir <- file.path(dir, "R-ex")
        if(dir.exists(example_dir)) {
            code_files <- c(code_files,
                            list_files_with_exts(example_dir, "R"))
        }
    }
    else if(!missing(dir)) {
        ## Using sources from directory @code{dir} ...
        if(!dir.exists(dir))
            stop(gettextf("directory '%s' does not exist", dir),
                 domain = NA)
        else
            dir <- file_path_as_absolute(dir)
        code_dir <- file.path(dir, "R")
        if(dir.exists(code_dir))   # could be data-only
            code_files <- list_files_with_type(code_dir, "code")
        docs_dir <- file.path(dir, "man")
        if(dir.exists(docs_dir))
            docs_files <- list_files_with_type(docs_dir, "docs")
    }
    else if(!missing(file)) {
        if(!file_test("-f", file))
            stop(gettextf("file '%s' does not exist", file),
                 domain = NA)
        else
            code_files <- file
    }
    else
        stop("you must specify 'package', 'dir' or 'file'")

    find_TnF_in_code <- function(file, txt) {
        ## If 'txt' is given, it contains the extracted examples from
        ## the R documentation file 'file'.  Otherwise, 'file' gives a
        ## file with (just) R code.
        matches <- list()
        TnF <- c("T", "F")
        find_bad_exprs <- function(e, p) {
            if(is.name(e)
               && (as.character(e) %in% TnF)
               && !is.null(p)) {
                ## Need the 'list()' to deal with T/F in function
                ## arglists which are pairlists ...
                matches <<- c(matches, list(p))
            }
            else if(is.recursive(e)) {
                for(i in seq_along(e)) Recall(e[[i]], e)
            }
        }
        exprs <- if(missing(txt))
            tryCatch(parse(file = file, n = -1L),
                     error = function(e)
                     stop(gettextf("parse error in file '%s':\n%s",
                                   file,
                                   .massage_file_parse_error_message(conditionMessage(e))),
                          domain = NA, call. = FALSE))
        else
            tryCatch(str2expression(txt),
                     error = function(e)
                     stop(gettextf("parse error in examples from file '%s':\n%s",
                                   file, conditionMessage(e)),
                          domain = NA, call. = FALSE))
        for(i in seq_along(exprs))
            find_bad_exprs(exprs[[i]], NULL)
        matches
    }

    bad_exprs <- list()
    for(file in code_files) {
        exprs <- find_TnF_in_code(file)
        if(length(exprs)) {
            exprs <- list(exprs)
            names(exprs) <- file
            bad_exprs <- c(bad_exprs, exprs)
        }
    }
    for(file in docs_files) {
        Rd <- prepare_Rd(file, defines = .Platform$OS.type)
        txt <- .Rd_get_example_code(Rd)
        exprs <- find_TnF_in_code(file, txt)
        if(length(exprs)) {
            exprs <- list(exprs)
            names(exprs) <- file
            bad_exprs <- c(bad_exprs, exprs)
        }
    }
    class(bad_exprs) <- "checkTnF"
    bad_exprs
}

format.checkTnF <-
function(x, ...)
{
    .fmt <- function(fname) {
        xfname <- x[[fname]]
        c(gettextf("File '%s':", fname),
          unlist(lapply(seq_along(xfname),
                        function(i) {
                            strwrap(gettextf("found T/F in %s",
                                             paste(deparse(xfname[[i]]),
                                                   collapse = "")),
                                    exdent = 4L)
                        })),
          "")
    }

    as.character(unlist(lapply(names(x), .fmt)))
}

### * .check_package_depends

## changed in 2.3.0 to refer to a source dir.

.check_package_depends <-
function(dir, force_suggests = TRUE, check_incoming = FALSE,
         ignore_vignettes = FALSE)
{
    .check_dependency_cycles <-
        function(db, available = utils::available.packages(),
                 dependencies = c("Depends", "Imports", "LinkingTo"))
        {
            ## given a package, find its recursive dependencies.
            ## We want the dependencies of the current package,
            ## not of a version on the repository.
##            pkg <- db[["Package"]]
            this <- db[dependencies]; names(this) <- dependencies
            ## FIXME: .extract_dependency_package_names
            known <- utils:::.clean_up_dependencies(this)
            info <- available[, dependencies, drop = FALSE]
            rn <- rownames(info)
            deps <- function(p) {
                if(p %notin% rn) return(character())
                ## FIXME: .extract_dependency_package_names
                utils:::.clean_up_dependencies(info[p, ])
            }
            extra <- known
            repeat {
                extra <- unlist(lapply(extra, deps))
                extra <- setdiff(extra, known)
                if(!length(extra)) break
                known <- c(known, extra)
            }
            known
        }

    if(length(dir) != 1L)
        stop("The package 'dir' argument must be of length 1")

    ## We definitely need a valid DESCRIPTION file.
    db <- .read_description(file.path(dir, "DESCRIPTION"))

    dir_name <- basename(dir)
    package_name <- db["Package"]
    if(!identical(package_name, dir_name) &&
       (!is.character(package_name) || !nzchar(package_name))) {
        message(sprintf(
            "package name '%s' seems invalid; using directory name '%s' instead",
            package_name, dir_name))
        package_name <- dir_name
    }

    bad_depends <- list()
    ## and we cannot have cycles
    ## this check needs a package db from repository(s), so
    repos <- getOption("repos")
    if(any(repos == "@CRAN@"))
        repos <- .get_standard_repository_URLs()
    if(length(repos)) {
        available <- utils::available.packages(repos = repos)
        ad <- .check_dependency_cycles(db, available)
        pkgname <- db[["Package"]]
        if(pkgname %in% ad)
            bad_depends$all_depends <- setdiff(ad, pkgname)
    } else if (check_incoming)
        bad_depends$skipped <-
            "  No repository set, so cyclic dependency check skipped"

    ldepends <-  .get_requires_with_version_from_package_db(db, "Depends")
    limports <-  .get_requires_with_version_from_package_db(db, "Imports")
    llinks <-  .get_requires_with_version_from_package_db(db, "LinkingTo")
    lsuggests <- .get_requires_with_version_from_package_db(db, "Suggests")
    ## NB: no one checks version for 'Enhances'.
    lenhances <- .get_requires_with_version_from_package_db(db, "Enhances")
    ## VignetteBuilder packages are needed to ascertain what is a vignette.
    VB <- .get_requires_from_package_db(db, "VignetteBuilder")

    depends <- vapply(ldepends, `[[`, "", 1L)
    imports <- vapply(limports, `[[`, "", 1L)
    links <- vapply(llinks, `[[`, "", 1L)
    suggests <- vapply(lsuggests, `[[`, "", 1L)

    standard_package_names <- .get_standard_package_names()

    ## Are all packages listed in Depends/Suggests/Imports/LinkingTo installed?
    lreqs <- c(ldepends, limports, llinks,
               if(force_suggests) lsuggests)
    lreqs2 <- c(if(!force_suggests) lsuggests, lenhances)
    if(length(c(lreqs, lreqs2))) {
        ## Do this directly for speed.
        installed <- character()
        installed_in <- character()
        for(lib in .libPaths()) {
            pkgs <- list.files(lib)
            pkgs <- pkgs[file.access(file.path(lib, pkgs, "DESCRIPTION"), 4) == 0]
            installed <- c(installed, pkgs)
            installed_in <- c(installed_in, rep.int(lib, length(pkgs)))
        }
        if (length(lreqs)) {
            reqs <- unique(sapply(lreqs, `[[`, 1L))
            bad <- setdiff(reqs, installed)
            if(length(bad)) {
                ## EDanalysis has a package in all of Depends, Imports, Suggests.
                bad1 <-  bad[bad %in% c(depends, imports, links)]
                if(length(bad1))
                    bad_depends$required_but_not_installed <- bad1
                bad2 <-  setdiff(bad, bad1)
                if(length(bad2))
                    bad_depends$suggested_but_not_installed <- bad2
            }
            ## now check versions
            have_ver <- vapply(lreqs, function(x) length(x) == 3L, NA)
            lreqs3 <- lreqs[have_ver]
            if(length(lreqs3)) {
                bad <- character()
                for (r in lreqs3) {
                    pkg <- r[[1L]]
                    op <- r[[2L]]
                    where <- which(installed == pkg)
                    if(!length(where)) next
                    ## want the first one
                    desc <- readRDS(file.path(installed_in[where[1L]], pkg,
                                              "Meta", "package.rds"))
                    current <- desc$DESCRIPTION["Version"]
                    target <- as.package_version(r[[3L]])
                    if(!do.call(op, list(current, target)))
                        bad <- c(bad, pkg)
                }
                if(length(bad))
                    bad_depends$required_but_obsolete <- bad
            }
        }
        if (length(lenhances) &&
            !config_val_to_logical(Sys.getenv("_R_CHECK_PACKAGE_DEPENDS_IGNORE_MISSING_ENHANCES_",
                                             "FALSE"))) {
            m <- setdiff(sapply(lenhances, `[[`, 1L), installed)
            if(length(m))
                bad_depends$enhances_but_not_installed <- m
        }
        if (!force_suggests && length(lsuggests)) {
            m <- setdiff(sapply(lsuggests, `[[`, 1L), installed)
            if(length(m))
                bad_depends$suggests_but_not_installed <- m
        }
        if (!ignore_vignettes && length(VB)) {
            ## These need both to be declared and installed
            ## If people explicitly state 'utils' they ought really to
            ## declare it, but skip for now.
            bad <- VB %w/o% c(package_name, "utils", depends, imports, suggests)
            if(length(bad))
                bad_depends$required_for_checking_but_not_declared <- bad
            bad2 <- VB %w/o% c(package_name, installed)
            bad2 <- setdiff(bad2, bad)
            if(length(bad2))
                bad_depends$required_for_checking_but_not_installed <- bad2
            if (length(VB) && !dir.exists(file.path(dir, "vignettes")))
                bad_depends$no_vignettes <- VB
        }
    }
    ## FIXME: is this still needed now we do dependency analysis?
    ## Are all vignette dependencies at least suggested or equal to
    ## the package name?

    ## This is a check for old-location vignettes.
    ## If the package itself is the VignetteBuilder,
    ## we may not have installed it yet.
    defer <- package_name %in%  db["VignetteBuilder"]
    vigns <-
        .package_vignettes_via_call_to_R(dir = dir,
                                         subdirs = file.path("inst", "doc"),
                                         check = !defer)

    if(length(vigns$msg))
        bad_depends$bad_engine <- vigns$msg
    if (!is.null(vigns) && length(vigns$docs) > 0L) {
        reqs <- unique(unlist(.build_vignette_index(vigns)$Depends))
        ## For the time being, ignore base packages missing from the
        ## DESCRIPTION dependencies even if explicitly given as vignette
        ## dependencies.
        reqs <- setdiff(reqs,
                        c(depends, imports, suggests, package_name,
                          standard_package_names$base))
        if(length(reqs))
            bad_depends$missing_vignette_depends <- reqs
    }

    ## Are all namespace dependencies listed as package dependencies?
    if(file_test("-f", file.path(dir, "NAMESPACE"))) {
        reqs <- .get_namespace_package_depends(dir)
        ## <FIXME>
        ## Not clear whether we want to require *all* namespace package
        ## dependencies listed in DESCRIPTION, or e.g. just the ones on
        ## non-base packages.  Do the latter for time being ...
        ## Actually we need to know at least about S4-using packages,
        ## since we need to reinstall if those change.
        allowed_imports <-
            setdiff(standard_package_names$base, c("methods", "stats4"))
        reqs <- setdiff(reqs, c(imports, depends, allowed_imports))
        if(length(reqs))
            bad_depends$missing_namespace_depends <- reqs
    }

    ## Check for excessive 'Depends'
    deps <- setdiff(depends, c("R", "base", "datasets", "grDevices",
                               "graphics", "methods", "utils", "stats"))
    if(length(deps) > 5L) bad_depends$many_depends <- deps

    ## and Imports
    lim <- as.integer(Sys.getenv("_R_CHECK_EXCESSIVE_IMPORTS_", "0"))
    imps <- setdiff(imports, standard_package_names$base)
    if(!is.na(lim) && lim > 0 && length(imps) > lim)
        bad_depends$many_imports <- imps

    ## check header-only packages
    if (check_incoming) {
        hdOnly <- c("BH", "RcppArmadillo", "RcppEigen")
        hd <- setdiff(intersect(hdOnly, c(depends, imports)),
                      .get_namespace_package_depends(dir, TRUE))
        if(length(hd)) bad_depends$hdOnly <- hd
    }

    ## Check RdMacros.
    RM <- setdiff(.get_requires_from_package_db(db, "RdMacros"),
                  c(imports, depends))
    if(length(RM)) bad_depends$missing_rdmacros_depends <- RM

    ## (added in 4.0.0) Check for orphaned packages.
    if (config_val_to_logical(Sys.getenv("_R_CHECK_ORPHANED_", "FALSE"))) {
        ## empty fields are list().
        strict <- setdiff(unique(c(as.character(depends),
                                   as.character(imports),
                                   as.character(links))),
                           bad_depends$required_but_not_installed)

        ## (4.1.0) This needs to be recursive, since a package
        ## strictly depends on everything required to load it.
        ## All of those should be installed, so we only look at those which are.
        ## We include LinkingTo as if a dependency links to an
        ## orphaned package, it becomes uninstallable if the linked-to
        ## package is, or if it is removed.
        dependencies <- .expand_dependency_type_spec("strong")
        ipd <- utils:::.installed_package_dependencies
        new <- strict0 <- strict
        ex <- character()
        repeat {
            need <- unname(unlist(ipd(new, dependencies)))
            new <- setdiff(need, c(strict, ex))
            if(!length(new)) break
            strict <- union(strict, new)
        }

        ## First use dependencies which are installed: strict dependencies
        ## need to be for a full check.
        ## Suggests might not even exist, so we suppress warnings.
        mt <- utils::maintainer
        strict2 <- sapply(strict, function(x) suppressWarnings(mt(x)))
        miss1 <- is.na(strict2)
        weak <- setdiff(as.character(suggests),
                        bad_depends$suggested_but_not_installed)
        weak2 <- sapply(weak, function(x) suppressWarnings(mt(x)))
        miss2 <- is.na(weak2)
        if((any(miss1) || any(miss2)) &&
           !inherits(tryCatch(db <- CRAN_package_db()[, c("Package",
                                                          "Maintainer")],
                              ## This may not be local and needs a
                              ## complete CRAN mirror.
                              error = identity),
                     "error")) {
            orphaned <- db[db$Maintainer == "ORPHANED" , 1L]
            s2 <- intersect(strict[miss1], orphaned)
            w2 <- intersect(weak[miss2], orphaned)
        } else s2 <- w2 <- character()
        strict <- c(strict[!miss1 & strict2 == "ORPHANED"], s2)
        if(length(strict)) {
            strict0 <- sort(intersect(strict, strict0))
            strict1 <- sort(setdiff(strict, strict0))
            if(length(strict0)) bad_depends$orphaned <- strict0
            if(length(strict1)) bad_depends$orphaned1 <- strict1
        }
        weak <- c(weak[!miss2 & weak2 == "ORPHANED"], w2)
        if(length(weak)) bad_depends$orphaned2 <- sort(weak)
    }

    class(bad_depends) <- "check_package_depends"
    bad_depends
}

format.check_package_depends <-
function(x, ...)
{
    c(character(),
      if(length(x$skipped)) c(x$skipped, ""),
      if(length(x$all_depends)) {
          c("There is circular dependency in the installation order:",
            .pretty_format2("  One or more packages in", x$all_depends),
            "  depend on this package (for the versions on the repositories).",
            "")
      },
      if(length(bad <- x$required_but_not_installed) > 1L) {
          c(.pretty_format2("Packages required but not available:", bad), "")
      } else if(length(bad)) {
          c(sprintf("Package required but not available: %s", sQuote(bad)), "")
      },
      if(length(bad <- x$suggested_but_not_installed) > 1L) {
          c(.pretty_format2("Packages suggested but not available:", bad), "")
      } else if(length(bad)) {
          c(sprintf("Package suggested but not available: %s", sQuote(bad)), "")
      },
      if(length(bad <- x$required_but_obsolete) > 1L) {
          c(.pretty_format2("Packages required and available but unsuitable versions:",
                            bad),
            "")
      } else if(length(bad)) {
          c(sprintf("Package required and available but unsuitable version: %s", sQuote(bad)),
            "")
      },
      if(length(bad <- x$suggests_but_not_installed) > 1L) {
          c(.pretty_format2("Packages suggested but not available for checking:",
                            bad),
            "")
      } else if(length(bad)) {
          c(sprintf("Package suggested but not available for checking: %s",
                     sQuote(bad)),
            "")
      },
      if(length(bad <- x$enhances_but_not_installed) > 1L) {
          c(.pretty_format2("Packages which this enhances but not available for checking:",
                            bad),
            "")
      } else if(length(bad)) {
          c(sprintf("Package which this enhances but not available for checking: %s", sQuote(bad)),
            "")
      },
      if(length(bad <- x$required_for_checking_but_not_declared) > 1L) {
          c(.pretty_format2("VignetteBuilder packages not declared:", bad), "")
      } else if(length(bad)) {
          c(sprintf("VignetteBuilder package not declared: %s", sQuote(bad)), "")
      },
      if(length(bad <- x$required_for_checking_but_not_installed) > 1L) {
          c(.pretty_format2("VignetteBuilder packages required for checking but not installed:", bad), "")
      } else if(length(bad)) {
          c(sprintf("VignetteBuilder package required for checking but not installed: %s", sQuote(bad)), "")
      },
      if(length(bad <- x$missing_vignette_depends)) {
          c(if(length(bad) > 1L) {
                c("Vignette dependencies not required:", .pretty_format(bad))
            } else {
                sprintf("Vignette dependency not required: %s", sQuote(bad))
            },
            strwrap(gettextf("Vignette dependencies (%s entries) must be contained in the DESCRIPTION Depends/Suggests/Imports entries.",
                             "\\VignetteDepends{}")),
            "")
      },
      if(length(bad <- x$no_vignettes)) {
          c(if(length(bad) > 1L) {
                c("Vignette dependencies required without any vignettes:", .pretty_format(bad))
            } else {
                sprintf("Vignette dependency required without any vignettes: %s", sQuote(bad))
            },
            "")
      },
      if(length(bad <- x$missing_rdmacros_depends)) {
          c(if(length(bad) > 1L)
                .pretty_format2("RdMacros packages not required:", bad)
            else
                sprintf("RdMacros package not required: %s", sQuote(bad)),
            strwrap("RdMacros packages must be contained in the DESCRIPTION Imports/Depends entries."),
            "")
      },
      if(length(bad <- x$missing_namespace_depends)) {
          error_str <- "missing from DESCRIPTION Imports/Depends entries:"
          c(if(length(bad) > 1L)
                .pretty_format2(paste("Namespace dependencies", error_str), bad)
            else
                sprintf("Namespace dependency %s %s", error_str, sQuote(bad)),
          "")
      },
      if(length(y <- x$many_depends)) {
          c(.pretty_format2("Depends: includes the non-default packages:", y),
            strwrap(paste("Adding so many packages to the search path",
                          "is excessive",
                          "and importing selectively is preferable."
                          , collapse = ", ")),
            "")
      },
      if(ly <- length(x$many_imports)) {
          c(sprintf("Imports includes %d non-default packages.", ly),
            strwrap(paste("Importing from so many packages",
                          "makes the package vulnerable to any of them",
                          "becoming unavailable.  Move as many as possible to",
                          "Suggests and use conditionally."
                          , collapse = ", ")),
            "")
      },
      if(length(y <- x$bad_engine)) {
          c(y, "")
      },
      if(length(bad <- x$hdOnly)) {
          c(if(length(bad) > 1L)
            c("Packages in Depends/Imports which should probably only be in LinkingTo:", .pretty_format(bad))
          else
            sprintf("Package in Depends/Imports which should probably only be in LinkingTo: %s", sQuote(bad)),
            "")
      },
      if(length(bad <- x[["orphaned"]])) {
          c(if(length(bad) > 1L)
            c("Requires orphaned packages:", .pretty_format(bad))
          else
            sprintf("Requires orphaned package: %s", sQuote(bad)),
          "")
      },
      if(length(bad <- x[["orphaned1"]])) {
          c(if(length(bad) > 1L)
            c("Requires (indirectly) orphaned packages:", .pretty_format(bad))
          else
            sprintf("Requires (indirectly) orphaned package: %s", sQuote(bad)),
            "")
      },
      if(length(bad <- x[["orphaned2"]])) {
          c(if(length(bad) > 1L)
            c("Suggests orphaned packages:", .pretty_format(bad))
          else
            sprintf("Suggests orphaned package: %s", sQuote(bad)),
            "")
      }
      )
}

### * .check_package_description

.check_package_description <-
function(dfile, strict = FALSE, db = NULL)
{
    if(is.null(db)) {
        dfile <- file_path_as_absolute(dfile)
        db <- .read_description(dfile)
    }

    standard_package_names <- .get_standard_package_names()

    valid_package_name_regexp <-
        .standard_regexps()$valid_package_name
    valid_package_version_regexp <-
        .standard_regexps()$valid_package_version

    is_base_package <-
        !is.na(priority <- db["Priority"]) && priority == "base"

    out <- list()                       # For the time being ...

    ## Check encoding-related things first.

    ## All field tags must be ASCII.
    if(any(ind <- !.is_ASCII(names(db))))
        out$fields_with_non_ASCII_tags <- names(db)[ind]
    ## For all fields used by the R package management system, values
    ## must be ASCII as well (so that the RPM works in a C locale).
    ASCII_fields <- c(.get_standard_repository_db_fields(),
                      "Encoding", "License")
    ASCII_fields <- intersect(ASCII_fields, names(db))
    if(any(ind <- !.is_ASCII(db[ASCII_fields])))
        out$fields_with_non_ASCII_values <- ASCII_fields[ind]

    ## Determine encoding and re-encode if necessary and possible.
    if("Encoding" %in% names(db)) {
        encoding <- db["Encoding"]
        if(Sys.getlocale("LC_CTYPE") %notin% c("C", "POSIX"))
            db <- iconv(db, encoding, sub = "byte")
    }
    else if(!all(.is_ISO_8859(db))) {
        ## No valid Encoding metadata.
        ## Determine whether we can assume Latin1.
        out$missing_encoding <- TRUE
    }

    if(anyNA(nchar(db, "c", TRUE))) {
        ## Ouch, invalid in the current locale.
        ## (Can only happen in a MBCS locale.)
        ## Try re-encoding from Latin1.
        db <- iconv(db, "latin1")
    }

    ## Check Authors@R and expansion if needed.
    if(!is.na(aar <- db["Authors@R"]) &&
       (is.na(db["Author"]) || is.na(db["Maintainer"]))) {
        res <- .check_package_description_authors_at_R_field(aar)
        if(is.na(db["Author"]) &&
           !is.null(s <- attr(res, "Author")))
            db["Author"] <- s
        if(is.na(db["Maintainer"]) &&
           !is.null(s <- attr(res, "Maintainer")))
            db["Maintainer"] <- s
        mostattributes(res) <- NULL     # Keep names.
        out <- c(out, res)
    }

    val <- package_name <- db["Package"]
    if(!is.na(val)) {
        tmp <- character()
        ## We allow 'R', which is not a valid package name.
        if(!grepl(sprintf("^(R|%s)$", valid_package_name_regexp), val))
            tmp <- c(tmp, gettext("Malformed package name"))
        if(!is_base_package && val %in% standard_package_names$base) {
            tmp <- c(tmp,
                     c("Invalid package name.",
                       "This is the name of a base package."))
        }
        if(length(tmp))
            out$bad_package <- tmp
    }
    if(!is.na(val <- db["Version"])
       && !is_base_package
       && !grepl(sprintf("^%s$", valid_package_version_regexp), val))
        out$bad_version <- val
    if(!is.na(val <- db["Maintainer"])
       && !grepl(.valid_maintainer_field_regexp, val))
        out$bad_maintainer <- val

    ## Optional entries in DESCRIPTION:
    ##   Depends/Suggests/Imports/Enhances, Namespace, Priority.
    ## These must be correct if present.

    val <- db[match(c("Depends", "Suggests", "Imports", "Enhances"),
                    names(db), nomatch = 0L)]
    if(length(val)) {
        depends <- trimws(unlist(strsplit(val, ",")))
        bad_dep_entry <- bad_dep_op <- bad_dep_version <- character()
        dep_regexp <-
            paste0("^[[:space:]]*",
                   paste0("(R|", valid_package_name_regexp, ")"),
                   "([[:space:]]*\\(([^) ]+)[[:space:]]+([^) ]+)\\))?",
                   "[[:space:]]*$")
        for(dep in depends) {
            if(!grepl(dep_regexp, dep)) {
                ## Entry does not match the regexp.
                bad_dep_entry <- c(bad_dep_entry, dep)
                next
            }
            if(nzchar(sub(dep_regexp, "\\2", dep))) {
                ## If not just a valid package name ...
                if(sub(dep_regexp, "\\3", dep) %notin%
                   c("<=", ">=", "<", ">", "==", "!="))
                    bad_dep_op <- c(bad_dep_op, dep)
                else if(grepl("^[[:space:]]*R", dep)) {
                    if(!grepl(sprintf("^(r[0-9]+|%s)$",
                                      valid_package_version_regexp),
                              sub(dep_regexp, "\\4", dep)))
                    bad_dep_version <- c(bad_dep_version, dep)
                } else if(!grepl(sprintf("^%s$",
                                         valid_package_version_regexp),
                                 sub(dep_regexp, "\\4", dep)))
                    bad_dep_version <- c(bad_dep_version, dep)
            }
        }
        if(length(c(bad_dep_entry, bad_dep_op, bad_dep_version)))
            out$bad_depends_or_suggests_or_imports <-
                list(bad_dep_entry = bad_dep_entry,
                     bad_dep_op = bad_dep_op,
                     bad_dep_version = bad_dep_version)
    }
    if(strict && !is.na(val <- db["VignetteBuilder"])) {
        depends <- trimws(unlist(strsplit(val, ",")))
        if(length(depends) < 1L || !all(grepl("^[[:alnum:].]*$", depends)))
            out$bad_vignettebuilder <- TRUE
    }
    if(!is.na(val <- db["Priority"])
       && !is.na(package_name)
       && (tolower(val) %in% c("base", "recommended", "defunct-base"))
       && (package_name %notin% unlist(standard_package_names)))
        out$bad_priority <- val

    ## Minimal check (so far) of Title and Description.
    if(strict && !is.na(val <- db["Title"])
       && endsWith(val, ".")
       && !grepl("[[:space:]][.][.][.]|et[[:space:]]al[.]", trimws(val)))
        out$bad_Title <- TRUE
    ## some people put punctuation inside quotes, some outside.
    if(strict && !is.na(val <- db["Description"])
       && !grepl("[.!?]['\")]?$", trimws(val)))
        out$bad_Description <- TRUE

    class(out) <- "check_package_description"
    out
}

format.check_package_description <-
function(x, ...)
{
    fmt <- function(x) {
        if(length(x)) paste(x, collapse = "\n") else character()
    }

    ## <FIXME>
    ## Currently, check_meta() will give an error unless all output
    ## matches "^Malformed (Title|Description)", so for now need to
    ## avoid the pointer to R-exts in these cases.
    xx <- x; xx$bad_Title <- xx$bad_Description <- NULL
    ## </FIXME>

    c(character(),
      if(length(x$missing_encoding))
          gettext("Unknown encoding"),
      if(length(y <- x$fields_with_non_ASCII_tags))
          paste(c(gettext("Fields with non-ASCII tags:"),
                  .strwrap22(y),
                  gettext("All field tags must be ASCII.")),
                collapse = "\n"),
      if(length(y <- x$fields_with_non_ASCII_values))
          paste(c(gettext("Fields with non-ASCII values:"),
                  .strwrap22(y),
                  gettext("These fields must have ASCII values.")),
                collapse = "\n"),
      fmt(.format_check_package_description_authors_at_R_field_results(x)),
      ## if(length(y <- x$missing_required_fields))
      ##     paste(c(gettext("Required fields missing or empty:"),
      ##             .strwrap22(y)),
      ##           collapse = "\n"),
      if(length(x$bad_package))
          paste(x$bad_package, collapse = "\n"),
      if(length(x$bad_version))
          gettext("Malformed package version."),
      if(length(x$bad_maintainer))
          gettext("Malformed maintainer field."),
      if(any(as.integer(lengths(x$bad_depends_or_suggests_or_imports)) > 0L )) {
          bad <- x$bad_depends_or_suggests_or_imports
          paste(c(gettext("Malformed Depends or Suggests or Imports or Enhances field."),
                  if(length(y <- bad$bad_dep_entry))
                      c(gettext("Offending entries:"),
                        paste0("  ", y),
                        strwrap(gettextf("Entries must be names of packages optionally followed by '<=' or '>=', white space, and a valid version number in parentheses."))),
                  if(length(y <- bad$bad_dep_op))
                      c(gettext("Entries with infeasible comparison operator:"),
                        paste0("  ", y),
                        strwrap(gettextf("Only operators '<=' and '>=' are possible."))),
                  if(length(y <- bad$bad_dep_version))
                      c(gettext("Entries with infeasible version number:"),
                        paste0("  ", y),
                        strwrap(gettextf("Version numbers must be sequences of at least two non-negative integers, separated by single '.' or '-'.")))),
                collapse = "\n")
      },
      if(isTRUE(x$bad_vignettebuilder))
          paste(c(gettext("Invalid VignetteBuilder field."),
                  strwrap(gettextf("This field must contain one or more packages (and no version requirement)."))),
                collapse = "\n"),
      if(length(x$bad_priority))
          paste(c(gettext("Invalid Priority field."),
                  strwrap(gettextf("Packages with priorities 'base' or 'recommended' or 'defunct-base' must already be known to R."))),
                collapse = "\n"),
      fmt(c(if(isTRUE(x$bad_Title))
                gettext("Malformed Title field: should not end in a period."),
            if(isTRUE(x$bad_Description))
                gettext("Malformed Description field: should contain one or more complete sentences."))),
      if(any(as.integer(lengths(xx)) > 0L))
          paste(c(strwrap(gettext("See section 'The DESCRIPTION file' in the 'Writing R Extensions' manual."))),
                collapse = "\n"))
}

print.check_package_description <-
function(x, ...)
{
    if(length(y <- format(x, ...)))
        writeLines(paste(y, collapse = "\n\n"))
    invisible(x)
}


### * .check_package_description2

.check_package_description2 <-
function(dfile)
{
    dfile <- file_path_as_absolute(dfile)
    db <- .read_description(dfile)
    depends <- .get_requires_from_package_db(db, "Depends")
    imports <- .get_requires_from_package_db(db, "Imports")
    suggests <- .get_requires_from_package_db(db, "Suggests")
    enhances <- .get_requires_from_package_db(db, "Enhances")
    ## Packages may occur several times in each dependency field to
    ## specify version ranges.
    ## See <https://bugs.r-project.org/show_bug.cgi?id=18735>.
    allpkgs <- c(unique(depends), unique(imports),
                 unique(suggests), unique(enhances))
    out <- unique(allpkgs[duplicated(allpkgs)])
    links <- missing_incs <- character()
    llinks <-  .get_requires_with_version_from_package_db(db, "LinkingTo")
    have_src <- TRUE # dummy
    if(length(llinks)) {
        ## This is pointless unless there is compilable code
        have_src <- dir.exists(file.path(dirname(dfile), "src"))

        ## See if this is installable under 3.0.1:
        ## if so check for versioned specs
        deps <- .split_description(db, verbose = TRUE)$Rdepends2
        status <- 0L
        current <- as.numeric_version("3.0.1")
        for(depends in deps) {
            if(depends$op %notin% c("<=", ">=", "<", ">", "==", "!=")) next
            status <- if(inherits(depends$version, "numeric_version"))
                !do.call(depends$op, list(current, depends$version))
            else {
                ver <- R.version
                if (ver$status %in% c("", "Patched")) FALSE
                else !do.call(depends$op,
                              list(ver[["svn rev"]],
                                   as.numeric(sub("^r", "", depends$version))))
            }
        }
        if(!status) {
            llinks <- llinks[lengths(llinks) > 1L]
            if(length(llinks)) links <- sapply(llinks, `[[`, 1L)
        }
        ## and check if we can actually link to these.
        llinks <-  .get_requires_from_package_db(db, "LinkingTo")
        incs <- lapply(llinks, function(x) system.file("include", package = x))
        missing_incs <- as.vector(llinks[!nzchar(incs)])
    }
    out <- list(duplicates = unique(allpkgs[duplicated(allpkgs)]),
                bad_links = links, missing_incs = missing_incs,
                have_src = have_src)
    class(out) <- "check_package_description2"
    out
}

format.check_package_description2 <- function(x, ...)
{
    c(if(length(xx <- x$duplicates)) {
        c(if(length(xx) > 1L)
          "Packages listed in more than one of Depends, Imports, Suggests, Enhances:"
        else
          "Package listed in more than one of Depends, Imports, Suggests, Enhances:",
          paste(c(" ", sQuote(xx)), collapse = " "),
          "A package should be listed in only one of these fields.")
    },
      if(!x$have_src) "'LinkingTo' field is unused: package has no 'src' directory",
      if(length(xx <- x$bad_links)) {
          if(length(xx) > 1L)
              c("Versioned 'LinkingTo' values for",
                paste(c(" ", sQuote(xx)), collapse = " "),
                "are only usable in R >= 3.0.2")
          else
              sprintf("Versioned 'LinkingTo' value for %s is only usable in R >= 3.0.2",
                      sQuote(xx))
      },
      if(x$have_src && length(xx <- x$missing_incs)) {
          if(length(xx) > 1L)
              c("'LinkingTo' for",
                paste(c(" ", sQuote(xx)), collapse = " "),
                "are unused as they have no 'include' directory")
          else
              sprintf("'LinkingTo' for %s is unused as it has no 'include' directory", sQuote(xx))
      })
}

.check_package_description_authors_at_R_field <-
function(aar, strict = FALSE)
{
    out <- list()
    if(is.na(aar)) return(out)
    ## <FIXME>
    ## Perhaps better to actually capture warnings?
    aar <- suppressWarnings(tryCatch(utils:::.read_authors_at_R_field(aar),
                                     error = identity))
    if(inherits(aar, "error")) {
        out$bad_authors_at_R_field <- conditionMessage(aar)
    } else {
        ## Check whether we can expand to something non-empty.
        s <- tryCatch(utils:::.format_authors_at_R_field_for_author(aar),
                      error = identity)
        if(inherits(s, "error")) {
            out$bad_authors_at_R_field_for_author <-
                conditionMessage(s)
        } else {
            if(s == "")
                out$bad_authors_at_R_field_has_no_author <- TRUE
            else {
                attr(out, "Author") <- s
                if(strict >= 1L) {
                    has_no_name <-
                        vapply(aar,
                               function(e)
                               is.null(e$given) && is.null(e$family),
                               NA)
                    if(any(has_no_name)) {
                        out$bad_authors_at_R_field_has_persons_with_no_name <-
                            format(aar[has_no_name])
                    }
                    has_no_role <-
                        vapply(aar,
                               function(e) is.null(e$role),
                               NA)
                    if(any(has_no_role)) {
                        out$bad_authors_at_R_field_has_persons_with_no_role <-
                            format(aar[has_no_role])
                    }
                }
                if(strict >= 2L) {
                    if(all(has_no_name |
                           vapply(aar, function(e) "aut" %notin% e$role, NA)))
                        out$bad_authors_at_R_field_has_no_author_roles <- TRUE
                    ids <- .ORCID_iD_from_person(aar)
                    pos <- which(!is.na(ids))
                    ids <- ids[pos]
                    pos <- pos[!.ORCID_iD_is_valid(ids)]
                    if(length(pos))
                        out$bad_authors_at_R_field_has_persons_with_bad_ORCID_identifiers <-
                            format(aar[pos])
                    ids <- ids[duplicated(ids)]
                    if(length(ids))
                        out$bad_authors_at_R_field_has_persons_with_dup_ORCID_identifiers <-
                            ids
                    ids <- .ROR_ID_from_person(aar)
                    pos <- which(!is.na(ids))
                    ids <- ids[pos]
                    pos <- pos[!.ROR_ID_is_valid(ids)]
                    if(length(pos))
                        out$bad_authors_at_R_field_has_persons_with_bad_ROR_identifiers <-
                            format(aar[pos])
                    ids <- ids[duplicated(ids)]
                    if(length(ids))
                        out$bad_authors_at_R_field_has_persons_with_dup_ROR_identifiers <-
                            ids
                }
                if(strict >= 3L) {
                    non_standard_roles <-
                        lapply(aar$role, setdiff,
                               utils:::MARC_relator_db_codes_used_with_R)
                    ind <- lengths(non_standard_roles) > 0L
                    if(any(ind)) {
                        out$authors_at_R_field_has_persons_with_nonstandard_roles <-
                            sprintf("%s: %s",
                                    format(aar[ind]),
                                    vapply(non_standard_roles[ind], paste,
                                           collapse = ", ",
                                           FUN.VALUE = ""))
                    }
                }
            }
        }
        s <- tryCatch(utils:::.format_authors_at_R_field_for_maintainer(aar),
                      error = identity)
        if(inherits(s, "error")) {
            out$bad_authors_at_R_field_for_maintainer <-
                conditionMessage(s)
        } else {
            ## R-exts says
            ##   The mandatory 'Maintainer' field should give a _single_
            ##   name followed by a _valid_ (RFC 2822) email address in
            ##   angle brackets.
            ## Hence complain when Authors@R
            ## * has more than one person with a cre role
            ## * has no person with a cre role, "valid" email address
            ##   and a non-empty name.
            bad <- FALSE
            p <- Filter(function(e) "cre" %in% e$role,
                        aar)
            if(length(p) > 1L) {
                bad <- TRUE
                out$bad_authors_at_R_field_too_many_maintainers <-
                    format(p)
            }
            p <- Filter(function(e) {
                (!is.null(e$given) || !is.null(e$family)) && !is.null(e$email)
            },
                        p)
            if(!length(p)) {
                bad <- TRUE
                out$bad_authors_at_R_field_has_no_valid_maintainer <- TRUE
            }
            ## s should now be non-empty iff bad is FALSE.
            if(!bad) attr(out, "Maintainer") <- s
        }
    }
    out
}

.format_check_package_description_authors_at_R_field_results <-
function(x)
{
    c(character(),
      if(length(bad <- x[["bad_authors_at_R_field"]])) {
          c(gettext("Malformed Authors@R field:"),
            paste0("  ", bad))
      },
      if(length(bad <- x[["bad_authors_at_R_field_for_author"]])) {
          c(gettext("Cannot extract Author field from Authors@R field:"),
            paste0("  ", bad))
      },
      if(length(x[["bad_authors_at_R_field_has_no_author"]])) {
          gettext("Authors@R field gives no person with name and roles.")
      },
      if(length(bad <-
                x[["bad_authors_at_R_field_has_persons_with_no_name"]])) {
          c(gettext("Authors@R field gives persons with no name:"),
            paste0("  ", bad))
      },
      if(length(bad <-
                x[["bad_authors_at_R_field_has_persons_with_no_role"]])) {
          c(gettext("Authors@R field gives persons with no role:"),
            paste0("  ", bad))
      },
      if(length(x[["bad_authors_at_R_field_has_no_author_roles"]])) {
          gettext("Authors@R field gives no person with name and author role")
      },
      ## if(length(bad <-
      ##           x[["authors_at_R_field_has_persons_with_nonstandard_roles"]])) {
      ##     c(gettext("Authors@R field gives persons with non-standard roles:"),
      ##       paste0("  ", bad))
      ## },
      if(length(bad <- x[["bad_authors_at_R_field_for_maintainer"]])) {
          c(gettext("Cannot extract Maintainer field from Authors@R field:"),
            paste0("  ", bad))
      },
      if(length(bad <-
                x[["bad_authors_at_R_field_too_many_maintainers"]])) {
          c(gettext("Authors@R field gives more than one person with maintainer role:"),
            paste0("  ", bad))
      },
      if(length(x[["bad_authors_at_R_field_has_no_valid_maintainer"]])) {
          strwrap(gettext("Authors@R field gives no person with maintainer role, valid email address and non-empty name."))
      },
      if(length(bad <- x[["bad_authors_at_R_field_has_persons_with_bad_ORCID_identifiers"]])) {
          c(gettext("Authors@R field gives persons with invalid ORCID identifiers:"),
            paste0("  ", bad))
      },
      if(length(bad <- x[["bad_authors_at_R_field_has_persons_with_dup_ORCID_identifiers"]])) {
          c(gettext("Authors@R field gives persons with duplicated ORCID identifiers:"),
            paste0("  ", bad))
      },
      if(length(bad <- x[["bad_authors_at_R_field_has_persons_with_bad_ROR_identifiers"]])) {
          c(gettext("Authors@R field gives persons with invalid ROR identifiers:"),
            paste0("  ", bad))
      },
      if(length(bad <- x[["bad_authors_at_R_field_has_persons_with_dup_ROR_identifiers"]])) {
          c(gettext("Authors@R field gives persons with duplicated ROR identifiers:"),
            paste0("  ", bad))
      }      
      )
}

### * .check_package_description_encoding

.check_package_description_encoding <-
function(dfile)
{
    dfile <- file_path_as_absolute(dfile)
    db <- .read_description(dfile)
    out <- list()

    ## Check encoding-related things.

    ## All field tags must be ASCII.
    if(any(ind <- !.is_ASCII(names(db))))
        out$fields_with_non_ASCII_tags <- names(db)[ind]

    if("Encoding" %notin% names(db)) {
        ind <- !.is_ASCII(db)
        if(any(ind)) {
            out$missing_encoding <- TRUE
            out$fields_with_non_ASCII_values <- names(db)[ind]
        }
    } else {
        enc <- db[["Encoding"]]
        if (enc %notin% c("latin1", "latin2", "UTF-8"))
            out$non_portable_encoding <- enc
    }

    class(out) <- "check_package_description_encoding"
    out
}

format.check_package_description_encoding <-
function(x, ...)
{
    c(character(),
      if(length(x$non_portable_encoding)) {
          c(gettextf("Encoding '%s' is not portable",
                     x$non_portable_encoding),
            "")
      },
      if(length(x$missing_encoding)) {
          gettext("Unknown encoding with non-ASCII data")
      },
      if(length(x$fields_with_non_ASCII_tags)) {
          c(gettext("Fields with non-ASCII tags:"),
            .pretty_format(x$fields_with_non_ASCII_tags),
            gettext("All field tags must be ASCII."),
            "")
      },
      if(length(x$fields_with_non_ASCII_values)) {
          c(gettext("Fields with non-ASCII values:"),
            .pretty_format(x$fields_with_non_ASCII_values))
      },
      if(any(as.integer(lengths(x)) > 0L)) {
          c(strwrap(gettextf("See section 'The DESCRIPTION file' in the 'Writing R Extensions' manual.")),
            "")
      })
}

### * .check_package_license

.check_package_license <-
function(dfile, dir)
{
    dfile <- file_path_as_absolute(dfile)
    db <- .read_description(dfile)

    if(missing(dir))
        dir <- dirname(dfile)

    ## Analyze the license information here.
    ## Cannot easily do this in .check_package_description(), as R CMD
    ## check's R::Utils::check_package_description() takes any output
    ## from this as indication of an error.

    out <- list()
    if(!is.na(val <- db["License"])) {
        ## If there is no License field, .check_package_description()
        ## will give an error.
        status <- analyze_license(val)
        ok <- status$is_canonical
        ## This analyzes the license specification but does not verify
        ## whether pointers exist, so let us do this here.
        if(length(pointers <- status$pointers)) {
            bad_pointers <-
                pointers[!file_test("-f", file.path(dir, pointers))]
            if(length(bad_pointers)) {
                status$bad_pointers <- bad_pointers
                ok <- FALSE
            }
        }
        patt <- "(^Modified BSD License$|^BSD$|^CC BY.* [23][.]0)"
        if(any(ind <- grepl(patt, status$components))) {
            status$deprecated <- status$components[ind]
            ok <- FALSE
        }
        ## Components with extensions but not extensible:
        if(length(extensions <- status$extensions) &&
           any(ind <- !extensions$extensible)) {
            status$bad_extensions <- extensions$components[ind]
            ok <- FALSE
        }
        ## Components which need extensions (note that such components
        ## could use the name or abbrev from the license db):
        if(any(ind <- status$components %in%
               c("MIT License", "MIT",
                 "BSD 2-clause License", "BSD_2_clause",
                 "BSD 3-clause License", "BSD_3_clause"))) {
            status$miss_extension <- status$components[ind]
            ok <- FALSE
        }
        ## License stubs invalid or incomplete.
        if(length(extensions <- status$extensions)) {
            components <- extensions$components
            nms <- if(any(grepl("^BSD[ _]3", components)))
                       c("YEAR", "COPYRIGHT HOLDER", "ORGANIZATION")
                   else if(any(grepl("^(MIT|BSD[ _]2)", components)))
                       c("YEAR", "COPYRIGHT HOLDER")
                   else
                       NULL
            if(!is.null(nms)
               && length(pointers <- status$pointers)
               && file_test("-f", file.path(dir, pointers[1L]))) {
                val <- tryCatch(read.dcf(file.path(dir, pointers[1L]),
                                         fields = nms),
                                error = identity)
                if(inherits(val, "error")) {
                    status$license_stub_is_bad_DCF <- TRUE
                    ok <- FALSE
                } else {
                    ind <- is.na(val) | !nzchar(val)
                    pos <- which(rowSums(ind) > 0)
                    if(length(pos)) {
                        status$license_stub_fields_not_complete <-
                            gettextf("Record: %d Field(s): %s",
                                     pos,
                                     vapply(pos,
                                            function(p)
                                                paste(nms[ind[p, ]],
                                                      collapse = ", "),
                                            ""))
                        ok <- FALSE
                    }
                }
            }
        }

        if(any(ind <- status$components %in% "ACM") &&
           !(db["Package"] %in% c("akima", "tripack"))) {
            status$ACM <- status$components[ind]
            ok <- FALSE
        }
        ## Could always return the analysis results and not print them
        ## if ok, but it seems more standard to only return trouble.
        if(!ok)
            out <- c(list(license = val), status)
    }

    class(out) <- "check_package_license"
    out
}

format.check_package_license <-
function(x, ...)
{
    if(!length(x))
        return(character())

    check <- Sys.getenv("_R_CHECK_LICENSE_")
    check <- if(check %in% c("maybe", ""))
        (!(x$is_standardizable)
         || length(x$bad_pointers)
         || length(x$bad_extensions)
         || length(x$license_stub_is_bad_DCF)
         || length(x$license_stub_fields_not_complete))
    else
        isTRUE(as.logical(check))
    if(!check)
        return(character())

    c(character(),
      if(!(x$is_canonical)) {
          c(gettext("Non-standard license specification:"),
            strwrap(x$license, indent = 2L, exdent = 2L),
            gettextf("Standardizable: %s", x$is_standardizable),
            if(x$is_standardizable) {
                c(gettext("Standardized license specification:"),
                  strwrap(x$standardization, indent = 2L, exdent = 2L))
            })
      },
      if(length(y <- x$deprecated)) {
          c(gettextf("Deprecated license: %s",
                     paste(y, collapse = " ")))
      },
      if(length(y <- x$bad_pointers)) {
          c(gettextf("Invalid license file pointers: %s",
                     paste(y, collapse = " ")))
      },
      if(length(y <- x$bad_extensions)) {
          c(gettext("License components with restrictions not permitted:"),
            paste0("  ", y))
      },
      if(length(y <- x$miss_extension)) {
          c(gettext("License components which are templates and need '+ file LICENSE':"),
            paste0("  ", y))
      },
      if(length(y <- x$license_stub_is_bad_DCF))
          gettext("License stub is invalid DCF."),
      if(length(y <- x$license_stub_fields_not_complete)) {
          c(gettext("License stub records with missing/empty fields:",
                    paste0("  ", y)))
      },
      if(length(y <- x$ACM)) {
          gettext("Uses ACM license: only appropriate for pre-2013 ACM TOMS code")
      }
      )
}

### * .check_make_vars

.check_make_vars <-
function(dir, makevars = c("Makevars.in", "Makevars"))
{
    bad_flags <- list()
    class(bad_flags) <- "check_make_vars"

    paths <- file.path(dir, makevars)
    paths <- paths[file_test("-f", paths)]
    if(!length(paths)) return(bad_flags)
    bad_flags$paths <- file.path("src", basename(paths))
    ## Makevars could be used with --no-configure
    ## and maybe configure does not even use src/Makevars.in
    mfile <- paths[1L]
    make <- Sys.getenv("MAKE")
    if(make == "") make <- "make"
    ## needs a target to avoid targets in src/Makevars
    command <- sprintf("%s -f %s -f %s -f %s makevars_test",
                       make,
                       shQuote(file.path(R.home("share"), "make",
                                         "check_vars_ini.mk")),
                       shQuote(mfile),
                       shQuote(file.path(R.home("share"), "make",
                                         "check_vars_out.mk")))
    lines <- suppressWarnings(tryCatch(system(command, intern = TRUE,
                                              ignore.stderr = TRUE),
                                       error = identity))
    if(!length(lines) || inherits(lines, "error"))
        return(bad_flags)

    prefixes <- c("CPP", "C", "CXX", "CXX98", "CXX11", "CXX14", "CXX17",
                  "CXX20", "CXX23", "F", "FC", "OBJC", "OBJCXX")

    uflags_re <- sprintf("^(%s)FLAGS: *(.*)$",
                         paste(prefixes, collapse = "|"))
    pos <- grep(uflags_re, lines)
    ind <- (sub(uflags_re, "\\2", lines[pos]) != "-o /dev/null")
    if(any(ind))
        bad_flags$uflags <- lines[pos[ind]]

    ## Try to be careful ...
    pflags_re <- sprintf("^PKG_(%s)FLAGS: ",
                         paste(prefixes, collapse = "|"))
    lines <- lines[grepl(pflags_re, lines)]
    names <- sub(":.*", "", lines)
    lines <- sub(pflags_re, "", lines)
    flags <- strsplit(lines, "[[:space:]]+")
    ## Bad flags:
    ##   -O*
    ##      (BDR: for example Sun Fortran compilers used to accept -O
    ##      but not -O2, and VC++ accepts -Ox (literal x) but not -O.)
    ##   -Wall -pedantic -ansi -traditional -std* -f* -m* [GCC]
    ##   -x [Solaris]
    ##   -q [AIX]
    ##   -pipe (GNU compilers, not accepted by flang-new)
    ## It is hard to think of anything apart from -I* and -D* that is
    ## safe for general use ...
    bad_flags_regexp <-
        sprintf("^-(%s)$",
                paste(c("O.*",
                        "W", # same as -Wextra in GCC.
                        "w", # GCC, Solaris inhibit all warnings
                        "W[^l].*", # -Wl, might just be portable
                        "ansi", "pedantic", "traditional",
                        "f.*", "m.*", "std.*", # includes -fopenmp
                        "isystem", # gcc and clones
                        "x",
                        "pipe",
                        "cpp", # gfortran
                        "g",  # not portable, waste of space
                        "q"),
                      collapse = "|"))
    for(i in seq_along(lines)) {
        bad <- grep(bad_flags_regexp, flags[[i]], value = TRUE)
        if (names[i] %in% c("PKG_FFLAGS", "PKG_FCFLAGS"))
            bad <- grep("^-std=f", bad, invert = TRUE, value = TRUE)
        if(length(bad))
            bad_flags$pflags <-
                c(bad_flags$pflags,
                  structure(list(bad), names = names[i]))
    }

    ## The above does not know about GNU make extensions like
    ## target.o: PKG_CXXFLAGS = -mavx
    ## so grep files directly.
    for (f in paths) {
        lines <- readLines(f, warn = FALSE)
        pflags_re2 <- sprintf(".*[.o]: +PKG_(%s)FLAGS *=",
                              paste(prefixes, collapse = "|"))
        lines <- grep(pflags_re2, lines, value = TRUE)
        lines <- sub(pflags_re2, "", lines)
        flags <- strsplit(lines, "[[:space:]]+")
        bad <- character()
        for(i in seq_along(lines))
            bad <- c(bad, grep(bad_flags_regexp, flags[[i]], value = TRUE))
        if(length(bad))
            bad_flags$p2flags <-
                c(bad_flags$p2flags,
                  structure(list(bad), names = file.path("src", basename(f))))
    }

    bad_flags
}

format.check_make_vars <-
function(x, ...)
{
    .fmt <- function(x) {
        s <- Map(c,
                 gettextf("Non-portable flags in variable '%s':",
                          names(x)),
                 sprintf("  %s", lapply(x, paste, collapse = " ")))
        as.character(unlist(s))
    }

    .fmt2 <- function(x) {
        s <- Map(c,
                 gettextf("Non-portable flags in file '%s':",
                          names(x)),
                 sprintf("  %s", lapply(x, paste, collapse = " ")))
        as.character(unlist(s))
    }

    c(character(),
      if(length(bad <- x$pflags)) .fmt(bad),
      if(length(bad <- x$p2flags)) .fmt2(bad),
      if(length(bad <- x$uflags)) {
          c(gettextf("Variables overriding user/site settings:"),
            sprintf("  %s", bad))
      },
      if(length(x$paths) > 1L) {
          c(sprintf("Package has both %s and %s.",
                  sQuote("src/Makevars.in"), sQuote("src/Makevars")),
            strwrap(sprintf("Installation with --no-configure' is unlikely to work.  If you intended %s to be used on Windows, rename it to %s otherwise remove it.  If %s created %s, you need a %s script.",
                            sQuote("src/Makevars"),
                            sQuote("src/Makevars.win"),
                            sQuote("configure"),
                            sQuote("src/Makevars"),
                            sQuote("cleanup"))))
      })
}

### * .check_code_usage_in_package

## First, its auxiliaries
##
## - .unix_only_proto_objects
## - .windows_only_proto_objects
## - compatibilityEnv ()        -- used also in codoc()

.unix_only_proto_objects <- as.environment(list(
    nsl = function(hostname) {}
  , X11Font = function(font) {}
  , X11Fonts = function(...) {}
  , X11.options = function(..., reset = TRUE) {}
  , quartz = function(title, width, height, pointsize, family,
                      fontsmooth, antialias, type, file = NULL,
                      bg, canvas, dpi) {}
  , quartzFont = function(family) {}
  , quartzFonts = function(...) {}
  , quartz.options = function(..., reset = TRUE) {}
  , quartz.save = function(file, type = "png", device = dev.cur(),
                           dpi = 100, ...) {}
))

.windows_only_proto_objects <- as.environment(list(
    arrangeWindows = function(action = c("vertical", "horizontal",
                                         "cascade", "minimize", "restore"),
                              windows, preserve = TRUE, outer = FALSE) {}
  , askYesNoWinDialog = function(msg, ...) {}
  , bringToTop = function(which = grDevices::dev.cur(), stay = FALSE) {}
  , choose.dir = function(default = "", caption = "Select folder") {}
  , choose.files = function(default = "", caption = "Select files", multi = TRUE,
                            filters = Filters, index = nrow(Filters)) {
      Filters <- NULL }
  , Filters = NULL
  , close.winProgressBar = function(con, ...) {}
  , DLL.version = function(path) {}
  , .fixupGFortranStderr = function() {}
  , .fixupGFortranStdout = function() {}
  , getClipboardFormats = function(numeric = FALSE) {}
  , getIdentification = function() {}
  , getWindowsHandle = function(which = "Console") {}
  , getWindowsHandles = function(which = "R", pattern = "", minimized = FALSE) {}
  , getWindowTitle = function() {}
  , getWinProgressBar = function(pb) {}
  , .install.winbinary = function(pkgs, lib, repos = getOption("repos"),
                                  contriburl = utils::contrib.url(repos),
                                  method, available = NULL, destdir = NULL,
                                  dependencies = FALSE, libs_only = FALSE, ...) {}
  , loadRconsole = function(file = choose.files(file.path(
                                Sys.getenv("R_USER"), "Rconsole"))) {}
  , msgWindow = function(type = c("minimize", "restore", "maximize", "hide",
                                  "recordOn", "recordOff"),
                         which = dev.cur()) {}
  , readClipboard = function(format = 1, raw = FALSE) {}
  , readRegistry = function(key,
                            hive = c("HLM", "HCR", "HCU", "HU", "HCC", "HPD"),
                            maxdepth = 1,
                            view = c("default", "32-bit", "64-bit")) {}
  ## Exists on all platforms though with differing formals :
  ## , savePlot = function(filename = "Rplot",
  ##                       type = c("wmf", "emf", "png", "jpeg", "jpg",
  ##                                "bmp", "ps", "eps", "pdf"),
  ##                       device = grDevices::dev.cur(), restoreConsole = TRUE) {}
  , setStatusBar = function(text) {}
  , setWindowTitle = function(suffix, title = paste(utils::getIdentification(),
                                                    suffix)) {}
  , setWinProgressBar = function(pb, value, title=NULL, label=NULL) {}
  , shell = function(cmd, shell, flag = "/c", intern = FALSE,
                     wait = TRUE, translate = FALSE, mustWork = FALSE, ...) {}
  , shell.exec = function(file) {}
  , shortPathName = function(path) {}
  , Sys.junction = function(from, to) {}
  , win.graph = function(width = 7, height = 7, pointsize = 12,
                         restoreConsole = FALSE) {}
  , win.metafile = function(filename = "", width = 7, height = 7,
                            pointsize = 12, family = "",
                            restoreConsole = TRUE) {}
  , win.print = function(width = 7, height = 7, pointsize = 12,
                         printer = "", family = "", antialias = "default",
                         restoreConsole = TRUE) {}
  , win.version = function() {}
  , windows = function(width, height, pointsize,
                       record, rescale, xpinch, ypinch,
                       bg, canvas, gamma, xpos, ypos,
                       buffered, title, restoreConsole, clickToConfirm,
                       fillOddEven, family = "", antialias) {}
  , windowsFont = function(font) {}
  , windowsFonts = function(...) {}
  , windows.options = function(..., reset = TRUE) {}
  , winDialog = function(type = "ok", message) {}
  , winDialogString = function(message, default) {}
  , winMenuAdd = function(menuname) {}
  , winMenuAddItem = function(menuname, itemname, action) {}
  , winMenuDel = function(menuname) {}
  , winMenuDelItem = function(menuname, itemname) {}
  , winMenuNames = function() {}
  , winMenuItems = function(menuname) {}
  , winProgressBar = function(title = "R progress bar", label = "",
                              min = 0, max = 1, initial = 0, width = 300) {}
  , writeClipboard = function(str, format = 1L) {}
  , zip.unpack = function(zipname, dest) {}
))

compatibilityEnv <- function() {
    ## (this formulation allows more than two OS.type s)
    switch(.Platform$OS.type,
           "windows" = .unix_only_proto_objects,
           "unix" = .windows_only_proto_objects,
           ## in such a future case, possibly the "union" of these environments:
           stop(gettextf("invalid 'OS.type' \"%s\".  Should not happen",
                         .Platform$OS.type), domain = NA))
}

.check_code_usage_in_package <-
function(package, lib.loc = NULL)
{
    is_base <- package == "base"

    check_without_loading <-
        config_val_to_logical(Sys.getenv("_R_CHECK_CODE_USAGE_VIA_NAMESPACES_",
                                         "TRUE"))

    if(!is_base) {
        if(!check_without_loading) {
            .load_package_quietly(package, lib.loc)
            .eval_with_capture({
                ## avoid warnings about code in other packages the package
                ## uses
                desc <- readRDS(file.path(find.package(package, NULL),
                                          "Meta", "package.rds"))
                pkgs1 <- sapply(desc$Suggests, `[[`, "name")
                pkgs2 <- sapply(desc$Enhances, `[[`, "name")
                for(pkg in unique(c(pkgs1, pkgs2)))
                    ## tcltk warns if no DISPLAY variable
                    ##, errors if not compiled in
                    suppressMessages(
                        tryCatch(require(pkg, character.only = TRUE,
                                         quietly = TRUE),
                                 error  = function(.) NULL,
                                 warning= function(.) NULL))
            }, type = "output")
        }
        if(is.null(.GlobalEnv$.Random.seed)) # create .Random.seed if necessary
            stats::runif(1)
        attach(compatibilityEnv(), name="compat", pos = length(search()),
               warn.conflicts = FALSE)
        on.exit(detach("compat"))
    }

    ## A simple function for catching the output from the codetools
    ## analysis using the checkUsage report mechanism.
    out <- character()
    foo <- function(x) out <<- c(out, x)
    ## (Simpler than using a variant of capture.output().)
    ## Of course, it would be nice to return a suitably structured
    ## result, but we can always do this by suitably splitting the
    ## messages on the double colons ...

    ## Not only check function definitions, but also S4 methods
    ## [a version of this should be part of codetools eventually] :
    checkMethodUsageEnv <- function(env, ...) {
        for(g in .get_S4_generics(env))
            for(m in .get_S4_methods_list(g, env)) {
                fun <- methods::unRematchDefinition(methods::getDataPart(m))
                signature <- paste(m@generic,
                                   paste(m@target, collapse = "-"),
                                   sep = ",")
                codetools::checkUsage(fun, signature, ...)
            }
    }
    checkMethodUsagePackage <- function (pack, ...) {
        pname <- paste0("package:", pack)
        if (pname %notin% search())
            stop("package must be loaded", domain = NA)
        checkMethodUsageEnv(if (isNamespaceLoaded(pack))
                            getNamespace(pack) else as.environment(pname), ...)
    }

    ## Allow specifying a codetools "profile" for checking via the
    ## environment variable _R_CHECK_CODETOOLS_PROFILE_, used as e.g.
    ##   _R_CHECK_CODETOOLS_PROFILE_="suppressLocalUnused=FALSE"
    ## (where the values get converted to logicals "the usual way").
    args <- list(skipWith = TRUE,
                 suppressPartialMatchArgs = FALSE,
                 suppressLocalUnused = TRUE)
    opts <- unlist(strsplit(Sys.getenv("_R_CHECK_CODETOOLS_PROFILE_"),
                            "[[:space:]]*,[[:space:]]*"))
    if(length(opts)) {
        args[sub("[[:space:]]*=.*", "", opts)] <-
            lapply(sub(".*=[[:space:]]*", "", opts),
                   config_val_to_logical)
    }
    if(check_without_loading)
        env <- suppressWarnings(suppressMessages(getNamespace(package)))
    ## look for globalVariables declaration in package
    ## (This loads the namespace if not already loaded.)
    .glbs <- suppressMessages(utils::globalVariables(, package))
    if(length(.glbs)) {
        ## Cannot use globalVariables() for base
        ## (and potentially tools and utils)
        dflt <- c(if(package == "base") "last.dump",
                  ".Generic", ".Method", ".Class")
        args$suppressUndefined <- c(dflt, .glbs)
    }

    if(check_without_loading) {
        args <- c(list(env, report = foo), args)
        suppressMessages(do.call(codetools::checkUsageEnv, args))
        suppressMessages(do.call(checkMethodUsageEnv, args))
    } else {
        args <- c(list(package, report = foo), args)
        suppressMessages(do.call(codetools::checkUsagePackage, args))
        suppressMessages(do.call(checkMethodUsagePackage, args))
    }

    out <- unique(out)
    class(out) <- "check_code_usage_in_package"
    out
}

format.check_code_usage_in_package <-
function(x, ...)
{
    if(length(x)) {
        ## There seems no easy we can gather usage diagnostics by type,
        ## so try to rearrange to some extent when formatting.
        ind <- grepl(": partial argument match of", x, fixed = TRUE)
        if(any(ind)) x <- c(x[ind], x[!ind])
    }
    if(length(x)) {
        ## Provide a summary listing of the undefined globals:
        y <- .canonicalize_quotes(x)
        m <- regexec("no visible global function definition for '(.*)'", y)
        funs <- vapply(Filter(length, regmatches(y, m)), `[`, "", 2L)
        m <- regexec("no visible binding for global variable '(.*)'", y)
        vars <- vapply(Filter(length, regmatches(y, m)), `[`, "", 2L)
        y <- sort(unique(c(funs, vars)))
        c(strwrap(x, indent = 0L, exdent = 2L),
          if(length(y)) {
              c("Undefined global functions or variables:",
                strwrap(paste(y, collapse = " "),
                        indent = 2L, exdent = 2L))
          })
    } else character()
}

### * .check_Rd_xrefs

.check_Rd_xrefs <-
function(package, dir, lib.loc = NULL)
{
    ## Build a db with all possible link targets (aliases) in the base
    ## and recommended packages.
    base <- unlist(.get_standard_package_names()[c("base", "recommended")],
                   use.names = FALSE)
    ## May not have recommended packages
    base <- base[dir.exists(file.path(.Library, base))]
    aliases <- lapply(base, Rd_aliases, lib.loc = NULL)
    ## (Don't use lib.loc = .Library, as recommended packages may have
    ## been installed to a different place.)

    ## Now find the aliases in packages it depends on
    if(!missing(package)) {
        pfile <- system.file("Meta", "package.rds", package = package,
                             lib.loc = lib.loc)
        pkgInfo <- readRDS(pfile)
    } else {
        pkgInfo <- .split_description(.get_package_metadata(dir))
    }
    pkgname <- pkgInfo$DESCRIPTION[["Package"]]
    ## only 'Depends' are guaranteed to be on the search path, but
    ## 'Imports' have to be installed and hence help there will be found
    deps <- c(names(pkgInfo$Depends), names(pkgInfo$Imports))
    pkgs <- setdiff(unique(deps), base)
    try_Rd_aliases <- function(...) tryCatch(Rd_aliases(...), error = identity)
    aliases <- c(aliases, lapply(pkgs, try_Rd_aliases, lib.loc = lib.loc))
    aliases[vapply(aliases, inherits, "error", FUN.VALUE = NA)] <- NULL

    ## Add the aliases from the package itself, and build a db with all
    ## (if any) \link xrefs in the package Rd objects.
    if(!missing(package)) {
        aliases1 <- Rd_aliases(package, lib.loc = lib.loc)
        if(!length(aliases1))
            return(structure(list(), class = "check_Rd_xrefs"))
        aliases <- c(aliases, list(aliases1))
        db <- .build_Rd_xref_db(package, lib.loc = lib.loc)
    } else {
        aliases1 <- Rd_aliases(dir = dir)
        if(!length(aliases1))
            return(structure(list(), class = "check_Rd_xrefs"))
        aliases <- c(aliases, list(aliases1))
        db <- .build_Rd_xref_db(dir = dir)
    }

    ## Flatten the xref db into one big matrix.
    db <- cbind(do.call(rbind, db),
                File = rep.int(names(db), vapply(db, NROW, 0L)))
    if(nrow(db) == 0L)
        return(structure(list(), class = "check_Rd_xrefs"))

    ## fixup \link[=dest] form
    anchor <- db[, 2L]
    have_equals <- startsWith(anchor, "=")
    if(any(have_equals))
        db[have_equals, 1:2] <- cbind(sub("^=", "", anchor[have_equals]), "")

    db <- cbind(db, bad = FALSE, report = db[, 1L])
    have_anchor <- nzchar(anchor <- db[, 2L])
    db[have_anchor, "report"] <-
        paste0("[", db[have_anchor, 2L], "]{", db[have_anchor, 1L], "}")

    ## Check the targets from the non-anchored xrefs.
    db[!have_anchor, "bad"] <- db[!have_anchor, 1L] %notin% unlist(aliases)

    ## and then check the anchored ones if we can.
    have_colon <- grepl(":", anchor, fixed = TRUE)
    unknown <- undeclared <- unavailable <- character()
    thispkg <- anchor
    thisfile <- db[, 1L]
    thispkg [have_colon] <- sub("([^:]*):(.*)", "\\1", anchor[have_colon])
    thisfile[have_colon] <- sub("([^:]*):(.*)", "\\2", anchor[have_colon])

    use_aliases_from_CRAN <-
        config_val_to_logical(Sys.getenv("_R_CHECK_XREFS_USE_ALIASES_FROM_CRAN_",
                                         "FALSE"))
    if(use_aliases_from_CRAN) {
        aliases_db <- NULL
        tried_aliases_db <- FALSE
    }

    anchors <- unique(thispkg[have_anchor])

    ## added in 4.1.0: are anchors declared?
    check_anchors <-
        config_val_to_logical(Sys.getenv("_R_CHECK_XREFS_PKGS_ARE_DECLARED_",
                                         "FALSE"))
    if(check_anchors) {
        deps2 <- c(names(pkgInfo$Depends), names(pkgInfo$Imports),
                   names(pkgInfo$Suggests))
        ## people link to the package itself, although never needed.
        undeclared <- setdiff(anchors, c(unique(deps2), pkgname, base))
        if(length(undeclared)) {
            ## Now dig out Enhances
            DESC <- pkgInfo$DESCRIPTION
            if("Enhances" %in% names(DESC)) {
                enh <- names(.split_dependencies(DESC[["Enhances"]]))
                undeclared <- setdiff(undeclared, enh)
            }
        }
    }

    mind_suspects <-
        config_val_to_logical(Sys.getenv("_R_CHECK_XREFS_MIND_SUSPECT_ANCHORS_",
                                         "FALSE"))
    if(mind_suspects) {
        db <- cbind(db, suspect = FALSE)
    }

    for(pkg in anchors) {
        ## we can't do this on the current uninstalled package!
        if (missing(package) && pkg == pkgname) next
        this <- have_anchor & (thispkg %in% pkg)
        top <- system.file(package = pkg, lib.loc = lib.loc)
        if(nzchar(top)) {
            aliases1 <- if(pkg %in% names(aliases))
                            aliases[[pkg]]
                        else
                            Rd_aliases(pkg, lib.loc = lib.loc)
            good <- thisfile[this] %in% aliases1
            suspect <- if(any(!good)) {
                           RdDB <- file.path(top, "help", "paths.rds")
                           nm <- sub("\\.[Rr]d", "", basename(readRDS(RdDB)))
                           !good & (thisfile[this] %in% nm)
                       } else FALSE
            db[this, "bad"] <- !good & !suspect
            if(mind_suspects)
                db[this, "suspect"] <- suspect

        } else if(use_aliases_from_CRAN) {
            if(is.null(aliases_db)) {
                if (tried_aliases_db) {
                    unknown <- c(unknown, pkg)
                    next
                }
                tried_aliases_db <- TRUE
                ## Not yet read in.
                ## This can fail if e.g. CRAN is updating DB
                aliases_db <- tryCatch(CRAN_aliases_db(),
                                       error = function(e) NULL)
                if (is.null(aliases_db)) {
                    unknown <- c(unknown, pkg)
                    next
                }
            }
            aliases <- aliases_db[[pkg]]
            if(is.null(aliases)) {
                unknown <- c(unknown, pkg)
                next
            }
            ## message(sprintf("Using aliases db for package %s", pkg))
            aliases1 <- unique(as.character(unlist(aliases,
                                                   use.names = FALSE)))
            good <- thisfile[this] %in% aliases1
            suspect <- if(any(!good)) {
                           nm <- sub("\\.[Rr]d", "",
                                     basename(names(aliases)))
                           !good & (thisfile[this] %in% nm)
                       } else FALSE
            db[this, "bad"] <- !good & !suspect
            if(mind_suspects)
                db[this, "suspect"] <- suspect
        }
        else
            unknown <- c(unknown, pkg)
    }

    unknown <- unique(unknown)
    if(length(unknown)) {
        ## respect _R_CHECK_XREFS_REPOSITORIES_ for this use
        repos <- .get_standard_repository_URLs(ForXrefs = TRUE)
        ## Also allow for additionally specified repositories.
        aurls <- pkgInfo[["DESCRIPTION"]]["Additional_repositories"]
        if(!is.na(aurls)) {
            repos <- c(repos, .read_additional_repositories_field(aurls))
        }
        known <-
            try(suppressWarnings(utils::available.packages(utils::contrib.url(repos, "source"),
               filters = c("R_version", "duplicates"))[, "Package"]))
        miss <- if(inherits(known, "try-error")) TRUE
        else unknown %in% c(known, c("GLMMGibbs", "survnnet", "yags"))
        ## from CRANextras
        unavailable <- unknown[miss]
        unknown <- unknown[!miss]
    }

    ## The bad ones:
    bad <- db[, "bad"] == "TRUE"

    out <- list(bad = split(db[bad, "report"], db[bad, "File"]))
    if(mind_suspects && any(ind <- db[, "suspect"] == "TRUE")) {
        out <- c(out, list(suspect = split(db[ind, "report"],
                                           db[ind, "File"])))
    }
    out <- c(out, Filter(length,
                         list(unknown = unknown,
                              undeclared = undeclared,
                              unavailable = unavailable)))
    structure(out, class = "check_Rd_xrefs")
}

format.check_Rd_xrefs <-
function(x, ...)
{
    xb <- x$bad
    xs <- x$suspect
    if(any(lengths(x)) > 0L) {
        .fmtb <- function(i) {
            c(gettextf("Missing link(s) in Rd file '%s':",
                       names(xb)[i]),
              ## NB, link might be empty, and was in mvbutils
              .pretty_format(unique(xb[[i]])),
              "")
        }
        .fmts <- function(i) {
            c(gettextf("Non-topic package-anchored link(s) in Rd file '%s':",
                       names(xs)[i]),
              .pretty_format(unique(xs[[i]])),
              "")
        }
        c(if(length(y <- x$undeclared))
              sprintf(ngettext(length(y),
                               "Undeclared package %s in Rd xrefs",
                               "Undeclared packages %s in Rd xrefs"),
                      paste(sQuote(y), collapse = ", ")),
          if(length(y <- x$unavailable))
              sprintf(ngettext(length(y),
                               "Package unavailable to check Rd xrefs: %s",
                               "Packages unavailable to check Rd xrefs: %s"),
                      paste(sQuote(y), collapse = ", ")),
          if(length(y <- x$unknown))
              sprintf(ngettext(length(y),
                               "Unknown package %s in Rd xrefs",
                               "Unknown packages %s in Rd xrefs"),
                      paste(sQuote(y), collapse = ", ")),
          unlist(lapply(seq_along(xb), .fmtb)),
          unlist(lapply(seq_along(xs), .fmts)),
          if(length(xb) || length(xs))
              strwrap(gettextf("See section 'Cross-references' in the 'Writing R Extensions' manual."))
          )
    } else {
        character()
    }
}

### * .check_package_datasets

.check_package_datasets <-
function(pkgDir)
{
    oLC_ct <- Sys.getlocale("LC_CTYPE"); on.exit(Sys.setlocale("LC_CTYPE", oLC_ct))
    Sys.setlocale("LC_CTYPE", "C")
    oop <- options(warn = -1)
    on.exit(options(oop), add = TRUE)
    check_one <- function(x, ds)
    {
        if(!length(x)) return()
        ## avoid as.list methods
        if(is.list(x)) lapply(unclass(x), check_one, ds = ds)
        if(is.character(x)) {
            xx <- unclass(x)
            enc <- Encoding(xx)
            latin1 <<- latin1 + sum(enc == "latin1")
            utf8 <<- utf8 + sum(enc == "UTF-8")
            bytes <<- bytes + sum(enc == "bytes")
            unk <- xx[enc == "unknown"]
            ind <- .Call(C_check_nonASCII2, unk)
            if(length(ind)) {
                non_ASCII <<- c(non_ASCII, unk[ind])
                where <<- c(where, rep.int(ds, length(ind)))
            }
        }
        a <- attributes(x)
        if(!is.null(a)) {
            lapply(a, check_one, ds = ds)
            check_one(names(a), ds)
        }
        invisible()
    }

    sink(tempfile()) ## suppress startup messages to stdout
    on.exit(sink(), add = TRUE)
    files <- list_files_with_type(file.path(pkgDir, "data"), "data")
    files <- unique(basename(file_path_sans_ext(files)))
    ans <- vector("list", length(files))
    dataEnv <- new.env(hash=TRUE)
    names(ans) <- files
    old <- setwd(pkgDir)

    ## formerly used .try_quietly which stops on error
    .try <- function (expr, msg) {
        oop <- options(warn = 1)
        on.exit(options(oop))
        outConn <- file(open = "w+")
        sink(outConn, type = "output")
        sink(outConn, type = "message")
        tryCatch(withRestarts(withCallingHandlers(expr, error = {
            function(e) invokeRestart("grmbl", e, sys.calls())
        }), grmbl = function(e, calls) {
            n <- length(sys.calls())
            calls <- calls[-seq.int(length.out = n - 1L)]
            calls <- rev(calls)[-c(1L, 2L)]
            tb <- lapply(calls, deparse)
            message(msg, conditionMessage(e), "\nCall sequence:\n",
                    paste(c(utils::head(.eval_with_capture(traceback(tb))$output, 5),
                            "  ..."),
                          collapse = "\n"),
                    "\n")
        }), error = identity, finally = {
            sink(type = "message")
            sink(type = "output")
            close(outConn)
        })
    }

    for(f in files) {
        msg <- sprintf("Error loading dataset %s: ", sQuote(f))
        .try(utils::data(list = f, package = character(), envir = dataEnv), msg)
    }
    setwd(old)

    non_ASCII <- where <- character()
    latin1 <- utf8 <- bytes <- 0L
    ## avoid messages about loading packages that started with r48409
    ## (and some more ...)
    ## add try() to ensure that all datasets are looked at
    ## (if not all of each dataset).
    for(ds in ls(envir = dataEnv, all.names = TRUE)) {
        if(inherits(suppressWarnings(suppressMessages(try(check_one(get(ds, envir = dataEnv), ds), silent = TRUE))),
                    "try-error")) {
            msg <- sprintf("Error loading dataset %s:\n ", sQuote(ds))
            message(msg, geterrmessage())
        }
    }
    unknown <- unique(cbind(non_ASCII, where))
    structure(list(latin1 = latin1, utf8 = utf8, bytes = bytes,
                   unknown = unknown),
              class = "check_package_datasets")
}

format.check_package_datasets <-
function(x, ...)
{
    ## not sQuote as we have mucked about with locales.
    iconv0 <- function(x, ...) paste0("'", iconv(x, ...), "'")

    suppress_notes <-
        config_val_to_logical(Sys.getenv("_R_CHECK_PACKAGE_DATASETS_SUPPRESS_NOTES_",
                                         "TRUE"))

    c(character(),
      if((n <- x$latin1) && !suppress_notes) {
          sprintf(ngettext(n,
                           "Note: found %d marked Latin-1 string",
                           "Note: found %d marked Latin-1 strings"),
                  n)
      },
      if((n <- x$utf8) && !suppress_notes) {
          sprintf(ngettext(n,
                           "Note: found %d marked UTF-8 string",
                           "Note: found %d marked UTF-8 strings"),
                  n)
      },
      ## if(n <- x$bytes) { ## elevated to a Warning in 4.5.0
      ##     sprintf(
      ##             ngettext(n,
      ##                      "Warning: found %d string marked as \"bytes\"",
      ##                      "Warning: found %d strings marked as \"bytes\""), n)
      ## },
      if((n <- x$bytes) && !suppress_notes) {
          sprintf(ngettext(n,
                           "Note: found %d string marked as \"bytes\"",
                           "Note: found %d strings marked as \"bytes\""),
                  n)
      },
      if(nr <- nrow(x$unknown)) {
          msg <- ngettext(nr,
                          "Warning: found non-ASCII string",
                          "Warning: found non-ASCII strings",
                          domain = NA)
          c(msg,
            paste0(iconv0(x$unknown[, 1L], "", "ASCII", sub = "byte"),
                   " in object '", x$unknown[, 2L], "'"))
      })
}

### * .check_package_datasets2

.check_package_datasets2 <-
function(fileName, pkgname)
{
    oldSearch <- search()
    dataEnv <- new.env(hash = TRUE)
    suppressMessages(utils::data(list = fileName, package = pkgname,
                                 envir = dataEnv))
    if (!length((ls(dataEnv)))) message("No dataset created in 'envir'")
    if (!identical(search(), oldSearch)) message("Search path was changed")
    invisible(NULL)
}

### * .check_package_compact_datasets

.check_package_compact_datasets <-
function(pkgDir, thorough = FALSE)
{
    msg <- NULL
    rdas <- checkRdaFiles(file.path(pkgDir, "data"))
    row.names(rdas) <- basename(row.names(rdas))
    problems <- with(rdas, (ASCII | compress == "none") & (size > 1e5))
    if (any(rdas$compress %in% c("bzip2", "xz"))) {
        OK <- FALSE
        Rdeps <- .split_description(.read_description(file.path(pkgDir, "DESCRIPTION")))$Rdepends2
        for(dep in Rdeps) {
            if(!(dep$op %in% c(">=", ">"))) next
            if(dep$version >= package_version("2.10")) {OK <- TRUE; break;}
        }
        if(!OK) msg <- "Warning: package needs dependence on R (>= 2.10)"
    }
    if (sum(rdas$size) < 1e5 || # we don't report unless we get a 1e5 reduction
        any(rdas$compress %in% c("bzip2", "xz"))) # assume already optimized
        thorough <- FALSE
    sizes <- improve <- NULL
    if (thorough) {
        files <- Sys.glob(c(file.path(pkgDir, "data", "*.rda"),
                            file.path(pkgDir, "data", "*.RData")))
        ## Exclude .RData, which this may or may not match
        files <- files[!endsWith(files, "/.RData")]
        if (length(files)) {
            cpdir <- tempfile('cp')
            dir.create(cpdir)
            file.copy(files, cpdir)
            resaveRdaFiles(cpdir)
            rdas2 <- checkRdaFiles(cpdir)
            row.names(rdas2) <- basename(row.names(rdas2))
            diff2 <- (rdas2$ASCII != rdas$ASCII) | (rdas2$compress != rdas$compress)
            diff2 <- diff2 & (rdas$size > 1e4) & (rdas2$size < 0.9*rdas$size)
            sizes <- c(sum(rdas$size), sum(rdas2$size))
            improve <- data.frame(old_size = rdas$size,
                                  new_size = rdas2$size,
                                  compress = rdas2$compress,
                                  row.names = row.names(rdas))[diff2, ]
        }
    }
    structure(list(rdas = rdas[problems, 1:3], msg = msg,
                   sizes = sizes, improve = improve),
              class = "check_package_compact_datasets")
}

print.check_package_compact_datasets <-
function(x, ...)
{
    reformat <- function(x) {
        xx <- paste0(x, "b")
        ind1 <- (x >= 1024)
        xx[ind1] <- sprintf("%.0fKb", x[ind1]/1024)
        ind2 <- x >= 1024^2
        xx[ind2] <- sprintf("%.1fMb", x[ind2]/(1024^2))
        ind3 <- x >= 1024^3
        xx[ind3] <- sprintf("%.1fGb", x[ind3]/1024^3)
        xx
    }
    if(nr <- nrow(x$rdas)) {
        msg <- ngettext(nr,
                        "Warning: large data file saved inefficiently:",
                        "Warning: large data files saved inefficiently:",
                        domain = NA)
        writeLines(msg)
        rdas <- x$rdas
        rdas$size <- reformat(rdas$size)
        print(rdas)
    }
    if(!is.null(x$msg)) writeLines(x$msg)
    if(!is.null(s <- x$sizes) && s[1L] - s[2L] > 1e5  # save at least 100Kb
       && s[2L]/s[1L] < 0.9) { # and at least 10%
        writeLines(c("",
                     "Note: significantly better compression could be obtained",
                     "      by using R CMD build --resave-data"))
        if(nrow(x$improve)) {
            improve <- x$improve
            improve$old_size <- reformat(improve$old_size)
            improve$new_size <- reformat(improve$new_size)
            print(improve)
        }
    }
    invisible(x)
}

### * .check_package_compact_sysdata

.check_package_compact_sysdata <-
function(pkgDir, thorough = FALSE)
{
    msg <- NULL
    files <- file.path(pkgDir, "R", "sysdata.rda")
    rdas <- checkRdaFiles(files)
    row.names(rdas) <- basename(row.names(rdas))
    problems <- with(rdas, (ASCII | compress == "none") & (size > 1e5))
    if (any(rdas$compress %in% c("bzip2", "xz"))) {
        OK <- FALSE
        Rdeps <- .split_description(.read_description(file.path(pkgDir, "DESCRIPTION")))$Rdepends2
        for(dep in Rdeps) {
            if(!(dep$op %in% c(">=", ">"))) next
            if(dep$version >= package_version("2.10")) {OK <- TRUE; break;}
        }
        if(!OK) msg <- "Warning: package needs dependence on R (>= 2.10)"
    }
    if (sum(rdas$size) < 1e5 || # we don't report unless we get a 1e5 reduction
        any(rdas$compress %in% c("bzip2", "xz"))) # assume already optimized
        thorough <- FALSE
    if (thorough) {
        cpdir <- tempfile('cp')
        dir.create(cpdir)
        file.copy(files, cpdir)
        resaveRdaFiles(cpdir)
        rdas2 <- checkRdaFiles(cpdir)
        row.names(rdas2) <- basename(row.names(rdas2))
        diff2 <- (rdas2$ASCII != rdas$ASCII) | (rdas2$compress != rdas$compress)
        diff2 <- diff2 & (rdas$size > 1e4) & (rdas2$size < 0.9*rdas$size)
        sizes <- c(sum(rdas$size), sum(rdas2$size))
        improve <- data.frame(old_size = rdas$size,
                              new_size = rdas2$size,
                              compress = rdas2$compress,
                              row.names = row.names(rdas))[diff2, ]
    } else sizes <- improve <- NULL
    structure(list(rdas = rdas[problems, 1:3], msg = msg,
                   sizes = sizes, improve = improve),
              class = "check_package_compact_datasets")
}


### * .check_package_subdirs

## used by R CMD build
.check_package_subdirs <-
function(dir, doDelete = FALSE)
{
    OS_subdirs <- c("unix", "windows")

    mydir <- function(dir)
    {
        d <- list.files(dir, all.files = TRUE, full.names = FALSE)
        if(!length(d)) return(d)
        if(basename(dir) %in% c("R", "man"))
            for(os in OS_subdirs) {
                os_dir <- file.path(dir, os)
                if(dir.exists(os_dir))
                    d <- c(d,
                           file.path(os,
                                     list.files(os_dir,
                                                all.files = TRUE,
                                                full.names = FALSE)))
            }
        d[file_test("-f", file.path(dir, d))]
    }

    if(!dir.exists(dir))
        stop(gettextf("directory '%s' does not exist", dir), domain = NA)
    else
        dir <- file_path_as_absolute(dir)

    wrong_things <- list(R = character(), man = character(),
                         demo = character(), `inst/doc` = character())

    code_dir <- file.path(dir, "R")
    if(dir.exists(code_dir)) {
        all_files <- mydir(code_dir)
        ## Under Windows, need a Makefile.win for methods.
        R_files <- c("sysdata.rda", "Makefile.win", "Makefile.ucrt",
                     list_files_with_type(code_dir, "code",
                                          full.names = FALSE,
                                          OS_subdirs = OS_subdirs))
        wrong <- setdiff(all_files, R_files)
        ## now configure might generate files in this directory
        generated <- which(endsWith(wrong, ".in"))
        if(length(generated)) wrong <- wrong[-generated]
        if(length(wrong)) {
            wrong_things$R <- wrong
            if(doDelete) unlink(file.path(dir, "R", wrong))
        }
    }

    man_dir <- file.path(dir, "man")
    if(dir.exists(man_dir)) {
        all_files <- mydir(man_dir)
        man_files <- list_files_with_type(man_dir, "docs",
                                          full.names = FALSE,
                                          OS_subdirs = OS_subdirs)
        wrong <- setdiff(all_files, man_files)
        if(length(wrong)) {
            wrong_things$man <- wrong
            if(doDelete) unlink(file.path(dir, "man", wrong))
        }
    }

    demo_dir <- file.path(dir, "demo")
    if(dir.exists(demo_dir)) {
        all_files <- mydir(demo_dir)
        demo_files <- list_files_with_type(demo_dir, "demo",
                                           full.names = FALSE)
	save_files <- paste0(sub("r$", "R", demo_files), "out.save")        
        wrong <- setdiff(all_files,
                         c("00Index", demo_files, save_files))
        if(length(wrong)) {
            wrong_things$demo <- wrong
            if(doDelete) unlink(file.path(dir, "demo", wrong))
        }
    }

    ## check installed vignette material
    subdir <- file.path("inst", "doc")
    vigns <-
        .package_vignettes_via_call_to_R(dir = dir, subdirs = subdir)
    if (!is.null(vigns) && length(vigns$docs)) {
        vignettes <- basename(vigns$docs)

        ## Add vignette output files, if they exist
        tryCatch({
            vigns <-
                .package_vignettes_via_call_to_R(dir = dir,
                                                 subdirs = subdir,
                                                 output = TRUE)
            vignettes <- c(vignettes, basename(vigns$outputs))
        }, error = function(ex) {})

        ## 'the file names should start with an ASCII letter and be comprised
        ## entirely of ASCII letters or digits or hyphen or underscore'
        ## Do this in a locale-independent way.
        OK <- grep("^[ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz][ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-]+$", vignettes)
        wrong <- vignettes
        if(length(OK)) wrong <- wrong[-OK]
        if(length(wrong)) wrong_things$`inst/doc` <- wrong
    }

    class(wrong_things) <- "subdir_tests"
    wrong_things
}

format.subdir_tests <-
function(x, ...)
{
    .fmt <- function(i) {
        tag <- names(x)[i]
        c(sprintf("Subdirectory '%s' contains invalid file names:",
                  tag),
          .pretty_format(x[[i]]))
    }

    as.character(unlist(lapply(which(lengths(x) > 0L), .fmt)))
}

### * .check_package_ASCII_code

## FIXME: respect_quotes=TRUE does not work well for multi-line quotes
.check_package_ASCII_code <-
function(dir, respect_quotes = FALSE) # by default also look inside quotes
{
    if(!dir.exists(dir))
        stop(gettextf("directory '%s' does not exist", dir), domain = NA)

    dir <- file_path_as_absolute(dir)
    wrong_things <- character()
    for(f in c(file.path(dir, "NAMESPACE"),
               list_files_with_type(file.path(dir, "R"), "code",
                                    OS_subdirs = c("unix", "windows")))) {
        text <- readLines(f, warn = FALSE)
        if (.Call(C_check_nonASCII, text, respect_quotes))
            wrong_things <- c(wrong_things, f)
    }
    if(length(wrong_things)) {
        wrong_things <- substring(wrong_things, nchar(dir) + 2L)
        cat(wrong_things, sep = "\n")
    }
    invisible(wrong_things)
}

### * .check_package_code_syntax

.check_package_code_syntax <-
function(dir)
{
    if(!dir.exists(dir))
        stop(gettextf("directory '%s' does not exist", dir), domain = NA)
    else
        dir <- file_path_as_absolute(dir)
    dir_name <- basename(dir)

    dfile <- file.path(dirname(dir), "DESCRIPTION")
    enc <- if(file.exists(dfile))
        .read_description(dfile)["Encoding"] else NA

    ## This was always run in the C locale < 2.5.0
    ## However, what chars are alphabetic depends on the locale,
    ## so as from R 2.5.0 we try to set a locale.
    ## Any package with no declared encoding should have only ASCII R
    ## code.
    oLC_ct <- Sys.getlocale("LC_CTYPE"); on.exit(Sys.setlocale("LC_CTYPE", oLC_ct))
    if(!is.na(enc)) {  ## try to use the declared encoding
        if(.Platform$OS.type == "windows" && !l10n_info()[["UTF-8"]]) {
            ## "C" is in fact "en"
            switch(enc,
                   "latin2" = Sys.setlocale("LC_CTYPE", 'polish'),
                   Sys.setlocale("LC_CTYPE", "C")
                   )
        } else {
            loc <- Sys.getenv("R_ENCODING_LOCALES", NA_character_)
            if(!is.na(loc)) {
                loc <- strsplit(strsplit(loc, ":")[[1L]], "=")
                nm <- lapply(loc, `[[`, 1L)
                loc <- lapply(loc, `[[`, 2L)
                names(loc) <- nm
                if(!is.null(l <- loc[[enc]]))
                    Sys.setlocale("LC_CTYPE", l)
                else
                    Sys.setlocale("LC_CTYPE", "C")

            } else if(l10n_info()[["UTF-8"]]) {
                ## the hope is that the conversion to UTF-8 works and
                ## so we can validly test the code in the current locale.
            } else {
                ## these are the POSIX forms, but of course not all Unixen
                ## abide by POSIX.  These locales need not exist, but
                ## do in glibc.
                switch(enc,
                       "latin1" = Sys.setlocale("LC_CTYPE", "en_US"),
                       "utf-8"  =,  # not valid, but used
                       "UTF-8"  = Sys.setlocale("LC_CTYPE", "en_US.UTF-8"),
                       "latin2" = Sys.setlocale("LC_CTYPE", "pl_PL"),
                       "latin9" = Sys.setlocale("LC_CTYPE",
                       "fr_FR.iso885915@euro"),
                       Sys.setlocale("LC_CTYPE", "C")
                      )
            }
        }
    }

    collect_parse_woes <- function(f) {
        .error <- .warnings <- character()
        file <- file.path(dir, f)
        if(!is.na(enc) &&
           (Sys.getlocale("LC_CTYPE") %notin% c("C", "POSIX"))) {
            lines <- iconv(readLines(file, warn = FALSE), from = enc, to = "",
                           sub = "byte")
            withCallingHandlers(tryCatch(str2expression(lines),
                                         error = function(e)
                                         .error <<- conditionMessage(e)),
                                warning = function(e) {
                                    .warnings <<- c(.warnings,
                                                    conditionMessage(e))
                                    tryInvokeRestart("muffleWarning")
                                })
        } else {
            withCallingHandlers(tryCatch(parse(file),
                                         error = function(e)
                                         .error <<- conditionMessage(e)),
                                warning = function(e) {
                                    .warnings <<- c(.warnings,
                                                    conditionMessage(e))
                                    tryInvokeRestart("muffleWarning")
                                })
        }
        ## (We show offending file paths starting with the base of the
        ## given directory as this provides "nicer" output ...)
        if(length(.error) || length(.warnings))
            list(File = file.path(dir_name, f),
                 Error = .error, Warnings = .warnings)
        else
            NULL
    }

    out <-
        lapply(list_files_with_type(dir, "code", full.names = FALSE,
                                    OS_subdirs = c("unix", "windows")),
               collect_parse_woes)
    structure(out[lengths(out) > 0L],
              class = "check_package_code_syntax")
}

print.check_package_code_syntax <-
function(x, ...)
{
    first <- TRUE
    for(i in seq_along(x)) {
        if(!first) writeLines("") else first <- FALSE
        xi <- x[[i]]
        if(length(xi$Error)) {
            msg <- gsub("\n", "\n  ", sub("[^:]*: *", "", xi$Error),
                        perl = TRUE, useBytes = TRUE)
            writeLines(c(sprintf("Error in file '%s':", xi$File),
                         paste0("  ", msg)))
        }
        if(len <- length(xi$Warnings))
            writeLines(c(sprintf(ngettext(len,
                                          "Warning in file %s:",
                                          "Warnings in file %s:"),
                                 sQuote(xi$File)),
                         paste0("  ", gsub("\n\n", "\n  ", xi$Warnings,
                                           perl = TRUE, useBytes = TRUE))))
    }
    invisible(x)
}

### * .check_package_code_shlib

.check_package_code_shlib <-
function(dir)
{
    predicate <- function(e) {
        ((length(e) > 1L)
            && (length(x <- as.character(e[[1L]])) == 1L)
            && (x %in% c("library.dynam", "library.dynam.unload"))
            && (length(y <- e[[2L]]) == 1L)
            && is.character(y)
            && grepl("\\.(so|sl|dll)$", y))
    }

    x <- Filter(length,
                .find_calls_in_package_code(dir, predicate,
                                            recursive = TRUE))

    ## Because we really only need this for calling from R CMD check, we
    ## produce output here in case we found something.
    if(length(x))
        writeLines(c(unlist(Map(.format_calls_in_file, x, names(x))),
                     ""))
    ## (Could easily provide format() and print() methods ...)

    invisible(x)
}

### * .check_package_code_startup_functions

.check_package_code_startup_functions <-
function(dir)
{
    bad_call_names <-
        unlist(.bad_call_names_in_startup_functions)

    .check_startup_function <- function(fcode, fname) {
        out <- list()
        nms <- names(fcode[[2L]])
        ## Check names of formals.
        ## Allow anything containing ... (for now); otherwise, insist on
        ## length two with names starting with lib and pkg, respectively.
        if(("..." %notin% nms) &&
           ((length(nms) != 2L) ||
            any(substr(nms, 1L, 3L) != c("lib", "pkg"))))
            out$bad_arg_names <- nms
        ## Look at all calls (not only at top level).
        calls <- .find_calls(fcode[[3L]], recursive = TRUE)
        if(!length(calls)) return(out)
        cnames <- .call_names(calls)
        ## And pick the ones which should not be there ...
        bcn <- bad_call_names
        if(fname == ".onAttach") bcn <- c(bcn, "library.dynam")
        if(fname == ".onLoad") bcn <- c(bcn, "packageStartupMessage")
        ind <- (cnames %in% bcn)
        if(any(ind)) {
            calls <- calls[ind]
            cnames <- cnames[ind]
            ## Exclude library(help = ......) calls.
            pos <- which(cnames == "library")
            if(length(pos)) {
                pos <- pos[vapply(calls[pos],
                                  function(e)
                                      any(names(e)[-1L] == "help"),
                                  NA)]
                ## Could also match.call(base::library, e) first ...
                if(length(pos)) {
                    calls <- calls[-pos]
                    cnames <- cnames[-pos]
                }
            }
            if(length(calls)) {
                out$bad_calls <-
                    list(calls = calls, names = cnames)
            }
        }
        out
    }

    calls <- .find_calls_in_package_code(dir,
                                         .worker =
                                         .get_startup_function_calls_in_file)
    FL <- unlist(lapply(calls, `[[`, ".First.lib"))
    calls <- Filter(length,
                    lapply(calls,
                           function(e)
                           Filter(length,
                                  Map(.check_startup_function,
                                      e, names(e)))))
    if(length(FL)) attr(calls, ".First.lib") <- TRUE
    class(calls) <- "check_package_code_startup_functions"
    calls
}

format.check_package_code_startup_functions <-
function(x, ...)
{
    res <- if(!is.null(attr(x, ".First.lib"))) "NB: .First.lib is obsolete and will not be used in R >= 3.0.0" else character()
    if(length(x)) {

        ## Flatten out doubly recursive list of functions within list of
        ## files structure for computing summary messages.
        y <- unlist(x, recursive = FALSE)

        has_bad_wrong_args <-
            "bad_arg_names" %in% unlist(lapply(y, names))
        calls <-
            unique(unlist(lapply(y,
                                 function(e) e[["bad_calls"]][["names"]])))
        has_bad_calls_for_load <-
            any(calls %in% .bad_call_names_in_startup_functions$load)
        has_bad_calls_for_output <-
            any(calls %in% .bad_call_names_in_startup_functions$output)
        has_unsafe_calls <-
            any(calls %in% .bad_call_names_in_startup_functions$unsafe)

        .fmt_entries_for_file <- function(e, f) {
            c(gettextf("File %s:", sQuote(f)),
              unlist(Map(.fmt_entries_for_function, e, names(e))),
              "")
        }

        .fmt_entries_for_function <- function(e, f) {
            c(if(length(bad <- e[["bad_arg_names"]])) {
                gettextf("  %s has wrong argument list %s",
                         f, sQuote(paste(bad, collapse = ", ")))
            },
              if(length(bad <- e[["bad_calls"]])) {
                  c(gettextf("  %s calls:", f),
                    paste0("    ",
                           unlist(lapply(bad[["calls"]], function(e)
                                         paste(deparse(e), collapse = "")))))
              })
        }

        res <-
            c(res,
              unlist(Map(.fmt_entries_for_file, x, names(x)),
                     use.names = FALSE),
              if(has_bad_wrong_args)
              strwrap(gettextf("Package startup functions should have two arguments with names starting with %s and %s, respectively.",
                               sQuote("lib"), sQuote("pkg")),
                      exdent = 2L),
              if(has_bad_calls_for_load)
              strwrap(gettextf("Package startup functions should not change the search path."),
                      exdent = 2L),
              if(has_bad_calls_for_output)
              strwrap(gettextf("Package startup functions should use %s to generate messages.",
                               sQuote("packageStartupMessage")),
                      exdent = 2L),
              if(has_unsafe_calls)
              strwrap(gettextf("Package startup functions should not call %s.",
                               sQuote("installed.packages")),
                      exdent = 2L),
              gettextf("See section %s in '%s'.",
                       sQuote("Good practice"), "?.onAttach")
              )
    }
    res
}

.bad_call_names_in_startup_functions <-
    list(load = c("library", "require"),
         output = c("cat", "message", "print", "writeLines"),
         unsafe = c("installed.packages", "utils::installed.packages"))

.get_startup_function_calls_in_file <-
function(file, encoding = NA)
{
    exprs <- .parse_code_file(file, encoding)

    ## Use a custom gatherer rather than .find_calls() with a suitable
    ## predicate so that we record the name of the startup function in
    ## which the calls were found.
    calls <- list()
    for(e in exprs) {
        if((length(e) > 2L) &&
           (is.name(x <- e[[1L]])) &&
           (as.character(x) %in% c("<-", "=")) &&
           (length(y <- as.character(e[[2L]])) == 1L) &&
           (y %in% c(".First.lib", ".onAttach", ".onLoad")) &&
           (is.call(z <- e[[3L]])) &&
           (length(w <- as.character(z[[1L]])) == 1L) &&
           (w == "function")) {
            new <- list(z)
            names(new) <- as.character(y)
            calls <- c(calls, new)
        }
    }
    calls
}

.call_names <-
function(x)
    vapply(x, function(e) deparse1(e[[1L]]), "")


### * .check_package_code_unload_functions

.check_package_code_unload_functions <-
function(dir)
{
    bad_call_names <- "library.dynam.unload"

    .check_unload_function <- function(fcode, fname) {
        out <- list()
        nms <- names(fcode[[2L]])
        ## Check names of formals.
        ## Allow anything containing ... (for now); otherwise, insist on
        ## length one with names starting with lib.
        if("..." %notin% nms && (length(nms) != 1L || !startsWith(nms, "lib")))
            out$bad_arg_names <- nms
        ## Look at all calls (not only at top level).
        calls <- .find_calls(fcode[[3L]], recursive = TRUE)
        if(!length(calls)) return(out)
        cnames <- .call_names(calls)
        ## And pick the ones which should not be there ...
        ind <- cnames %in% bad_call_names
        if(any(ind))
            out$bad_calls <- list(calls = calls[ind], names = cnames[ind])
        out
    }

    calls <- .find_calls_in_package_code(dir,
                                         .worker =
                                         .get_unload_function_calls_in_file)
    LL <- unlist(lapply(calls, `[[`, ".Last.lib"))
    calls <- Filter(length,
                    lapply(calls,
                           function(e)
                           Filter(length,
                                  Map(.check_unload_function,
                                      e, names(e)))))
    if(length(LL) &&
       !inherits(tryCatch(nsInfo <-
                              parseNamespaceFile(basename(dir),
                                                 dirname(dir)),
                          error = identity),
                 "error")) {
        code_objs <- ".Last.lib"
        OK <- intersect(code_objs, nsInfo$exports)
        for(p in nsInfo$exportPatterns)
            OK <- c(OK, grep(p, code_objs, value = TRUE))
        if(!length(OK)) attr(calls, ".Last.lib") <- TRUE
    }
    class(calls) <- "check_package_code_unload_functions"
    calls
}

format.check_package_code_unload_functions <-
function(x, ...)
{
    res <- if(!is.null(attr(x, ".Last.lib"))) "NB: .Last.lib will not be used unless it is exported" else character()
    if(length(x)) {

        ## Flatten out doubly recursive list of functions within list of
        ## files structure for computing summary messages.
        y <- unlist(x, recursive = FALSE)

        has_bad_wrong_args <-
            "bad_arg_names" %in% unlist(lapply(y, names))
##        calls <-
##            unique(unlist(lapply(y,
##                                 function(e) e[["bad_calls"]][["names"]])))
        .fmt_entries_for_file <- function(e, f) {
            c(gettextf("File %s:", sQuote(f)),
              unlist(Map(.fmt_entries_for_function, e, names(e))),
              "")
        }

        .fmt_entries_for_function <- function(e, f) {
            c(if(length(bad <- e[["bad_arg_names"]])) {
                gettextf("  %s has wrong argument list %s",
                         f, sQuote(paste(bad, collapse = ", ")))
            },
              if(length(bad <- e[["bad_calls"]])) {
                  c(gettextf("  %s calls:", f),
                    paste0("    ",
                           unlist(lapply(bad[["calls"]], function(e)
                                         paste(deparse(e), collapse = "")))))
              })
        }

        res <-
            c(res,
              unlist(Map(.fmt_entries_for_file, x, names(x)),
                     use.names = FALSE),
              if(has_bad_wrong_args)
              strwrap(gettextf("Package detach functions should have one argument with name starting with %s.", sQuote("lib")),
                      exdent = 2L),
              if(length(call))
              strwrap(gettextf("Package detach functions should not call %s.",
                               sQuote("library.dynam.unload")),
                      exdent = 2L),
              gettextf("See section %s in '%s'.",
                       sQuote("Good practice"), "?.Last.lib")
              )
    }
    res
}

.get_unload_function_calls_in_file <-
function(file, encoding = NA)
{
    exprs <- .parse_code_file(file, encoding)

    ## Use a custom gatherer rather than .find_calls() with a suitable
    ## predicate so that we record the name of the unload function in
    ## which the calls were found.
    calls <- list()
    for(e in exprs) {
        if((length(e) > 2L) &&
           (is.name(x <- e[[1L]])) &&
           (as.character(x) %in% c("<-", "=")) &&
           (length(y <- as.character(e[[2L]])) == 1L) &&
           (y %in% c(".Last.lib", ".onDetach")) &&
           (is.call(z <- e[[3L]])) &&
           (length(w <- as.character(z[[1L]])) == 1L) &&
           (w == "function")) {
            new <- list(z)
            names(new) <- as.character(y)
            calls <- c(calls, new)
        }
    }
    calls
}

### * .check_package_code_tampers

.check_package_code_tampers <-
function(dir)
{
    dfile <- file.path(dir, "DESCRIPTION")
    pkgname <- if(file.exists(dfile))
        .read_description(dfile)["Package"] else ""

    predicate <- function(e) {
        if(length(e) <= 1L) return(FALSE)
        if(as.character(e[[1L]])[1L] %in% "unlockBinding") {
            e3 <- as.character(e[[3L]])
            if (e3[[1L]] == "asNamespace") e3 <- as.character(e[[3L]][[2L]])
            ## maybe this should use any()
            return(e3 != pkgname)
        }
        if((as.character(e[[1L]])[1L] %in% ".Internal") &&
           as.character(e[[2L]][[1L]]) == "unlockBinding") return(TRUE)
        if(as.character(e[[1L]])[1L] %in% "assignInNamespace") {
            e3 <- as.character(e[[4L]])
            if (e3[[1L]] == "asNamespace") e3 <- as.character(e[[4L]][[2L]])
            ## maybe this should use any()
            return(e3 != pkgname)
        }
        FALSE
    }

    x <- Filter(length,
                .find_calls_in_package_code(dir, predicate,
                                            recursive = TRUE))

    ## Because we really only need this for calling from R CMD check, we
    ## produce output here in case we found something.
    if(length(x))
        writeLines(unlist(Map(.format_calls_in_file, x, names(x))))
    ## (Could easily provide format() and print() methods ...)

    invisible(x)
}

### * .check_package_code_assign_to_globalenv

.check_package_code_assign_to_globalenv <-
function(dir)
{
    predicate <- function(e) {
        if(!is.call(e) ||
           (length(x <- as.character(e[[1L]])) != 1L) ||
           (x != "assign"))
            return(FALSE)
        e <- e[as.character(e) != "..."]
        ## Capture assignments to global env unless to .Random.seed.
        ## (This may fail for conditionalized code not meant for R
        ## [e.g., argument 'where'].)
        mc <- tryCatch(match.call(base::assign, e), error = identity)
        if(inherits(mc, "error") || identical(mc$x, ".Random.seed"))
            return(FALSE)
        if(!is.null(env <- mc$envir) &&
           identical(tryCatch(eval(env),
                              error = identity),
                     globalenv()))
            return(TRUE)
        if(!is.null(pos <- mc$pos) &&
           identical(tryCatch(eval(call("as.environment", pos)),
                              error = identity),
                     globalenv()))
            return(TRUE)
        FALSE
    }

    calls <- Filter(length,
                    .find_calls_in_package_code(dir, predicate,
                                                recursive = TRUE))
    class(calls) <- "check_package_code_assign_to_globalenv"
    calls
}

format.check_package_code_assign_to_globalenv <-
function(x, ...)
{
    if(!length(x)) return(character())

    c("Found the following assignments to the global environment:",
      unlist(Map(.format_calls_in_file, x, names(x))))
}

### * .check_package_code_attach

.check_package_code_attach <-
function(dir)
{
    predicate <- function(e)
    ((length(x <- as.character(e[[1L]])) == 1L) &&
     (x == "attach"))

    calls <- Filter(length,
                    .find_calls_in_package_code(dir, predicate,
                                                recursive = TRUE))
    class(calls) <- "check_package_code_attach"
    calls
}

format.check_package_code_attach <-
function(x, ...)
{
    if(!length(x)) return(character())

    c("Found the following calls to attach():",
      unlist(Map(.format_calls_in_file, x, names(x))))
}

### * .check_package_code_data_into_globalenv

.check_package_code_data_into_globalenv <-
function(dir)
{
    predicate <- function(e) {
        if(!is.call(e) ||
           (length(x <- as.character(e[[1L]])) != 1L) ||
           (x != "data"))
            return(FALSE)
        ## As data() has usage
        ##   data(..., list = character(), package = NULL, lib.loc = NULL,
        ##        verbose = getOption("verbose"), envir = .GlobalEnv))
        ## argument 'envir' must be matched exactly, and calls which
        ## only have the last four arguments do not load any data.
        env <- e$envir
        tab <- c("package", "lib.loc", "verbose", "envir")
        if(!is.null(nms <- names(e)))
            e <- e[nms %notin% tab]
        ((length(e) > 1L) &&
         (is.null(env) ||
          (is.name(env) && as.character(env) == ".GlobalEnv") ||
          (is.call(env) && as.character(env[[1L]]) == "globalenv")))
    }

    calls <- Filter(length,
                    .find_calls_in_package_code(dir, predicate,
                                                recursive = TRUE))
    class(calls) <- "check_package_code_data_into_globalenv"
    calls
}

format.check_package_code_data_into_globalenv <-
function(x, ...)
{
    if(!length(x)) return(character())

    c("Found the following calls to data() loading into the global environment:",
      unlist(Map(.format_calls_in_file, x, names(x))))
}

### * .check_package_code_class_is_string

## Could easily make this return something classed with suitable
## format() and print() methods ...

.check_package_code_class_is_string <-
function(dir) {
    funA <- function(e) {
        if(is.call(e) &&
           (length(e) >= 2L) &&
           (length(s <- as.character(e[[1L]])) == 1L)) {
            if(s %in% c("(", "!"))
                return(Recall(e[[2L]]))
            else if(s %in% c("||", "&&", "|", "&"))
                return(Recall(e[[2L]]) || Recall(e[[3L]]))
            else if(s %in% c("==", "!=") &&
                    is.call(e2 <- e[[2L]]) &&
                    (as.character(e2[[1L]])[1L] == "class") &&
                    is.character(e[[3L]]))
                return(TRUE)
        }
        FALSE
    }
    funB <- function(e) {
        if(is.call(e) &&
           (length(e) >= 2L) &&
           (as.character(e[[1L]])[1L] == "if"))
            return(funA(e[[2L]]))
        FALSE
    }
    x <- Filter(length,
                .find_calls_in_package_code(dir, funB, recursive = TRUE))
    if(length(x)) {
        s <- sprintf("File %s: %s",
                     sQuote(rep.int(names(x), lengths(x))),
                     vapply(unlist(x),
                            function(e)
                                sprintf("if (%s) ...", deparse1(e[[2L]])),
                            ""))
        writeLines(c("Found if() conditions comparing class() to string:",
                     s,
                     "Use inherits() (or maybe is()) instead."))
    }
    invisible(x)
}

### * .check_packages_used

.check_packages_used <-
function(package, dir, lib.loc = NULL)
{
    ## Argument handling.
    ns <- NULL
    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        ## Using package installed in @code{dir} ...
        code_dir <- file.path(dir, "R")
        if(!dir.exists(code_dir))
            stop(gettextf("directory '%s' does not contain R code",
                          dir),
                 domain = NA)
        if(basename(dir) != "base")
            .load_namespace_quietly(package, dirname(dir))
        code_env <- asNamespace(package)
        dfile <- file.path(dir, "DESCRIPTION")
        db <- .read_description(dfile)
        ## fake installs do not have this.
        nsfile <- file.path(dir, "Meta", "nsInfo.rds")
        if (file.exists(nsfile)) ns <- readRDS(nsfile)
        else {
            nsfile <- file.path(dir, "NAMESPACE")
            if(file.exists(nsfile))
                ns <- parseNamespaceFile(basename(dir), dirname(dir))
        }
    }
    else if(!missing(dir)) {
        ## Using sources from directory @code{dir} ...
        if(!dir.exists(dir))
            stop(gettextf("directory '%s' does not exist", dir),
                 domain = NA)
        else
            dir <- file_path_as_absolute(dir)
        db <- .get_package_metadata(dir)
        nsfile <- file.path(dir, "NAMESPACE")
        if(file.exists(nsfile) &&
           inherits(tryCatch(ns <- parseNamespaceFile(basename(dir),
                                                      dirname(dir)),
                             error = identity),
                    "error"))
            ns <- NULL
        code_dir <- file.path(dir, "R")
        if(dir.exists(code_dir)) {
            file <- tempfile()
            on.exit(unlink(file))
            if(!file.create(file)) stop("unable to create ", file)
            if(!all(.file_append_ensuring_LFs(file,
                                              list_files_with_type(code_dir,
                                                                   "code"))))
                stop("unable to write code files")
        } else return(invisible())
    }
    pkg_name <- db["Package"]
    depends <- .get_requires_from_package_db(db, "Depends")
    imports <- imports0 <- .get_requires_from_package_db(db, "Imports")
    suggests <- .get_requires_from_package_db(db, "Suggests")
    enhances <- .get_requires_from_package_db(db, "Enhances")

    ## it is OK to refer to yourself and non-S4 standard packages
    standard_package_names <-
        setdiff(.get_standard_package_names()$base,
                c("methods", "stats4"))
    ## It helps to know if non-default standard packages are require()d
    ## but safer to list them: compiler & parallel got included for years
    ## Some people depend on 'base'!
    default_package_names <-
        c("base", "datasets", "grDevices", "graphics", "stats", "utils")
    depends_suggests <- c(depends, suggests, enhances, pkg_name, default_package_names)
    imports <- c(imports, depends, suggests, enhances, pkg_name,
                 standard_package_names)
    ## the first argument could be named, or could be a variable name.
    ## we just have a stop list here.
    common_names <- c("pkg", "pkgName", "package", "pos", "dep_name")

    bad_exprs <- bad_deps <- bad_imps <- bad_prac <- character()
    bad_imports <- all_imports <- imp2 <- imp2f <- imp3 <- imp3f <- character()
    uses_methods <- FALSE
    find_bad_exprs <- function(e) {
        if(is.call(e) || is.expression(e)) {
            Call <- deparse(e[[1L]])[1L]
            if(Call %in% c("clusterEvalQ", "parallel::clusterEvalQ")) return()
            if((Call %in%
                c("library", "require", "loadNamespace", "requireNamespace"))
               && (length(e) >= 2L)) {
                ## We need to remove '...': OTOH the argument could be NULL
                keep <- vapply(e, function(x) deparse(x)[1L] != "...", NA)
                mc <- match.call(baseenv()[[Call]], e[keep])
                if(!is.null(pkg <- mc$package)) {
                    ## <NOTE>
                    ## Using code analysis, we really don't know which
                    ## package was called if character.only = TRUE and
                    ## the package argument is not a string constant.
                    ## (BTW, what if character.only is given a value
                    ## which is an expression evaluating to TRUE?)
                    dunno <- FALSE
                    if((Call %in% c("loadNamespace",
                                    "requireNamespace"))) {
                        if(!identical(class(pkg), "character"))
                            dunno <- TRUE
                    } else {
                        if(!identical(class(pkg), "character") &&
                           !is.null(co <- mc$character.only) &&
                           !isFALSE(co))
                            dunno <- TRUE
                    }
                    ## </NOTE>
                    ## <FIXME> could be inside substitute or a variable
                    ## and is in e.g. R.oo
                    if(!dunno) {
                        pkg <- as.character(pkg)
                        if(Call %in% c("loadNamespace",
                                       "requireNamespace")) {
                            if(pkg %notin%
                               c(imports, depends_suggests, common_names))
                                bad_imps <<- c(bad_imps, pkg)
                        } else {
                            if(pkg %notin% c(depends_suggests, common_names))
                                bad_exprs <<- c(bad_exprs, pkg)
                            if(pkg %in% depends)
                                bad_deps <<- c(bad_deps, pkg)
                            ## assume calls to itself are to clusterEvalQ etc
                            else if (pkg != pkg_name)
                                bad_prac <<- c(bad_prac, pkg)
                        }
                    }
                }
            } else if(Call %in% "::") {
                pkg <- deparse(e[[2L]])
                all_imports <<- c(all_imports, pkg)
                if(pkg %notin% imports)
                    bad_imports <<- c(bad_imports, pkg)
                else {
                    imp2 <<- c(imp2, pkg)
                    imp2f <<- c(imp2f, deparse(e[[3L]]))
                }
            } else if(Call %in% ":::") {
                pkg <- deparse(e[[2L]])
                all_imports <<- c(all_imports, pkg)
                imp3 <<- c(imp3, pkg)
                imp3f <<- c(imp3f, deparse(e[[3L]]))
                if(pkg %notin% imports)
                    bad_imports <<- c(bad_imports, pkg)
            } else if(Call %in% c("setClass", "setMethod")) {
                uses_methods <<- TRUE
            } else if((Call %in% c("<-", "<<-")) &&
                      is.call(e[[2L]]) &&
                      is.call(e21 <- e[[2L]][[1L]]) &&
                      (deparse(e21[[1L]])[1L] %in% c("::", ":::"))) {
                ## For complex assignments like
                ##    pkg::fun(......) <- rhs
                ## need to look for replacement function 'fun<-' in pkg
                ## (PR#17613).
                e[[2L]][[1L]][[3L]] <-
                    as.name(paste0(deparse(e21[[3L]])[1L], "<-"))
            }
            for(i in seq_along(e)) Recall(e[[i]])
        } else if (is.pairlist(e))
            for(i in seq_along(e)) Recall(e[[i]])

    }

    if(!missing(package)) {
        ## <FIXME>
        ## Suggested way of checking for S4 metadata.
        ## Change to use as envir_has_S4_metadata() once this makes it
        ## into base or methods.
        if(length(objects(code_env, all.names = TRUE,
                          pattern = "^[.]__[CT]_")))
            uses_methods <- TRUE
        ## </FIXME>
        exprs <- lapply(ls(envir = code_env, all.names = TRUE),
                        function(f) {
                            f <- get(f, envir = code_env) # get is expensive
                            if(typeof(f) == "closure") pairlist(formals(f), body(f)) # else NULL
                        })
        if(.isMethodsDispatchOn()) {
            ## Also check the code in S4 methods.
            ## This may find things twice.
            for(f in .get_S4_generics(code_env)) {
                mlist <- .get_S4_methods_list(f, code_env)
                exprs <- c(exprs, lapply(mlist, body))
            }
        }
    }
    else {
        enc <- db["Encoding"]
        if(!is.na(enc) &&
           (Sys.getlocale("LC_CTYPE") %notin% c("C", "POSIX"))) {
            ## FIXME: what if conversion fails on e.g. UTF-8 comments
            con <- file(file, encoding=enc)
            on.exit(close(con))
        } else con <- file
        exprs <-
            tryCatch(parse(file = con, n = -1L),
                     error = function(e)
                     stop(gettextf("parse error in file '%s':\n%s",
                                   file,
                                   .massage_file_parse_error_message(conditionMessage(e))),
                               domain = NA, call. = FALSE))
    }
    for(i in seq_along(exprs)) find_bad_exprs(exprs[[i]])

    if(length(ns)) {
        imp <- c(ns$imports, ns$importClasses, ns$importMethods)
        if (length(imp)) {
            imp <- sapply(imp, function(x) x[[1L]])
            all_imports <- unique(c(imp, all_imports))
        }
    } else imp <- character()
    bad_imp <- setdiff(imports0, all_imports)

    ## All the non-default packages need to be imported from.
    depends_not_import <- setdiff(depends, c(imp, default_package_names))

    methods_message <-
        if(uses_methods && "methods" %notin% c(depends, imports))
            gettext("package 'methods' is used but not declared")
        else ""

    extras <- list(
        base = c("Sys.junction", "shell", "shell.exec"),
        grDevices = c("X11.options", "X11Font", "X11Fonts", "quartz",
        "quartz.options", "quartz.save", "quartzFont", "quartzFonts",
        "bringToTop", "msgWindow", "win.graph", "win.metafile", "win.print",
        "windows", "windows.options", "windowsFont", "windowsFonts"),
        parallel = c("mccollect", "mcparallel", "mc.reset.stream", "mcaffinity"),
        utils = c("nsl", "DLL.version", "Filters",
        "choose.dir", "choose.files", "getClipboardFormats",
        "getIdentification", "getWindowsHandle", "getWindowsHandles",
        "getWindowTitle", "loadRconsole", "readClipboard",
        "readRegistry", "setStatusBar", "setWindowTitle",
        "shortPathName", "win.version", "winDialog",
        "winDialogString", "winMenuAdd", "winMenuAddItem",
        "winMenuDel", "winMenuDelItem", "winMenuNames",
        "winMenuItems", "writeClipboard", "zip.unpack",
        "winProgressBar", "getWinProgressBar", "setWinProgressBar",
        "setInternet2", "arrangeWindows"),
        RODBC = c("odbcConnectAccess", "odbcConnectAccess2007",
        "odbcConnectDbase", "odbcConnectExcel", "odbcConnectExcel2007")
        )
    imp2un <- character()
    if(length(imp2)) { ## Try to check these are exported
        names(imp2f) <- imp2
        imp2 <- unique(imp2)
        imps <- split(imp2f, names(imp2f))
        for (p in names(imps)) {
            ## some people have these quoted:
            this <- imps[[p]]
            this <- sub('^"(.*)"$', "\\1", this)
            this <- sub("^'(.*)'$", "\\1", this)
            if (p %in% "base") {
                this <- setdiff(this, ls(baseenv(), all.names = TRUE))
                if(length(this))
                    imp2un <- c(imp2un, paste(p, this, sep = "::"))
                next
            }
            ns <- .getNamespace(p)
            value <- if(is.null(ns)) {
                ## this could be noisy
                tryCatch(suppressWarnings(suppressMessages(loadNamespace(p))),
                         error = function(e) e)
            } else NULL
            if (!inherits(value, "error")) {
                ns <- asNamespace(p)
                exps <- c(ls(envir = .getNamespaceInfo(ns, "exports"),
                             all.names = TRUE),
                          ls(envir = .getNamespaceInfo(ns, "lazydata"),
                             all.names = TRUE),
                          extras[[p]])
                this2 <- setdiff(this, exps)
                if(length(this2))
                    imp2un <- c(imp2un, paste(p, this2, sep = "::"))
            }
        }
    }

    names(imp3f) <- imp3
    ## Eliminate some methods ::: self-calls which we know are in fact
    ## necessary.
    if(pkg_name == "methods") {
        imp3f <- imp3f[(imp3 != "methods") |
                       (imp3f %notin% c(".class1",
                                        ".missingMethod",
                                        ".selectDotsMethod",
                                        ".setDummyField",
                                        ".InhSlotNames"))]
        imp3 <- names(imp3f)
    }
    imp3 <- unique(imp3)
    imp3self <- pkg_name %in% imp3
    imp3selfcalls <- as.vector(imp3f[names(imp3f) == pkg_name])
    imp3 <- setdiff(imp3, pkg_name)
    if(length(imp3)) {
        imp3f <- imp3f[names(imp3f) %in% imp3]
        imps <- split(imp3f, names(imp3f))
        imp32 <- imp3 <- imp3f <- imp3ff <- unknown <- character()
        for (p in names(imps)) {
            this <- imps[[p]]
            this <- sub('^"(.*)"$', "\\1", this)
            this <- sub("^'(.*)'$", "\\1", this)
            if (p %in% "base") {
                imp32 <- c(imp32, paste(p, this, sep = ":::"))
                next
            }
            ns <- .getNamespace(p)
            value <- if(is.null(ns)) {
                ## this could be noisy
                tryCatch(suppressWarnings(suppressMessages(loadNamespace(p))),
                         error = function(e) e)
            } else NULL
            if (inherits(value, "error")) {
                unknown <- c(unknown, p)
            } else {
                 exps <- c(ls(envir = getNamespaceInfo(p, "exports"),
                              all.names = TRUE), extras[[p]])
                 this2 <- this %in% exps
                 if (any(this2))
                     imp32 <- c(imp32, paste(p, this[this2], sep = ":::"))
                 if (any(!this2)) {
                     imp3 <- c(imp3, p)
                     this <- this[!this2]
                     pp <- ls(envir = asNamespace(p), all.names = TRUE)
                     this2 <- this %in% pp
                     if(any(this2))
                         imp3f <- c(imp3f, paste(p, this[this2], sep = ":::"))
                     if(any(!this2))
                         imp3ff <- c(imp3ff, paste(p, this[!this2], sep = ":::"))
                 }
            }
        }
        if(length(imp3f)) {
            ## remove other packages which have the same maintainer,
            ## but report references to itself.  Unless they should be :: .
            maintainers <-
                sapply(strsplit(imp3f, ":::", fixed = TRUE),
                       function(p) {
                           dfile <- system.file("DESCRIPTION", package = p[[1L]])
                           if(dfile == "") return("")
                           unname(.read_description(dfile)["Maintainer"])
                       })
            imp3f <- imp3f[(maintainers != db["Maintainer"])]
        }
    } else imp32 <- imp3f <- imp3ff <- unknown <- character()
    ## An unexported function only available on Windows, used in tools
    imp3ff <- setdiff(sort(unique(imp3ff)), "utils:::unpackPkgZip")
    res <- list(others = unique(bad_exprs),
                bad_practice = unique(bad_prac),
                imports = unique(bad_imports),
                imps = unique(bad_imps),
                in_depends = unique(bad_deps),
                unused_imports = bad_imp,
                depends_not_import = depends_not_import,
                imp2un = sort(unique(imp2un)),
                imp32 = sort(unique(imp32)),
                imp3 = imp3, imp3f = sort(unique(imp3f)),
                imp3ff = imp3ff, imp3self = imp3self,
                imp3selfcalls = sort(unique(imp3selfcalls)),
                imp3unknown = unknown,
                methods_message = methods_message)
    class(res) <- "check_packages_used"
    res
}

format.check_packages_used <-
function(x, ...)
{
    incoming <-
        identical(Sys.getenv("_R_CHECK_PACKAGES_USED_CRAN_INCOMING_NOTES_",
                             "FALSE"),
                  "TRUE")
    ignore_unused_imports <-
        config_val_to_logical(Sys.getenv("_R_CHECK_PACKAGES_USED_IGNORE_UNUSED_IMPORTS_",
                                         "FALSE"))
                                        # ^^^^^ rather "TRUE" ??
    c(character(),
      if(length(xx <- x$imports)) {
          if(length(xx) > 1L) {
              c(gettext("'::' or ':::' imports not declared from:"),
                .pretty_format(sort(xx)))
          } else {
              gettextf("'::' or ':::' import not declared from: %s", sQuote(xx))
          }
      },
      if(length(xx <- x$others)) {
          if(length(xx) > 1L) {
              c(gettext("'library' or 'require' calls not declared from:"),
                .pretty_format(sort(xx)))
          } else {
              gettextf("'library' or 'require' call not declared from: %s",
                       sQuote(xx))
          }
      },
      if(length(xx <- x$imps)) {
          if(length(xx) > 1L) {
              c(gettext("'loadNamespace' or 'requireNamespace' calls not declared from:"),
                .pretty_format(sort(xx)))
          } else {
              gettextf("'loadNamespace' or 'requireNamespace' call not declared from: %s",
                       sQuote(xx))
          }
      },
      if(length(xx <- x$in_depends)) {
          msg <- "  Please remove these calls from your code."
          if(length(xx) > 1L) {
              c(gettext("'library' or 'require' calls to packages already attached by Depends:"),
                .pretty_format(sort(xx)), msg)
          } else {
              c(gettextf("'library' or 'require' call to %s which was already attached by Depends.",
                         sQuote(xx)), msg)
          }
      },
      if(length(xx <- x$bad_practice)) {
          msg <-
              "  Please use :: or requireNamespace() instead.\n  See section 'Suggested packages' in the 'Writing R Extensions' manual."
          if(length(xx) > 1L) {
              c(gettext("'library' or 'require' calls in package code:"),
                .pretty_format(sort(xx)), msg)
          } else {
              c(gettextf("'library' or 'require' call to %s in package code.",
                         sQuote(xx)), msg)
          }
      },

      if(length(xx <- x$unused_imports) && !ignore_unused_imports) {
          msg <- "  All declared Imports should be used."
          if(length(xx) > 1L) {
              c(gettext("Namespaces in Imports field not imported from:"),
                .pretty_format(sort(xx)), msg)
          } else {
              c(gettextf("Namespace in Imports field not imported from: %s",
                       sQuote(xx)), msg)
          }
      },
      if(length(xx <- x$depends_not_import)) {
          msg <- c("  These packages need to be imported from (in the NAMESPACE file)",
                   "  for when this namespace is loaded but not attached.")
          if(length(xx) > 1L) {
              c(gettext("Packages in Depends field not imported from:"),
                .pretty_format(sort(xx)), msg)
          } else {
              c(gettextf("Package in Depends field not imported from: %s",
                         sQuote(xx)), msg)
          }
      },
      if(length(xx <- x$imp2un)) {
          if(length(xx) > 1L) {
              c(gettext("Missing or unexported objects:"),
                .pretty_format(sort(xx)))
          } else {
              gettextf("Missing or unexported object: %s", sQuote(xx))
          }
      },
      if(length(xx <- x$imp32)) { ## ' ' seems to get converted to dir quotes
          msg <- "See the note in ?`:::` about the use of this operator."
          msg <- strwrap(paste(msg, collapse = " "), indent = 2L, exdent = 2L)
          if(length(xx) > 1L) {
              c(gettext("':::' calls which should be '::':"),
                .pretty_format(sort(xx)), msg)
          } else {
              c(gettextf("':::' call which should be '::': %s",
                         sQuote(xx)), msg)
          }
      },
      if(length(xx <- x$imp3ff)) {
           if(length(xx) > 1L) {
              c(gettext("Missing objects imported by ':::' calls:"),
                .pretty_format(sort(xx)))
          } else {
              gettextf("Missing object imported by a ':::' call: %s",
                       sQuote(xx))
          }
     },
      if(length(xxx <- x$imp3f)) { ## ' ' seems to get converted to dir quotes
          msg <- "See the note in ?`:::` about the use of this operator."
          msg <- strwrap(paste(msg, collapse = " "), indent = 2L, exdent = 2L)
          if(incoming) {
              z <- sub(":::.*", "", xxx)
              base <- unlist(.get_standard_package_names()[c("base", "recommended")])
              if (any(z %in% base))
                  msg <- c(msg,
                           "  Including base/recommended package(s):",
                           .pretty_format(intersect(base, z)))
          }
          if(length(xxx) > 1L) {
              c(gettext("Unexported objects imported by ':::' calls:"),
                .pretty_format(sort(xxx)), msg)
          } else  if(length(xxx)) {
              c(gettextf("Unexported object imported by a ':::' call: %s",
                         sQuote(xxx)), msg)
          }
      },
      if(isTRUE(x$imp3self)) {
          msg <-
              c("There are ::: calls to the package's namespace in its code.",
                "A package almost never needs to use ::: for its own objects:")
          c(strwrap(paste(msg, collapse = " "), indent = 0L, exdent = 2L),
            .pretty_format(sort(x$imp3selfcalls)))
      },
      if(length(xx <- x$imp3unknown)) {
          msg <- "See the note in ?`:::` about the use of this operator."
          msg <- strwrap(paste(msg, collapse = " "), indent = 2L, exdent = 2L)
          if(length(xx) > 1L) {
              c(gettext("Unavailable namespaces imported from by ':::' calls:"),
                .pretty_format(sort(xx)), msg)
          } else {
              c(gettextf("Unavailable namespace imported from by a ':::' call: %s",
                         sQuote(xx)), msg)
          }
      },
      if(length(xx <- x$data)) {
          if(length(xx) > 1L) {
              c(gettext("'data(package=)' calls not declared from:"),
                .pretty_format(sort(xx)))
          } else {
              gettextf("'data(package=)' call not declared from: %s",
                       sQuote(xx))
          }
      },
      if(nzchar(x$methods_message)) {
          x$methods_message
      })
}

### * .check_packages_used_in_examples

.check_packages_used_helper <-
function(db, files)
{
    pkg_name <- db["Package"]
    depends <- .get_requires_from_package_db(db, "Depends")
    imports <- .get_requires_from_package_db(db, "Imports")
    suggests <- .get_requires_from_package_db(db, "Suggests")
    enhances <- .get_requires_from_package_db(db, "Enhances")

    ## it is OK to refer to yourself and standard packages
    standard_package_names <- .get_standard_package_names()$base
    depends_suggests <- c(depends, imports, suggests, enhances, pkg_name,
                          standard_package_names)
    ## the first argument could be named, or could be a variable name.
    ## we just have a stop list here.
    common_names <- c("pkg", "pkgName", "package", "pos")

    parse_errors <-
    bad_exprs <- character()
    bad_imports <- character()
    bad_data <- character()
    find_bad_exprs <- function(e) {
        if(is.call(e) || is.expression(e)) {
            Call <- deparse(e[[1L]])[1L]
            if((Call %in%
               c("library", "require", "loadNamespace", "requireNamespace"))
               && (length(e) >= 2L)) {
                ## We need to remove '...': OTOH the argument could be NULL
                keep <- vapply(e,
                               function(x) deparse(x)[1L] != "...",
                               NA)
                mc <- match.call(baseenv()[[Call]], e[keep])
                if(!is.null(pkg <- mc$package)) {
                    ## <NOTE>
                    ## Using code analysis, we really don't know which
                    ## package was called if character.only = TRUE and
                    ## the package argument is not a string constant.
                    ## (Btw, what if character.only is given a value
                    ## which is an expression evaluating to TRUE?)
                    dunno <- FALSE
                    if((Call %in% c("loadNamespace",
                                    "requireNamespace"))) {
                        if(!identical(class(pkg), "character"))
                            dunno <- TRUE
                    } else {
                        if(!identical(class(pkg), "character") &&
                           !is.null(co <- mc$character.only) &&
                           !isFALSE(co))
                            dunno <- TRUE
                    }
                    if(!dunno) {
                        pkg <- as.character(pkg)
                        if(pkg %notin% c(depends_suggests, common_names))
                            bad_exprs <<- c(bad_exprs, pkg)
                    }
                }
            } else if(Call %in%  "::") {
                pkg <- deparse(e[[2L]])
                if(! pkg %in% depends_suggests)
                    bad_imports <<- c(bad_imports, pkg)
            } else if(Call %in%  ":::") {
                pkg <- deparse(e[[2L]])
                if(! pkg %in% depends_suggests)
                    bad_imports <<- c(bad_imports, pkg)
            } else if((Call %in% "data" && length(e) >= 3L) ||
                      (Call %in% c("utils::data", "utils:::data"))) {
                mc <- match.call(utils::data, e)
                if(is.character(pkg <- mc$package) && pkg %notin% depends_suggests)
                    bad_data <<- c(bad_data, pkg)
            }

            for(i in seq_along(e)) Recall(e[[i]])
        }
    }

    if (is.character(files)) {
        for (f in files) {
            tryCatch({
                        ## This can give errors because the vignette etc
                        ## need not be in the session encoding.
                        exprs <- parse(file = f, n = -1L)
                        for(i in seq_along(exprs)) find_bad_exprs(exprs[[i]])
                     },
                     error = function(e) {
                         ## so ignore 'invalid multibyte character' errors.
                         msg <- .massage_file_parse_error(e)
                         if(!startsWith(msg, "invalid multibyte character"))
                         {
                             parse_errors <<- c(parse_errors, f)
                             warning(gettextf("parse error in file '%s':\n%s",
                                              f, msg),
                                     domain = NA, call. = FALSE)
                         }
                     })
        }
    } else {
        ## called for examples with translation
        tryCatch({
            exprs <- parse(file = files, n = -1L)
            for(i in seq_along(exprs)) find_bad_exprs(exprs[[i]])
        },
                 error = function(e)
                 warning(gettextf("parse error in file '%s':\n%s",
                                  summary(files)$description,
                                  .massage_file_parse_error_message(conditionMessage(e))),
                         domain = NA, call. = FALSE))
    }

    res <- list(others = unique(bad_exprs),
                imports = unique(bad_imports),
                data = unique(bad_data),
                methods_message = ""
              , parse_errors = unique(parse_errors)
            )
    class(res) <- "check_packages_used"
    res
}

.check_packages_used_in_examples <-
function(package, dir, lib.loc = NULL)
{
    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        dfile <- file.path(dir, "DESCRIPTION")
        db <- .read_description(dfile)
    }
    else if(!missing(dir)) {
        ## Using sources from directory @code{dir} ...
        if(!dir.exists(dir))
            stop(gettextf("directory '%s' does not exist", dir), domain = NA)
        else
            dir <- file_path_as_absolute(dir)
        dfile <- file.path(dir, "DESCRIPTION")
        db <- .read_description(dfile)
    }
    pkg_name <- db["Package"]

    file <- .createExdotR(pkg_name, dir, silent = TRUE,
                          commentDonttest = FALSE,
                          installed = !missing(package))
    if (is.null(file)) return(invisible(NULL)) # e.g, no examples
    enc <- db["Encoding"]
    if(!is.na(enc) &&
       (Sys.getlocale("LC_CTYPE") %notin% c("C", "POSIX"))) {
        ## Avoid conversion failing on e.g. UTF-8 comments
        ## con <- file(file, encoding = enc)
        lines <- iconv(readLines(file, warn = FALSE),
                       from = "UTF-8", to = "", sub = "byte")
        con <- textConnection(lines)
        on.exit(close(con), add = TRUE)
    } else con <- file

    .check_packages_used_helper(db, con)
}


### * .check_packages_used_in_tests

.check_packages_used_in_tests <-
function(dir, testdir, lib.loc = NULL)
{
    ## Argument handling.
    ## Using sources from directory @code{dir} ...
    if(!dir.exists(dir))
        stop(gettextf("directory '%s' does not exist", dir), domain = NA)
    else
        dir <- file_path_as_absolute(dir)
    dfile <- file.path(dir, "DESCRIPTION")
    db <- .read_description(dfile)

    testsrcdir <- file.path(dir, testdir)
    od <- setwd(testsrcdir)
    on.exit(setwd(od))
    Rinfiles <- list.files(".", pattern = "\\.Rin$")
    Rfiles <- list.files(".", pattern = "\\.[rR]$")
    if(testdir != "tests") {
        use_subdirs <- FALSE
    } else {
        use_subdirs <-
            Sys.getenv("_R_CHECK_PACKAGES_USED_IN_TESTS_USE_SUBDIRS_",
                       "FALSE")
        use_subdirs <- config_val_to_logical(use_subdirs)
        if(use_subdirs) {
            subdirs <- c("testthat", "testit", "unitizer", "RUnit")
            subdirs <- subdirs[dir.exists(subdirs)]
            if(length(subdirs)) {
                Rfiles <-
                    c(Rfiles,
                      unlist(lapply(subdirs, list.files,
                                    pattern = "\\.[rR]$",
                                    full.names = TRUE),
                             use.names = FALSE))
            } else {
                use_subdirs <- FALSE
            }
        }
    }
    res <- .check_packages_used_helper(db, c(Rinfiles, Rfiles))
    if(any(lengths(bad <- res[1L : 3L]))) {
        ## Filter results against available package names to avoid (too
        ## many) false positives.
        ## <FIXME>
        ## Should really standardize getting available packages when
        ## checking.
        repos <- .get_standard_repository_URLs()
        available <- utils::available.packages(repos = repos)
        if (nrow(available)) # cannot filter when offline
        res[1L : 3L] <- lapply(bad, intersect, available[, "Package"])
    }
    res
}

### * .check_packages_used_in_vignettes

.check_packages_used_in_vignettes <-
function(package, lib.loc = NULL)
{
    ## Argument handling.
    if(missing(package) || length(package) != 1L)
        stop("argument 'package' must be of length 1")
    dir <- find.package(package, lib.loc)
    ## FIXME: use Meta directory.
    db <- .read_description(file.path(dir, "DESCRIPTION"))
    vinfo <- .package_vignettes_via_call_to_R(dir = dir,
                                              subdirs = "doc",
                                              source = TRUE)
    Rfiles <- unique(as.character(unlist(vinfo$sources)))
    .check_packages_used_helper(db, Rfiles)
}

### * .check_T_and_F

## T and F checking, next generation.
##
## What are we really trying to do?
##
## In R, T and F are "just" variables which upon startup are bound to
## TRUE and FALSE, respectively, in the base package/namespace.  Hence,
## if code uses "global" variables T and F and dynamic lookup is in
## place (for packages, if they do not have a namespace), there may be
## trouble in case T or F were redefined.  So we'd like to warn about
## these cases.
##
## A few things to note:
## * Package code top-level bindings *to* T and F are not a problem for
##   packages installed for lazy-loading (as the top-level T and F get
##   evaluated "appropriately" upon installation.
## * Code in examples using "global" T and F is always a problem, as
##   this is evaluated in the global envionment by examples().
## * There is no problem with package code using T and F as local
##   variables.
## * Functions in a namespace will always find the T or F in the
##   namespace, imports or base, never in the global environment.
##
## Our current idea is the following.  Function findGlobals() in
## codetools already provides a way to (approximately) determine the
## globals.  So we can try to get these and report them.
##
## Note that findGlobals() only works on closures, so we definitely miss
## top-level assignments to T or F.  This could be taken care of rather
## easily, though.
##
## Note also that we'd like to help people find where the offending
## globals were found.  Seems that codetools currently does not offer a
## way of recording e.g. the parent expression, so we do our own thing
## based on the legacy checkTnF code.

.check_T_and_F <-
function(package, dir, lib.loc = NULL)
{
    ## Seems that checking examples has several problems, and can result
    ## in "strange" diagnostic output.  Let's more or less disable this
    ## for the time being.
    check_examples <-
        isTRUE(as.logical(Sys.getenv("_R_CHECK_RD_EXAMPLES_T_AND_F_")))


    bad_closures <- character()
    bad_examples <- character()

    find_bad_closures <- function(env) {
        x <- lapply(as.list(env, all.names = TRUE, sorted = TRUE),
                    function(v) {
                        if (typeof(v) == "closure")
                            codetools::findGlobals(v)
                    })
        names(x)[vapply(x, function(s) any(s %in% c("T", "F")), NA)]
    }

    find_bad_examples <- function(txts) {
        env <- new.env(hash = TRUE) # might be many
        x <- lapply(txts,
                    function(txt) {
                        tryCatch({
                            eval(str2expression(
                                       paste("FOO <- function() {",
                                             paste(txt, collapse = "\n"),
                                             "}",
                                             collapse = "\n")),
                                 env)
                            find_bad_closures(env)
                        },
                                 error = function(e) character())
                    })
        names(txts)[lengths(x) > 0L]
    }

    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        if(check_examples)
            example_texts <-
                .get_example_texts_from_example_dir(file.path(dir, "R-ex"))
    }
    else {
        ## The dir case.
        if(missing(dir))
            stop("you must specify 'package' or 'dir'")
        dir <- file_path_as_absolute(dir)
        code_dir <- file.path(dir, "R")
        if(!packageHasNamespace(basename(dir), dirname(dir))
           && dir.exists(code_dir)) {
            code_env <- new.env(hash = TRUE)
            dfile <- file.path(dir, "DESCRIPTION")
            meta <- if(file_test("-f", dfile))
                .read_description(dfile)
            else
                character()
            .source_assignments_in_code_dir(code_dir, code_env, meta)
            bad_closures <- find_bad_closures(code_env)
        }
        if(check_examples)
            example_texts <- .get_example_texts_from_source_dir(dir)
    }

    if(check_examples)
        bad_examples <- find_bad_examples(example_texts)

    out <- list(bad_closures = bad_closures,
                bad_examples = bad_examples)
    class(out) <- "check_T_and_F"
    out
}

.get_example_texts_from_example_dir <-
function(dir)
{
    if(!dir.exists(dir)) return(NULL)
    files <- list_files_with_exts(dir, "R")
    texts <- lapply(files,
                    function(f) paste(readLines(f, warn = FALSE),
                                      collapse = "\n"))
    names(texts) <- files
    texts
}

.get_example_texts_from_source_dir <-
function(dir)
{
    if(!dir.exists(file.path(dir, "man"))) return(NULL)
    lapply(Rd_db(dir = dir), .Rd_get_example_code)
}

format.check_T_and_F <-
function(x, ...)
{
    c(character(),
      if(length(x$bad_closures)) {
          msg <- ngettext(length(x$bad_closures),
                          "Found possibly global 'T' or 'F' in the following function:",
                          "Found possibly global 'T' or 'F' in the following functions:"
                          )
          c(strwrap(msg),
            .pretty_format(x$bad_closures))
      },
      if(length(x$bad_examples)) {
          msg <- ngettext(length(x$bad_examples),
                          "Found possibly global 'T' or 'F' in the examples of the following Rd file:",
                          "Found possibly global 'T' or 'F' in the examples of the following Rd files:"
                          )
          c(strwrap(msg),
            paste0("  ", x$bad_examples))
      })
}

### * .check_bogus_return

## Find bogus 'return' statements probably intended as a return() call.
## This uses codetools::findGlobals() to find functions which rely on a
## global variable "return".
## The code is derived from .check_T_and_F above.

.check_bogus_return <-
function(package, dir, lib.loc = NULL)
{
    bad_closures <- character()

    find_bad_closures <- function(env) {
        x <- lapply(as.list(env, all.names = TRUE, sorted = TRUE),
                    function(v) {
                        if (typeof(v) == "closure")
                            codetools::findGlobals(v, merge = FALSE)$variables
                    })
        names(x)[vapply(x, function(s) any(s %in% "return"), NA)]
    }

    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        if(package %notin% .get_standard_package_names()$base) {
            .load_namespace_quietly(package, dirname(dir))
            code_env <- asNamespace(package)
            bad_closures <- find_bad_closures(code_env)
        }
    }
    else {
        ## The dir case.
        if(missing(dir))
            stop("you must specify 'package' or 'dir'")
        dir <- file_path_as_absolute(dir)
        code_dir <- file.path(dir, "R")
        if(dir.exists(code_dir)) {
            code_env <- new.env(hash = TRUE)
            dfile <- file.path(dir, "DESCRIPTION")
            meta <- if(file_test("-f", dfile))
                .read_description(dfile)
            else
                character()
            .source_assignments_in_code_dir(code_dir, code_env, meta)
            bad_closures <- find_bad_closures(code_env)
        }
    }

    out <- list(bad_closures = bad_closures)
    class(out) <- "check_bogus_return"
    out
}

format.check_bogus_return <-
function(x, ...)
{
    c(character(),
      if(length(x$bad_closures)) {
          msg <- ngettext(length(x$bad_closures),
                          "Possibly missing '()' after 'return' in the following function:",
                          "Possibly missing '()' after 'return' in the following functions:"
                          )
          c(strwrap(msg),
            .pretty_format(x$bad_closures))
      })
}


### * .check_dotIntenal

.check_dotInternal <-
function(package, dir, lib.loc = NULL, details = TRUE)
{
    bad_closures <- character()

    find_bad_closures <- function(env) {
        objects_in_env <- as.list(env, all.names = TRUE, sorted = TRUE)
        x <- lapply(objects_in_env,
                    function(v) {
                        if (typeof(v) == "closure")
                            codetools::findGlobals(v)
                    })
        names(x)[vapply(x, function(s) any(s %in% ".Internal"), NA)]
    }

    find_bad_S4methods <- function(env) {
        gens <- .get_S4_generics(code_env)
        x <- lapply(gens, function(f) {
            tab <- get(methods:::.TableMetaName(f, attr(f, "package")),
                       envir = code_env)
            ## The S4 'system' does **copy** base code into packages ....
            any(unlist(eapply(tab, function(v) !inherits(v, "derivedDefaultMethod") &&
                                   any(codetools::findGlobals(v) %in% ".Internal"))))
        })
        gens[unlist(x)]
    }

    find_bad_refClasses <- function(refs) {
        cl <- names(refs)
        x <- lapply(refs, function(z) {
            any(vapply(z,
                       function(v)
                           any(codetools::findGlobals(v) %in%
                               ".Internal"),
                       NA))
        })
        cl[unlist(x)]
    }


    bad_S4methods <- list()
    bad_refs <- character()
    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        if(package %notin% .get_standard_package_names()$base) {
            .load_namespace_quietly(package, dirname(dir))
            code_env <- asNamespace(package)
            bad_closures <- find_bad_closures(code_env)
            if(.isMethodsDispatchOn()) {
                bad_S4methods <- find_bad_S4methods(code_env)
                refs <- .get_ref_classes(code_env)
                if(length(refs)) bad_refs <- find_bad_refClasses(refs)
            }
        }
    }
    else {
        ## The dir case.
        if(missing(dir))
            stop("you must specify 'package' or 'dir'")
        dir <- file_path_as_absolute(dir)
        code_dir <- file.path(dir, "R")
        if(dir.exists(code_dir)) {
            code_env <- new.env(hash = TRUE)
            dfile <- file.path(dir, "DESCRIPTION")
            meta <- if(file_test("-f", dfile))
                .read_description(dfile)
            else
                character()
            .source_assignments_in_code_dir(code_dir, code_env, meta)
            bad_closures <- find_bad_closures(code_env)
        }
    }

    internals <- character()
    if (length(bad_closures) && details) {
        lapply(bad_closures, function(o) {
            v <- get(o, envir = code_env)
            calls <- .find_calls(v, recursive = TRUE)
            if(!length(calls)) return()
            calls <- calls[.call_names(calls) == ".Internal"]
            calls2 <- lapply(calls, `[`, 2L)
            calls3 <-
                sapply(calls2, function(x) sub("\\(.*", "", deparse(x)[1L]))
            internals <<- c(internals, calls3)
        })
    }
    out <- list(bad_closures = bad_closures, internals = internals,
                bad_S4methods = bad_S4methods, bad_refs = bad_refs)
    class(out) <- "check_dotInternal"
    out
}

format.check_dotInternal <-
function(x, ...)
{
    out <- if(length(x$bad_closures)) {
        msg <- ngettext(length(x$bad_closures),
                        "Found a .Internal call in the following function:",
                        "Found .Internal calls in the following functions:"
                        )
        out <- c(strwrap(msg), .pretty_format(x$bad_closures))
        if (length(unique(x$internals)))
            out <- c(out, "with calls to .Internal functions",
                     .pretty_format(sort(unique(x$internals))))
        out
    } else character()
    if(length(x$bad_S4methods)) {
        msg <- ngettext(length(x$bad_S4methods),
                        "Found a .Internal call in methods for the following S4 generic:",
                        "Found .Internal calls in methods for the following S4 generics:"
                        )
        out <- c(out, strwrap(msg), .pretty_format(x$bad_S4methods))
    }
    if(length(x$bad_refs)) {
        msg <- ngettext(length(x$bad_refs),
                        "Found a .Internal call in methods for the following reference class:",
                        "Found .Internal calls in methods for the following reference classes:"
                        )
        out <- c(out, strwrap(msg), .pretty_format(x$bad_refs))
    }
    out
}

### * .check_namespace

.check_namespace <-
function(dir)
{
    dir <- file_path_as_absolute(dir)
    invisible(tryCatch(parseNamespaceFile(basename(dir), dirname(dir)),
                       error = function(e) {
                           writeLines("Invalid NAMESPACE file, parsing gives:")
                           stop(e)
                       }))
}

### * .check_citation

.check_citation <-
function(cfile, dir = NULL)
{
    cfile <- file_path_as_absolute(cfile)

    if(!is.null(dir)) {
        meta <- utils::packageDescription(basename(dir), dirname(dir))
        db <- .read_citation_quietly(cfile, meta)
        if(inherits(db, "error")) {
            msg <- conditionMessage(db)
            call <- conditionCall(db)
            if(is.null(call))
                msg <- c("Error: ", msg)
            else
                msg <- c("Error in ", deparse(call), ": ", msg)
            writeLines(paste(msg, collapse = ""))
        }
        return(invisible())
    }

    meta <- if(basename(dir <- dirname(cfile)) == "inst")
        as.list(.get_package_metadata(dirname(dir)))
    else
        NULL

    db <- tryCatch(suppressMessages(get_CITATION_entry_fields(cfile,
                                                              meta$Encoding)),
                   error = identity)

    if(inherits(db, "error")) {
        writeLines(conditionMessage(db))
        return(invisible())
    }

    if(!NROW(db)) return(invisible())

    bad <- Map(find_missing_required_BibTeX_fields, db$Entry, db$Fields,
               USE.NAMES = FALSE)
    ind <- vapply(bad, identical, NA_character_, FUN.VALUE = NA)
    if(length(pos <- which(ind))) {
        entries <- db$Entry[pos]
        entries <-
            ifelse(nchar(entries) < 20L,
                   entries,
                   paste(substr(entries, 1L, 20L), "[TRUNCATED]"))
        writeLines(sprintf("entry %d: invalid type %s",
                           pos, sQuote(entries)))
    }
    pos <- which(!ind & (lengths(bad) > 0L))
    if(length(pos)) {
        writeLines(strwrap(sprintf("entry %d (%s): missing required field(s) %s",
                                   pos,
                                   db$Entry[pos],
                                   vapply(bad[pos],
                                          function(s)
                                          paste(sQuote(s),
                                                collapse = ", "),
                                          "")),
                           indent = 0L, exdent = 2L))
    }
}

### * .check_package_parseRd

## FIXME: could use dumped files, except for use of encoding = "ASCII"
.check_package_parseRd <-
function(dir, silent = FALSE, def_enc = FALSE, minlevel = -1)
{
    if(file.exists(dfile <- file.path(dir, "DESCRIPTION"))) {
        enc <- read.dcf(dfile)[1L, ]["Encoding"]
        if(is.na(enc)) enc <- "ASCII"
        else def_enc <- TRUE
    } else enc <- "ASCII"
    macros <- loadPkgRdMacros(dir)
    ## UGLY! FIXME: add (something like) 'dir' as argument to checkRd() below!
    oenv <- Sys.getenv("_R_RD_MACROS_PACKAGE_DIR_", unset = NA)
    on.exit(if (!is.na(oenv)) Sys.setenv("_R_RD_MACROS_PACKAGE_DIR_" = oenv)
            else Sys.unsetenv("_R_RD_MACROS_PACKAGE_DIR_"))
    Sys.setenv("_R_RD_MACROS_PACKAGE_DIR_" = normalizePath(dir))

    pg <- list_files_with_type(file.path(dir, "man"), "docs", full.names = TRUE,
                               OS_subdirs = c("unix", "windows"))
    if(file.exists(nfile <- file.path(dir, "inst", "NEWS.Rd")))
        pg <- c(nfile, pg)
    bad <- character()
    for (f in pg) {
        ## FIXME: this may not work for no/fake install if the expressions
        ## involve the package under check.
        tmp <- tryCatch(suppressMessages(
            if (f == nfile)
                checkRd(f, encoding = "UTF-8", def_enc = TRUE,
                        stages = "install")
            else
                checkRd(f, encoding = enc, def_enc = def_enc,
                        macros = macros,
                        stages = c("build", "install", "render"))
        ), error = identity)
        if(inherits(tmp, "error")) {
            bad <- c(bad, f)
            if(!silent) message(geterrmessage())
        } else print(tmp, minlevel = minlevel)
    }
    if(length(bad)) bad <- sQuote(sub(".*/", "", bad))
    if(length(bad) > 1L)
        cat("problems found in ", paste(bad, collapse=", "), "\n", sep = "")
    else if(length(bad))
        cat("problem found in ", bad, "\n", sep = "")
    invisible()
}


### * .check_depdef

.check_depdef <-
function(package, dir, lib.loc = NULL, WINDOWS = FALSE)
{
    bad_depr <- c("plclust")

    bad_def <- c("La.eigen", "tetragamma", "pentagamma",
                 "package.description", "gammaCody",
                 "manglePackageName", ".readRDS", ".saveRDS",
                 "mem.limits", "trySilent", "traceOn", "traceOff",
                 "print.coefmat", "anovalist.lm", "lm.fit.null",
                 "lm.wfit.null", "glm.fit.null", "tkcmd",
                 "tkfile.tail", "tkfile.dir", "tkopen", "tkclose",
                 "tkputs", "tkread", "Rd_parse", "CRAN.packages",
                 "zip.file.extract",
                 "real", "as.real", "is.real",
                 ".find.package", ".path.package")

    ## X11 may not work on even a Unix-alike: it needs X support
    ## (optional) at install time and an X server at run time.
    bad_dev <- c("quartz", "x11", "X11")
    if(!WINDOWS)
        bad_dev <- c(bad_dev,  "windows", "win.graph", "win.metafile", "win.print")

    bad <- c(bad_depr, bad_def, bad_dev)
    bad_closures <- character()
    found <- character()

    find_bad_closures <- function(env) {
        objects_in_env <- as.list(env, all.names = TRUE, sorted = TRUE)
        x <- lapply(objects_in_env,
                    function(v) {
                        if (typeof(v) == "closure")
                            codetools::findGlobals(v)
                    })
        names(x)[vapply(x,
                        function(s) {
                            res <- any(s %in% bad)
                            if(res) found <<- c(found, s)
                            res
                        },
                        NA)]
    }

    find_bad_S4methods <- function(env) {
        gens <- .get_S4_generics(code_env)
        x <- lapply(gens, function(f) {
            tab <- get(methods:::.TableMetaName(f, attr(f, "package")),
                       envir = code_env)
            ## The S4 'system' does **copy** base code into packages ....
            any(unlist(eapply(tab, function(v) {
                if(!inherits(v, "derivedDefaultMethod")) FALSE
                else {
                    s <- codetools::findGlobals(v)
                    found <<- c(found, s)
                    any(s %in% bad)
                }
            })))
        })
        gens[unlist(x)]
    }

    find_bad_refClasses <- function(refs) {
        cl <- names(refs)
        x <- lapply(refs, function(z) {
            any(vapply(z,
                       function(v) {
                           s <- codetools::findGlobals(v)
                           found <<- c(found, s)
                           any(s %in% bad)
                       },
                       NA))
        })
        cl[unlist(x)]
    }


    ## FIXME: these are set but not used.
    bad_S4methods <- list()
    bad_refs <- character()
    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        if(package %notin% .get_standard_package_names()$base) {
            .load_namespace_quietly(package, dirname(dir))
            code_env <- asNamespace(package)
            bad_closures <- find_bad_closures(code_env)
            if(.isMethodsDispatchOn()) {
                bad_S4methods <- find_bad_S4methods(code_env)
                refs <- .get_ref_classes(code_env)
                if(length(refs)) bad_refs <- find_bad_refClasses(refs)
            }
        }
    }
    else {
        ## The dir case.
        if(missing(dir))
            stop("you must specify 'package' or 'dir'")
        dir <- file_path_as_absolute(dir)
        code_dir <- file.path(dir, "R")
        if(dir.exists(code_dir)) {
            code_env <- new.env(hash = TRUE)
            dfile <- file.path(dir, "DESCRIPTION")
            meta <- if(file_test("-f", dfile))
                .read_description(dfile)
            else
                character()
            .source_assignments_in_code_dir(code_dir, code_env, meta)
            bad_closures <- find_bad_closures(code_env)
        }
    }

    found <- sort(unique(found))
    deprecated <- found[found %in% bad_depr]
    defunct <- found[found %in% bad_def]
    devices <- found[found %in% bad_dev]

    out <- list(bad_closures = bad_closures, deprecated = deprecated,
                defunct = defunct, devices = devices)
    class(out) <- "check_depdef"
    out
}

format.check_depdef <-
function(x, ...)
{
    out <- if(length(x$bad_closures)) {
        msg <- ngettext(length(x$bad_closures),
                        "Found an obsolete/platform-specific call in the following function:",
                        "Found an obsolete/platform-specific call in the following functions:"
                        )
        c(strwrap(msg), .pretty_format(x$bad_closures))
    } else character()
    if(length(x$bad_S4methods)) {
        msg <- ngettext(length(x$bad_S4methods),
                        "Found an obsolete/platform-specific call in methods for the following S4 generic:",
                        "Found an obsolete/platform-specific call in methods for the following S4 generics:"
                        )
        out <- c(out, strwrap(msg), .pretty_format(x$bad_S4methods))
    }
    if(length(x$bad_refs)) {
        msg <- ngettext(length(x$bad_refs),
                        "Found an obsolete/platform-specific call in methods for the following reference class:",
                        "Found an obsolete/platform-specific call in methods for the following reference classes:"
                        )
        out <- c(out, strwrap(msg), .pretty_format(x$bad_refs))
    }
    if(length(x$deprecated)) {
        msg <- ngettext(length(x$deprecated),
                        "Found the deprecated function:",
                        "Found the deprecated functions:"
                        )
        out <- c(out, strwrap(msg), .pretty_format(x$deprecated))
    }
    if(length(x$defunct)) {
        msg <- ngettext(length(x$defunct),
                        "Found the defunct/removed function:",
                        "Found the defunct/removed functions:"
                        )
        out <- c(out, strwrap(msg), .pretty_format(x$defunct))
    }
    if(length(x$devices)) {
        msg <- ngettext(length(x$devices),
                        "Found the platform-specific device:",
                        "Found the platform-specific devices:"
                        )
        out <- c(out, strwrap(msg), .pretty_format(x$devices),
                 strwrap(paste("dev.new() is the preferred way to open a new device,",
                               "in the unlikely event one is needed.",
                               collapse = " ")))
    }
    out
}

### * .check_package_CRAN_incoming

## localOnly means to skip tests requiring Internet access.
## These are all done first.

.check_package_CRAN_incoming <-
function(dir, localOnly = FALSE, pkgSize = NA)
{
    out <- list()
    class(out) <- "check_package_CRAN_incoming"

    meta <- .get_package_metadata(dir, FALSE)
    lic_info <- analyze_license(meta["License"])
    ## Use later to indicate changes from FOSS to non-FOSS licence.
    foss <- lic_info$is_verified
    ## Record to notify about components extending a base license which
    ## permits extensions.
    if(length(extensions <- lic_info$extensions) &&
       ((length(components <- extensions$components) != 1L) ||
        (.license_component_is_for_stub_and_ok(components,
                                               dir) != 0L)) &&
       any(ind <- extensions$extensible)) {
        out$extensions <- extensions$components[ind]
        out$pointers <-
            Filter(length,
                   lapply(lic_info$pointers,
                          function(p) {
                              fp <- file.path(dir, p)
                              if(file_test("-f", fp)) {
                                  lines <- readLines(fp, warn = FALSE)
                                  ## Should this use the package
                                  ## encoding?
                                  ## (no, as we have LICENSE files with
                                  ## copyright signs in ASCII packages)
                                  pos <- grep("[^[:blank:]]", lines,
                                              useBytes = TRUE)
                                  c(p, if(len <- length(pos)) {
                                           lines[seq.int(from = pos[1L],
                                                         to = pos[len])]
                                  })
                              } else NULL
                          }))
    }

    out$Maintainer <- meta["Maintainer"]
    ## pick out 'display name'
    display <- gsub("<.*", "", as.vector(out$Maintainer))
    display <- sub("[[:space:]]+$", "",
                   sub("^[[:space:]]+", "", display, useBytes = TRUE),
                   useBytes = TRUE)
    ## RFC 5322 allows '.' in the display name, but 2822 did not.
    ## ',' separates email addresses.
    if(grepl("[,]", display, useBytes = TRUE) &&
       !grepl('^".*"$', display, useBytes = TRUE))
        out$Maintainer_needs_quotes <- TRUE
    if(!nzchar(display))
        out$empty_Maintainer_name <- TRUE
    ## Try to catch bad maintainer fields which give more than one
    ## person.  In principle, the field should be of the form
    ##   DISPLAY-NAME <ANGLE-ADDR>
    ## with the former (for simplicity) either a single quoted string,
    ## or several atoms.  (There are cases where <ANGLE-ADDR> does not
    ## follow whitespace, so simple tokenizing via scan() does not quite
    ## work.)
    check_maintainer_address <- function(s) {
        re <- paste0("^",
                     "[[:space:]]*",
                     "([^<]*|\"([^\"]|\\\\\")*\")", # display-name
                     "[[:space:]]*",
                     "(<[^>]+>)",           # angle-addr
                     "[[:space:]]*",
                     "(.*)",                # rest?
                     "[[:space:]]*",
                     "$")
        s <- unlist(regmatches(s, regexec(re, s)))
        length(s) && (s[5L] == "") ## && (s[2L] != "")
        ## (Adding the test for s[2L] would check for non-empty
        ## display-name which we already do separately.)
    }
    ## NOTE: perhaps whitespace should be canonicalized further above?
    maintainer <- gsub("\n", " ", meta["Maintainer"], fixed = TRUE)
    if((maintainer != "ORPHANED") &&
         !check_maintainer_address(maintainer))
        out$Maintainer_invalid_or_multi_person <- TRUE

    ver <- meta["Version"]
    if(is.na(ver))
        stop("Package has no 'Version' field", call. = FALSE)
    if(grepl("(^|[.-])0[0-9]+", ver))
        out$version_with_leading_zeroes <- ver
    if((ver == "@VERSION@") &&
       !is.na(meta["Priority"]) &&
       (meta["Priority"] == "base"))
        ver <- meta["Version"] <- format(getRversion())
    unlisted_version <- unlist(package_version(ver))
    if(any(unlisted_version >= 1234 &
           unlisted_version != as.integer(format(Sys.Date(), "%Y"))) &&
       !config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_SKIP_LARGE_VERSION_",
                                         "FALSE")))
        out$version_with_large_components <- ver

    .aspell_package_description_for_CRAN <- function(dir, meta = NULL) {
        if(!is.null(meta)) {
            dir.create(dir <- tempfile(pattern = "aspell"))
            on.exit(unlink(dir, recursive = TRUE))
            .write_description(meta, file.path(dir, "DESCRIPTION"))
        }
        ignore <-
            list(c("(?<=[ \t[:punct:]])'[^']*'(?=[ \t[:punct:]])",
                   "(?<=[ \t[:punct:]])([[:alnum:]]+::)?[[:alnum:]_.]*\\(\\)(?=[ \t[:punct:]])",
                   "(?<=[<])(https?://|DOI:|doi:|arXiv:)[^>]+(?=[>])"),
                 perl = TRUE)
        utils:::aspell_package_description(dir,
                                           ignore = ignore,
                                           control =
                                               c("--master=en_US",
                                                 "--add-extra-dicts=en_GB"),
                                           program = "aspell",
                                           dictionaries = "en_stats")
    }

    language <- meta["Language"]
    if((is.na(language) ||
        (language == "en") ||
        startsWith(language, "en-")) &&
       config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_USE_ASPELL_",
                                        "FALSE"))) {
        a <- tryCatch(.aspell_package_description_for_CRAN(dir),
                      error = identity)
        if(inherits(a, "error"))
            out$aspell_package_description_error <- conditionMessage(a)
        else if(NROW(a))
            out$spelling <- a
    }

    parse_description_field <- function(desc, field, default)
        str_parse_logic(desc[field], default=default)

    ## Check for possibly mis-spelled field names.
    nms <- names(meta)
    stdNms <- .get_standard_DESCRIPTION_fields()
    nms <- nms[nms %notin% stdNms &
               !grepl(paste0("^(",
                             paste(c("X-CRAN",
                                     "X-schema.org",
                                     "Repository/R-Forge",
                                     "VCS/",
                                     "Config/"),
                                   collapse = "|"),
                             ")"),
                      nms)]
    if(length(nms) && ## Allow maintainer notes  <stdName>Note :
       length(nms <- nms[nms %notin% paste0(stdNms,"Note")]))
        out$fields <- nms


    uses <-  BUGS <- ACM <- character()
    for (field in c("Depends", "Imports", "Suggests")) {
        p <- strsplit(meta[field], " *, *")[[1L]]
        ## multicore has been superseded by parallel.  Almost all of
        ## snow has been too, so it should be optional.
        ## <NOTE>
        ## However, as of 2024-08 there are still 40 and 64 CRAN
        ## packages which at least suggest snow or doSNOW, respectively,
        ## apparently needing the functionality of snow not ported to
        ## parallel (MPI, ...).  Hence better to no longer NOTE their
        ## use.
        ##   p2 <- grep("^(multicore|snow|doSNOW)( |\\(|$)", p, value = TRUE)
        ##   uses <- c(uses, p2)
        ## </NOTE>
        ## BRugs and R2OpenBUGS have a SystemRequirements of OpenBUGS.
        ## which requires ix86 (not x86-64) and currently installs
        ## only on Linux using a compiler supporting -m32.
        ## Some of R2WinBUGS requires a version of BUGS, but not all.
        ##
        ## mzR has a long record of not installing: in 2023 with neither
        ## gcc 13 nor clang 16.  xcms and MSnbase require it.
        p2 <- grep("^(BRugs|R2OpenBUGS|mzR|xcms|MSnbase)( |\\(|$)",
                   p, value = TRUE)
        BUGS <- c(BUGS, p2)
        p2 <- grep("^(Akima|tripack)( |\\(|$)", p, value = TRUE)
        ACM <- c(ACM, p2)
    }
    if (length(uses))
        out$uses <- sort(unique(gsub("[[:space:]]+", " ", uses)))
    if (length(BUGS)) ## and other non-portable packages
        out$BUGS <- sort(unique(gsub("[[:space:]]+", " ", BUGS)))
    if (length(ACM))
        out$ACM <- sort(unique(gsub("[[:space:]]+", " ", ACM)))

    ## Check for non-Sweave vignettes (as indicated by the presence of a
    ## 'VignetteBuilder' field in DESCRIPTION) without
    ## 'build/vignette.rds'.

    vds <- character()
    if(!is.na(meta["VignetteBuilder"])) {
        if(!file.exists(vds <- file.path(dir, "build", "vignette.rds")))
            out$missing_vignette_index <- TRUE
        else
            vds <- readRDS(vds)[, "File"]
    }

    ## Check for missing build/{partial.rdb,pkgname.pdf}
    ## Code similar to build.R, but we really need step = 2 here as this
    ## may throw errors ...
    Rdb <- tryCatch(.build_Rd_db(dir, stages = NULL,
                                 os = c("unix", "windows"), step = 2),
                    error = identity)
    if(inherits(Rdb, "error"))
        out$Rd_db_build_error <- conditionMessage(Rdb)
    else if(length(Rdb)) {
        names(Rdb) <-
            substring(names(Rdb), nchar(file.path(dir, "man")) + 2L)
        Rdb0 <- Rdb
        containsBuildSexprs <-
            which(vapply(Rdb,
                         function(Rd) any(getDynamicFlags(Rd)["build"]),
                         NA))
        if(length(containsBuildSexprs)) {
            built_file <- file.path(dir, "build", "partial.rdb")
            if(!file.exists(built_file))
                out$missing_partial_rdb <- TRUE
            else {
                ## Merge in the partial db: there could be build Sexprs
                ## giving install/render Sexprs ...
                built <- readRDS(built_file)
                pos <- match(basename(names(Rdb)), names(built), 0L)
                Rdb[pos > 0L] <- built[pos]
            }
        }
        containsLaterSexprs <-
            any(vapply(Rdb,
                       function(Rd)
                           any(.Rd_get_Sexpr_build_time_info(Rd)["later"]),
                       NA))
        if(containsLaterSexprs &&
           !file.exists(file.path(dir, "build", "stage23.rdb")))
            out$missing_stage23_rdb <- TRUE
        ## (Could checks whether this really contain all possibly unsafe
        ## install/render Sexprs.)
        if(containsLaterSexprs &&
           config_val_to_logical(Sys.getenv("_R_CHECK_NOTE_MISSING_MANUAL_PDF",
                                            "FALSE")) &&
           !file.exists(file.path(dir, "build",
                                  paste0( meta[["Package"]], ".pdf"))))
            out$missing_manual_pdf <- TRUE
        ## Also check for \keyword and \concept entries which use Rd
        ## markup or (likely) give multiple index terms.
        ## This could be moved to .check_Rd_metadata() ...
        .fmt <- function(x) {
            Map(function(f, e) {
                    e <- vapply(e, .Rd_deparse, "")
                    c(paste0("  File ", sQuote(f), ":"),
                      paste0("    ",
                             gsub("\n",
                                  "\n      ",
                                  ifelse(nchar(e) < 50L,
                                         e,
                                         paste(substr(e, 1L, 50L),
                                               "[TRUNCATED]")))))
                },
                names(x), x)
        }
        bad <- lapply(Rdb,
                      function(Rd) {
                          Rd <- Rd[RdTags(Rd) %in% c("\\keyword", "\\concept")]
                          Rd[vapply(Rd,
                                    function(e)
                                        any(unlist(RdTags(e)) != "TEXT"),
                                    NA)]
                      })
        bad <- Filter(length, bad)
        if(length(bad))
            out$Rd_keywords_or_concepts_with_Rd_markup <- .fmt(bad)
        bad <- lapply(Rdb,
                      function(Rd) {
                          Rd <- Rd[RdTags(Rd) %in% c("\\keyword", "\\concept")]
                          Rd[grepl("[,;\t\n]",
                                   trimws(vapply(Rd, paste, "",
                                                 collapse = "\n"))) &
                             !vapply(Rd,
                                     function(e)
                                         any(unlist(RdTags(e)) != "TEXT"),
                                     NA)]
                  })
        bad <- Filter(length, bad)
        if(length(bad))
            out$Rd_keywords_or_concepts_more_than_one <- .fmt(bad)
        ## Also check for URLs which should use \doi with the DOI name.
        .fmt <- function(x) {
            Map(function(f, e) {
                    c(paste0("  File ", sQuote(f), ":"),
                      paste0("    ", e))
                },
                names(x), x)
        }
        bad <- lapply(Rdb0,
                      function(Rd) {
                          grep("https?://(dx[.])?doi[.]org/10",
                               .get_urls_from_Rd(Rd),
                               value = TRUE)
                      })
        bad <- Filter(length, bad)
        if(length(bad))
            out$Rd_URLs_which_should_use_doi <- .fmt(bad)
    }


    ## Check for vignette source (only) in old-style 'inst/doc' rather
    ## than 'vignettes'.
    vign_dir <- file.path(dir, "vignettes")
    if(length(vds)) {
        sources <- setdiff(list.files(file.path(dir, "inst", "doc")),
                           list.files(vign_dir))
        sources <- intersect(vds, sources)
    } else {
        pattern <- vignetteEngine("Sweave")$pattern
        sources <- setdiff(list.files(file.path(dir, "inst", "doc"),
                                      pattern = pattern),
                           list.files(vign_dir, pattern = pattern))
    }

    if(length(sources)) {
        out$have_vignettes_dir <- dir.exists(vign_dir)
        out$vignette_sources_only_in_inst_doc <- sources
    }

    ## Check for Java files without sources (in the right place)
    ## NB: this is only a basic check: that directory need
    ## not contain all (or any) of the sources.
    ## We might in due course want to prompt looking into it.
    if (foss && !dir.exists(file.path(dir, "java"))) {
        allfiles <- list.files(file.path(dir, "inst"),
                               full.names = TRUE, recursive = TRUE)
        allfiles <- c(allfiles,  # misused by ndtv, sisus
                      list.files(file.path(dir, "exec"), full.names = TRUE))
        javafiles <- grep(".*[.](class|jar)$", allfiles, value = TRUE)
        if(length(javafiles)) out$javafiles <- javafiles
    }

    ## Check for installing Java source files
    {
        dotjava <- list.files(file.path(dir, "inst"), pattern = ".*[.]java$",
                              full.names = TRUE, recursive = TRUE)
        dotjava <- c(dotjava,  # misused by ndtv
                     list.files(file.path(dir, "exec"), pattern = ".*[.]java$",
                                full.names = TRUE))
        if(length(dotjava)) out$dotjava <- dotjava
    }

    ## Check CITATION file for CRAN needs.
    .check_citation_for_CRAN <- function(cfile, meta) {
        ## For publishing on CRAN, we need to be able to correctly
        ## process package CITATION files without having the package
        ## installed (actually, using only the base and recommended
        ## packages), which we cannot perfectly emulate when checking.
        ## The best we can easily do is reduce the library search path
        ## to the system and site library.  If the package is not
        ## installed there, check directly; otherwise, check for
        ## offending calls likely to cause trouble.
        ## Note however that in most cases, the issue is calling
        ## packageDescription() to get the package metadata, instead of
        ## using 'meta' as passed to readCitationFile() since R 2.8.0.
        ## Unfortunately, when the package is not installed,
        ## packageDescription() only warns and returns NA, or a vector
        ## of NAs if called with specific fields.  Subscripting the
        ## return value using $ will fail (as this needs lists);
        ## subscripting by other means, or using specific fields,
        ## incorrectly results in NAs.
        ## Hence, we also catch and report all warnings ...
        libpaths <- .libPaths()
        .libPaths(character())
        on.exit(.libPaths(libpaths))
        out <- list()
        installed <- nzchar(system.file(package = meta["Package"]))
        if(installed) {
            ## Ignore pre-2.8.0 compatibility calls to
            ## packageDescription() inside
            ##   if(!exists("meta") || is.null(meta))
            ccalls <- .parse_code_file(cfile, meta["Encoding"])
            ind <- vapply(ccalls,
                          function(e) {
                              is.call(e) &&
                              (length(e) == 3L) &&
                              identical(deparse(e[[1L]]), "if") &&
                              identical(deparse(e[[2L]]),
                                        "!exists(\"meta\") || is.null(meta)")
                          },
                          NA)
            if(any(ind))
                ccalls <- ccalls[!ind]
            ccalls <- .find_calls(ccalls, recursive = TRUE)
            cnames <-
                intersect(unique(.call_names(ccalls)),
                          c("packageDescription", "library", "require"))
            if(length(cnames))
                out$citation_calls <- cnames
        }
        .warnings <- character()
        cinfo <-
            withCallingHandlers(tryCatch(utils::readCitationFile(cfile,
                                                                 meta),
                                         error = identity),
                                warning = function(e) {
                                    .warnings <<- c(.warnings,
                                                    conditionMessage(e))
                                    tryInvokeRestart("muffleWarning")
                                })
        if(inherits(cinfo, "error")) {
            if(installed)
                out$citation_error_reading_if_installed <-
                    conditionMessage(cinfo)
            else
                out$citation_error_reading_if_not_installed <-
                    conditionMessage(cinfo)
            return(out)
        }
        if(length(.warnings))
            out$citation_trouble_when_reading <- unique(.warnings)
        ## If we can successfully read in the citation file, also check
        ## whether we can at least format the bibentries we obtained.
        cfmt <- tryCatch(format(cinfo, style = "text"),
                         warning = identity, error = identity)
        ## This only finds unbalanced braces by default, with messages
        ##   unexpected END_OF_INPUT ... { no }
        ##   unexpected '}'          ... } no {
        ## One can also find 'unknown Rd macros' by setting env var
        ## _R_UTILS_FORMAT_BIBENTRY_VIA_RD_PERMISSIVE_ to something
        ## true, and perhaps we should do this here.
        if(inherits(cfmt, "condition"))
            out$citation_problem_when_formatting <-
                conditionMessage(cfmt)

        ## Also capture calls to outdated personList() and citEntry()
        if(!installed) {
            ccalls <- .find_calls(.parse_code_file(cfile,
                                                   meta["Encoding"]),
                                  recursive = TRUE)
        }
        cnames <- .call_names(ccalls)
        if(any(cnames %in% c("personList", "as.personList")))
            out$citation_has_calls_to_personList_et_al <- TRUE
        if(any(cnames == "citEntry"))
            out$citation_has_calls_to_citEntry <- TRUE

        out
    }

    if(file.exists(cfile <- file.path(dir, "inst", "CITATION"))) {
        cinfo <- .check_citation_for_CRAN(cfile, meta)
        if(length(cinfo))
            out[names(cinfo)] <- cinfo
        ## Simply
        ##   out <- c(out, cinfo)
        ## strips the class attribute from out ...
    }

    ## Check Authors@R.
    if(!is.na(aar <- meta["Authors@R"]) &&
       ## DESCRIPTION is fully checked later on, so be careful.
       !inherits(aar <- tryCatch(str2expression(aar), error = identity),
                 "error")) {
        bad <- ((length(aar) != 1L) || !is.call(aar <- aar[[1L]]))
        if(!bad) {
            cname <- as.character(aar[[1L]])
            bad <-
                ((cname != "person") &&
                 ((cname != "c") ||
                  !all(vapply(aar[-1L],
                              function(e) {
                                  (is.call(e) &&
                                   is.name(x <- e[[1L]]) &&
                                   (as.character(x) == "person"))
                              },
                              FALSE))))
        }
        if(bad)
            out$authors_at_R_calls <- aar
        else {
            ## Catch messages about deprecated arguments in person() calls.
            aar <- meta["Authors@R"]
            aut <- tryCatch(.eval_with_capture(utils:::.read_authors_at_R_field(aar)),
                            error = identity)
            if(!inherits(aut, "error") && length(msg <- aut$message))
                out$authors_at_R_message <- msg
        }
    }
    if(is.na(meta["Authors@R"])) {
        aar <-
            utils:::.authors_at_R_field_from_author_and_maintainer(meta["Author"],
                                                                   maintainer)
        aar <- format(aar, style = "R")
        out$authors_at_R_missing <-
            paste(c("No Authors@R field in DESCRIPTION.",
                    "Please add one, modifying",
                    paste(c("  Authors@R:",
                            rep.int(strrep(" ", 12L),
                                    length(aar) - 1L)),
                          aar),
                    "as necessary."),
                  collapse = "\n")
    }

    ## Check Author field.
    auth <- trimws(as.vector(meta["Author"]))
    if(grepl("^Author *:", auth))
        out$author_starts_with_Author <- TRUE
    if(grepl("^(Authors@R *:|person *\\(|c *\\()", auth))
        out$author_should_be_authors_at_R <- auth

    ## Check Title field.
    title <- trimws(as.vector(meta["Title"]))
    title <- gsub("[\n\t]", " ", title)
    package <- meta["Package"]
    if (tolower(title) == tolower(package)) {
        out$title_is_name <- TRUE
    } else {
        if(grepl(paste0("^",
                        gsub(".", "[.]", package, fixed = TRUE),
                        "[ :]"), title, ignore.case = TRUE))
            out$title_includes_name <- TRUE
        language <- meta["Language"]
        if(is.na(language) ||
           (language == "en") ||
           startsWith(language, "en-")) {
            title2 <- toTitleCase(title)
            ## Keep single quoted elements unchanged.
            p <- "(^|(?<=[ \t[:punct:]]))'[^']*'($|(?=[ \t[:punct:]]))"
            m <- gregexpr(p, title, perl = TRUE)
            regmatches(title2, m) <- regmatches(title, m)
            if(title != title2)
                out$title_case <- c(title, title2)
        }
    }

    ## Check Description field.
    descr <- trimws(as.vector(meta["Description"]))
    descr <- gsub("[\n\t]", " ", descr)
    package <- meta["Package"]
    if(grepl(paste0("^['\"]?", package), ignore.case = TRUE, descr))
        out$descr_bad_start <- TRUE
    if(grepl("^(The|This|A|In this|In the) package", descr))
        out$descr_bad_start <- TRUE
    if(!isTRUE(out$descr_bad_start) && !grepl("^['\"]?[[:upper:]]", descr))
        out$descr_bad_initial <- TRUE
    descr <- strwrap(descr)
    if(any(ind <- grepl("(^|[^</\"])https?://", descr))) {
        ## Could try to filter out the matches for DOIs and arXiv ids
        ## noted differently below: not entirely straightforward when
        ## matching wrapped texts for to ease reporting ...
        out$descr_bad_URLs <- descr[ind]
    }
    if(any(ind <- grepl(paste(c("https?:.*doi.org/",
                                "(^|[^<])doi:",
                                "<doi[^:]",
                                "<10[.]"),
                              collapse = "|"),
                        descr, ignore.case = TRUE)))
        out$descr_bad_DOIs <- descr[ind]
    else if(any(ind <- grepl(
           # almost all others are publisher URLs that should be replaced by DOI markup
           "<https?:.*/10\\.\\d{4,}/.*?>",
           descr, ignore.case = TRUE)))
       out$descr_replace_by_DOI <- descr[ind]
    if(any(ind <- grepl(paste(c("<(arXiv|arxiv):(([[:alpha:].-]+/)?[[:digit:].]+)(v[[:digit:]]+)?([[:space:]]*\\[[^]]+\\])?>",
                                "https?://arxiv.org",
                                "(^|[^<])arxiv:",
                                "<arxiv[^:]"),
                              collapse = "|"),
                        descr, ignore.case = TRUE)))
        out$descr_bad_arXiv_ids <- descr[ind]

    ## Check URL field
    if(!is.na(v <- meta["URL"]) &&
       length(z <- .bad_DESCRIPTION_URL_field_parts(v)))
        out$url_field_parts <- z

    ## Check BugReports field
    if(!is.na(v <- meta["BugReports"])) {
        ## Should be a single URL: this is checked in check_meta()
        ## inside .check_packages().
        z <- parse_URI_reference(v)
        if((endsWith(tolower(z$authority), "github.com") ||
            endsWith(tolower(z$authority), "gitlab.com")) &&
           basename(z$path) != "issues") {
            w <- sprintf("%s/issues", sub("/$", "", v))
            out$bugreports <-
                paste(c("The BugReports field in DESCRIPTION has",
                        sprintf("  %s", v),
                        "which should likely be",
                        sprintf("  %s", w),
                        "instead."),
                      collapse = "\n")
        }
    }

    skip_dates <-
        config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_SKIP_DATES_",
                                         "FALSE"))

    ## Check Date
    date <- trimws(as.vector(meta["Date"]))
    if(!is.na(date)) {
        dd <- strptime(date, "%Y-%m-%d", tz = "GMT")
        if (is.na(dd)) out$bad_date <- TRUE
        else if(!skip_dates && (as.Date(dd) < Sys.Date() - 31))
            out$old_date <- TRUE
    }

    ## Check build time stamp
    ptime <- trimws(as.vector(meta["Packaged"]))
    if(is.na(ptime)) {
        out$build_time_stamp_msg <-
            "The build time stamp is missing."
    } else {
        ts <- strptime(ptime, "%Y-%m-%d", tz = "GMT")
        if(is.na(ts)) {
            out$build_time_stamp_msg <-
                "The build time stamp has invalid/outdated format."
        }
        else if(!skip_dates && (as.Date(ts) < Sys.Date() - 31)) {
            out$build_time_stamp_msg <-
                "This build time stamp is over a month old."
        }
    }

    ## Check DESCRIPTION placeholders
    placeholders <-
        c(if(!is.na(x <- tolower(meta["Title"])) &&
             startsWith(x, "what the package does"))
              x,
          if(!is.na(x <- meta["Author"]) &&
             (x == "Who wrote it"))
              x,
          if(!is.na(x <- meta["Maintainer"]) &&
             (startsWith(x, "Who to complain to") ||
              startsWith(x, "The package maintainer")))
              x,
          if(!is.na(x <- tolower(meta["Description"])) &&
             (startsWith(x, "what the package does") ||
              startsWith(x, "more about what it does")))
              x)
    if(length(placeholders))
        out$placeholders <- placeholders

    if(!is.na(enc <- meta["Encoding"]) && (enc != "UTF-8"))
        out$encoding <- enc

    ## Are there non-ASCII characters in the R source code without a
    ## package encoding in DESCRIPTION?
    ## Note that checking always runs .check_package_ASCII_code() which
    ## however ignores comments.  Ideally, the checks would be merged,
    ## with the comment checking suitably conditionalized.
    ## Note also that this does not catch the cases where non-ASCII
    ## content in R source code cannot be re-encoded using a given
    ## package encoding.  Ideally, this would be checked for as well.
    if(is.na(meta["Encoding"])) {
        ## A variation on showNonASCII():
        find_non_ASCII_lines <- function(f) {
            x <- readLines(f, warn = FALSE)
            asc <- iconv(x, "latin1", "ASCII")
            ind <- is.na(asc) | asc != x
            if(any(ind)) {
                paste0(which(ind),
                       ": ",
                       iconv(x[ind], "latin1", "ASCII", sub = "byte"))
            } else character()
        }
        code_files <- c(file.path(dir, "NAMESPACE"),
                        file.path(dir, "inst", "CITATION"))
        code_files <- code_files[file.exists(code_files)]
        if(dir.exists(file.path(dir, "R"))) {
            OS_subdirs <- c("unix", "windows")
            code_files <-
                c(code_files,
                  list_files_with_type(file.path(dir, "R"),
                                       "code",
                                       OS_subdirs = OS_subdirs))
        }
        names(code_files) <-
            file_path_relative_to(code_files, dir, parent = FALSE)
        lines <- Filter(length, lapply(code_files, find_non_ASCII_lines))
        if(length(lines))
            out$R_files_non_ASCII <- lines
    }

    if(file.exists(fp <- file.path(dir, "R",
                                   paste0(basename(dir),
                                          "-internal.R")))) {
        exprs <- parse(fp)
        tst <- function(e) {
            is.call(e) &&
                (length(s <- as.character(e[[1L]])) == 1L) &&
                (s == "<-") &&
                (length(s <- as.character(e[[2L]])) == 1L) &&
                (s == ".Random.seed")
        }
        if(any(vapply(exprs, tst, NA)))
            out$R_files_set_random_seed <- basename(fp)
    }

    if(!is.na(size <- as.numeric(pkgSize)) &&
       size > as.numeric(Sys.getenv("_R_CHECK_CRAN_INCOMING_TARBALL_THRESHOLD_",
                                    unset = "5e6")))
        out$size_of_tarball <- size

    ## Check URLs.
    remote <-
        (!localOnly &&
         !config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_SKIP_URL_CHECKS_IF_REMOTE_",
                                           "FALSE")))
    check_urls_in_parallel <-
        config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_CHECK_URLS_IN_PARALLEL_",
                                         "FALSE"))
    if(!capabilities("libcurl") && remote)
        out$no_url_checks <- TRUE
    else if(is.null(out$Rd_db_build_error)) {
        ## Skip if building the Rd db failed.
        udb <- url_db_from_package_sources(dir)
        bad <- tryCatch(check_url_db(udb,
                                     remote = remote,
                                     parallel = check_urls_in_parallel),
                        error = identity)
        if(inherits(bad, "error")) {
            out$bad_urls <- bad
        } else if(NROW(bad)) {
            ## When checking a new submission, take the canonical CRAN
            ## package URL as ok, and signal variants using http instead
            ## of https as non-canonical instead of showing "not found".
            prefix <- "https://cran.r-project.org/package="
            ncp <- nchar(prefix)
            ind <- ((substring(tolower(bad$URL), 1L, ncp) == prefix) &
                    (substring(bad$URL, ncp + 1L) == package))
            if(any(ind))
                bad <- bad[!ind, ]
            prefix <- "http://cran.r-project.org/package="
            ncp <- nchar(prefix)
            ind <- ((substring(tolower(bad$URL), 1L, ncp) == prefix) &
                    (substring(bad$URL, ncp + 1L) == package))
            if(any(ind))
                bad[ind, c("Status", "Message")] <- ""
            if(NROW(bad)) {
                ## Drop non-OK results which are "basically ok" or one
                ## can do nothing about.
                dom <- parse_URI_reference(bad$URL)$authority
                val <- bad$Status
                ind <- ((endsWith(dom, "shinyapps.io") &
                         (val == "202")) |
                        (endsWith(dom, "linkedin.com") &
                         (val == "999")))
                if(any(ind))
                    bad <- bad[!ind, ]
            }
            if(NROW(bad))
                out$bad_urls <- bad
        }
        if(config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_CHECK_FILE_URIS_",
                                            "FALSE"))) {
            ## Also check file URIs in packages.
            ## These only make sense relative to their parent.
            ## We could integrate this check into check_url_db() by e.g.
            ## passing the top-level package dir via a suitable env var,
            ## but this is not quite straightforward as the check code
            ## aggregates parents according to URI.
            urls <- udb$URL
            parts <- parse_URI_reference(urls)
            ind <- (parts[, "scheme"] %in% c("", "file"))
            fpaths1 <- fpaths0 <- parts[ind, "path"]
            parents <- udb[ind, "Parent"]
            ## Help files, vignettes (and more) can be accessed via the
            ## dynamic HTML help system.  This employs an internal HTTP
            ## server which handles
            ##   /doc/html /demo /library
            ## and relative paths from help system components resolving
            ## to such.
            ## (Note that these will not work in general, e.g. for the
            ## pdf refmans.)
            if(any(ind <- (startsWith(fpaths0, "../") &
                           grepl("^(inst/doc|man|demo)", parents)))) {
                ## Vignettes have document root
                ##   /library/<pkg>/doc
                ## Help pages have
                ##   /library/<pkg>/html
                foo <- rep.int("/library/<pkg>/<sub>", sum(ind))
                bar <- fpaths0[ind]
                while(length(pos <- which(startsWith(bar, "../")))) {
                    foo[pos] <- dirname(foo[pos])
                    bar[pos] <- substring(bar[pos], 4L)
                }
                fpaths1[ind] <- foo
            }
            fpaths1[grepl("^(/doc/html|/demo|/library)", fpaths1)] <- ""
            fpaths1[(fpaths1 == "index.html") &
                    startsWith(parents, "inst/doc")] <- ""
            ## (Of course, one could verify that the special cased paths
            ## really exist.)
            ppaths <- dirname(parents)
            pos <- which(!file.exists(file.path(ifelse(nzchar(ppaths),
                                                       file.path(dir,
                                                                 ppaths),
                                                       dir),
                                                fpaths1)))
            if(length(pos))
                out$bad_file_URIs <-
                    cbind(fpaths0[pos], parents[pos])
        }
        if(remote) {
            ## Also check arXiv pseudo URIs not yet converted to arXiv
            ## DOIs.
            pat <- "<(arXiv|arxiv):(([[:alpha:].-]+/)?[[:digit:].]+)(v[[:digit:]]+)?([[:space:]]*\\[[^]]+\\])?>"
            dsc <- meta["Description"]
            ids <- .gregexec_at_pos(pat, dsc, gregexpr(pat, dsc), 3L)
            if(length(ids)) {
                ini <- "10.48550/arXiv."
                ddb <- doi_db(paste0(ini, ids),
                              rep.int("DESCRIPTION", length(ids)))
                bad <- tryCatch(check_doi_db(ddb,
                                             parallel =
                                                 check_urls_in_parallel),
                                error = identity)
                if(!inherits(bad, "error") && length(bad))
                    out$bad_arXiv_ids <-
                        substring(bad$DOI, nchar(ini) + 1L)
            }
            ## Also check ORCID iDs.
            odb <- .ORCID_iD_db_from_package_sources(dir)
            if(NROW(odb)) {
                ## Only look at things that may be valid: the others are
                ## complained about elsewhere.
                ind <- grepl(.ORCID_iD_variants_regexp, odb[, 1L])
                odb <- odb[ind, , drop = FALSE]
            }
            if(NROW(odb) && requireNamespace("curl", quietly = TRUE)) {
                ids <- .ORCID_iD_canonicalize(odb[, 1L])
                pos <- which(!.ORCID_iD_is_alive(ids))
                if(length(pos))
                    out$bad_ORCID_iDs <- odb[pos, , drop = FALSE]
            }
            ## Also check ROR IDs.
            rdb <- .ROR_ID_db_from_package_sources(dir)
            if(NROW(rdb)) {
                ## Only look at things that may be valid: the others are
                ## complained about elsewhere.
                ind <- grepl(.ROR_ID_variants_regexp, rdb[, 1L])
                rdb <- rdb[ind, , drop = FALSE]
            }
            if(NROW(rdb) && requireNamespace("curl", quietly = TRUE)) {
                ids <- .ROR_ID_canonicalize(rdb[, 1L])
                pos <- which(!.ROR_ID_is_alive(ids))
                if(length(pos))
                    out$bad_ROR_IDs <- rdb[pos, , drop = FALSE]
            }
        }
    }

    ## Checks from here down require Internet access, so drop out now if we
    ## don't want that.
    if (localOnly)
        return(out)

    urls <- .get_standard_repository_URLs()

    ## If a package has a FOSS license, check whether any of its strong
    ## recursive dependencies restricts use.
    if(foss) {
        available <-
            utils::available.packages(utils::contrib.url(urls, "source"),
                                      filters = c("R_version", "duplicates"))
        ## We need the current dependencies of the package (so batch
        ## upload checks will not necessarily do "the right thing").
        package <- meta["Package"]
        depends <- c("Depends", "Imports", "LinkingTo")
        ## Need to be careful when merging the dependencies of the
        ## package (in case it is not yet available).
        if(package %in% rownames(available)) {
            available[package, depends] <- meta[depends]
        } else {
            entry <- rbind(meta[colnames(available)])
            rownames(entry) <- package
            available <- rbind(available, entry)
        }
        ldb <- analyze_licenses(available[, "License"], available)
        depends <- unlist(package_dependencies(package, available,
                                               recursive = TRUE))
        ru <- ldb$restricts_use
        pnames_restricts_use_TRUE <- rownames(available)[!is.na(ru) & ru]
        pnames_restricts_use_NA <- rownames(available)[is.na(ru)]
        bad <- intersect(depends, pnames_restricts_use_TRUE)
        if(length(bad))
            out$depends_with_restricts_use_TRUE <- bad
        bad <- intersect(depends, pnames_restricts_use_NA)
        if(length(bad))
            out$depends_with_restricts_use_NA <- bad
        bv <- parse_description_field(meta, "BuildVignettes", TRUE)
        if (!bv) out$foss_with_BuildVignettes <- TRUE
    }

    ## We do not want to use utils::available.packages() for now, as
    ## this unconditionally filters according to R version and OS type.
    ## <FIXME>
    ## This is no longer true ...
    ## </FIXME>
    .repository_db <- function(u) {
        con <- gzcon(url(sprintf("%s/src/contrib/PACKAGES.gz", u), "rb"))
        on.exit(close(con))
        ## hopefully all these fields are ASCII, or we need to re-encode.
        cbind(read.dcf(con,
                       c(.get_standard_repository_db_fields(), "Path")),
              Repository = u)

    }
    db <- tryCatch(lapply(urls, .repository_db), error = identity)
    if(inherits(db, "error")) {
        message("NB: need Internet access to use CRAN incoming checks")
        ## Actually, all repositories could be local file:// mirrors.
        return(out)
    }
    db <- do.call(rbind, db)

    ## Note that .get_standard_repository_URLs() puts the CRAN master first.
    CRAN <- urls[1L]

    ## Check for CRAN repository db overrides and possible conflicts.
    con <- url(sprintf("%s/src/contrib/PACKAGES.in", CRAN))
    odb <- read.dcf(con)
    close(con)
    ## For now (2022-09-22), PACKAGES.in is all ASCII, so there is no
    ## need to re-encode.  Eventually, it might be in UTF-8 ...
    entry <- odb[odb[, "Package"] == meta["Package"], ]
    entry <- entry[!is.na(entry) &
                   (names(entry) %notin% c("Package", "X-CRAN-History"))]
    if(length(entry)) {
        ## Check for conflicts between package license implications and
        ## repository overrides.  Note that the license info predicates
        ## are logicals (TRUE, NA or FALSE) and the repository overrides
        ## are character ("yes", missing or "no").
        if(!is.na(iif <- lic_info$is_FOSS) &&
           !is.na(lif <- entry["License_is_FOSS"]) &&
           ((lif == "yes") != iif))
            out$conflict_in_license_is_FOSS <- lif
        if(!is.na(iru <- lic_info$restricts_use) &&
           !is.na(lru <- entry["License_restricts_use"]) &&
           ((lru == "yes") != iru))
            out$conflict_in_license_restricts_use <- lru

        fmt <- function(s)
            unlist(lapply(s,
                          function(e) {
                              paste(strwrap(e, indent = 2L, exdent = 4L),
                                    collapse = "\n")
                          }))
        nms <- names(entry)
        ## Report all overrides for visual inspection.
        entry <- fmt(sprintf("  %s: %s", nms, entry))
        names(entry) <- nms
        out$overrides <- entry
        fields <- intersect(names(meta), nms)
        if(length(fields)) {
            ## Find fields where package metadata and repository
            ## overrides are in conflict.
            ind <- ! unlist(Map(identical,
                                fmt(sprintf("  %s: %s", fields, meta[fields])),
                                entry[fields]))
            if(any(ind))
                out$conflicts <- fields[ind]
        }
    }

    archive_db <- CRAN_archive_db()
    packages_in_CRAN_archive <- names(archive_db)

    ## Package names must be unique within standard repositories when
    ## ignoring case.
    package <- meta["Package"]
    packages <- db[, "Package"]
    if(package %notin% packages) out$new_submission <- TRUE
    clashes <- character()
    pos <- which((tolower(packages) == tolower(package)) &
                 (packages != package))
    if(length(pos))
        clashes <-
            sprintf("%s [%s]", packages[pos], db[pos, "Repository"])
    ## If possible, also catch clashes with archived CRAN packages
    ## (which might get un-archived eventually).
    if(length(packages_in_CRAN_archive)) {
        pos <- which((tolower(packages_in_CRAN_archive) ==
                      tolower(package)) &
                     (packages_in_CRAN_archive != package))
        if(length(pos)) {
            clashes <-
                c(clashes,
                  sprintf("%s [CRAN archive]",
                          packages_in_CRAN_archive[pos]))
        }
    }
    if(length(clashes))
        out$bad_package <- list(package, clashes)

    ## Is this duplicated from another repository?
    repositories <- db[(packages == package) &
                       (db[, "Repository"] != CRAN),
                       "Repository"]
    if(length(repositories))
        out$repositories <- repositories

    ## Does this have strong dependencies not in mainstream
    ## repositories?  This should not happen, and hence is not compared
    ## against possibly given additional repositories.
    strong_dependencies <-
        setdiff(unique(c(.extract_dependency_package_names(meta["Depends"]),
                         .extract_dependency_package_names(meta["Imports"]),
                         .extract_dependency_package_names(meta["LinkingTo"]))),
                c(.get_standard_package_names()$base, db[, "Package"]))
    if(length(strong_dependencies)) {
        out$strong_dependencies_not_in_mainstream_repositories <-
            strong_dependencies
    }

    ## Does this have Suggests or Enhances not in mainstream
    ## repositories?
    suggests_or_enhances <-
        setdiff(unique(c(.extract_dependency_package_names(meta["Suggests"]),
                         .extract_dependency_package_names(meta["Enhances"]))),
                c(.get_standard_package_names()$base, db[, "Package"]))
    if(length(suggests_or_enhances)) {
        out$suggests_or_enhances_not_in_mainstream_repositories <-
            suggests_or_enhances
    }
    if(!is.na(aurls <- meta["Additional_repositories"])) {
        aurls <- .read_additional_repositories_field(aurls)
        ## Get available packages separately for each given URL, so that
        ## we can spot the ones which do not provide any packages.
        adb <-
            tryCatch(lapply(aurls,
                            function(u) {
                                utils::available.packages(utils::contrib.url(u,
                                                                             "source"),
                                                          filters =
                                                              c("R_version",
                                                                "duplicates"))
                            }),
                     error = identity)
        if(inherits(adb, "error")) {
            out$additional_repositories_analysis_failed_with <-
                conditionMessage(adb)
        } else {
            ## Check for additional repositories with no packages.
            ind <- vapply(adb, NROW, 0L) == 0L
            if(any(ind))
                out$additional_repositories_with_no_packages <-
                    aurls[ind]
            ## Merge available packages dbs and remove duplicates.
            adb <- do.call(rbind, adb)
            adb <- utils:::available_packages_filters_db$duplicates(adb)
            ## Ready.
            dependencies <- unique(c(strong_dependencies, suggests_or_enhances))
            pos <- match(dependencies, rownames(adb), nomatch = 0L)
            ind <- (pos > 0L)
            tab <- matrix(character(), nrow = 0L, ncol = 3L)
            if(any(ind))
                tab <- rbind(tab,
                             cbind(dependencies[ind],
                                   "yes",
                                   adb[pos[ind], "Repository"]))
            ind <- !ind
            if(any(ind))
                tab <- rbind(tab,
                             cbind(dependencies[ind],
                                   "no",
                                   "?"))
            ## Map Repository fields to URLs, and determine unused
            ## URLs.
            ## Note that available.packages() possibly adds Path
            ## information in the Repository field, so matching
            ## given contrib URLs to these fields is not trivial.
            unused <- character()
            for(u in aurls) {
                cu <- utils::contrib.url(u, "source")
                ind <- startsWith(tab[, 3L], cu)
                if(any(ind)) {
                    tab[ind, 3L] <- u
                } else {
                    unused <- c(unused, u)
                }
            }
            if(length(unused))
                tab <- rbind(tab, cbind("?", "?", unused))
            dimnames(tab) <- NULL
            out$additional_repositories_analysis_results <- tab
        }
    }

    ## Check DOIs.
    if(capabilities("libcurl") &&
       !config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_SKIP_DOI_CHECKS_",
                                         "FALSE"))) {
        bad <- tryCatch(check_doi_db(doi_db_from_package_sources(dir,
                                                                 Rd = TRUE),
                                     parallel = check_urls_in_parallel),
                        error = identity)
        if(inherits(bad, "error") || NROW(bad))
            out$bad_dois <- bad
    }

    ## Is this an update for a package already on CRAN?
    db <- db[(packages == package) &
             (db[, "Repository"] == CRAN) &
             is.na(db[, "Path"]), , drop = FALSE]
    ## This drops packages in version-specific subdirectories.
    ## It also does not know about archived versions.
    if(!NROW(db)) {
        if(package %in% packages_in_CRAN_archive) {
            out$CRAN_archive <- TRUE
            v_m <- package_version(meta["Version"])
            v_a <- sub("^.*_(.*)\\.tar.gz$", "\\1",
                       basename(rownames(archive_db[[package]])))
            v_a <- max(package_version(v_a, strict = FALSE),
                       na.rm = TRUE)
            if(v_m <= v_a)
                out$bad_version <- list(v_m, v_a)
        }
        if(!foss)
            out$bad_license <- meta["License"]
        return(out)
    }

    ## Checks from this point down should be for a package already on CRAN

    ## For now, there should be no duplicates ...

    ## Package versions should be newer than what we already have on CRAN.

    v_m <- package_version(meta["Version"])
    v_d <- max(package_version(db[, "Version"]))
    if((v_m <= v_d) &&
       !config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_SKIP_VERSIONS_",
                                         "FALSE")))
        out$bad_version <- list(v_m, v_d)
    if((v_m$major == v_d$major) && (v_m$minor >= v_d$minor + 10))
        out$version_with_jump_in_minor <- list(v_m, v_d)

    ## Check submission recency and frequency.
    current_db <- CRAN_current_db()
    mtimes <- c(current_db[match(package,
                                 sub("_.*", "", rownames(current_db)),
                                 nomatch = 0L),
                           "mtime"],
                archive_db[[package]]$mtime)
    if(length(mtimes)) {
        deltas <- Sys.Date() - as.Date(sort(mtimes, decreasing = TRUE))
        ## Number of days since last update.
        recency <- as.numeric(deltas[1L])
        if(recency < 7)
            out$recency <- recency
        ## Number of updates in past 6 months.
        frequency <- sum(deltas <= 180)
        if(frequency > 6)
            out$frequency <- frequency
    }

    ## Watch out for maintainer changes.
    ## Note that we cannot get the maintainer info from the PACKAGES
    ## files.
    db <- tryCatch(CRAN_package_db(), error = identity)
    if(inherits(db, "error")) return(out)

    meta1 <- db[db[, "Package"] == package, ]
    ## this can have multiple entries, e.g. for recommended packages.
    meta0 <- unlist(meta1[1L, ])
    m_m <- as.vector(meta["Maintainer"]) # drop name
    m_d <- meta0["Maintainer"]
    # There may be white space differences here
    m_m_1 <- gsub("[[:space:]]+", " ", m_m)
    m_d_1 <- gsub("[[:space:]]+", " ", m_d)
    if(!all(m_m_1 == m_d_1)) {
        ## strwrap is used below, so we need to worry about encodings.
        ## m_d is in UTF-8 already
        if(Encoding(m_m) == "latin1") m_m <- iconv(m_m, "latin1")
        out$new_maintainer <- list(m_m, m_d)
    }

    l_d <- meta0["License"]
    if(!foss && analyze_license(l_d)$is_verified)
        out$new_license <- list(meta["License"], l_d)

    ## for incoming check we may want to check for GNU make in
    ## SystemRequirements here in order to auto-accept packages once
    ## this was already accepted before
    if(config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_NOTE_GNU_MAKE_",
                                        "FALSE"))){
        SysReq <- meta["SystemRequirements"]
        if(!is.na(SysReq) && grepl("GNU [Mm]ake", SysReq)) {
            out$GNUmake <- TRUE
        }
    }

    ## Re-check for some notes if enabled and current version was published recently enough.
    if(!inherits(year <- tryCatch(format(as.Date(meta0["Published"]), "%Y"),
                                     error = identity),
                    "error")){
        ## possible misspellings and keep only the new ones:
        if(NROW(a <- out$spelling)
           && config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_ASPELL_RECHECK_MAYBE_",
                                               "TRUE"))
           && (year >=
               as.numeric(Sys.getenv("_R_CHECK_CRAN_INCOMING_ASPELL_RECHECK_START_",
                                     "2013")))) {
            a0 <- .aspell_package_description_for_CRAN(meta = meta0)
            out$spelling <- a[a$Original %notin% a0$Original, ]
        }

        # possible title_includes_name and only report if the title actually changed
        if(NROW(out$title_includes_name)
            && config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_TITLE_INCLUDES_NAME_RECHECK_MAYBE_",
                                "TRUE"))
            && (year >= as.numeric(Sys.getenv("_R_CHECK_CRAN_INCOMING_TITLE_INCLUDES_NAME_RECHECK_START_",
                                     "2016")))
            && meta0["Title"] == meta["Title"]) {
                out$title_includes_name <- NULL
                }

        # possible title case problems and only report if the title actually changed
        if(NROW(out$title_case)
            && config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_TITLE_CASE_RECHECK_MAYBE_",
                                           "TRUE"))
            && (year >= as.numeric(Sys.getenv("_R_CHECK_CRAN_INCOMING_TITLE_CASE_RECHECK_START_",
                                 "2016")))
            && meta0["Title"] == meta["Title"]) {
                out$title_case <- NULL
        }

        # possible bad Description start and only report if new:
        if(NROW(out$descr_bad_start)
            && config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_DESCR_BAD_START_RECHECK_MAYBE_",
                                           "TRUE"))
            && (year >= as.numeric(Sys.getenv("_R_CHECK_CRAN_INCOMING_DESCR_BAD_START_RECHECK_START_",
                                 "2016")))) {
                descr0 <- trimws(as.vector(meta0["Description"]))
                descr0 <- gsub("[\n\t]", " ", descr0)
                if(grepl(paste0("^['\"]?", package), ignore.case = TRUE, descr0)
                        || grepl("^(The|This|A|In this|In the) package", descr0)){
                    out$descr_bad_start <- NULL
                }
        }

        # possible GNU make usage and only report if this is new
        if(NROW(out$GNUmake)
            && config_val_to_logical(Sys.getenv("_R_CHECK_CRAN_INCOMING_GNU_MAKE_RECHECK_MAYBE_",
                                 "TRUE"))
            && (year >= as.numeric(Sys.getenv("_R_CHECK_CRAN_INCOMING_GNU_MAKE_RECHECK_START_",
                                 "2015")))) {
                SysReq0 <- meta0["SystemRequirements"]
                if(!is.na(SysReq0) && grepl("GNU [Mm]ake", SysReq0)) {
                    out$GNUmake <- NULL
                }
        }
    }

    out
}

format.check_package_CRAN_incoming <-
function(x, ...)
{
    fmt <- function(x) {
        if(length(x)) paste(x, collapse = "\n") else character()
    }

    c(character(),
      if(length(x$Maintainer))
          sprintf("Maintainer: %s", sQuote(lines2str(x$Maintainer, " ")))
      else
          "No maintainer field in DESCRIPTION file",
      fmt(c(if(isTRUE(x$Maintainer_invalid_or_multi_person))
                "The maintainer field is invalid or specifies more than one person",
            if(isTRUE(x$empty_Maintainer_name))
                'The maintainer field lacks a name',
            if(isTRUE(x$Maintainer_needs_quotes))
                'The display-name part of the maintainer field should be enclosed in ""')
          ),
      if(length(x$new_submission))
          "New submission",
      if(length(y <- x$bad_package))
          sprintf("Conflicting package names (submitted: %s, existing: %s)",
                  y[[1L]], y[[2L]]),
      if(length(y <- x$repositories))
          sprintf("Package duplicated from %s", y),
      if(length(y <- x$CRAN_archive))
          "Package was archived on CRAN",
      fmt(c(if(length(y <- x$bad_version))
                sprintf("Insufficient package version (submitted: %s, existing: %s)",
                        y[[1L]], y[[2L]]),
            if(length(y <- x$version_with_leading_zeroes))
                sprintf("Version contains leading zeroes (%s)", y),
            if(length(y <- x$version_with_large_components))
                sprintf("Version contains large components (%s)", y),
            if(length(y <- x$version_with_jump_in_minor))
                sprintf("Version jumps in minor (submitted: %s, existing: %s)",
                        y[[1L]], y[[2L]]))),
      fmt(c(if(length(y <- x$recency))
                sprintf("Days since last update: %d", y),
            if(length(y <- x$frequency))
                sprintf("Number of updates in past 6 months: %d", y))),
      if(length(y <- x$new_maintainer))
          paste(c("New maintainer:",
                  strwrap(y[[1L]], indent = 2L, exdent = 4L),
                  "Old maintainer(s):",
                  strwrap(y[[2L]], indent = 2L, exdent = 4L)),
                collapse = "\n"),
      fmt(c(if(length(y <- x$bad_license))
                sprintf("Non-FOSS package license (%s)", y),
            if(length(y <- x$new_license))
                paste(c("Change to non-FOSS package license.",
                        "New license:",
                        strwrap(y[[1L]], indent = 2L, exdent = 4L),
                        "Old license:",
                        strwrap(y[[2L]], indent = 2L, exdent = 4L)),
                      collapse = "\n"),
            if(length(y <- x$extensions)) {
                paste(c("License components with restrictions and base license permitting such:",
                        paste0("  ", y),
                        unlist(lapply(x$pointers,
                                      function(e) {
                                          c(sprintf("File '%s':", e[1L]),
                                            paste0("  ", e[-1L]))
                                      }))),
                      collapse = "\n")
            })),
      if(length(y <- x$aspell_package_description_error)) {
          paste(y, collapse = "\n")
      },
      if(NROW(y <- x$spelling)) {
          s <- split(sprintf("%d:%d", y$Line, y$Column), y$Original)
          paste(c("Possibly misspelled words in DESCRIPTION:",
                  sprintf("  %s (%s)",
                          names(s),
                          lapply(s, paste, collapse = ", "))),
                collapse = "\n")
      },
      if(isTRUE(x$foss_with_BuildVignettes)) {
          "FOSS licence with BuildVignettes: false"
      },
      if(length(y <- x$fields)) {
          paste(c("Unknown, possibly misspelled, fields in DESCRIPTION:",
                  sprintf("  %s", paste(sQuote(y), collapse = " "))),
                collapse = "\n")
      },
      fmt(c(if(length(y <- x$overrides)) {
                paste(c("CRAN repository db overrides:", y),
                      collapse = "\n")
            },
            if(length(y <- x$conflicts)) {
                paste(sprintf("CRAN repository db conflicts: %s",
                              sQuote(y)),
                      collapse = "\n")
            },
            if(length(y <- x$conflict_in_license_is_FOSS)) {
                sprintf("Package license conflicts with %s override",
                        sQuote(paste("License_is_FOSS:", y)))
            },
            if(length(y <- x$conflict_in_license_restricts_use)) {
                sprintf("Package license conflicts with %s override",
                        sQuote(paste("License_restricts_use:", y)))
            })),
      fmt(c(if(length(y <- x$depends_with_restricts_use_TRUE)) {
                paste(c("Package has a FOSS license but eventually depends on the following",
                        if(length(y) > 1L)
                            "packages which restrict use:"
                        else
                            "package which restricts use:",
                        strwrap(paste(y, collapse = ", "),
                                indent = 2L, exdent = 4L)),
                      collapse = "\n")
            },
            if(length(y <- x$depends_with_restricts_use_NA)) {
                paste(c("Package has a FOSS license but eventually depends on the following",
                        if(length(y) > 1L)
                            "packages which may restrict use:"
                        else
                            "package which may restrict use:",
                        strwrap(paste(y, collapse = ", "),
                                indent = 2L, exdent = 4L)),
                      collapse = "\n")
            })),
      fmt(c(if(length(y <- x$strong_dependencies_not_in_mainstream_repositories)) {
                  paste(c("Strong dependencies not in mainstream repositories:",
                          strwrap(paste(y, collapse = ", "),
                                  indent = 2L, exdent = 4L)),
                        collapse = "\n")
              },
              if(length(y <- x$suggests_or_enhances_not_in_mainstream_repositories)) {
                  paste(c("Suggests or Enhances not in mainstream repositories:",
                          strwrap(paste(y, collapse = ", "),
                                  indent = 2L, exdent = 4L)),
                        collapse = "\n")
              },
              if(length(y <- x$additional_repositories_analysis_failed_with)) {
                  paste(c("Using Additional_repositories specification failed with message:",
                          paste0("  ", y)),
                        collapse = "\n")
              },
              if(length(y <- x$additional_repositories_analysis_results)) {
                  paste(c("Availability using Additional_repositories specification:",
                          sprintf("  %s   %s   %s",
                                  format(y[, 1L], justify = "left"),
                                  format(y[, 2L], justify = "right"),
                                  format(y[, 3L], justify = "left"))),
                        collapse = "\n")
              },
              if(length(y <- x$additional_repositories_with_no_packages)) {
                  paste(c("Additional repositories with no packages:",
                          paste0("  ", y)),
                        collapse = "\n")
              })),
      if(length(y <- x$uses)) {
          paste(if(length(y) > 1L)
                "Uses the superseded packages:" else
                "Uses the superseded package:",
                paste(sQuote(y), collapse = ", "))
      },
      if(length(y <- x$BUGS)) {
          paste(if(length(y) > 1L)
                "Uses the non-portable packages:" else
                "Uses the non-portable package:",
                paste(sQuote(y), collapse = ", "))
      },
      if(length(y <- x$ACM)) {
          paste(if(length(y) > 1L)
                "Uses the ACM-licensed packages:" else
                "Uses the ACM-licensed package:",
                paste(sQuote(y), collapse = ", "))
      },
      if(length(y <- x$authors_at_R_calls)) {
          "Authors@R field should be a call to person(), or combine such calls."
      },
      if(length(y <- x$authors_at_R_message)) {
          paste(c("Authors@R field gives persons with deprecated or bad elements:",
                  paste0("  ", y)),
                collapse = "\n")
      },
      if(length(y <- x$authors_at_R_missing)) y,
      if(length(y <- x$author_starts_with_Author)) {
          "Author field starts with 'Author:'."
      },
      if(length(y <- x$author_should_be_authors_at_R)) {
          paste(c("Author field should be Authors@R.  Current value is:",
                  paste0("  ", gsub("\n", "\n  ", y, fixed=TRUE))),
                collapse = "\n")
      },
      if(length(y <- x$vignette_sources_only_in_inst_doc)) {
          if(isFALSE(x$have_vignettes_dir))
              paste(c("Vignette sources in 'inst/doc' with no 'vignettes' directory:",
                      strwrap(paste(sQuote(y), collapse = ", "),
                              indent = 2L, exdent = 2L),
                      "A 'vignettes' directory is required as from R 3.1.0"),
                    collapse = "\n")
          else
              paste(c("Vignette sources in 'inst/doc' missing from the 'vignettes' directory:",
                      strwrap(paste(sQuote(y), collapse = ", "),
                              indent = 2L, exdent = 2L)),
                    collapse = "\n")
      },
      if(length(y <- x$missing_vignette_index)) {
          "Package has a VignetteBuilder field but no prebuilt vignette index."
      },
      if(length(y <- x$Rd_db_build_error)) {
          paste(c("Reading Rd files failed with message:",
                  paste0("  ", y)),
                collapse = "\n")
      },
      fmt(c(if(length(y <- x$missing_partial_rdb)) {
                paste(strwrap("Package has help file(s) containing build-stage \\Sexpr{} expressions but no 'build/partial.rdb' file."),
                      collapse = "\n")
            },
            if(length(y <- x$missing_stage23_rdb)) {
                paste(strwrap("Package has help file(s) containing later-stage \\Sexpr{} expressions but no 'build/stage23.rdb' file."),
                      collapse = "\n")
            },
            if(length(y <- x$missing_manual_pdf)) {
                paste(strwrap("Package has help file(s) containing install/render-stage \\Sexpr{} expressions but no prebuilt PDF manual."),
                      collapse = "\n")
            })),
      fmt(c(if(length(y <- x$dotjava)) {
                "Package installs .java files."
            },
            if(length(y <- x$javafiles)) {
                "Package has FOSS license, installs .class/.jar but has no 'java' directory."
            })),
      fmt(c(if(length(y <- x$citation_calls)) {
                paste(c("Package CITATION file contains call(s) to:",
                        strwrap(paste(y, collapse = ", "),
                                indent = 2L, exdent = 4L)),
                      collapse = "\n")
            },
            if(length(y <- x$citation_error_reading_if_installed)) {
                paste(c("Reading CITATION file fails with",
                        paste0("  ", gsub("\n", "\n  ", y))),
                      collapse = "\n")
            },
            if(length(y <- x$citation_error_reading_if_not_installed)) {
                paste(c("Reading CITATION file fails with",
                        paste0("  ", gsub("\n", "\n  ", y)),
                        "when package is not installed."),
                      collapse = "\n")
            },
            if(length(y <- x$citation_trouble_when_reading)) {
                paste(c("Problems when reading CITATION file:",
                        paste0("  ", y)),
                      collapse = "\n")
            },
            if(length(y <- x$citation_problem_when_formatting)) {
                paste(c("Problems when formatting CITATION entries:",
                        paste0("  ", y)),
                      collapse = "\n")
            },
            if(isTRUE(x$citation_has_calls_to_personList_et_al)) {
                paste(strwrap("Package CITATION file contains call(s) to old-style personList() or as.personList().  Please use c() on person objects instead."),
                      collapse = "\n")
            },
            if(isTRUE(x$citation_has_calls_to_citEntry)) {
                paste(strwrap("Package CITATION file contains call(s) to old-style citEntry().  Please use bibentry() instead."),
                      collapse = "\n")
            }
            )),
      fmt(c(if(length(y <- x$bad_urls)) {
                if(inherits(y, "error"))
                    paste(c("Checking URLs failed with message:",
                            paste0("  ", conditionMessage(y))),
                          collapse = "\n")
                else
                    paste(c(if(length(y) > 1L)
                                "Found the following (possibly) invalid URLs:"
                            else
                                "Found the following (possibly) invalid URL:",
                            paste0("  ", gsub("\n", "\n    ", format(y), fixed=TRUE))),
                          collapse = "\n")
            },
            if(length(y) && any(nzchar(z <- y$CRAN))) {
                ul <- tolower(z)
                indp <- (grepl("^https?://cran.r-project.org/web/packages",
                               ul) &
                         !grepl("^https?://cran.r-project.org/web/packages/[.[:alnum:]_]+(html|pdf|rds)$",
                                ul))
                indv <- grepl("https?://cran.r-project.org/web/views/[[:alnum:]]+[.]html$",
                              ul)
                paste(c(if(any(indp)) {
                            c("The canonical URL of the CRAN page for a package is ",
                              "  https://CRAN.R-project.org/package=pkgname")
                        },
                        if(any(indv)) {
                            c("The canonical URL of the CRAN page for a task view is ",
                              "  https://CRAN.R-project.org/view=viewname")
                        },
                        if(any(nzchar(z) & !indp & !indv)) {
                            "Canonical CRAN.R-project.org URLs use https."
                        }),
                      collapse = "\n")
            },
            if(length(y) && any(nzchar(y$New)) &&
               config_val_to_logical(Sys.getenv("_R_CHECK_URLS_SHOW_301_STATUS_", "FALSE"))) {
                paste(strwrap("For content that is 'Moved Permanently', please change http to https, add trailing slashes, or replace the old by the new URL."),
                      collapse = "\n")
            },
            if(length(y) && any(nzchar(y$Spaces))) {
                "  Spaces in an http[s] URL should probably be replaced by %20"
            },
            if(length(y) && any(ind <- nzchar(z <- y$R))) {
                ul <- tolower(z[ind])
                elts <- unique(sub("^http://([^.]+)[.].*", "\\1", ul))
                paste(sprintf("  Canonical %s.R-project.org URLs use https.",
                              elts),
                      collapse = "\n")
            },
            if(length(y) &&
               any(ind <-
                       (grepl(re_or(c("^https://pubmed.ncbi.nlm.nih.gov/[0-9]+",
                                      "^https://www.ncbi.nlm.nih.gov/pmc/articles/PMC[0-9]+/$",
                                      "^https://academic.oup.com/.*(/[0-9]*){4}$",
                                      "^https://www.sciencedirect.com/science/article")),
                              y$URL)))) {
                ## <FIXME>
                ## Ideally we would complain about such URLs in general
                ## and not only when the URL checks were not OK.
                paste(c("Please use DOIs for the following publisher URLs:",
                        paste0("  ", y$URL[ind])),
                      collapse = "\n")
                ## </FIXME>
            },
            if(length(y <- x$no_url_checks) && y) {
                "Checking URLs requires 'libcurl' support in the R build."
            })),
      if(length(y <- x$bad_file_URIs)) {
          paste(c(if(NROW(y) > 1L)
                      "Found the following (possibly) invalid file URIs:"
                  else
                      "Found the following (possibly) invalid file URI:",
                  sprintf("  URI: %s\n    From: %s", y[, 1L], y[, 2L])),
                collapse = "\n")
      },
      fmt(if(length(y <- x$bad_dois)) {
              if(inherits(y, "error"))
                  paste(c("Checking DOIs failed with message:",
                           paste0("  ", conditionMessage(y))),
                        collapse = "\n")
              else
                  paste(c(if(length(y) > 1L)
                              "Found the following (possibly) invalid DOIs:"
                          else
                              "Found the following (possibly) invalid DOI:",
                          paste0("  ", gsub("\n", "\n    ", format(y),
                                            fixed = TRUE))),
                        collapse = "\n")
          }),
      fmt(if(length(y <- x$bad_arXiv_ids)) {
              paste(c(if(length(y) > 1L)
                          "The Description field contains the following (possibly) invalid arXiv ids:"
                      else
                          "The Description field contains the following (possibly) invalid arXiv id:",
                      paste0("  ", gsub("\n", "\n    ", format(y),
                                        fixed = TRUE))),
                    collapse = "\n")
          }),
      fmt(if(length(y <- x$bad_ORCID_iDs)) {
              paste(c(if(NROW(y) > 1L)
                          "Found the following (possibly) invalid ORCID iDs:"
                      else
                          "Found the following (possibly) invalid ORCID iD:",
                      sprintf("  iD: %s\t(from: %s)",
                              unlist(y[, 1L]),
                              vapply(y[, 2L], paste, "",
                                     collapse = ", "))),
                    collapse = "\n")
          }),
      fmt(if(length(y <- x$bad_ROR_IDs)) {
              paste(c(if(NROW(y) > 1L)
                          "Found the following (possibly) invalid ROR IDs:"
                      else
                          "Found the following (possibly) invalid ROR IDs:",
                      sprintf("  ID: %s\t(from: %s)",
                              unlist(y[, 1L]),
                              vapply(y[, 2L], paste, "",
                                     collapse = ", "))),
                    collapse = "\n")
          }),
      fmt(if(length(y <- x$encoding))
              c(sprintf("Package encoding '%s' is deprecated.", y),
                "Please change to UTF-8 for non-ASCII content.")),
      if(length(y <- x$R_files_non_ASCII)) {
          paste(c("No package encoding and non-ASCII characters in the following R files:",
                  paste0("  ", names(y), "\n    ",
                         vapply(y, paste, "", collapse = "\n    "),
                         collapse = "\n")),
                collapse = "\n")
      },
      if(length(y <- x$R_files_set_random_seed)) {
          paste(c(sprintf("File '%s' sets .Random.seed.",
                          file.path("R", y)),
                  "This is usually neither needed nor wanted."),
                collapse = "\n")
      },
      fmt(c(if(length(x$title_is_name)) {
                "The Title field is just the package name: provide a real title."
            },
            if(length(x$title_includes_name)) {
                "The Title field starts with the package name."
            },
            if(length(y <- x$title_case)) {
                paste(c("The Title field should be in title case. Current version is:",
                        paste0("  ", sQuote(y[1L])),
                        "In title case that is:",
                        paste0("  ", sQuote(y[2L]))),
                      collapse = "\n")
            })),
      fmt(c(if(length(x$descr_bad_initial)) {
                "The Description field should start with a capital letter."
            },
            if(length(x$descr_bad_start)) {
                "The Description field should not start with the package name,\n  'This package' or similar."
            },
            if(length(y <- x$descr_bad_URLs)) {
                paste(c("The Description field contains",
                        paste0("  ", y),
                        "Please enclose URLs in angle brackets (<...>)."),
                      collapse = "\n")
            },
            if(length(y <- x$descr_bad_DOIs)) {
                paste(c("The Description field contains",
                        paste0("  ", y),
                        "Please write DOIs as <doi:prefix/suffix>."),
                      collapse = "\n")
            },
            if(length(y <- x$descr_bad_arXiv_ids)) {
                paste(c("The Description field contains",
                        paste0("  ", y),
                        "Please refer to arXiv e-prints via their arXiv DOI <doi:10.48550/arXiv.YYMM.NNNNN>."),
                      collapse = "\n")
            },
           if(length(y <- x$descr_replace_by_DOI)) {
               paste(c("The Description field contains",
                       paste0("  ", y),
                       "Please use permanent DOI markup for linking to publications as in <doi:prefix/suffix>."),
                       collapse = "\n")
            }
           )),
      fmt(c(if(length(y <- x$url_field_parts)) {
                paste(c("The URL field contains the following bad parts:",
                        paste0("  ", y),
                        strwrap("The URL field should be a list of URLs separated by commas or whitespace.")),
                      collapse = "\n")
            })),
      if(length(y <- x$bugreports)) y,
      fmt(c(if(length(x$GNUmake)) {
                "GNU make is a SystemRequirements."
            })),
      fmt(c(if(length(x$bad_date)) {
                "The Date field is not in ISO 8601 yyyy-mm-dd format."
            },
            if(length(x$old_date)) {
                "The Date field is over a month old."
            })),
      if(length(y <- x$build_time_stamp_msg)) y,
      if(length(y <- x$placeholders)) {
          paste(c("DESCRIPTION fields with placeholder content:",
                  paste0("  ",
                         unlist(strsplit(formatDL(y,
                                                  style = "list",
                                                  indent = 2L),
                                         "\n", fixed = TRUE)))),
                collapse = "\n")
      },
      if(length(y <- x$size_of_tarball))
          paste("Size of tarball:", y, "bytes"),
      fmt(c(if(length(y <- x$Rd_keywords_or_concepts_with_Rd_markup))
                paste(c("Found the following \\keyword or \\concept entries with Rd markup:",
                        unlist(y)),
                      collapse = "\n"),
            if(length(y <- x$Rd_keywords_or_concepts_more_than_one))
                paste(c("Found the following \\keyword or \\concept entries",
                        "which likely give several index terms:",
                        unlist(y)),
                      collapse = "\n"))),
      fmt(c(if(length(y <- x$Rd_URLs_which_should_use_doi))
                paste(c("Found the following URLs which should use \\doi (with the DOI name only):",
                        unlist(y)),
                      collapse = "\n")))
      )
}

print.check_package_CRAN_incoming <-
function(x, ...)
{
    if(length(y <- format(x, ...)))
        writeLines(paste(y, collapse = "\n\n"))
    invisible(x)
}

### * .check_Rd_metadata

.check_Rd_metadata <-
function(package, dir, lib.loc = NULL)
{
    ## Perform package-level Rd metadata checks:
    ## names and aliases must be unique within a package.

    ## Note that we cannot use Rd_aliases(), as this does
    ##   if(length(aliases))
    ##       sort(unique(unlist(aliases, use.names = FALSE)))

    out <- structure(list(), class = "check_Rd_metadata")

    meta <- if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        rds <- file.path(dir, "Meta", "Rd.rds")
        if(file_test("-f", rds)) { # should always exist, potentially 0-row
            readRDS(rds)
        } # else NULL
    } else {
        package <- .get_package_metadata(dir)["Package"]
        Rd_contents(Rd_db(dir = dir))
    }

    if (NROW(meta) == 0L)
        return(out)

    files <- meta$File
    names <- meta$Name
    aliases <- meta$Aliases
    doctypes <- meta$Type

    files_grouped_by_names <- split(files, names)
    files_with_duplicated_names <-
        files_grouped_by_names[lengths(files_grouped_by_names) > 1L]
    if(length(files_with_duplicated_names))
        out$files_with_duplicated_names <-
            files_with_duplicated_names

    nAliases <- lengths(aliases)
    files_grouped_by_aliases <-
        split(rep.int(files, nAliases),
              unlist(aliases, use.names = FALSE))
    files_with_duplicated_aliases <-
        files_grouped_by_aliases[lengths(files_grouped_by_aliases) > 1L]
    if(length(files_with_duplicated_aliases))
        out$files_with_duplicated_aliases <-
            files_with_duplicated_aliases

    files_without_aliases <- files[nAliases == 0L]
    if(length(files_without_aliases))
        out$files_without_aliases <- files_without_aliases

    aliases <- unlist(aliases)
    names(aliases) <- rep.int(files, nAliases) # again ...
    all_package_aliases <- aliases[endsWith(aliases, "-package")]
    the_package_alias <- sprintf("%s-package", package)
    if(the_package_alias %in% all_package_aliases) {
        ## Be nice: package names in standard repositories are unique
        ## ignoring case.
        all_package_aliases <-
            all_package_aliases[tolower(all_package_aliases) !=
                                tolower(the_package_alias)]
    }
    if(length(all_package_aliases))
        out$files_with_bad_package_aliases <-
            split(all_package_aliases, names(all_package_aliases))

    out
}

format.check_Rd_metadata <-
function(x, ...)
{
    c(character(),
      if(length(bad <- x$files_with_duplicated_name)) {
          unlist(lapply(names(bad),
                 function(nm) {
                     c(gettextf("Rd files with duplicated name '%s':",
                                nm),
                       .pretty_format(bad[[nm]]))
                 }))
      },
      if(length(bad <- x$files_with_duplicated_aliases)) {
          unlist(lapply(names(bad),
                 function(nm) {
                     c(gettextf("Rd files with duplicated alias '%s':",
                                nm),
                       .pretty_format(bad[[nm]]))
                 }))
      },
      if(length(bad <- x$files_without_aliases)) {
          c(gettext("Rd files without \\alias:"),
            .pretty_format(bad))
      },
      if(length(bad <- x$files_with_bad_package_aliases)) {
          unlist(lapply(names(bad),
                        function(nm) {
                            c(gettextf("Invalid package aliases in Rd file '%s':",
                                       nm),
                              .pretty_format(bad[[nm]]))
                        }))
      })
}

## * checkRdContents

## NOTE: this checks displayed content, not Rd_contents() metadata

checkRdContents <- # was  .check_Rd_contents <-
function(package, dir, lib.loc = NULL, chkInternal = NULL)
{
    out <- list()
    class(out) <- "checkRdContents" # was "check_Rd_contents"

    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        ## Using package installed in @code{dir} ...
    }
    else {
        if(missing(dir))
            stop("you must specify 'package' or 'dir'")
        ## Using sources from directory @code{dir} ...
        if(!dir.exists(dir))
            stop(gettextf("directory '%s' does not exist", dir),
                 domain = NA)
        else
            dir <- file_path_as_absolute(dir)
    }

    check_internal_specially <- FALSE
    ## Do
    ##   if(!isTRUE(chkInternal) && !isFALSE(chkInternal))
    ## more efficiently.
    if(is.null(chkInternal) ||
       !is.logical(chkInternal) ||
       (length(chkInternal) != 1L) ||
       is.na(chkInternal))
        chkInternal <- check_internal_specially <- TRUE

    db <- if(!missing(package))
              Rd_db(package, lib.loc = dirname(dir))
          else
              Rd_db(dir = dir)

    for(nm in names(db)) {
        rd <- db[[nm]]

        ## If !chkInternal, exclude internal Rd objects from further
        ## computations.  Otherwise, maybe treat them specially, and
        ## ignore arguments with no description.
        internal <- "internal" %in% .Rd_get_metadata(rd, "keyword")
        if(internal && !chkInternal) next
        special <- (internal && check_internal_specially)

        ## \description is mandatory except for package overviews
        missing_description <-
            !identical("package", .Rd_get_doc_type(rd)) &&
            "\\description" %notin% RdTags(rd)

        ## Arguments with no description.
        arguments_with_no_description <- if(special) NULL else {
            arg_table <- .Rd_get_argument_table(rd)
            arg_table[grepl("^[[:space:]]*$", arg_table[, 2L]),
                      1L]
        }

        ## Autogenerated Rd content which needs editing.
        offending_autogenerated_content <-
            .Rd_get_offending_autogenerated_content(rd)

        if(missing_description
           || length(arguments_with_no_description)
           || length(offending_autogenerated_content)) {
            out[[nm]] <-
                list(missing_description = missing_description,
                     arguments_with_no_description =
                     arguments_with_no_description,
                     offending_autogenerated_content =
                     offending_autogenerated_content)
        }
    }

    out
}

format.checkRdContents <-
function(x, ...)
{
    glimpse <- function(x, tail = FALSE) {
        x <- lines2str(x, " ")
        if ((nc <- nchar(x)) > 63L) {
            if (tail) paste0("...", substr(x, nc-59L, nc))
            else paste0(substr(x, 0, 60L), "...")
        } else x
    }
    .fmt <- function(nm) {
        y <- x[[nm]]
        c(character(),
          if(length(arguments_with_no_description <-
                    y[["arguments_with_no_description"]])) {
              c(gettextf("Argument items with no description in Rd file '%s':",
                         nm),
                .pretty_format(arguments_with_no_description))
          },
          if(length(autocontents <- y[["offending_autogenerated_content"]])) {
              c(gettextf("Auto-generated content requiring editing in Rd file '%s':",
                         nm),
                sprintf("  %s: %s", autocontents[, 1L],
                        sQuote(mapply(glimpse, autocontents[, 2L],
                                      autocontents[, 1L] == "\\details"))))
          })
    }

    c(character(),
      if (length(res <- which(vapply(x, `[[`, TRUE, "missing_description"))))
          c(gettext("Rd files without \\description:"),
            .pretty_format(names(res))),
      as.character(unlist(lapply(names(x), .fmt))))
}

### * .check_Rd_line_widths

.check_Rd_line_widths <-
function(dir, limit = c(usage = 95, examples = 105), installed = FALSE)
{
    db <- if(installed)
        Rd_db(basename(dir), lib.loc = dirname(dir))
    else
        Rd_db(dir = dir)
    out <- find_wide_Rd_lines_in_Rd_db(db, limit, installed)
    class(out) <- "check_Rd_line_widths"
    attr(out, "limit") <- limit
    out
}

format.check_Rd_line_widths <-
function(x, ...)
{
    if(!length(x)) return(character())

    .truncate <- function(s) {
        ifelse(nchar(s) > 140L,
               paste(substr(s, 1, 140L),
                     "... [TRUNCATED]"),
               s)
    }

    limit <- attr(x, "limit")
    ## Rd2txt() by default adds a section indent of 5 also incorporated
    ## in the limits used for checking.  But users actually look at the
    ## line widths in their source Rd file, so remove the indent when
    ## formatting for reporting check results.
    ## (This should reduce confusion as long as we only check the line
    ## widths in verbatim type sections.)
    limit <- limit - 5L

    sections <- names(limit)

    .fmt <- function(nm) {
        y <- x[[nm]]
        c(sprintf("Rd file '%s':", nm),
          unlist(lapply(sections,
                        function(s) {
                            lines <- y[[s]]
                            if(!length(lines)) character() else {
                                c(sprintf("  \\%s lines wider than %d characters:",
                                          s, limit[s]),
                                  .truncate(lines))
                            }
                        }),
                 use.names = FALSE),
          "")
    }

    as.character(unlist(lapply(names(x), .fmt)))
}

find_wide_Rd_lines_in_Rd_db <-
function(x, limit = NULL, installed = FALSE)
{
    y <- lapply(x, find_wide_Rd_lines_in_Rd_object, limit, installed)
    Filter(length, y)
}

find_wide_Rd_lines_in_Rd_object <-
function(x, limit = NULL, installed = FALSE)
{
    if(is.null(limit))
        limit <- list(usage = c(79, 95), examples = c(87, 105))
    sections <- names(limit)
    if(is.null(sections))
        stop("no Rd sections specified")
    if (installed) x <- prepare_Rd(x, stages = "render")
    y <- Map(function(s, l) {
        out <- NULL
        zz <- textConnection("out", "w", local = TRUE)
        on.exit(close(zz))
        pos <- which(RdTags(x) == s)
        ## measure length in chars, not in bytes after substitutions
        Rd2txt(x[pos[1L]], out = zz, fragment = TRUE, outputEncoding = "UTF-8")
        nc <- nchar(sub("[ \t]+$", "", out))
        if(length(l) > 1L) {
            ind_warn <- (nc > max(l))
            ind_note <- (nc > min(l)) & !ind_warn
            Filter(length,
                   list(warn = out[ind_warn], note = out[ind_note]))
        } else {
            out[nc > l]
        }
    },
             paste0("\\", sections),
             limit)
    names(y) <- sections
    Filter(length, y)
}


### * .find_charset

.find_charset <-
function()
{
    l10n <- l10n_info()
    enc <- if(l10n[["UTF-8"]]) "UTF-8" else utils::localeToCharset()
    cat("charset: ", enc, "\n", sep = "")
    invisible()
}


### * Utilities

### ** as.alist.call

as.alist.call <-
function(x)
{
    y <- as.list(x)
    ind <- if(is.null(names(y)))
        seq_along(y)
    else
        which(names(y) == "")
    if(length(ind)) {
        names(y)[ind] <- vapply(y[ind], paste, "", collapse = " ")
        y[ind] <- rep.int(list(alist(irrelevant = )[[1L]]), length(ind))
    }
    y
}

### ** as.alist.symbol

as.alist.symbol <-
function(x)
{
    as.alist.call(call(as.character(x)))
}

### ** .arg_names_from_call

.arg_names_from_call <-
function(x)
{
    y <- as.character(x)
    if(!is.null(nx <- names(x))) {
        ind <- which(nzchar(nx))
        y[ind] <- nx[ind]
    }
    y
}

### ** .dquote_method_markup

## See the notes below.
## An alternative and possibly more efficient implementation could be
## based using gregexpr(re, txt), massaging the matches and merging with
## the non-matched parts.

.dquote_method_markup <-
function(txt, re)
{
    out <- ""
    while((ipos <- regexpr(re, txt)) > -1L) {
        epos <- ipos + attr(ipos, "match.length") - 1L
        str <- substring(txt, ipos, epos)
        str <- sub("\"", "\\\"", str, fixed = TRUE)
        str <- sub("\\", "\\\\", str, fixed = TRUE)
        out <- sprintf("%s%s\"%s\"", out,
                       substring(txt, 1L, ipos - 1L), str)
        txt <- substring(txt, epos + 1L)
    }
    paste0(out, txt)
}

### ** .format_calls_in_file

.format_calls_in_file <-
function(calls, f)
{
    c(gettextf("File %s:", sQuote(f)),
      paste0("  ",
             unlist(lapply(calls,
                           function(e)
                           paste(deparse(e), collapse = "\n")))))
}

### ** .functions_to_be_ignored_from_usage

.functions_to_be_ignored_from_usage <-
function(package_name)
{
    c("<-", "=",
      if(package_name == "base")
      c("(", "{", "function", "if", "for", "while", "repeat",
        "Math", "Ops", "Summary", "Complex", "matrixOps"),
      if(package_name == "utils") "?",
      if(package_name == "methods") "@")
}

### ** get_S4_generics_with_methods

## FIXME: make option of methods::getGenerics()
## JMC agreed & proposed argument  'excludeEmpty = FALSE'
get_S4_generics_with_methods <-
function(env, verbose = getOption("verbose"))
{
    env <- as.environment(env)
    ##  Filter(function(g) methods::isGeneric(g, where = env),
    ##         methods::getGenerics(env))
    r <- methods::getGenerics(env)
    if(length(r) && {
        hasM <- lapply(r, function(g)
                       tryCatch(methods::hasMethods(g, where = env),
                                error = identity))
        if(any(hasErr <- vapply(hasM, inherits, NA, what = "error"))) {
            dq <- function(ch) paste0('"', ch ,'"')
            rErr <- r[hasErr]
            pkgs <- r@package[hasErr]
            ## FIXME: This warning should not happen here when called
            ## from R CMD check, but rather be part of a new "check"
            ## there !
            warning(gettextf("Generics 'g' in 'env' %s where '%s' errors: %s\nMay need something like\n\n%s\nin NAMESPACE.",
                             format(env),
                             "hasMethods(g, env)",
                             paste(sQuote(rErr), collapse = ", "),
                             paste0("  importFrom(",
                                    paste(dq(pkgs), dq(rErr), sep =", "),
                                    ")\n")
                             ),
                    domain = NA)
            hasM <- hasM[!hasErr]
        }
        !all(ok <- unlist(hasM))
    }) {
        if(verbose)
            message(sprintf(ngettext(sum(!ok),
                                     "Generic without any methods in %s: %s",
                                     "Generics without any methods in %s: %s"),
                            format(env),
                            paste(sQuote(r[!ok]), collapse = ", ")),
                    domain = NA)
        r[ok]
    }
    else as.vector(r)# for back-compatibility and current ..../tests/reg-S4.R
}

### ** .get_S4_generics

## For several QC tasks, we need to compute on "all S4 methods in/from a
## package".  These days, this can straightforwardly be accomplished by
## looking at all methods tables in the package environment or namespace.
## Somewhat historically, we organize our computations by first using
## using methods::getGenerics() to find all S4 generics the package has
## methods for, and then iterating over these.  To make this work
## conveniently, we wrap around methods::getGenerics() to rewrite its
## "ObjectsWithPackage" result into a (currently unclassed) list of
## generic-name-with-package-name-attribute objects, and wrap around
## methods::findMethods() to perform lookup based on this information
## (rather than the genericFunction object itself), and also rewrite the
## MethodsList result into a simple list.

.get_S4_generics <-
function(env)
{
    env <- as.environment(env)
    g <- suppressMessages(methods::getGenerics(env))
    y <- Map(function(f, p) {
                 attr(f, "package") <- p
                 f
             },
             g@.Data,
             g@package)
    names(y) <- g@.Data
    y
}

### ** .get_S4_methods_list

.get_S4_methods_list <-
function(f, env)
{
    ## Get S4 methods in environment env for f a structure with the name
    ## of the S4 generic and its package in the corresponding attribute.

    ## For the QC computations, we really only want the S4 methods
    ## defined in a package, so we try to exclude derived default
    ## methods as well as methods inherited from other environments.

    env <- as.environment(env)

    ## <FIXME>
    ## Use methods::findMethods() once this gets a package argument.
    ## This will return a listOfMethods object: turn this into a simple
    ## list of methods named by hash-collapsed signatures.
    tab <- get(methods:::.TableMetaName(f, attr(f, "package")), envir = env)
    mlist <- as.list(tab, all.names = TRUE, sorted = TRUE)
    ## </FIXME>

    ## First, derived default methods (signature w/ "ANY").
    if(any(ind <- vapply(mlist, methods::is, NA, "derivedDefaultMethod")))
        mlist <- mlist[!ind]

    if(length(mlist)) {
        ## Determining the methods defined in a package from the package
        ## env or the associated namespace seems rather tricky.  What we
        ## seem to observe is the following.
        ## * If there is a namespace N, methods defined in the package
        ##   have N as their environment, for both the package env and
        ##   the associated namespace.
        ## * If there is no namespace, methods defined in the package
        ##   have an environment E which is empty and has globalenv() as
        ##   its parent.  (If the package defines generics, these seem
        ##   to have E as their parent env.)
        ## However, in the latter case, there seems no way to infer E
        ## from the package env.  In the old days predating methods
        ## tables, we compared methods in the package env with those in
        ## its parent env, and excluded the ones already found there.
        ## This no longer works, so we exclude "at least" all methods
        ## with a namespace environment (as these cannot come from a
        ## package with no namespace).

        namespace <- if(isNamespace(env)) env else .get_namespace_from_package_env(env)
        mlist <- if(!is.null(namespace))
            Filter(function(m) identical(environment(m), namespace), mlist)
        else
            Filter(function(m) environmentName(environment(m)) == "", mlist)
    }

    mlist
}

.get_ref_classes <-
function(env)
{
    env <- as.environment(env)
    cl <- methods::getClasses(env)
    cl <- cl[vapply(cl,
                    function(Class)
                        methods::is(methods::getClass(Class, where = env),
                                    "refClassRepresentation"),
                    NA)]
    if(length(cl)) {
        res <- lapply(cl, function(Class) {
            def <- methods::getClass(Class, where = env)
            ff <- def@fieldPrototypes
            accs <- vapply(ff,
                           function(what)
                               methods::is(what, "activeBindingFunction") &&
                               !methods::is(what, "defaultBindingFunction"),
                           NA)
            c(as.list(def@refMethods), as.list(ff)[accs])
        })
        names(res) <- cl
        res
    } else list()
}

.get_namespace_from_package_env <-
function(env)
{
    package <-
        sub(".*:([^_]*).*", "\\1", attr(env, "name", exact = TRUE))
    if(length(package) && nzchar(package)) .getNamespace(as.name(package))
}


### ** .is_call_from_replacement_function_usage

.is_call_from_replacement_function_usage <-
function(x)
{
    ((length(x) == 3L)
     && identical(x[[1L]], quote(`<-`))
     && (length(  x[[2L]]) > 1L)
     && is.symbol(x[[3L]]))
}

### ** .make_siglist

.make_siglist <-
function(x)
{
    ## Argument 'x' should be a named list of methods as obtained by
    ## methods::findMethods() or .get_S4_methods_list().
    gsub("#", ",", names(x), fixed = TRUE)
}

### ** .make_signatures

.make_signatures <-
function(cls)
{
    ## Note that (thanks JMC), when comparing signatures, the signature
    ## has to be stripped of trailing "ANY" elements (which are always
    ## implicit) or padded to a fixed length.
    sub("(#ANY)*$", "", unlist(lapply(cls, paste, collapse = "#")))
}

### ** .massage_file_parse_error_message
.massage_file_parse_error_message <-
function(x)
    sub("^[^:]+:[[:space:]]*", "", x)

## get rid of "file/name" where file/name maybe "<text>"
## new *classed* parseError messages look like
##    "function '(' not supported in RHS call of a pipe (filename:1:8)"
.massage_file_parse_error <- function(e) { # 'e' : the error itself
    msg <- conditionMessage(e)
    if(inherits(e, "parseError"))
        ## get rid of 'file name:'
        sub("\\([^:]+:(.*)\\)", "(\\1)", msg)
    else ## old version: == .massage_file_parse_error_message(msg)
        sub("^[^:]+:[[:space:]]*", "", msg)
}


### ** .package_env

.package_env <-
function(package_name)
{
    as.environment(paste0("package:", package_name))
}

### ** .parse_text_as_much_as_possible

.parse_text_as_much_as_possible <-
function(txt)
{
    fun <- function(txt) {
        if(!l10n_info()$MBCS && identical(Encoding(txt), "UTF-8"))
            parse(text = txt, encoding = "UTF-8")
        else
            str2expression(txt)
    }
    exprs <- tryCatch(fun(txt), error = identity)
    if(!inherits(exprs, "error")) return(exprs)
    exprs <- expression()
    lines <- unlist(strsplit(txt, "\n"))
    bad_lines <- character()
    while((n <- length(lines))) {
        i <- 1L; txt <- lines[1L]
        while(inherits(yy <- tryCatch(fun(txt), error = identity),
                       "error")
              && (i < n)) {
            i <- i + 1L; txt <- paste(txt, lines[i], collapse = "\n")
        }
        if(inherits(yy, "error")) {
            bad_lines <- c(bad_lines, lines[1L])
            lines <- lines[-1L]
        }
        else {
            exprs <- c(exprs, yy)
            lines <- lines[-seq_len(i)]
        }
    }
    attr(exprs, "bad_lines") <- bad_lines
    exprs
}

### ** .parse_usage_as_much_as_possible

.parse_usage_as_much_as_possible <-
function(x)
{
    if(!length(x)) return(expression())
    ## Drop specials and comments.
    ## Note that when used from the QC functions, building the Rd db
    ## already dropped all parts that are not rendered, including
    ## comments.
    x <- .Rd_drop_nodes_with_tags(x, c("COMMENT", "\\special"))
    ## Transform \dots and \ldots to '...'.
    ## For many years we did
    ##   txt <- .Rd_deparse(x, tag = FALSE)
    ##   txt <- gsub("\\\\l?dots", "...", txt)
    ## but as <https://bugs.r-project.org/show_bug.cgi?id=18574>
    ## reports, this also incorrectly transforms dots markup in value
    ## strings.  So we really need to transform while we have the Rd
    ## structure available.  This could use something like
    ##   f <- function(e) {
    ##     switch(attr(e, "Rd_tag"),
    ##            "\\special" =,
    ##            "COMMENT" = NULL,
    ##            "\\ldots" =,
    ##            "\\dots" = structure("...", Rd_tag = "TEXT"),
    ##            e)
    ## }
    ## (including dropping specials and comments, and actually things
    ## inside \usage are RCODE and not TEXT).
    ## For now, we use as.character.Rd() on the Rd which should allow to
    ## identify the macros as literals.
    attr(x, "Rd_tag") <- "Rd"
    txt <- as.character.Rd(x)
    txt[txt %in% c("\\dots", "\\ldots")] <- "..."
    txt <- paste(txt, collapse = "")
    ## Ideally we would also make the subsequent S3/S4 method markup
    ## transformations before collapsing ...
    txt <- .dquote_method_markup(txt, .S3_method_markup_regexp)
    txt <- .dquote_method_markup(txt, .S4_method_markup_regexp)
    ## \usage is only 'verbatim-like'
    ## now any valid escape by \ is
    ##   \a \b \f \n \r \t \u \U \v \x \' \" \\ or \octal
    txt <- gsub("(^|[^\\])\\\\($|[^abfnrtuUvx0-9'\"\\])",
                "\\1<unescaped bksl>\\2", txt)
    ## and since this may overlap, try again
    txt <- gsub("(^|[^\\])\\\\($|[^abfnrtuUvx0-9'\"\\])",
                "\\1<unescaped bksl>\\2", txt)
    .parse_text_as_much_as_possible(txt)
}

### ** .pretty_format

.strwrap22 <- function(x, collapse = " ")
    strwrap(paste(x, collapse=collapse), indent = 2L, exdent = 2L)

.pretty_format <-
function(x, collapse = " ", q = getOption("useFancyQuotes"))
    .strwrap22(sQuote(x, q=q), collapse=collapse)

.pretty_format2 <-
function(msg, x, collapse = ", ", useFancyQuotes = FALSE)
{
    xx <- strwrap(paste(sQuote(x, q=useFancyQuotes), collapse=collapse), exdent = 2L)
    if (length(xx) > 1L || nchar(msg) + nchar(xx) + 1L > 75L)
        ## trash 'xx', instead wrap w/ 'indent' :
        c(msg, .pretty_format(x, collapse=collapse, q=useFancyQuotes))
    else paste(msg, xx)
}

### ** .pretty_print

.pretty_print <-
function(x, collapse = " ")
    writeLines(.strwrap22(x, collapse=collapse))


### ** .strip_backticks

.strip_backticks <-
function(x)
    gsub("`", "", x, fixed=TRUE)

### ** .transform_S3_method_markup

.transform_S3_method_markup <-
function(x)
{
    ## Note how we deal with S3 replacement methods found.
    ## These come out named "\method{GENERIC}{CLASS}<-" which we
    ## need to turn into 'GENERIC<-.CLASS'.
    re <- sprintf("%s(<-)?", .S3_method_markup_regexp)
    ## Note that this is really only called on "function" names obtained
    ## by parsing the \usage texts, so that the method regexps possibly
    ## augmented by '<-' fully match if they match.
    ## We should be able to safely strip all backticks; alternatively,
    ## we could do something like
    ##   cl <- .strip_backticks(sub(re, "\\4", x))
    ##   sub(re, sprintf("\\3\\5.%s", cl), x)
    .strip_backticks(sub(re, "\\3\\5.\\4", x))
}

### ** .transform_S4_method_markup

.transform_S4_method_markup <-
function(x)
{
    re <- sprintf("%s(<-)?", .S4_method_markup_regexp)
    ## We should be able to safely strip all backticks; alternatively,
    ## we could do something like
    ##   sl <- .strip_backticks(sub(re, "\\3", x))
    ##   sub(re, sprintf("\\\\S4method{\\2\\7}{%s}", sl), x)
    .strip_backticks(sub(re, "\\\\S4method{\\2\\7}{\\3}", x))
}

### ** .S3_method_markup_regexp

## For matching \(S3)?method{GENERIC}{CLASS}.
## GENERIC can be
## * a syntactically valid name
## * one of $ [ [[ @
## * one of the binary operators
##   + - * / ^ < <= > >= != == | & %something%
## * unary !
## (as supported by Rdconv).
## CLASS can be a syntactic name (we could be more precise about the
## fact that these must start with a letter or '.'), or anything quoted
## by backticks (not containing backticks itself for now).  Arguably,
## non-syntactic class names should best be avoided, but R has always
## had them at least for
## R> class(bquote({.}))
## [1] "{"
## R> class(bquote((.)))
## [1] "("

## <NOTE>
## Handling S3/S4 method markup is somewhat tricky.
## When using R to parse the usage entries, we turn the
##   \METHOD{GENERIC}{CLASS_OR_SIGLIST}(args)
## markup into (something which parses to) a function call by suitably
## quoting the \METHOD{GENERIC}{CLASS_OR_SIGLIST} part.  In case of a
## replacement method
##   \METHOD{GENERIC}{CLASS_OR_SIGLIST}(args) <- value
## parsing results in a
##   \METHOD{GENERIC}{CLASS_OR_SIGLIST}<-
## pseudo name, which need to be transformed to
##   \METHOD{GENERIC<-}{CLASS_OR_SIGLIST}
## We currently use double quoting for the parse step.  As we also allow
## for non-syntactic class names quoted by backticks, this means that
## double quotes and backslashes need to be escaped.  Alternatively, we
## could strip backticks right away and quote by backticks, but then the
## replacement method transformation would need different regexps.
## </NOTE>

.S3_method_markup_regexp <-
    sprintf("(\\\\(S3)?method\\{(%s)\\}\\{(%s)\\})",
            paste(c("[._[:alnum:]]*",
                    ## Subscripting
                    "\\$", "\\[\\[?", "\\@",
                    ## Binary operators and unary '!'.
                    "\\+", "\\-", "\\*", "\\/", "\\^",
                    "<=?", ">=?", "!=?", "==", "\\&", "\\|",
                    "\\%[[:alnum:][:punct:]]*\\%"),
                  collapse = "|"),
            "[._[:alnum:]]+|`[^`]+`")

### ** .S4_method_markup_regexp

## For matching \S4method{GENERIC}{SIGLIST}.
## SIGLIST can be a comma separated list of CLASS specs as above.

.S4_method_markup_regexp <-
    sprintf("(\\\\S4method\\{(%s)\\}\\{(%s)\\})",
            paste(c("[._[:alnum:]]*",
                    ## Subscripting
                    "\\$", "\\[\\[?",
                    ## Binary operators and unary '!'.
                    "\\+", "\\-", "\\*", "\\/", "\\^",
                    "<=?", ">=?", "!=?", "==", "\\&", "\\|",
                    "\\%[[:alnum:][:punct:]]*\\%"),
                  collapse = "|"),
            "(([._[:alnum:]]+|`[^`]+`),)*([._[:alnum:]]+|`[^`]+`)")

### ** .valid_maintainer_field_regexp

.make_RFC_2822_email_address_regexp <-
function()
{
    ## Local part consists of ASCII letters and digits, the characters
    ##   ! # $ % * / ? | ^ { } ` ~ & ' + = _ -
    ## and . provided it is not leading or trailing or repeated, or must
    ## be a quoted string.
    ## Domain part consists of dot-separated elements consisting of
    ## ASCII letters, digits and hyphen.
    ## We could also check that the local and domain parts are no longer
    ## than 64 and 255 characters, respectively.
    ## See https://en.wikipedia.org/wiki/Email_address.
    ASCII_letters_and_digits <-
        "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
    l <- sprintf("[%s%s]", ASCII_letters_and_digits, "!#$%*/?|^{}`~&'+=_-")
    d <- sprintf("[%s%s]", ASCII_letters_and_digits, "-")
    ## Be careful to arrange the hyphens to come last in the range spec.
    sprintf("(\\\".+\\\"|(%s+\\.)*%s+)@(%s+\\.)*%s+", l, l, d, d)
}

.valid_maintainer_field_regexp <-
    sprintf("^[[:space:]]*(.*<%s>|ORPHANED)[[:space:]]*$",
            .make_RFC_2822_email_address_regexp())

### ** .Rd_get_offending_autogenerated_content

.Rd_get_offending_autogenerated_content <-
function(x)
{
    out <- NULL

    ## prompt[Data]():
    s <- .Rd_get_section(x, "title")
    if(length(s)) {
        s <- .Rd_deparse(s, tag = FALSE)
        ## FIXME: some start with "~~ ", see FIXME in methods::promptMethods
        if(startsWith(trimws(s), "A Capitalized Title"))
            out <- rbind(out, c("\\title", s))
    }
    ## catch dummy texts generated by R < 2.9.0 (still some on internal pages)
    s <- .Rd_get_section(x, "details")
    if(length(s)) {
        s <- .Rd_deparse(.Rd_drop_comments(s), tag = FALSE)
        if(grepl("If necessary, more details than the description above",
                 s, fixed = TRUE))
            out <- rbind(out, c("\\details", s))
    }
    tab <- .Rd_get_argument_table(x)
    if(length(tab)) {
        descs <- trimws(tab[, 2L])
        ind <- descs == sprintf("~~Describe \\code{%s} here~~", tab[, 1L])
        if(any(ind))
            out <- rbind(out,
                         cbind(sprintf("\\arguments: %s", tab[ind, 1L]),
                               tab[ind, 2L]))
    }

    ## promptPackage(): (catch some dummy texts generated by R < 4.4.0)
    s <- .Rd_get_section(x, "details")
    if(length(s) && length(s) <= 11 &&
       any(grepl("~~ An overview of how to use the package",
                 unlist(s[RdTags(s) == "TEXT"]), fixed = TRUE)))
        out <- rbind(out, c("\\details", .Rd_deparse(s, tag = FALSE)))
    ## the following three also match Rcpp's manual-page-stub.Rd skeleton
    s <- .Rd_get_section(x, "references")
    if(length(s)) {
        s <- .Rd_deparse(.Rd_drop_comments(s), tag = FALSE)
        if(grepl("[Ll]iterature or other references for background information",
                 gsub("[[:space:]]+", " ", s)))
            out <- rbind(out, c("\\references", s))
    }
    s <- .Rd_get_section(x, "seealso")
    if(length(s) && length(s) <= 5) {
        s <- .Rd_deparse(.Rd_drop_comments(s), tag = FALSE)
        if(grepl("Optional links to other man pages",
                 s, fixed = TRUE))
            out <- rbind(out, c("\\seealso", s))
    }
    s <- .Rd_get_section(x, "examples")
    if(length(s)) {
        s <- .Rd_deparse(.Rd_drop_comments(s), tag = FALSE)
        if(grepl("Optional simple examples of the most important functions",
                 s, fixed = TRUE))
            out <- rbind(out, c("\\examples", s))
    }

    ## promptMethods():
    ## <FIXME>
    ## r50996 changed to produce commented descriptions, so this is obsolete
    tab <- .Rd_get_methods_description_table(x)
    if(length(tab)) {
        descriptions <- trimws(tab[, 2L])
        ## /data/rsync/PKGS/coin/man/initialize-methods.Rd
        ind <- descriptions == "~~describe this method here"
        if(any(ind))
            out <- rbind(out,
                         cbind(sprintf("section 'Methods', description of item '%s'",
                                       tab[ind, 1L]),
                               tab[ind, 2L]))
    }
    ## </FIXME>

    if(config_val_to_logical(Sys.getenv("_R_CHECK_RD_CONTENTS_KEYWORDS_",
                                        "FALSE"))) {
        k <- .Rd_get_metadata(x, "keyword")
        k <- k[k %in% .Rd_keywords_auto]
        if(length(k)) {
            out <- rbind(out,
                         cbind("\\keyword", k))
        }
    }

    out
}


### ** .check_pragmas

.check_pragmas <-
function(dir)
{
    ## Check a source package for disallowed pragmas in src and inst/include
    ## Try (not very hard) to avoid ones which are commented out (RcppParallel)
    ## One could argue for recording all uses of #pragma ... diagnostic
    ## There are also
    ##   #pragma warning (disable:4996)
    ##   #pragma warning(push, 0)
    ## which seem intended for MSVC++ and hence not relevant here.
    found <- warn <- port <- character()
    od <- setwd(dir); on.exit(setwd(od))
    ff <- dir(c('src', 'inst/include'),
              pattern = "[.](c|cc|cpp|h|hh|hpp)$",
              full.names = TRUE, recursive = TRUE)
    pat <- "^\\s*#pragma (GCC|clang) diagnostic ignored"
    ## -Wmissing-field-initializers looks important but is not part of -Wall
    pat2 <- "^\\s*#pragma (GCC|clang) diagnostic ignored[^-]*[-]W(uninitialized|float-equal|array-bound|format)"
    ## gcc8 -W warnings not accepted by clang 7
    ## found by listing with gcc -Q --help=warning and testing with clang.
     nonport <-
         c("abi-tag", "aggressive-loop-optimizations", "aliasing",
           "align-commons", "aligned-new", "alloc-size-larger-than",
           "alloc-zero", "alloca", "alloca-larger-than", "ampersand",
           "argument-mismatch", "array-temporaries",
           "assign-intercept", "attribute-alias", "bool-compare",
           "bool-operation", "builtin-declaration-mismatch",
           "c-binding-type", "c90-c99-compat", "c99-c11-compat",
           "cast-function-type", "catch-value",
           "character-truncation", "chkp", "class-memaccess",
           "clobbered", "compare-reals", "conditionally-supported",
           "conversion-extra", "coverage-mismatch", "designated-init",
           "discarded-array-qualifiers", "discarded-qualifiers",
           "do-subscript", "duplicated-branches", "duplicated-cond",
           "format-contains-nul", "format-overflow",
           "format-signedness", "format-truncation", "frame-address",
           "frame-larger-than", "free-nonheap-object",
           "function-elimination", "hsa", "if-not-aligned",
           "implicit-interface", "implicit-procedure",
           "inherited-variadic-ctor", "int-in-bool-context",
           "integer-division", "intrinsic-shadow", "intrinsics-std",
           "invalid-memory-model", "jump-misses-init", "larger-than",
           "line-truncation", "literal-suffix", "logical-op",
           "lto-type-mismatch", "maybe-uninitialized",
           "memset-elt-size", "misleading-indentation",
           "missing-attributes", "missing-parameter-type",
           "multiple-inheritance", "multistatement-macros",
           "namespaces", "noexcept", "non-template-friend",
           "nonnull-compare", "normalized", "old-style-declaration",
           "openmp-simd", "override-init",
           "override-init-side-effects", "packed-bitfield-compat",
           "packed-not-aligned", "placement-new", "pmf-conversions",
           "pointer-compare", "property-assign-default", "psabi",
           "real-q-constant", "realloc-lhs", "realloc-lhs-all",
           "restrict", "return-local-addr", "scalar-storage-order",
           "shadow-compatible-local", "shadow-local",
           "sized-deallocation", "sizeof-pointer-div", "stack-usage",
           "strict-null-sentinel", "stringop-overflow",
           "stringop-truncation", "subobject-linkage",
           "suggest-attribute", "suggest-final-methods",
           "suggest-final-types", "suggest-override", "surprising",
           "switch-unreachable", "sync-nand", "tabs",
           "target-lifetime", "templates", "terminate", "traditional",
           "traditional-conversion", "trampolines",
           "undefined-do-loop", "underflow",
           "unsafe-loop-optimizations", "unsuffixed-float-constants",
           "unused-but-set-parameter", "unused-but-set-variable",
           "unused-dummy-argument", "use-without-only",
           "useless-cast", "vector-operation-performance",
           "virtual-inheritance", "virtual-move-assign",
           "vla-larger-than", "zerotrip")
    pat3 <- paste0("^\\s*#pragma (GCC|clang) diagnostic[^-]*[-]W(",
                   paste(nonport, collapse="|"), ")")
    for(f in ff) {
        if(any(grepl(pat, readLines(f, warn = FALSE),
                     perl = TRUE, useBytes = TRUE)))
            found <- c(found, f)
        else next
        if(any(grepl(pat2, readLines(f, warn = FALSE),
                     perl = TRUE, useBytes = TRUE)))
            warn <- c(warn, f)
        if(any(grepl(pat3, readLines(f, warn = FALSE),
                     perl = TRUE, useBytes = TRUE)))
            port <- c(port, f)
    }
    structure(found, class = "check_pragmas", warn = warn, port = port)
}

print.check_pragmas <-
function(x, ...)
{
    if(length(x)) {
        if(length(x) == 1L)
            writeLines("File which contain pragma(s) suppressing diagnostics:")
        else
            writeLines("Files which contain pragma(s) suppressing diagnostics:")
        .pretty_print(x)
    }
    x
}

### ** .check_S3_methods_needing_delayed_registration

.check_S3_methods_needing_delayed_registration <-
function(package, lib.loc = NULL)
{
    mat <- matrix(character(), 0L, 3L,
                  dimnames = list(NULL,
                                  c("Package", "Generic", "Method")))
    out <- list(mat = mat, bad = character())
    class(out) <- "check_S3_methods_needing_delayed_registration"

    if(length(package) != 1L)
        stop("argument 'package' must be of length 1")

    if(package == "base") return()

    dir <- find.package(package, lib.loc)
    if(!dir.exists(file.path(dir, "R"))) return()

    db <- .read_description(file.path(dir, "DESCRIPTION"))
    suggests <- unname(.get_requires_from_package_db(db, "Suggests"))
    if(!length(suggests)) return()

    reg <- parseNamespaceFile(package, dirname(dir))$S3methods
    reg <- reg[!is.na(reg[, 4L]), , drop = FALSE]
    if(length(reg))
        out$reg <- cbind(Package = reg[, 4L],
                         Generic = reg[, 1L],
                         Class = reg[, 2L],
                         Method = reg[, 3L])

    .load_namespace_quietly(package, dirname(dir))
    code_env <- asNamespace(package)

    ok <- vapply(suggests, requireNamespace, quietly = TRUE,
                 FUN.VALUE = NA)
    out$bad <- suggests[!ok]

    suggests <- suggests[ok]
    generics <- lapply(suggests, .get_S3_generics_in_ns_exports)

    packages <- rep.int(suggests, lengths(generics))
    generics <- unlist(generics, use.names = FALSE)

    code_env <- .package_env(package)
    objects_in_code <- sort(names(code_env))
    functions_in_code <-
        Filter(function(f) is.function(code_env[[f]]),
               objects_in_code)

    ## Look only at the *additional* generics in suggests.
    ind <- (generics %notin%
            c(Filter(function(f) .is_S3_generic(f, code_env),
                     functions_in_code),
              .get_S3_generics_in_env(parent.env(code_env)),
              .get_S3_generics_in_base()))
    if(!all(ind)) {
        generics <- generics[ind]
        packages <- packages[ind]
    }

    methods_stop_list <- nonS3methods(basename(dir))
    methods <- lapply(generics,
                      function(g) {
                          i <- startsWith(functions_in_code,
                                          paste0(g, "."))
                          setdiff(functions_in_code[i],
                                  methods_stop_list)
                      })
    len <- lengths(methods)
    ind <- (len > 0L)

    if(!any(ind)) return(out)

    len <- len[ind]
    out$mat <-
        cbind(Package = rep.int(packages[ind], len),
              Generic = rep.int(generics[ind], len),
              Method = unlist(methods[ind], use.names = FALSE))
    out
}

format.check_S3_methods_needing_delayed_registration <-
function(x, ...)
{
    c(character(),
      if(length(bad <- x$bad)) {
          c("Suggested packages not available for checking:",
            strwrap(paste(bad, collapse = " "), indent = 2L))
      },
      if(length(mat <- x$mat)) {
          c("Apparent S3 methods needing delayed registration:",
            sprintf("  %s %s %s",
                    format(c("Package", mat[, 1L])),
                    format(c("Generic", mat[, 2L])),
                    format(c("Method", mat[, 3L])))
            )
      },
      if(length(reg <- x$reg)) {
          c("S3 methods using delayed registration:",
            sprintf("  %s %s %s %s",
                    format(c("Package", reg[, 1L])),
                    format(c("Generic", reg[, 2L])),
                    format(c("Class", reg[, 3L])),
                    format(c("Method", reg[, 4L])))
            )
      })
}

.get_S3_generics_in_ns_exports <-
function(ns)
{
    env <- asNamespace(ns)
    nms <- sort(intersect(names(env), getNamespaceExports(env)))
    .get_S3_generics_in_env(env, nms)
}

### ** .check_package_datalist

.check_package_datalist <-
function(package, lib.loc = NULL)
{
    out <- list()
    ans1 <- list_data_in_pkg(package, lib.loc)
    ans2 <- list_data_in_pkg(package, lib.loc, use_datalist = FALSE)
    ## Canonicalize.
    ans1 <- lapply(ans1, sort)
    ans1 <- ans1[order(names(ans1))]
    ans2 <- lapply(ans2, sort)
    ans2 <- ans2[order(names(ans2))]
    if(!identical(ans1, ans2)) {
        nx1 <- names(ans1)
        nx2 <- names(ans2)
        ex1 <- unlist(ans1)
        ex2 <- unlist(ans2)
        out <- Filter(length,
                      list(n12 = setdiff(nx1, nx2),
                           n21 = setdiff(nx2, nx1),
                           e12 = setdiff(ex1, ex2),
                           e21 = setdiff(ex2, ex1)))
    }
    class(out) <- "check_package_datalist"
    out
}

format.check_package_datalist <-
function(x, ...)
{
    fmt <- function(s) .strwrap22(s, " ")
    c(character(),
      if(length(y <- x$n12))
          c("Data files in 'datalist' not in 'data' directory:",
            fmt(y)),
      if(length(y <- x$n21))
          c("Data files in 'data' directory not in 'datalist':",
            fmt(y)),
      if(length(y <- x$e12))
          c("Data objects in 'datalist' not in 'data' directory:",
            fmt(y)),
      if(length(y <- x$e21))
          c("Data objects in 'data' directory not in 'datalist':",
            fmt(y)))
}

### ** .bad_DESCRIPTION_URL_field_parts

.bad_DESCRIPTION_URL_field_parts <-
function(s)
{
    if(is.na(s)) return(character())
    y <- .get_urls_from_DESCRIPTION_URL_field(s)
    z <- strsplit(s,
                  "[[:space:]]*(\\([^)]*\\))?([,[:space:]]+|$)")[[1L]]
    if(length(y) == length(z))
        character()
    else {
        z <- z %w/o% y
        z[!grepl("^<?(svn://|doi:)", z)]
    }
}


### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
##  File src/library/tools/R/Rconcordance.R
##  Part of the R package, https://www.R-project.org
##
## Copyright (C) 1995-2016 The R Core Team
## Copyright (C) 2022 Duncan Murdoch
##
##  This program is free software; you can redistribute it and/or modify
##  it under the terms of the GNU General Public License as published by
##  the Free Software Foundation; either version 2 of the License, or
##  (at your option) any later version.
##
##  This program is distributed in the hope that it will be useful,
##  but WITHOUT ANY WARRANTY; without even the implied warranty of
##  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
##  GNU General Public License for more details.
##
##  A copy of the GNU General Public License is available at
##  https://www.R-project.org/Licenses/


# There are three kinds of objects used when working with
# concordances.  
#
# 1.  The "activeConcordance" is an environment used to build a 
#     concordance one string at a time.  
# 2.  The "Rconcordance" is a list object.
# 3.  "String concordances" are representations of concordance
#     objects suitable for embedding in text files.

# This function produces an activeConcordance.

activeConcordance <- function(srcfile = NA_character_) 
    local({
	lastSrcref <- NULL
	srcLinenum <- integer()
	srcFile <- srcfile
	offset <- 0
	lastText <- ""
		
	saveSrcref <- function(node) {
	    # Node may be a function which we don't
	    # want to evaluate unnecessarily
	    lastSrcref <<- utils::getSrcref(node)
	}
		
	addToConcordance <- function(text) {
	    if (any(nchar(text) > 0)) {
	    	lastText <<- text
	        concordanceUsed <- length(srcLinenum)	
	        newlines <- sum(nchar(gsub("[^\n]", "", text)))
	        srcLine <- utils::getSrcLocation(lastSrcref, "line")
	        if (!is.null(srcLine)) {
	    	
	    	    # Do we have a later node on the same output
	    	    # line? Save it instead
	            if (concordanceUsed)
	            	if (text[1] != "\n")
	                    concordanceUsed <- concordanceUsed - 1
	                else
	                    newlines <- newlines - 1
	            # Save the current line(s)
	    	    srcLinenum[concordanceUsed + seq_len(1 + newlines)] <<- srcLine
	        } else if (!concordanceUsed)
	            offset <<- offset + newlines
	    }
	}
		
	finish <- function() {
            ## Drop the last line if it is empty
            lastText <<- lastText[[length(lastText)]]
	    if (length(srcLinenum) && (len <- nchar(lastText)) && substr(lastText, len, len) == "\n")
		srcLinenum <- srcLinenum[-length(srcLinenum)]
	    
	    structure(list(offset = offset, srcLine = srcLinenum,
	    	           srcFile = srcFile), 
	    	      class = "Rconcordance")
	}
		
	structure(environment(), class = "activeConcordance")
    })

print.activeConcordance <- function(x, ...) {
    cat("lastSrcref:")
    print(x$lastSrcref)
    cat("lastText:")
    print(x$lastText)
    print(x$finish())
    invisible(x)
}

print.Rconcordance <- function(x, ...) {
    df <- data.frame(srcFile = x$srcFile, srcLine = x$srcLine)
    rownames(df) <- seq_len(nrow(df)) + x$offset
    print(df)
    invisible(x)
}


# This function takes a location in a file and uses a concordance
# object to find the corresponding location in the source for that
# file.

matchConcordance <- function(linenum, concordance) {
    if (!all(c("offset", "srcLine", "srcFile") %in% names(concordance)))
	stop("concordance is not valid")
    linenum <- as.numeric(linenum)
    srcLines <- concordance$srcLine
    srcFile <- rep_len(concordance$srcFile, length(srcLines))
    offset <- concordance$offset
    
    result <- matrix(character(), length(linenum), 2, 
    		     dimnames = list(NULL, 
    		     		    c("srcFile", "srcLine")))
    for (i in seq_along(linenum)) {
	if (linenum[i] <= concordance$offset)
	    result[i,] <- c("", "")
	else
	    result[i,] <- c(srcFile[linenum[i] - offset], 
	    		    with(concordance, srcLine[linenum[i] - offset]))
    }
    result
}

# This function converts concordance objects to string representations
# of them.  

## The string has three or four parts, separated by colons:
## 1.  The output .tex filename
## 2.  The input .Rnw filename
## 3.  Optionally, the starting line number of the output coded as "ofs nn",
##     where nn is the offset to the first output line.  This is omitted if nn is 0.
## 4.  The input line numbers corresponding to each output line.
##     This are compressed using the following simple scheme:
##     The first line number, followed by
##     a run-length encoded diff of the rest of the line numbers.

as.character.Rconcordance <- function(x,
			      targetfile = "",
			      ...) {
    concordance <- x
    offset <- concordance$offset
    src <- concordance$srcLine
    
    result <- character()
    
    srcfile <- rep_len(concordance$srcFile, length(src))
    
    while (length(src)) {
        first <- src[1]
        if (length(unique(srcfile)) > 1)
            n <- which(srcfile != srcfile[1])[1] - 1
        else
            n <- length(srcfile)
        
        vals <- with(rle(diff(src[seq_len(n)])), as.numeric(rbind(lengths, values)))
        result <- c(result, paste0("concordance:", 
               targetfile, ":",
               srcfile[1], ":",
               if (offset) paste0("ofs ", offset, ":"),
               concordance$srcLine[1], " ",
               paste(vals, collapse = " ")
               ))
        offset <- offset + n
        drop <- seq_len(n)
        src <- src[-drop]
        srcfile <- srcfile[-drop]
    }
    result    
}

as.Rconcordance <- function(x, ...) {
    UseMethod("as.Rconcordance")  
}

# This takes concordance strings and combines them
# into one concordance object.

as.Rconcordance.default <- function(x, ...) {
    # clean comments etc.
    s <- sub("^.*(concordance){1}?", "concordance", sub("[^[:digit:]]*$", "", x))
    s <- grep("^concordance:", s, value = TRUE)
    if (!length(s))
    	return(NULL)
    result <- stringToConcordance(s[1])
    for (line in s[-1])
    	result <- addConcordance(result, line)
    result
}

# Windows paths may include colons in the filenames
# if drive letters are used.  This looks for drive letters that
# have been split from the rest of the path and reattaches
# them.

fixWindowsConcordancePaths <- function(split) {
    if (length(split) <= 4)
        return(split)
    # We are looking for a drive letter which should have been at the start
    # of the 2nd or 3rd entry, but will be in an entry by itself
    
    driveletter <- grep("^[a-zA-Z]$", split[2:4]) + 1
    ofs <- grep("^ofs [[:digit:]]+$", split[4:length(split)]) + 3
    
    # The drive letter can't precede the offset record
    driveletter <- setdiff(driveletter, ofs - 1)
    
    if (!length(driveletter))
        return(split)
    
    if (!length(ofs) # no ofs record but length is 5 or more
        || length(split) >= 6) {
        if (2 %in% driveletter) {
            split <- c(split[1],
                       paste(split[2], split[3], sep=":"),
                       split[4:length(split)])
            driveletter <- driveletter - 1
        }
        if (3 %in% driveletter) {
            split <- c(split[1:2],
                       paste(split[3], split[4], sep=":"),
                       split[5:length(split)])
        }
    }
    split
}

# This takes one concordance string and produces a single concordance
# object

stringToConcordance <- function(s) {
    split <- strsplit(s, ":")[[1]]
    if (.Platform$OS.type == "windows")
        split <- fixWindowsConcordancePaths(split)
    targetfile <- split[2]
    srcFile <- split[3]
    if (length(split) == 4) {
    	ofs <- 0
    	vi <- 4
    } else {
    	ofs <- as.integer(sub("^ofs ([0-9]+)", "\\1", split[4]))
    	vi <- 5
    }
    values <- as.integer(strsplit(split[vi], " ")[[1]])
    firstline <- values[1]
    rledata <- matrix(values[-1], nrow = 2)
    rle <- structure(list(lengths=rledata[1,], values=rledata[2,]), class="rle")
    diffs <- inverse.rle(rle)
    srcLines <- c(firstline, firstline + cumsum(diffs))
    structure(list(offset = ofs, srcFile = srcFile, srcLine = srcLines),
    	      class = "Rconcordance")
}

# This modifies an existing concordance object to incorporate
# one new concordance string

addConcordance <- function(conc, s) {
    prev <- stringToConcordance(s)
    if (!is.null(prev)) {
    	conc$srcFile <- rep_len(conc$srcFile, length(conc$srcLine))
        i <- seq_along(prev$srcLine)
        conc$srcFile[prev$offset + i] <- prev$srcFile
        conc$srcLine[prev$offset + i] <- prev$srcLine
    }
    conc
}

# This modifies an existing concordance by following links specified
# in a previous one.

followConcordance <- function(concordance, prevConcordance) {
    if (!is.null(prevConcordance)) {
        curLines <- concordance$srcLine
        curFile <- rep_len(concordance$srcFile, length(curLines))
        curOfs <- concordance$offset
        
        prevLines <- prevConcordance$srcLine
        prevFile <- rep_len(prevConcordance$srcFile, length(prevLines))
        prevOfs <- prevConcordance$offset
        
        if (prevOfs) {
            prevLines <- c(rep(NA_integer_, prevOfs), prevLines)
            prevFile <- c(rep(NA_character_, prevOfs), prevFile)
            prevOfs <- 0
        }
        n0 <- max(curLines)
        n1 <- length(prevLines)
        if (n1 < n0) {
            prevLines <- c(prevLines, rep(NA_integer_, n0 - n1))
            prevFile <- c(prevFile, rep(NA_character_, n0 - n1))
        }
        new <- is.na(prevLines[curLines])
        
        concordance$srcFile <- ifelse(new, curFile,
                                      prevFile[curLines])
        concordance$srcLine <- ifelse(new, curLines,
                                      prevLines[curLines])
    }
    concordance
}
#  File src/library/tools/R/Rd.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2025 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

### * Rd_info

Rd_info <-
function(file, encoding = "unknown")
{
    if(inherits(file, "Rd")) {
        Rd <- file
        description <- attr(attr(Rd, "srcref"), "srcfile")$filename
    } else
        stop("Rd object required")

    aliases <- .Rd_get_metadata(Rd, "alias")
    concepts <- .Rd_get_metadata(Rd, "concept")
    keywords <- .Rd_get_metadata(Rd, "keyword") %w/o% .Rd_keywords_auto

    ## Could be none or more than one ... argh.
    Rd_type <- .Rd_get_doc_type(Rd)
    encoding <- c(.Rd_get_metadata(Rd, "encoding"), "")[1L]

    Rd_name <- .Rd_get_name(Rd)
    if(!length(Rd_name)) {
        msg <-
            c(gettextf("missing/empty %s field in '%s'",
                       "\\name",
                       description),
              gettextf("Rd files must have a non-empty %s.",
                       "\\name"),
              gettext("See chapter 'Writing R documentation' in manual 'Writing R Extensions'."))
        stop(paste(msg, collapse = "\n"), domain = NA)
    }

    Rd_title <- .Rd_get_title(Rd)
    if(!nzchar(Rd_title)) {
        msg <-
            c(gettextf("missing/empty \\title field in '%s'",
                       description),
              gettext("Rd files must have a non-empty \\title."),
              gettext("See chapter 'Writing R documentation' in manual 'Writing R Extensions'."))
        stop(paste(msg, collapse = "\n"), domain = NA)
    }

    list(name = Rd_name, type = Rd_type, title = Rd_title,
         aliases = aliases, concepts = concepts, keywords = keywords,
         encoding = encoding)
}

### * Rd_contents

Rd_contents <-
function(db)
{
    ## Compute contents db from Rd db.
    ## NB: Encoding is the encoding declared in the file, not
    ## that after parsing.
    if(!length(db)) {
        out <- list2DF(list(File = character(),
                            Name = character(),
                            Type = character(),
                            Title = character(),
                            Encoding = character(),
                            Aliases = list(),
                            Concepts = list(),
                            Keywords = list()))
        return(out)
    }

    entries <- c("Name", "Type", "Title", "Aliases", "Concepts",
                 "Keywords", "Encoding")
    contents <- vector("list", length(db) * length(entries))
    dim(contents) <- c(length(db), length(entries))
    for(i in seq_along(db)) {
        contents[i, ] <- Rd_info(db[[i]])
    }
    colnames(contents) <- entries

    title <- .Rd_format_title(unlist(contents[ , "Title"]))
    out <- list2DF(list(File = basename(names(db)),
                        Name = unlist(contents[ , "Name"]),
                        Type = unlist(contents[ , "Type"]),
                        Title = title,
                        Encoding = unlist(contents[ , "Encoding"]),
                        Aliases = contents[ , "Aliases"],
                        Concepts = contents[ , "Concepts"],
                        Keywords = contents[ , "Keywords"]))
    out
}

### * .write_Rd_contents_as_RDS

.write_Rd_contents_as_RDS <-
function(contents, outFile)
{
    ## Save Rd contents db to @file{outFile}.

    ## <NOTE>
    ## To deal with possible changes in the format of the contents db
    ## in the future, use a version attribute and/or a formal class.
    saveRDS(contents, file = outFile, compress = TRUE)
    ## </NOTE>
}

### * .write_Rd_contents_as_DCF

if(FALSE) {
.write_Rd_contents_as_DCF <-
function(contents, packageName, outFile)
{
    ## Write a @file{CONTENTS} DCF file from an Rd contents db.
    ## Note that these files currently have @samp{URL:} entries which
    ## contain the package name, whereas @code{Rd_contents()} works on
    ## collections of Rd files which do not necessarily all come from
    ## the same package ...

    ## If the contents is 'empty', return immediately.  (Otherwise,
    ## e.g. URLs would not be right ...)
    if(!NROW(contents)) return()

    ## <NOTE>
    ## This has 'html' hard-wired.
    ## Note that slashes etc. should be fine for URLs.
    URLs <- paste0("../../../library/", packageName, "/html/",
                   file_path_sans_ext(contents[ , "File"]),
                   ".html")
    ## </NOTE>

    if(is.data.frame(contents))
        contents <-
            cbind(contents$Name,
                  vapply(contents$Aliases, paste, "", collapse = " "),
                  vapply(contents$Keywords, paste, "", collapse = " "),
                  contents$Title)
    else
        contents <-
            contents[, c("Name", "Aliases", "Keywords", "Title"),
                     drop = FALSE]

    cat(paste(c("Entry:", "Aliases:", "Keywords:", "Description:",
                "URL:"),
              t(cbind(contents, URLs))),
        sep = c("\n", "\n", "\n", "\n", "\n\n"),
        file = outFile)
}
}

### * .build_Rd_index

.build_Rd_index <-
function(contents, type = NULL)
{
    ## Build an Rd 'index' containing Rd "names" (see below) and titles,
    ## maybe subscripted according to the Rd type (\docType).

    keywords <- contents[ , "Keywords"]

    if(!is.null(type)) {
        idx <- contents[ , "Type"] %in% type
        ## Argh.  Ideally we only want to subscript according to
        ## \docType.  Maybe for 2.0 ...
        if(type == "data")
            idx <- idx | keywords == "datasets"
        ## (Note: we really only want the Rd objects which have
        ## 'datasets' as their *only* keyword.)
        contents <- contents[idx, , drop = FALSE]
        keywords <- keywords[idx]
    }

    ## Drop all Rd objects marked as 'internal' from the index.
    idx <- (vapply(keywords,
                   function(x) match("internal", x, 0L),
                   0L) == 0L)
    topic <- as.character(unlist(Map(.Rd_topic_for_display,
                                     contents[idx, "Name"],
                                     contents[idx, "Aliases"])))
    index <- data.frame(Topic = topic,
                        Title = contents[idx, "Title"])
    if(nrow(index)) {
        ## Handle entries with missing topic: should these perhaps be 
        ## dropped?
        index$Topic[is.na(index$Topic)] <- ""
        ## Sort by topic.
        index <- index[order(index$Topic), ]
    }
    index
}

### * Rdindex

Rdindex <-
function(RdFiles, outFile = "", type = NULL,
         width = 0.9 * getOption("width"), indent = NULL)
{
    ## Create @file{INDEX} or @file{data/00Index} style files from Rd
    ## files.
    ##
    ## R version of defunct @code{R CMD Rdindex} (now removed).
    ##
    ## called from R CMD build

    if((length(RdFiles) == 1L) && dir.exists(RdFiles)) {
        ## Compatibility code for the former @code{R CMD Rdindex}
        ## interface.
        docsDir <- RdFiles
        if(dir.exists(file.path(docsDir, "man")))
            docsDir <- file.path(docsDir, "man")
        RdFiles <- list_files_with_type(docsDir, "docs")
    }

    if(outFile == "")
        outFile <- stdout()
    else if(is.character(outFile)) {
        outFile <- file(outFile, "w")
        on.exit(close(outFile))
    }
    if(!inherits(outFile, "connection"))
        stop("argument 'outFile' must be a character string or connection")

    db <- .build_Rd_db(files = RdFiles, stages="build")
    index <- .build_Rd_index(Rd_contents(db), type = type)
    writeLines(formatDL(index, width = width, indent = indent), outFile)
}

### * Rd_db

Rd_db <-
function(package, dir, lib.loc = NULL, stages = "build")
{
    ## Build an Rd 'data base' from an installed package or the unpacked
    ## package sources as a list containing the parsed Rd objects.

    ## <NOTE>
    ## We actually also process platform conditionals.
    ## If this was to be changed, we could also need to arrange that Rd
    ## objects in *all* platform specific subdirectories are included.
    ## </NOTE>

    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
        ## For an installed package, we have (even when there are no help pages)
        ## help/package.rd[bx]
        ##    with a DB of the parsed (and platform processed, see above) Rd objects.
        db_file <- file.path(dir, "help", package)
        if(file_test("-f", paste0(db_file, ".rdx"))) {
            db <- fetchRdDB(db_file)
            pathfile <- file.path(dir, "help", "paths.rds")
            if(file.exists(pathfile)) {
                paths <- readRDS(pathfile)
                if(!is.null(first <- attr(paths, "first")))
                    paths <- substring(paths, first)
                names(db) <- paths
            }
        } else # should not happen for packages installed with R >= 2.10.0
            stop(sprintf("installed help of package %s is corrupt",
                         sQuote(package)), domain = NA)
    }
    else {
        if(missing(dir))
            stop("you must specify 'package' or 'dir'")
        ## Using sources from directory @code{dir} ...
        if(!dir.exists(dir))
            stop(gettextf("directory '%s' does not exist", dir),
                 domain = NA)
        else
            dir <- file_path_as_absolute(dir)
        built_file <- file.path(dir, "build", "partial.rdb")
        later_file <- file.path(dir, "build", "stage23.rdb")
        db <- .build_Rd_db(dir,
                           stages = stages,
                           built_file = built_file,
                           later_file = later_file)
        if(length(db)) {
            first <- nchar(file.path(dir, "man")) + 2L
            names(db) <- substring(names(db), first)
        }
    }

    db
}

.build_Rd_db <-
function(dir = NULL, files = NULL,
         encoding = "unknown", db_file = NULL,
         stages = c("build", "install"), os = .OStype(), step = 3L,
         built_file = NULL, later_file = NULL, macros = character())
{
    if(!is.null(dir)) {
        dir <- file_path_as_absolute(dir)
        macros0 <- loadPkgRdMacros(dir)
        man_dir <- file.path(dir, "man")
        if(!dir.exists(man_dir))
            return(structure(list(), names = character()))
        if(is.null(files))
            files <- list_files_with_type(man_dir, "docs", OS_subdirs=os)
        encoding <- .get_package_metadata(dir, FALSE)["Encoding"]
        if(is.na(encoding)) encoding <- "unknown"
    } else if(!is.null(files))
        macros0 <- initialRdMacros()
    else
        stop("you must specify 'dir' or 'files'")

    if(length(macros)) {
        con <- textConnection(macros)
        macros <- loadRdMacros(con, macros0)
        close(con)
    } else {
        macros <- macros0
    }

    .fetch_Rd_object <- function(f, stages) {
        ## This calls parse_Rd if f is a filename
        Rd <- prepare_Rd(f, encoding = encoding,
                         defines = os,
                         stages = stages, warningCalls = FALSE,
                         stage2 = step > 1L, stage3 = step > 2L,
                         macros = macros)
        structure(Rd, prepared = step)
    }

    if(!is.null(db_file) && file_test("-f", db_file)) {
        ## message("updating database of parsed Rd files")
        db <- fetchRdDB(sub("\\.rdx$", "", db_file))
        db_names <- names(db) <-
            readRDS(file.path(dirname(db_file), "paths.rds"))
        ## Files in the db in need of updating:
        indf <- (files %in% db_names) & file_test("-nt", files, db_file)
        ## Also files not in the db:
        indf <- indf | (files %notin% db_names)

        ## Db elements missing from files:
        ind <- (db_names %notin% files) | (db_names %in% files[indf])
	if(any(ind))
            db <- db[!ind]
	files <- files[indf]
    } else
    	db <- list()

    ## The built_file is a file of partially processed Rd objects, where
    ## build time \Sexprs have been evaluated.  We'll put the object in
    ## place of its filename to continue processing.
    ## Similarly for later_file.

    basenames <- basename(files)    
    names(files) <- files
    files <- as.list(files)
    
    if(!is.null(built_file) && file_test("-f", built_file)) {
 	built <- readRDS(built_file)
 	names_built <- names(built)
        ## Hmm ... why are we doing this?
 	if ("install" %in% stages) {
 	    this_os <- grepl(paste0("^", os, "/"), names_built)
 	    name_only <- basename(names_built[this_os])
 	    built[name_only] <- built[this_os]
 	    some_os <- grepl("/", names(built))
 	    built <- built[!some_os]
 	    names_built <- names(built)
 	}
 	built[names_built %notin% basenames] <- NULL
 	if (length(built)) {
 	    which <- match(names(built), basenames)
 	    if (all(file_test("-nt", built_file, names(files)[which]))) {
	    	files[which] <- built
	    }
	}
    }
    if("later" %in% stages) {
        if(!is.null(later_file) && file_test("-f", later_file)) {
            later <- readRDS(later_file)
            names_later <- names(later)
            later[names_later %notin% basenames] <- NULL
            if (length(later)) {
                which <- match(names(later), basenames)
                if (all(file_test("-nt", later_file, names(files)[which]))) {
                    files[which] <- later
                }
            }
        }
        stages <- stages[stages != "later"]
    }

    if(length(files)) {
        ## message("building database of parsed Rd files")
        db1 <- lapply(files, .fetch_Rd_object, stages)
        names(db1) <- names(files)
        db <- c(db, db1)
    }

    db
}

### * Rd_aliases

## Called from undoc and .check_Rd_xrefs
Rd_aliases <-
function(package, dir, lib.loc = NULL)
{
    ## Get the Rd aliases (topics) from an installed package or the
    ## unpacked package sources.

    if(!missing(package)) {
        dir <- find.package(package, lib.loc)
        rds <- file.path(dir, "Meta", "Rd.rds")
        if(file_test("-f", rds)) {
            aliases <- readRDS(rds)$Aliases
            if(length(aliases)) sort(unlist(aliases)) else character()
        } else
            character()
        ## <NOTE>
        ## Alternatively, we could get the aliases from the help index
        ## (and in fact, earlier versions of this code, then part of
        ## undoc(), did so), along the lines of
        ## <CODE>
        ##   help_index <- file.path(dir, "help", "AnIndex")
        ##   all_doc_topics <- if(!file_test("-f", help_index))
        ##       character()
        ##   else
        ##       sort(scan(file = helpIndex, what = list("", ""),
        ##                 sep = "\t", quote = "", quiet = TRUE,
        ##                 na.strings = character())[[1L]])
        ## </CODE>
        ## This gets all topics the same way as index.search() would
        ## find individual ones.
        ## </NOTE>
    }
    else {
        if(dir.exists(file.path(dir, "man"))) {
            db <- Rd_db(dir = dir)
            aliases <- lapply(db, .Rd_get_metadata, "alias")
            if(length(aliases))
                sort(unique(unlist(aliases, use.names = FALSE)))
            else character()
        }
        else
            character()
    }
}

### .build_Rd_xref_db

.build_Rd_xref_db <-
function(package, dir, lib.loc = NULL)
{
    db <- if(!missing(package))
        Rd_db(package, lib.loc = lib.loc)
    else
        Rd_db(dir = dir)
    lapply(db, .Rd_get_xrefs)
}

### * .Rd_get_metadata

.Rd_get_metadata <-
function(x, kind)
{
    x <- x[RdTags(x) == sprintf("\\%s", kind)]
    if(!length(x))
        character()
    else {
        ## <NOTE>
        ## WRE says that
        ##   Each @code{\concept} entry should give a @emph{single}
        ##   index term (word or phrase), and not use any Rd markup.
        ## but at least for now we use \I{...} for spell checking.
        if(kind == "concept")
            x <- lapply(x, function(e) {
                if((length(e) > 1L) &&
                   identical(attr(e[[1L]], "Rd_tag"), "USERMACRO") &&
                   identical(attr(e[[1L]], "macro"), "\\I"))
                    e[-1L]
                else
                    e
            })
        ## </NOTE>
        unique(trimws(vapply(x, paste, "", collapse = "\n")))
    }
}

### * .Rd_keywords_auto

.Rd_keywords_auto <-
    c("~kwd1", "~kwd2",                  # prompt.default() in R < 4.0.0
      "~~ other possible keyword(s) ~~") # promptMethods()

### * .Rd_get_section

.Rd_get_section <-
function(x, which, predefined = TRUE)
{
    if(predefined)
        x <- x[RdTags(x) == paste0("\\", which)]
    else {
        ## User-defined sections are parsed into lists of length 2, with
        ## the elements the title and the body, respectively.
        x <- x[RdTags(x) == "\\section"]
        if(length(x)) {
            ind <- sapply(x, function(e) .Rd_get_text(e[[1L]])) == which
            x <- lapply(x[ind], `[[`, 2L)
        }
    }
    if(!length(x)) x else structure(x[[1L]], class = "Rd")
}

### * .Rd_deparse

.Rd_deparse <-
function(x, tag = TRUE)
{
    ## <NOTE>
    ## This should eventually get an option controlling whether to
    ## escape Rd special characters as needed (thus providing valid Rd)
    ## or not.
    ## It might also be useful to have an option for dropping comments.
    ## </NOTE>
    if(!tag)
        attr(x, "Rd_tag") <- "Rd"
    paste(as.character.Rd(x), collapse = "")
}

### * .Rd_drop_comments

.Rd_drop_comments <-
function(x)
    .Rd_drop_nodes_with_tags(x, "COMMENT")

### * .Rd_drop_nodes_with_tags

.Rd_drop_nodes_with_tags <-
function(x, tags)
{
    recurse <- function(e) {
        if(is.list(e)) {
            a <- attributes(e)
            e <- lapply(e[is.na(match(RdTags(e), tags))], recurse)
            attributes(e) <- a
        }
        e
    }
    recurse(x)
}

### * .Rd_drop_nodes

.Rd_drop_nodes <-
function(x, predicate)
{
    recurse <- function(e) {
        if(is.list(e)) {
            a <- attributes(e)
            e <- lapply(e[!vapply(e, predicate, NA)], recurse)
            attributes(e) <- a
        }
        e
    }
    recurse(x)
}

### * .Rd_find_nodes_with_tags

.Rd_find_nodes_with_tags <-
function(x, tags)
{
    nodes <- list()
    recurse <- function(e) {
        if(any(attr(e, "Rd_tag") == tags))
            nodes <<- c(nodes, list(e))
        if(is.list(e))
            lapply(e, recurse)
    }
    lapply(x, recurse)
    nodes
}

### * .Rd_find_nodes

.Rd_find_nodes <-
function(x, predicate)
{
    nodes <- list()
    recurse <- function(e) {
        if(predicate(e)) 
            nodes <<- c(nodes, list(e))
        if(is.list(e)) 
            lapply(e, recurse)
    }
    lapply(x, recurse)
    nodes
}

### * .Rd_apply

## A first shot at recursively transforming nodes in Rd objects: nodes
## transformed to NULL will get dropped.
## E.g., to drop comments and specials, one could also do
##   .Rd_apply(x,
##             function(e) {
##                 switch(attr(e, "Rd_tag"),
##                        "\\special" =,
##                        "COMMENT" = NULL,
##                        e)
##             })

.Rd_apply <- function(x, f) {
    recurse <- function(e) {
        if(is.list(e)) {
            a <- attributes(e)
            ## Apply f to all nodes:
            e <- lapply(e, f)
            ## Drop the NULLs and recurse:
            e <- lapply(e[!vapply(e, is.null, NA)], recurse)
            attributes(e) <- a
        }
        ## <FIXME>
        ## Should we do f(e) if not is.list(e)?
        e
        ## </FIXME>
    }
    recurse(x)
}

### * .Rd_get_Sexpr_build_time_info

## Determine whether Rd has \Sexprs which R CMD build needs to handle at
## build stage (expand into the partial Rd db), "later" (build
## refman.pdf) or "never" (\Sexprs from \PR or \doi can always safely
## be expanded). Needs unprocessed install \Sexprs.

.Rd_get_Sexpr_build_time_info <-
function(x)
{
    y <- getDynamicFlags(x)
    if(!y["\\Sexpr"])
        c("\\Sexpr" = FALSE,
          build = FALSE,
          later = FALSE,
          never = FALSE)
    else if(!any(y[c("install", "render")]))
        c("\\Sexpr" = TRUE,
          build = TRUE,
          later = FALSE,
          never = FALSE)
    else {
        nodes <- .Rd_find_nodes_with_tags(x, "\\Sexpr")
        btinfo <-
            vapply(nodes,
                   function(e) {
                       flags <- getDynamicFlags(e)
                       if(flags["build"])
                           return("build")
                       else if(flags["install"]) {
                           s <- trimws(paste(as.character(e),
                                             collapse = ""))
                           if(startsWith(s, "tools:::Rd_expr_PR(") ||
                              startsWith(s, "tools:::Rd_expr_doi("))
                               return("never")
                       }
                       "later"
                   },
                   "")
        c("\\Sexpr" = TRUE,
          y["build"],
          later = any(btinfo == "later"),
          never = any(btinfo == "never"))
    }
}

### * .Rd_get_argument_names

.Rd_get_argument_names <-
function(x)
{
    x <- .Rd_get_section(x, "arguments")
    if(!length(x)) return(character())
    txt <- .Rd_get_item_tags(x)
    txt <- unlist(strsplit(txt, ", *"))
    txt <- gsub("\\\\l?dots", "...", txt)
    txt <- gsub("\\_", "_", txt, fixed=TRUE)
    trimws(txt)
}

### * .Rd_get_argument_table

.Rd_get_argument_table <-
function(x)
{
    x <- .Rd_get_section(x, "arguments")
    if(!length(x)) return(matrix(character(), 0L, 2L))
    ## Extract two-arg \item tags at top level ... non-recursive.
    x <- x[RdTags(x) == "\\item"]
    if(!length(x)) return(matrix(character(), 0L, 2L))
    x <- lapply(x[lengths(x) == 2L], vapply, FUN.VALUE = "",
                function(block) .Rd_deparse(block[RdTags(block) != "COMMENT"]))
    matrix(unlist(x), ncol = 2L, byrow = TRUE)
}

### * .Rd_get_item_tags

.Rd_get_item_tags <-
function(x)
{
    ## Extract two-arg \item tags at top level ... non-recursive.
    x <- x[RdTags(x) == "\\item"]
    out <- lapply(x[lengths(x) == 2L],
                  function(e) .Rd_deparse(e[[1L]]))
    as.character(unlist(out))
}

### * .Rd_get_example_code

.Rd_get_example_code <-
function(x)
{
    x <- .Rd_get_section(x, "examples")
    if(!length(x)) return(character())

    ## Need to remove everything inside \dontrun (and drop comments),
    ## and "undefine"
    ##   \dontdiff \dontshow \donttest \testonly
    ## (which is achieved by changing the Rd tag to "Rd").

    ## <FIXME>
    ## Remove eventually.
    x <- .Rd_drop_comments(x)
    ## </FIXME>

    recurse <- function(e) {
        if(!is.null(tag <- attr(e, "Rd_tag"))
           && tag %in% c("\\dontdiff", "\\dontshow", "\\donttest",
                         "\\testonly")) {
            e <- c(list(tagged("\n", "RCODE")),
                   e,
                   list(tagged("\n", "RCODE")))
            attr(e, "Rd_tag") <- "Rd"
        }
        if(is.list(e)) {
            structure(lapply(e[is.na(match(RdTags(e), "\\dontrun"))],
                             recurse),
                      Rd_tag = attr(e, "Rd_tag"))
        }
        else e
    }

    y <- recurse(x)
    attr(y, "Rd_tag") <- "Rd"
    y <- as.character.Rd(y)
    y[y %in% c("\\dots", "\\ldots")] <- "..."
    y <- psub("(?<!\\\\)\\\\([%{])", "\\1", y)
    paste(y, collapse = "")
}

### * .Rd_get_methods_description_table

.Rd_get_methods_description_table <-
function(x)
{
    y <- matrix(character(), 0L, 2L)
    x <- .Rd_get_section(x, "Methods", FALSE)
    if(!length(x)) return(y)
    x <- .Rd_get_section(x, "describe")
    if(!length(x)) return(y)
    x <- x[RdTags(x) == "\\item"]
    if(!length(x)) return(y)
    x <- lapply(x[lengths(x) == 2L], sapply, .Rd_deparse)
    matrix(unlist(x), ncol = 2L, byrow = TRUE)
}

### * .Rd_get_doc_type

.Rd_get_doc_type <-
function(x)
{
    c(attr(x, "meta")$docType, .Rd_get_metadata(x, "docType"), "")[1L]
}

### * .Rd_get_name

.Rd_get_name <-
function(x)
{
    x <- .Rd_get_section(x, "name")
    ## The name should really be plain text, so as.character() should be
    ## fine as well ...
    if(length(x))
        trimws(.Rd_deparse(x, tag = FALSE))
    else
        character()
}

### * .Rd_get_title

.Rd_get_title <-
function(x)
{
    title <- .Rd_get_section(x, "title")

    result <- character()
    if(length(title)) {
        result <- .Rd_get_text(title)
        result <- result[nzchar(result)]
    }
    paste(result, collapse=" ")
}

### * .Rd_get_text

# Return display form of text, encoded in UTF-8.  Note that
# textConnection converts to the local encoding, and we convert back,
# so unrepresentable characters will be lost

## FIXME: use out = tempfile(), like .Rd_get_latex.

.Rd_get_text <-
function(x) {
    # Handle easy cases first
    if (is.character(x)) return(c(x))

    # We'd like to use capture.output here, but don't want to depend
    # on utils, so we duplicate some of it
    rval <- NULL
    file <- textConnection("rval", "w", local = TRUE)

    save <- options(useFancyQuotes = FALSE)
    Rdsave <- Rd2txt_options(underline_titles = FALSE)
    sink(file)
    tryCatch(Rd2txt(x, fragment=TRUE),
             finally = {sink()
                        options(save)
                        Rd2txt_options(Rdsave)
                        close(file)})

    if (is.null(rval)) rval <- character()
    else enc2utf8(rval)
}

### * .Rd_get_xrefs

.Rd_get_xrefs <-
function(x)
{
    out <- matrix(character(), nrow = 0L, ncol = 2L)
    recurse <- function(e) {
        tag <- attr(e, "Rd_tag")
        if(identical(tag, "\\link")) {
            val <- if(length(e)) { # mvbutils has empty links
                arg <- paste(trimws(unlist(e)), collapse = " ")
                opt <- attr(e, "Rd_option")
                c(arg, if(is.null(opt)) "" else as.character(opt))
            } else c("", "")
            out <<- rbind(out, val)
        } else if(identical(tag, "\\linkS4class")) {
            arg <- if (length(e)) as.character(e[[1L]]) else ""
            val <- c(arg, sprintf("=%s-class", arg))
            out <<- rbind(out, val)
        }
        if(is.list(e)) lapply(e, recurse)
    }
    lapply(x, recurse)
    dimnames(out) <- list(NULL, c("Target", "Anchor"))
    out
}

### * .Rd_get_names_from_Rd_db

.Rd_get_names_from_Rd_db <-
function(db)
{
    Rd_names <- lapply(db, .Rd_get_name)
    ## If the Rd db was obtained from an installed package, we know that
    ## all Rd objects must have a \name entry---otherwise, Rd_info() and
    ## hence installing the package Rd contents db would have failed.
    ## For Rd dbs created from a package source directory, we now add
    ## the Rd file paths as the names attribute, so that we can point to
    ## the files with missing \name entries.
    idx <- as.integer(lengths(Rd_names)) == 0L
    if(any(idx)) {
        Rd_paths <- names(db)
        if(is.null(Rd_paths)) {
            ## This should not happen.
            ## We cannot refer to the bad Rd objects because we do not
            ## know their names, and have no idea which file they came
            ## from ...)
            stop("cannot deal with Rd objects with missing/empty names")
        }
        else {
            stop(sprintf(ngettext(sum(idx),
                                  "missing/empty \\name field in Rd file\n%s",
                                  "missing/empty \\name field in Rd files\n%s"),
                         paste0("  ", Rd_paths[idx], collapse = "\n")),
                 call. = FALSE, domain = NA)
        }
    }
    unlist(Rd_names)
}

### * ..Rd_get_equations_from_Rd

.Rd_get_equations_from_Rd <-
function(x)
{
    y <- .Rd_find_nodes_with_tags(x, c("\\eqn", "\\deqn"))
    if(!length(y)) return(matrix(character(), 0L, 5L))
    z <- lapply(y, function(e) {
        c(attr(e, "Rd_tag"),
          ## % is treated verbatim in the first arg of equations as per
          ## "Exceptions to special character handling" in parseRd.pdf.
          .Rd_deparse(e[[1L]], tag = FALSE),
          if(length(e) > 1L)
              trimws(.Rd_deparse(e[[2L]], tag = FALSE))
          else
              NA_character_,
          if(!is.null(loc <- attr(e, "srcref")))
              loc[c(1L, 3L)]
          else
              rep.int(NA_character_, 2L))
    })
    do.call(rbind, z)
}

### * .Rd_get_equations_from_Rd_db

.Rd_get_equations_from_Rd_db <-
function(x)
{
    if(!length(x)) return(matrix(character(), 0L, 6L))
    m <- lapply(x, .Rd_get_equations_from_Rd)
    cbind(rep.int(names(m), vapply(m, nrow, 0L)),
          do.call(rbind, m))
}

### * .Rd_format_title

.Rd_format_title <-
function(x)
{
    ## Although R-exts says about the Rd title slot that
    ## <QUOTE>
    ##   This should be capitalized, not end in a period, and not use
    ##   any markup (which would cause problems for hypertext search).
    ## </QUOTE>
    ## some Rd files have LaTeX-style markup, including
    ## * LaTeX-style single and double quotation
    ## * Medium and punctuation dashes
    ## * Escaped ampersand.
    ## Hence we try getting rid of these ...
    x <- gsub("(``|'')", "\"", x)
    x <- gsub("`", "'", x, fixed=TRUE)
    x <- gsub("([[:alnum:]])--([[:alnum:]])", "\\1-\\2", x)
    x <- gsub("\\&", "&",  x, fixed=TRUE)
    x <- gsub("---", "--", x, fixed=TRUE)
    ## Also remove leading and trailing whitespace.
    trimws(x)
}

### * .Rd_topic_for_display

.Rd_topic_for_display <-
function(name, aliases)
    if(name %in% aliases) name else aliases[1L]

### * fetchRdDB

fetchRdDB <-
function(filebase, key = NULL)
{
    fun <- function(db) {
        vals <- db$vals
        vars <- db$vars
        datafile <- db$datafile
        compressed <- db$compressed
        envhook <- db$envhook

        fetch <- function(key)
            lazyLoadDBfetch(vals[key][[1L]], datafile, compressed, envhook)

        if(length(key)) {
            if(key %notin% vars)
                stop(gettextf("No help on %s found in RdDB %s",
                              sQuote(key), sQuote(filebase)),
                     domain = NA)
            fetch(key)
        } else {
            res <- lapply(vars, fetch)
            names(res) <- vars
            res
        }
    }
    res <- lazyLoadDBexec(filebase, fun)
    if (length(key))
        res
    else
        invisible(res)
}

### * loadRdMacros

## The macros argument can be TRUE, in which case a new environment is
## created with an empty parent, or the result of a previous call to this
## function, in which case it becomes the parent, or a filename, in
## which case that file is loaded first, then the new file into a child
## environment. 

## It is not safe to save this environment, as changes to the parser may
## invalidate its contents.

loadRdMacros <- function(file, macros = TRUE) {
    # New macros are loaded into a clean environment
    if (is.logical(macros) && !macros)
    	stop("'macros' must be TRUE or must specify existing macros")
    Rd <- parse_Rd(file, fragment = TRUE, macros = macros, warningCalls = FALSE)
    for(entry in Rd) {
        bad <- TRUE
	if (is.list(entry)) break
	tag <- attr(entry, "Rd_tag")
	switch(tag,
	    TEXT = if (any(grepl("[^[:space:]]", entry, perl = TRUE, useBytes=TRUE)))
		      break
		   else
		      bad <- FALSE,
	    USERMACRO =,
	    "\\newcommand" =,
	    "\\renewcommand" =,
	    COMMENT = bad <- FALSE,
	    break
	)
    }
    if (bad)
	warning(gettextf("Macro file %s should only contain Rd macro definitions and comments",
	                 file))
    attr(Rd, "macros")
}

### * initialRdMacros

initialRdMacros <- function(pkglist = NULL,
                            macros = file.path(R.home("share"), "Rd", "macros", "system.Rd")
                            ) {
    if (length(pkglist)) {
    	others <- trimws(unlist(strsplit(pkglist, ",")))

    	for (p in others) {
            if((fp <- system.file(package = p)) == "")
                warning(gettextf("Rd macro package '%s' is not installed.",
                                 p),
                        call. = FALSE)
            else if(dir.exists(file.path(fp, "help", "macros")))
    	    	macros <- loadPkgRdMacros(fp, macros)
    	    else
    	    	warning(gettextf("No Rd macros in package '%s'.", p),
                        call. = FALSE)
        }
    } else if (is.character(macros))
    	macros <- loadRdMacros(file = macros)
    macros
}

### * loadPkgRdMacros

loadPkgRdMacros <- function(pkgdir, macros = NULL) {
    pkglist <- .get_package_metadata(pkgdir)["RdMacros"]
    if (is.na(pkglist))
        pkglist <- NULL

    if (is.null(macros))
        macros <- initialRdMacros(pkglist)
    else
        macros <- initialRdMacros(pkglist, macros)

    files <- c(list.files(file.path(pkgdir, "man", "macros"), pattern = "\\.Rd$", full.names = TRUE),
               list.files(file.path(pkgdir, "help", "macros"), pattern = "\\.Rd$", full.names = TRUE))

    for (f in files)
    	macros <- loadRdMacros(f, macros)

    macros
}

### * check_math_rendering_in_Rd_db

check_math_rendering_in_Rd_db <-
function(db, eq = NULL, katex = .make_KaTeX_checker()) {
    if(is.null(eq))
        eq <- .Rd_get_equations_from_Rd_db(db)
    ## Now eq is a 6-column matrix with
    ##   file tag latex ascii beg end
    ## where tag is \eqn or \deqn.
    out <- matrix(character(), 0L, 3L)
    results <- lapply(eq[, 3L], katex)
    msg <- vapply(results, `[[`, "", "error")
    ind <- nzchar(msg)
    if(any(ind)) {
        msg <- msg[ind]
        msg <- sub("^KaTeX parse error: (.*) at position.*:",
                   "\\1 in",
                   msg)
        msg <- sub("^KaTeX parse error: ", "", msg)
        ## KaTeX uses
        ##   COMBINING LOW LINE  (U+0332)
        ##   HORIZONTAL ELLIPSIS (U+2026)
        ## for formatting parse errors.  These will not work in
        ## non-UTF-8 locales and not well in UTF-8 ones, so change as
        ## necessary ... 
        msg <- gsub("\u2026", "...", msg)
        msg <- gsub("\u0332", "", msg)
        l1 <- eq[ind, 5L]
        l2 <- eq[ind, 6L]
        tst <- (l1 == l2)
        pos <- is.na(tst)
        l1[pos] <- ""
        pos <- which(!pos)
        l1[pos] <- paste0(":", l1[pos])
        pos <- which(!tst[pos])
        l1[pos] <- paste0(l1[pos], "-", l2[pos])
        out <- cbind(eq[ind, 1L], l1, msg)
    }
    colnames(out) <- c("path", "pos", "msg")
    out
}

### * base_Rd_metadata_db

base_Rd_metadata_db <-
function(kind, verbose = TRUE, Ncpus = getOption("Ncpus", 1L)) 
{
    .package_apply(.get_standard_package_names()$base,
                   function(p) {
                       lapply(Rd_db(p, lib.loc = .Library),
                              .Rd_get_metadata, kind)
                   },
                   verbose = verbose, Ncpus = Ncpus)
}

### * base_aliases_db

base_aliases_db <-
function(verbose = FALSE, Ncpus = getOption("Ncpus", 1L))
    base_Rd_metadata_db("alias", verbose = verbose, Ncpus = Ncpus)
    
### * base_keyword_db

base_keyword_db <-
function(verbose = FALSE, Ncpus = getOption("Ncpus", 1L))
    base_Rd_metadata_db("keyword", verbose = verbose, Ncpus = Ncpus)

### * base_rdxrefs_db

base_rdxrefs_db <- 
function(verbose = FALSE, Ncpus = getOption("Ncpus", 1L))
{
    .package_apply(.get_standard_package_names()$base,
                   function(p) {
                       db <- Rd_db(p, lib.loc = .Library)
                       rdxrefs <- lapply(db, .Rd_get_xrefs)
                       cbind(do.call(rbind, rdxrefs),
                             Source = rep.int(names(rdxrefs),
                                              vapply(rdxrefs, NROW,
                                                     0L)))
                   },
                   verbose = verbose, Ncpus = Ncpus)
}

### * .Rd_xrefs_with_missing_package_anchors

.Rd_xrefs_with_missing_package_anchors <-
function(dir, level = 1)
{
    ## Find the Rd xrefs with non-anchored targets not in the package
    ## itself or the installed packages with the given new-style levels
    ## (base: 1, recommended: 2, others: 3)
    ## Note that we use 'dir' as the path to package sources (and not
    ## the installed package), and hence use the package Rd db for both
    ## aliases and rdxrefs.

    db <- Rd_db(dir = dir)
    if(!length(db)) return()
    aliases <- lapply(db, .Rd_get_metadata, "alias")
    rdxrefs <- lapply(db, .Rd_get_xrefs)
    rdxrefs <- cbind(do.call(rbind, rdxrefs),
                     Source = rep.int(names(rdxrefs),
                                      vapply(rdxrefs,
                                             NROW,
                                             0L)))
    anchors <- rdxrefs[, "Anchor"]
    if(any(ind <- startsWith(anchors, "=")))
        rdxrefs[ind, 1L : 2L] <- cbind(sub("^=", "", anchors[ind]), "")
    rdxrefs <- rdxrefs[!nzchar(rdxrefs[, "Anchor"]), , drop = FALSE]
    aliases <- c(unlist(aliases, use.names = FALSE),
                 names(findHTMLlinks(level = level)))
    if(any(ind <- is.na(match(rdxrefs[, "Target"], aliases))))
        unique(rdxrefs[ind, , drop = FALSE])
    else NULL
}

### * .Rd_metadata_db_to_data_frame

.Rd_metadata_db_to_data_frame <- 
function(x, kind)
{
    wrk <- function(a, p) {
        cbind(unlist(a, use.names = FALSE),
              rep.int(sprintf("%s::%s", p, names(a)), lengths(a)))
    }
    y <- as.data.frame(do.call(rbind,
                               Map(wrk, x, names(x), USE.NAMES = FALSE)))
    colnames(y) <- c(kind, "Source")
    y
}    
        
### * .Rd_aliases_db_to_data_frame

.Rd_aliases_db_to_data_frame <-
function(x)
    .Rd_metadata_db_to_data_frame(x, "Alias")

### * .Rd_keyword_db_to_data_frame

.Rd_keyword_db_to_data_frame <-
function(x)
    .Rd_metadata_db_to_data_frame(x, "Keyword")

### * .Rd_rdxrefs_db_to_data_frame

.Rd_rdxrefs_db_to_data_frame <-
function(x)
{
    wrk <- function(u, p) {
        u$Source <- sprintf("%s::%s", p, u$Source)
        u
    }
    do.call(rbind,
            Map(wrk, lapply(x, as.data.frame), names(x),
                USE.NAMES = FALSE))
}

### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
#  File src/library/tools/R/Rd2ex.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2024 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

## This warns on multiple \examples sections, never fails.

Rd2ex <-
    function(Rd, out="", defines=.Platform$OS.type, stages="render",
             outputEncoding="UTF-8", commentDontrun = TRUE, commentDonttest = FALSE, ...)
{
    WriteLines <- function(x, con, outputEncoding, ...) {
        if (outputEncoding != "UTF-8") {
            x <- iconv(x, "UTF-8", outputEncoding,  mark=FALSE)
            if (anyNA(x))
                x <- iconv(x, "UTF-8", outputEncoding, sub="byte", mark=FALSE)
        }
        writeLines(x, con, useBytes = TRUE, ...)
    }

    dropNewline <- FALSE # drop next char if newline

    of0 <- function(...)
        of1(paste0(...))
    of1 <- function(text) {
        if (dropNewline && length(text)) {
            text[1L] <- psub("^\n", "", text[1L])
            dropNewline <<- FALSE
        }
        WriteLines(text, con, outputEncoding, sep = "")
    }
    wr <- function(x)
	paste0("###", strwrap(remap(x), 73L, indent=1L, exdent=3L), collapse="\n")

    remap <- function(x) {
        if(!length(x)) return(x)
        ## \link, \var are untouched in comments: e.g. is.R
        x <- psub("\\\\(link|var)\\{([^}]+)\\}", "\\2", x)
        ## not valid in perl: use lookbehind instead.
        ## x <- gsub("(^|[^\\])\\\\([%{])", "\\1\\2", x)
        x <- psub("(?<!\\\\)\\\\([%{])", "\\1", x)
        ## FIXME:  Previously said "Want to leave file bytes unchanged"
        x
    }

    render <- function(x, prefix = "")
    {
	renderDont <- function(txt, comment, label=TRUE, xtra1=comment) {
            if (length(txt) == 1L) {
                label1 <- paste0(txt, ": ")
                label2 <- paste0("End(", txt, ")")
            } else {
                label1 <- txt[1L]
                label2 <- txt[2L]
            }
	    if (label)
		of0("## ", label1)
	    ## Special case for one line.
	    if (xtra1 && length(x) == 1L) {
		render(x[[1L]], prefix)
	    } else {
		if (!grepl("^\n", x[[1L]][1L], perl = TRUE) &&
		    RdTags(x)[1L] != "COMMENT") {
		    writeLines("", con)
		    render(x[[1L]], paste0(if (comment) "##D ", prefix))
		} else render(x[[1L]], prefix)
		for(i in seq_along(x)[-1]) ## `` i in 2:length(x) ''
		    render(x[[i]], paste0(if (comment) "##D ", prefix))
		last <- x[[length(x)]]
		if (!grepl("\n$", last[length(last)], perl = TRUE))
		    writeLines("", con)
		if (label) {
		    of0("## ", label2, "\n")
		    dropNewline <<- TRUE
		}
	    }
	}
	tag <- attr(x, "Rd_tag")
	if(tag %in% c("\\dontshow", "\\testonly")) {
	    renderDont("Don't show", comment=FALSE)
	} else if (tag == "\\dontdiff") {
	    renderDont(c("IGNORE_RDIFF_BEGIN", "IGNORE_RDIFF_END"), comment=FALSE)
	} else if (tag == "\\dontrun") {
	    renderDont("Not run", commentDontrun, label=commentDontrun)
	} else if (tag == "\\donttest") {
	    renderDont("No test", commentDonttest, xtra1=FALSE)
	} else if (tag == "COMMENT") {
            ## % can escape a whole line (e.g. beavers.Rd) or
            ## be trailing when we want a NL
            ## This is not right (leading spaces?) but it may do
            if(attr(x, "srcref")[2L] == 1L) dropNewline <<- TRUE
        } else if (tag %in% c("\\dots", "\\ldots")) {
            of1("...")
        } else if (tag == "\\if" || tag == "\\ifelse") {
            if (testRdConditional("example", x, Rdfile))
            	for(i in seq_along(x[[2L]])) render(x[[2L]][[i]], prefix)
            else if (tag == "\\ifelse")
            	for(i in seq_along(x[[3L]])) render(x[[3L]][[i]], prefix)
        } else if (tag == "\\out") {
            for (i in seq_along(x))
            	of1(x[[i]])
        } else if (tag %in% c("USERMACRO", "\\newcommand", "\\renewcommand")) {
            # do nothing
        } else {
            txt <- unlist(x)
            of0(prefix, remap(txt))
        }
    }

    Rd <- prepare_Rd(Rd, defines=defines, stages=stages, ...)
    Rdfile <- attr(Rd, "Rdfile")
    sections <- RdTags(Rd)

    ## FIXME should we skip empty \examples sections?
    where <- which(sections == "\\examples")
    if(length(where)) {
	if (is.character(out)) {
	    if(out == "") {
		con <- stdout()
	    } else {
		con <- file(out, "wt")
		on.exit(close(con))
	    }
        } else {
            con <- out
            out <- summary(con)$description
        }

        if(length(where) > 1L)
            warning("more than one \\examples section, using the first")
        ex <- Rd[[ where[1L] ]]
        exl <- unlist(ex)
        ## Do we need to output an encoding?
        if(length(exl) && any(Encoding(exl) != "unknown")) {
            if(any(f <- sections == "\\encoding")) {
                encoding <- unlist(Rd[which(f)])[1L]
                ## FIXME: which should win here?
                if(nzchar(outputEncoding))
                    encoding <- outputEncoding
                else
                    outputEncoding <- encoding
                of0("### Encoding: ", encoding, "\n") #
            }
        }
        nameblk <- sections == "\\name"
        if (any(nameblk)) {
            ## perl wrapped here, but it seems unnecessary
            name <- as.character(Rd[[ which.max(nameblk) ]])
            of0("### Name: ", name, "\n")
        }
        title <- .Rd_format_title(.Rd_get_title(Rd))
        if (!length(title))
            title <- "No title found"
        of0(wr(paste0("Title: ", title)), "\n")
        aliasblks <- sections == "\\alias"
        if (any(aliasblks)) {
            aliases <- unlist(Rd[aliasblks])
            sp <- grep(" ", aliases, fixed = TRUE)
            aliases[sp] <- paste0("'", aliases[sp], "'")
            of0(wr(paste0("Aliases: ", paste(aliases, collapse=" "))),
                "\n")
        }
        keyblks <- sections == "\\keyword"
        if (any(keyblks)) {
            ## some people have only empty keyword blocks.
            keys <- trimws(unlist(Rd[keyblks])) %w/o% .Rd_keywords_auto
            if(length(keys)) {
                of0(wr(paste("Keywords: ",
                             paste0(keys, collapse=" "))), "\n")
            }
        }
        writeLines(c("", "### ** Examples"), con)
        for (i in seq_along(ex)) render(ex[[i]])
        of1("\n\n\n")
    }
    invisible(out)
}
#  File src/library/tools/R/Rd2HTML.R
#
#  Copyright (C) 1995-2024 The R Core Team
#  Part of the R package, https://www.R-project.org
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

## also used by Rd2latex, but only 'topic' and 'dest'
get_link <- function(arg, tag, Rdfile) {
    ## 'topic' is the text to display (used by Rd2latex, also as \index entry),
    ## 'dest' is the topic to link to (unless for option [pkg:bar]).
    ## Package-anchored links have non-NULL 'pkg' and 'targetfile',
    ## where the latter is the topic/file to link to in HTML help.

    ## \link{foo}: show and link to topic foo.
    ## \link[=bar]{foo} means shows foo but treat this as a link to *topic* bar.
    ## \link[pkg]{bar} means show bar and link to topic/file bar in package pkg.
    ## \link[pkg:bar]{foo} means show foo and link to topic/file bar in package pkg.
    ## As from 2.10.0, look for topic 'bar' if file not found.
    ## As from 4.1.0, prefer topic 'bar' over file 'bar' (in which case 'targetfile' is a misnomer)
    ## As from 4.5.0, allow markup in link text for variants 2 and 4.

    isTEXT <- all(RdTags(arg) == "TEXT")
    option <- attr(arg, "Rd_option")

    topic <- dest <- paste(unlist(arg), collapse = "")
    targetfile <- NULL
    pkg <- NULL
    if (!is.null(option)) {
        if (!identical(attr(option, "Rd_tag"), "TEXT"))
    	    stopRd(option, Rdfile, "Bad \\link option -- must be text")
        option <- as.character(option)
        if (startsWith(option, "="))
    	    dest <- psub1("^=", "", option)
        else if (grepl(":", option, fixed = TRUE)) {
    	    targetfile <- psub1("^[^:]*:", "", option)
    	    pkg <- psub1(":.*", "", option)
    	} else {
            if (!isTEXT)
                stopRd(arg, Rdfile, "Bad \\link[pkg]{topic} -- argument must be text")
            targetfile <- dest
            pkg <- option
    	}
    } else if (!isTEXT)
        stopRd(arg, Rdfile, "Bad \\link topic -- must be text")

    if (tag == "\\linkS4class") dest <- paste0(dest, "-class")
    list(topic = topic, dest = dest, pkg = pkg, targetfile = targetfile)
}

## translation of Utils.pm function of the same name, plus "unknown"
mime_canonical_encoding <- function(encoding)
{
    encoding[encoding %in% c("", "unknown")] <-
        utils::localeToCharset()[1L]
    encoding <- tolower(encoding)
    encoding <- sub("iso_8859-([0-9]+)", "iso-8859-\\1", encoding)
    encoding <- sub("iso8859-([0-9]+)", "iso-8859-\\1", encoding)
    encoding[encoding == "latin1"] <-  "iso-8859-1"
    encoding[encoding == "latin2"] <-  "iso-8859-2"
    encoding[encoding == "latin3"] <-  "iso-8859-3"
    encoding[encoding == "latin4"] <-  "iso-8859-4"
    encoding[encoding == "cyrillic"] <-"iso-8859-5"
    encoding[encoding == "arabic"] <-  "iso-8859-6"
    encoding[encoding == "greek"] <-   "iso-8859-7"
    encoding[encoding == "hebrew"] <-  "iso-8859-8"
    encoding[encoding == "latin5"] <-  "iso-8859-9"
    encoding[encoding == "latin6"] <-  "iso-8859-10"
    encoding[encoding == "latin8"] <-  "iso-8859-14"
    encoding[encoding == "latin-9"] <- "iso-8859-15"
    encoding[encoding == "latin10"] <- "iso-8859-16"
    encoding[encoding == "utf8"] <-    "utf-8"
    encoding[encoding == "ascii"] <-   "us-ascii" # from W3C validator
    encoding
}

htmlify <- function(x) {
    x <- fsub("&", "&amp;", x)
    x <- fsub("---", "&mdash;", x)
    x <- fsub("--", "&ndash;", x)
    x <- fsub("``", "&ldquo;", x)
    x <- fsub("''", "&rdquo;", x)
    x <- psub("`([^']+)'", "&lsquo;\\1&rsquo;", x)
    x <- fsub("`", "'", x)
    x <- fsub("<", "&lt;", x)
    x <- fsub(">", "&gt;", x)
    x <- fsub('"\\{"', '"{"', x)
    x <- fsub('"', '&quot;', x)
    x
}

vhtmlify <- function(x, inEqn = FALSE) { # code version
    x <- fsub("&", "&amp;", x)
    x <- fsub("<", "&lt;", x)
    x <- fsub(">", "&gt;", x)
    x <- fsub('"\\{"', '"{"', x)
    ## http://htmlhelp.com/reference/html40/entities/symbols.html
    if(inEqn) {
        x <- psub("\\\\(Alpha|Beta|Gamma|Delta|Epsilon|Zeta|Eta|Theta|Iota|Kappa|Lambda|Mu|Nu|Xi|Omicron|Pi|Rho|Sigma|Tau|Upsilon|Phi|Chi|Psi|Omega|alpha|beta|gamma|delta|epsilon|zeta|eta|theta|iota|kappa|lambda|mu|nu|xi|omicron|pi|rho|sigma|tau|upsilon|phi|chi|psi|omega|le|ge|sum|prod)", "&\\1;", x)
        x <- psub("\\\\(dots|ldots)", "&hellip;", x)
        x <- fsub("\\infty", "&infin;", x)
        x <- fsub("\\sqrt", "&radic;", x)
    }
    x
}

shtmlify <- function(s) {
    s <- fsub("&", "&amp;", s)
    s <- fsub("<", "&lt;" , s)
    s <- fsub(">", "&gt;" , s)
    s
}

## URL encode for use in href attributes.
urlify <- function(x, reserved = FALSE, repeated = FALSE) {
    ## When reserved is a logical, like
    ##   utils::URLencode(x, reserved)
    ## with '&' replaced by '&amp;' and hence directly usable for href
    ## attributes.  Equivalently, one could use
    ##   escapeAmpersand(utils::URLencode(x, reserved))
    ## Alternatively, reserved can be a string giving the reserved chars
    ## not to percent encode if it starts with a '^', and to percent
    ## encode otherwise (perhaps utils::URLencode() should be enhanced
    ## accordingly?).
    ##
    ## According to RFC 3986 <https://www.rfc-editor.org/rfc/rfc3986>, the
    ## reserved characters are
    ##   c(gendelims, subdelims)
    ## with
    ##   gendelims <- c(":", "/", "?", "#", "[", "]", "@")
    ##   subdelims <- c("!", "$", "&", "'", "(", ")", "*", "+", ",", ";", "=")
    ## The following is
    ##   paste(c(gendelims, subdelims), collapse = "")
    ## re-arranged for convenient use in regexp (negated) character
    ## classes:
    alldelims <- "][!$&'()*+,;=:/?@#"
    ## See also <https://url.spec.whatwg.org/#valid-url-string>.
    
    if(!repeated && grepl("%[[:xdigit:]]{2}", x, useBytes = TRUE)) {
        gsub("&", "&amp;", x, fixed = TRUE)
    } else {
        chars <- unlist(strsplit(x, ""))
        hex <- vapply(chars,
                      function(x)
                      paste0("%", toupper(as.character(charToRaw(x))),
                             collapse = ""),
                      "")
        if(is.character(reserved)) {
            reserved <- paste(reserved, collapse = "")
            reserved <- if(startsWith(reserved, "^"))
                            substring(reserved, 2L)
                        else      
                            rawToChar(setdiff(charToRaw(alldelims),
                                              charToRaw(reserved)))
            escape <- any(charToRaw(reserved) == charToRaw("&"))
        } else if(!reserved) {
            reserved <- alldelims
            escape <- TRUE
        } else {
            reserved <- ""
            escape <- FALSE
        }
        todo <- paste0("[^",
                       reserved,
                       "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
                       "abcdefghijklmnopqrstuvwxyz0123456789._~-",
                       "]")
        x <- paste(ifelse(grepl(todo, chars), hex, chars), collapse = "")
        if(escape)
            x <- gsub("&", "&amp;", x, fixed = TRUE)
        x
    }
}

urlify_email_address <- function(x) {
    ## As per RFC 6068
    ## <https://www.rfc-editor.org/rfc/rfc6068#section-2> we must
    ## percent encode
    ##   "%"
    ##   from gendelims:   c("/", "?", "#", "[", "]")  
    ##   from subdelims:   c("&", ";", "=")
    urlify(x, reserved = "][#?/&;=%", repeated = TRUE)
}

urlify_doi <- function(x) {
    ## According to
    ##   <https://www.doi.org/doi_handbook/2_Numbering.html#2.2>
    ## a DOI name can "incorporate any printable characters from the
    ## legal graphic characters of Unicode".  The subsequent
    ##   <https://www.doi.org/doi_handbook/2_Numbering.html#htmlencoding>
    ## discussed encoding issues but is a bit vague.
    ## For now, percent encode all reserved characters but the slash.
    urlify(x, reserved = "^/", repeated = TRUE)
}

## Ampersands should be escaped in proper HTML URIs
escapeAmpersand <- function(x) gsub("&", "&amp;", x, fixed = TRUE)

invalid_HTML_chars_re <-
    "[\u0001-\u0008\u000b\u000c\u000e-\u001f\u007f-\u009f]"

## topics can contain weird characters like % / & !, so need to be
## encoded. We allow for different encodings for URLs and filenames
## (although mostly the same for now).

## & -> %26F is OK, & -> &amp; is NOT OK with dynamic help
topic2url <- function(x)
{
    if(config_val_to_logical(Sys.getenv("_R_HELP_USE_URLENCODE_",
                                        "FALSE")))
        utils::URLencode(x, reserved = TRUE)
    else
        vapply(x, urlify, "", reserved = TRUE) # to vectorize (used in toHTML.R)
}
topic2filename <- function(x)
    gsub("%", "+", utils::URLencode(x, reserved = TRUE))
## The next few are for generating URL fragment ids
string2id <- function(x)
    gsub("%", "+", utils::URLencode(x, reserved = TRUE))
name2id <- function(x) string2id(x)
topic2id <- function(x) sprintf("topic+%s", string2id(x))
topic2href <- function(x, destpkg = NULL, hooks = list())
{
    if (is.null(destpkg)) sprintf("#%s", topic2id(x))
    else {
        FUN <- hooks$pkg_href
        if (is.null(FUN)) FUN <- function(pkg) sprintf("%s.html", pkg)
        ## Need a way to turn links to unavailable packages into
        ## "nothing", e.g. when building package HTML refmans.
        ## We do so if FUN() gave "nothing" or the special '#'.
        if(!length(s <- FUN(destpkg)) || (s == "#"))
            "#"
        else
            sprintf("%s#%s", s, topic2id(x))
    }
}

## We want to give an id to each top-level section tags, arguments,
## and _maybe_ other \item{} objects (eventually). The following
## function tries to implement a rule to construct such an id given
## relevant information.

## Ideally, the id-s should be predictable so we could reference them
## from other help pages using some standard markup, but this may or
## may not be possible. The standard sections should be unique, so can
## be standardized, but arbitrary \section{}-s and \item{}-s inside
## sections other than \argument{}-s are potentially problematic. In
## addition, the HTML may be used in standalone help pages or in a
## combined per-package refman; in the second case, id-s must be
## distinguished for different pages. For this, we will use the
## \name{} of a page, which must uniquely define every help page
## within a package.

## The current rule is as follows:
## - If name != NULL, it will be used as a prefix
## - Standard sections will get standardized ids
## - remaining sections will require a string to be supplied

## Note that tagid can be a vector (for comma-separated items)

tag2id <- function(tag, name = NULL, tagid = section2id[tag])
{
    section2id <- 
        c("\\description" = "_sec_description", "\\usage"    = "_sec_usage",
          "\\arguments"   = "_sec_arguments",   "\\format"   = "_sec_format",
          "\\details"     = "_sec_details",     "\\note"     = "_sec_note",
          "\\section"     = "_sec_section",     "\\author"   = "_sec_author",
          "\\references"  = "_sec_references",  "\\source"   = "_sec_source",
          "\\seealso"     = "_sec_seealso",     "\\examples" = "_sec_examples",
          "\\value"       = "_sec_value")
    if (anyNA(tagid)) return(NULL) # or "" ?
    id <- if (is.null(name)) tagid
          else paste(name2id(name), tagid, sep = "_:_")
    string2id(gsub("[[:space:]]+", "-", id))
}

rdfragment2text <- function(rd, html = TRUE)
{
    if (html) {
        ## utils::capture.output(Rd2HTML(rd, fragment = TRUE)) has
        ## unclosed <p>. Handle this as tools:::.extract_news_from_Rd
        ## does
        s <- utils::capture.output(Rd2HTML(rd, fragment = TRUE)) |> trimws()
        i <- which(startsWith(s, "<p>") & !endsWith(s, "</p>"))
        if (length(i)) {
            z <- s[i]
            j <- which((lengths(gregexpr("</?p>", z)) %% 2L) > 0L)
            if (length(j)) 
                s[i[j]] <- paste0(z[j], "</p>")
        }
        paste(s, collapse = "\n")
    }
    else
        (utils::capture.output(Rd2txt(rd, fragment = TRUE))
            |> paste(collapse = "\n")
            |> trimws())
}


## Create HTTP redirect files for aliases; called only during package
## installation if static help files are enabled. Files are named
## after aliases, which may contain 'undesirable' characters. These
## are escaped using topic2filename(). Analogous escaping needs to be
## done when creating links in HTML output as well, but ONLY for
## static HTML (dynamic help is already capable of handling such
## links)
createRedirects <- function(file, Rdobj)
{
    linksToTopics <-
        config_val_to_logical(Sys.getenv("_R_HELP_LINKS_TO_TOPICS_", "TRUE"))
    if (!linksToTopics) return(invisible()) # do nothing
    ## create a HTTP redirect for each 'alias' in .../pkg/help/
    redirHTML <-
        sprintf("<!DOCTYPE html>\n<html><head><meta http-equiv='refresh' content='0; url=../html/%s'><title>HTTP redirect</title></head><body></body></html>\n",
                urlify(basename(file), reserved = TRUE))
    toProcess <- which(RdTags(Rdobj) == "\\alias")
    helpdir <- paste0(dirname(dirname(file)), "/help") # .../pkg/help/
    aliasName <- function(i) trimws(Rdobj[[i]][[1]])
    aliasFile <- function(i) file.path(helpdir, sprintf("%s.html", topic2filename(aliasName(i))))
    redirMsg <- function(type, src, dest, status) {
        ## change sprintf to gettextf to make translatable, but seems unnecessary
        msg <- sprintf("\nREDIRECT:%s\t %s -> %s [ %s ]", type, src, dest, status)
        message(msg, appendLF = FALSE)
    }
    ## remove duplicate aliases, if any
    aliasesToProcess <- vapply(toProcess, aliasName, "")
    toProcess <- toProcess[!duplicated(aliasesToProcess)]
    for (i in toProcess) {
        aname <- aliasName(i)
        afile <- aliasFile(i)
        if (file.exists(afile)) {
            ## warning("Previous alias or file overwritten by alias: ", aname)
            msg <- sprintf("\nREDIRECT:topic\t Previous alias or file overwritten by alias: %s",
                           afile)
            message(msg, appendLF = FALSE)
        }
        try(suppressWarnings(cat(redirHTML, file = afile)), silent = TRUE) # Fails for \alias{%/%}
        ## redirMsg("topic", aname, basename(file), if (file.exists(afile)) "SUCCESS" else "FAILURE")
        if (!file.exists(afile)) redirMsg("topic", aname, basename(file), "FAILURE")
    }
    ## Also add .../pkg/help/file.html -> ../pkg/html/file.html as fallback
    ## when topic is not found (but do not overwrite)
    file.fallback <- file.path(helpdir, basename(file))
    if (!file.exists(file.fallback)) {
        try(cat(redirHTML, file = file.fallback), silent = TRUE)
        ## redirMsg("file", basename(file), basename(file), if (file.exists(file.fallback)) "SUCCESS" else "FAILURE")
        if (!file.exists(file.fallback)) redirMsg("file", basename(file), basename(file),  "FAILURE")
    }
}


## This gets used two ways:

## 1) With dynamic = TRUE from tools:::httpd()
##    Here generated links are of the forms
##    ../../pkg/help/topic
##    file.html
##    ../../pkg/html/file.html
##    and links are never missing: topics are always linked as
##    ../../pkg/help/topic for the current packages, and this means
##    'search this package then all the others, and show all matches
##    if we need to go outside this packages'

## 2) With dynamic = FALSE from .convertRdfiles (with Links[2], used for
##    prebuilt HTML pages) and .Rdconv (no link lookup)
##    Here generated links are of the forms
##    file.html
##    ../../pkg/html/file.html
##    and missing links (those without an explicit package, and
##    those topics not in Links[2]) don't get linked anywhere.

## There is a third use (from R 4.4.0), which is to generate
## single-page HTML refmans for an entire package via pkg2HTML(),
## which calles Rd2HTML(standalone = FALSE) for each help page. 


Rd2HTML <-
    function(Rd, out = "", package = "", defines = .Platform$OS.type,
             Links = NULL, Links2 = NULL,
             stages = "render", outputEncoding = "UTF-8",
             dynamic = FALSE, no_links = FALSE, fragment = FALSE,
             stylesheet = if (dynamic) "/doc/html/R.css" else "R.css",
             texmath = getOption("help.htmlmath"),
             concordance = FALSE,
             standalone = TRUE,
             hooks = list(),
             toc = isTRUE(getOption("help.htmltoc")),
             Rhtml = FALSE, # TODO: guess from 'out' if non-missing
             ...)
{
    ## Is this package help, as opposed to from Rdconv or similar?
    ## Used to decide whether redirect files should be created when
    ## generating static HTML
    package_help <- inherits(Rd, "Rd") && (length(package) == 2L)
    if (missing(no_links) && is.null(Links) && !dynamic) no_links <- TRUE
    linksToTopics <-
        config_val_to_logical(Sys.getenv("_R_HELP_LINKS_TO_TOPICS_", "TRUE"))
    enhancedHTML <-
        config_val_to_logical(Sys.getenv("_R_HELP_ENABLE_ENHANCED_HTML_", "TRUE"))
    if (!no_links && !linksToTopics && !standalone) {
        warning("links not supported for 'standalone = FALSE' when _R_HELP_LINKS_TO_TOPICS_=false")
        no_links <- TRUE
    }
    version <- ""
    if(!identical(package, "")) {
        if(length(package) > 1L) {
            version <- package[2L]
            package <- package[1L]
        } else {
            dir <- dirname(package)
            if(nzchar(dir) &&
               file_test("-f", dfile <- file.path(package, "DESCRIPTION"))) {
                version <- .read_description(dfile)["Version"]
                package <- basename(package)
            } else {
                ## Should we really do this?
                ## Used when Rdconv is given a package argument.
                version <- utils::packageDescription(package,
                                                     fields = "Version")
            }
        }
        if(is.na(version)) version <- ""
    }

    ## writeLines by default re-encodes strings to the local encoding.
    ## Avoid that by useBytes=TRUE
    writeLinesUTF8 <-
        if (outputEncoding == "UTF-8" ||
           (outputEncoding == "" && l10n_info()[["UTF-8"]])) {
        function(x, con, outputEncoding, ...)
            writeLines(x, con, useBytes = TRUE, ...)
    } else {
        function(x, con, outputEncoding, ...) {
            x <- iconv(x, "UTF-8", outputEncoding, sub="byte", mark=FALSE)
            writeLines(x, con, useBytes = TRUE, ...)
        }
    }
    
    if (concordance)
    	conc <- activeConcordance()
    else
    	conc <- NULL
    if (toc) { # not meaningful unless standalone = TRUE
        if (!standalone) toc <- FALSE
        else toc_entries <- list()
    }

    skipNewline <- FALSE
    of0 <- function(...)
        of1(paste0(...))
    of1 <- function(text) {
        if (skipNewline) {
            skipNewline <<- FALSE
            if (text == "\n") return()
        }
    	if (concordance)
    	    conc$addToConcordance(text)
        writeLinesUTF8(text, con, outputEncoding, sep = "")
    }

    pendingClose <- pendingOpen <- character()  # Used for infix methods

    inEqn <- FALSE		# Should we do edits needed in an eqn?
    sectionLevel <- 0L		# How deeply nested within section/subsection
    inPara <- FALSE		# Are we in a <p> paragraph? If NA, we're not, but we're not allowed to be
    inAsIs <- FALSE             # Should we show characters "as is"?

### These correspond to HTML wrappers
    HTMLTags <- c("\\bold"="b",
    	          "\\cite"="cite",
                  "\\code"="code",
                  "\\command"="code",
                  "\\dfn"="dfn",
                  "\\emph"="em",
                  "\\kbd"="kbd",
                  "\\preformatted"="pre",
#                  "\\special"="pre",
                  "\\strong"="strong",
                  "\\var"="var")
    # These have simple substitutions
    HTMLEscapes <- c("\\R"='<span class="rlang"><b>R</b></span>',
    		     "\\cr"="<br>",
    		     "\\dots"="...",
    		     "\\ldots"="...")
    ## These correspond to idiosyncratic wrappers
    HTMLLeft <- c("\\abbr"='<abbr>',
                  "\\acronym"='<abbr><span class="acronym">',
    		  "\\donttest"="", "\\dontdiff"="",
    		  "\\env"='<span class="env">',
                  "\\file"='&lsquo;<span class="file">',
                  "\\option"='<span class="option">',
                  "\\pkg"='<span class="pkg">',
                  "\\samp"='&lsquo;<span class="samp">&#8288;',
                  "\\sQuote"="&lsquo;",
                  "\\dQuote"="&ldquo;",
                  "\\verb"='<code style="white-space: pre;">&#8288;')
    HTMLRight <- c("\\abbr"="</abbr>",
                   "\\acronym"='</span></abbr>',
    		   "\\donttest"="", "\\dontdiff"="",
    		   "\\env"="</span>",
                   "\\file"='</span>&rsquo;',
                   "\\option"="</span>",
                   "\\pkg"="</span>",
                   "\\samp"="&#8288;</span>&rsquo;",
                   "\\sQuote"="&rsquo;",
                   "\\dQuote"="&rdquo;",
                   "\\verb"="&#8288;</code>")

    addParaBreaks <- function(x) {
	if (isBlankLineRd(x) && isTRUE(inPara)) {
	    inPara <<- FALSE
	    return("</p>\n")
	}
	## TODO: can we get 'start col' if no srcref ?
	if (utils:::getSrcByte(x) == 1L) x <- psub("^\\s+", "", x)
	if (isFALSE(inPara) && !all(grepl("^[[:blank:]\n]*$", x, perl = TRUE))) {
	    x <- paste0("<p>", x)
	    inPara <<- TRUE
	}
        x
    }

    enterPara <- function(enter = TRUE) {
	if (enter && isFALSE(inPara)) {
            of1("<p>")
            inPara <<- TRUE
        }
    }

    leavePara <- function(newval) {
    	if (isTRUE(inPara)) of1("</p>\n")
    	inPara <<- newval
    }

    writeItemAsCode <- function(blocktag, block, addID = blocktag == "\\arguments") {
        ## Argh.  Quite a few packages put the items in their value
        ## section inside \code.
        for(i in which(RdTags(block) == "\\code"))
            attr(block[[i]], "Rd_tag") <- "Rd"

        ## Usually RdTags(block)[1L] == "TEXT", except when it is
        ## \\dots, \\ldots, etc. Potentially more complicated in cases
        ## like \item{foo, \dots, bar}, where block will have length >
        ## 1. We want to (a) split on comma and treat each part as a
        ## separate argument / value, and (b) for blocktag==arguments
        ## only, add an id tag so that we can have internal links.

        ## We do this by 'deparsing' block[[1L]], using as.character.Rd()
        s <- as.character.Rd(block)
        toEsc <- s %in% names(HTMLEscapes)
        if (any(toEsc)) s[toEsc] <- HTMLEscapes[s[toEsc]]

        ## Now just join, split on comma, wrap individually inside
        ## </code>, and unsplit. This will be problematic if any
        ## TeX-like macros remain, but that should not happen in
        ## practice for \item-s inside \arguments or \value.

        s <- trimws(strsplit(paste(s, collapse = ""), ",", fixed = TRUE)[[1]])
        s <- s[nzchar(s)] # unlikely to matter, but just to be safe
        item_value <- vhtmlify(s)
        s <- if (addID) {
                 item_id <- tag2id(name = if (standalone) NULL else name, tagid = s)
                 if (toc)
                     toc_entries <<-
                         c(toc_entries,
                           list(argitem =
                                    list(id = item_id,
                                         value = sprintf("<code>%s</code>",
                                                         item_value))))
                 sprintf('<code id="%s">%s</code>', item_id, item_value)
             }
             else sprintf('<code>%s</code>', item_value)
        s <- paste0(s, collapse = ", ")
        of1(s)
    }

    writeWrapped <- function(tag, block, doParas) {
    	if (!doParas || HTMLTags[tag] == "pre")
            leavePara(NA)
        else
            enterPara()
        saveAsIs <- inAsIs
        asis <- !is.na(match(tag, "\\command"))
        if(asis) inAsIs <<- TRUE
        if (!isBlankRd(block)) {
    	    of0("<", HTMLTags[tag], ">")
    	    writeContent(block, tag)
    	    of0("</",  HTMLTags[tag], ">")
    	}
        if(HTMLTags[tag] == "pre")
            inPara <<- FALSE
        if(asis) inAsIs <<- saveAsIs
    }

    writeLink <- function(tag, block, doParas) {
	parts <- get_link(block, tag, Rdfile)

        writeHref <- function() {
            enterPara(doParas)
            savePara <- inPara
            inPara <<- NA
            if (!no_links) of0('<a href="', htmlfile, '">')
            writeContent(block, tag)
            if (!no_links) of1('</a>')
            inPara <<- savePara
        }
    	if (is.null(parts$targetfile)) {
            ## ---------------- \link{topic} and \link[=topic]{foo}
            topic <- parts$dest
    	    if (dynamic) { # never called with package=""
                htmlfile <- paste0("../../", urlify(package), "/help/", topic2url(topic))
                writeHref()
                return()
            }
            else if (linksToTopics && !is.null(Links) && !is.na(Links[topic]) &&
                     startsWith(Links[topic], paste0("../../", urlify(package)))) {
                ## only if the topic exists in the package (else look
                ## harder below). 'Links' contains all topics in the
                ## package, but also those in base+recommended
                ## packages. We do this branch only if this is a
                ## within-package link
                htmlfile <-
                    if (standalone)
                        paste0("../../", urlify(package), "/help/", topic2filename(topic), ".html")
                    else
                        topic2href(topic) # htmlfile is actually a link target within current file
                writeHref()
                return()

            } else { # identify actual file containing topic
            	htmlfile  <- NA_character_
            	if (!is.null(Links)) {
            	    tmp <- Links[topic]
            	    if (!is.na(tmp)) htmlfile <- tmp
                    else {
                        tmp <- Links2[topic]
                        if (!is.na(tmp)) htmlfile <- tmp
                    }
            	}
            }
            if (is.na(htmlfile)) {
                ## Used to use the search engine, but we no longer have one,
                ## and we don't get here for dynamic help.
                if (!no_links)
                    warnRd(block, Rdfile, "missing link ", sQuote(topic))
                writeContent(block, tag)
            } else {
                if (!standalone) {
                    htmlfile <- topic2href(topic,
                                           destpkg = strsplit(htmlfile, "/", fixed = TRUE)[[1]][[3]],
                                           hooks = hooks)
                }
                else {
                    ## treat links in the same package specially -- was needed for CHM
                    pkg_regexp <- paste0("^../../", urlify(package), "/html/")
                    if (grepl(pkg_regexp, htmlfile)) {
                        htmlfile <- sub(pkg_regexp, "", htmlfile)
                    }
                }
                writeHref()
            }
    	} else {
            ## ----------------- \link[pkg]{file} and \link[pkg:file]{bar}
            if (!dynamic && !linksToTopics && !no_links &&
                nzchar(pkgpath <- system.file(package = parts$pkg))) {
                ## old-style static HTML: prefer filename over topic,
                ## so treat as filename and topic2url() instead of
                ## topic2filename()
                htmlfile <- paste0(topic2url(parts$targetfile), ".html")
                ## check the link, only if the package is found
                OK <- FALSE
                if (!file.exists(file.path(pkgpath, "html", htmlfile))) {
                    ## does not exist as static HTML, so look harder
                    f <- file.path(pkgpath, "help", "paths.rds")
                    if (file.exists(f)) {
                        paths <- sub("\\.[Rr]d$", "", basename(readRDS(f)))
                        OK <- parts$targetfile %in% paths
                    }
                } else OK <- TRUE
                if (!OK) {
                    ## so how about as a topic?
                    file <- utils:::index.search(parts$targetfile, pkgpath)
                    if (length(file)) {
                        ## warnRd(block, Rdfile,
                        ##        "file link ", sQuote(parts$targetfile),
                        ##        " in package ", sQuote(parts$pkg),
                        ##        " does not exist and so has been treated as a topic")
                        parts$targetfile <- basename(file)
                    } else {
                        warnRd(block, Rdfile, "missing file link ",
                               sQuote(parts$targetfile))
                    }
                }
            }
            if (parts$pkg == package) { # within same package
                if (linksToTopics)
                    htmlfile <-
                        if (dynamic) paste0("../help/", topic2url(parts$targetfile))
                        else if (standalone) paste0("../help/", topic2filename(parts$targetfile), ".html")
                        else topic2href(parts$targetfile)
                else # use href = "file.html"
                    htmlfile <- paste0(topic2url(parts$targetfile), ".html")
                writeHref()
            } else {  # link to different package
                ## href = "../../pkg/html/file.html"
                if (linksToTopics)
                    htmlfile <-
                        if (dynamic) paste0("../../", urlify(parts$pkg), "/help/",
                                            topic2url(parts$targetfile))
                        else if (standalone) paste0("../../", urlify(parts$pkg), "/help/",
                                                    topic2filename(parts$targetfile), ".html")
                        else topic2href(parts$targetfile,
                                        destpkg = urlify(parts$pkg),
                                        hooks = hooks)
                else
                    htmlfile <- paste0("../../", urlify(parts$pkg), "/html/",
                                       topic2url(parts$targetfile), ".html") # FIXME Is this always OK ??
                writeHref()
            }
        }
    }

    writeLR <- function(block, tag, doParas) {
    	enterPara(doParas)
        of1(HTMLLeft[tag])
        writeContent(block, tag)
        of1(HTMLRight[tag])
    }

    writeDR <- function(block, tag) {
        if (Rhtml && length(block) > 1L)
            of1("\nend.rcode-->\n\n<!--begin.rcode eval=FALSE\n")
        of1('## Not run: ')
        writeContent(block, tag)
        if (length(block) > 1L) {
            of1('\n## End(Not run)')
            if (Rhtml) of1("\nend.rcode-->\n\n<!--begin.rcode\n")
        }
    }

    writeBlock <- function(block, tag, blocktag) {
    	if (concordance)
    	    conc$saveSrcref(block)
        doParas <- (blocktag %notin% c("\\tabular"))
	switch(tag,
               UNKNOWN =,
               VERB = if (Rhtml && blocktag == "\\dontrun") of1(block)
                      else of1(vhtmlify(block, inEqn)),
               RCODE = if (Rhtml) of1(block) else of1(vhtmlify(block)),
               TEXT = of1(if(doParas && !inAsIs) addParaBreaks(htmlify(block)) else vhtmlify(block)),
               USERMACRO =,
               "\\newcommand" =,
               "\\renewcommand" = {},
               COMMENT = if (utils:::getSrcByte(block) == 1L)
                             skipNewline <<- TRUE,
               LIST = writeContent(block, tag),
               "\\describe"=,
               "\\enumerate"=,
               "\\itemize" = {
               	   leavePara(FALSE)
                   writeContent(block, tag)
               },
               "\\bold" =,
               "\\cite" =,
               "\\code" =,
               "\\command" =,
               "\\dfn" =,
               "\\emph" =,
               "\\kbd" =,
               "\\preformatted" =,
               "\\strong" =,
               "\\var" = writeWrapped(tag, block, doParas),
               "\\special" = writeContent(block, tag), ## FIXME, verbatim?
               "\\linkS4class" =,
               "\\link" = writeLink(tag, block, doParas),
               ## cwhmisc has an empty \\email
               "\\email" = if (length(block)) {
                   url <- lines2str(as.character(block))
                   enterPara(doParas)
                   ## FIXME: urlify
                   of0('<a href="mailto:', urlify_email_address(url), '">',
                       htmlify(url), '</a>')},
               ## watch out for empty URLs (TeachingDemos had one)
               "\\url" = if(length(block)) {
                   url <- lines2str(as.character(block))
                   enterPara(doParas)
                   of0('<a href="', urlify(url), '">', htmlify(url), '</a>')
               },
               "\\href" = {
                   closing <-
                       if(length(block[[1L]])) {
                           url <- lines2str(as.character(block[[1L]]))
                           enterPara(doParas)
                           of0('<a href="', urlify(url), '">')
                           "</a>"
                       }
                       else ""
               	   savePara <- inPara
               	   inPara <<- NA
               	   writeContent(block[[2L]], tag)
               	   of0(closing)
               	   inPara <<- savePara
               },
               "\\Sexpr"= of0(as.character.Rd(block, deparse=TRUE)),
               "\\cr" =,
               "\\dots" =,
               "\\ldots" =,
               "\\R" = {
                   enterPara(doParas)
               	   of1(HTMLEscapes[tag])
               },
               "\\abbr" =,
               "\\acronym" =,
               "\\donttest" =, "\\dontdiff" =,
               "\\env" =,
               "\\file" =,
               "\\option" =,
               "\\pkg" =,
               "\\samp" =,
               "\\sQuote" =,
               "\\dQuote" =,
               "\\verb" = writeLR(block, tag, doParas),
               "\\dontrun"= writeDR(block, tag),
               "\\enc" = writeContent(block[[1L]], tag),
               "\\eqn" = {
                   block <-
                       if (doTexMath) block[[1L]]
                       else block[[length(block)]]
                   if(length(block)) {
                       enterPara(doParas)
                       inEqn <<- !doTexMath
                       if (doTexMath) of1('<code class="reqn">') # safer than of1('\\(') etc.
                       else of1("<i>")
                       ## FIXME: space stripping needed: see Special.html
                       writeContent(block, tag)
                       if (doTexMath) of1('</code>') # of1('\\)')
                       else of1("</i>")
                       inEqn <<- FALSE
                   }
               },
               "\\deqn" = {
                   block <-
                       if (doTexMath) block[[1L]]
                       else block[[length(block)]]
                   if(length(block)) {
                       inEqn <<- !doTexMath
                       leavePara(TRUE)
                       if (doTexMath) of1('<p style="text-align: center;"><code class="reqn">')
                       else of0('<p style="',
                                if (length(block) <= 3) 'text-align: center'
                                else 'white-space: pre', # as in Rd2txt()
                                ';"><i>')
                       writeContent(block, tag)
                       if (doTexMath) of1('</code>\n')
                       else of1('</i>')
                       leavePara(FALSE)
                       inEqn <<- FALSE
                   }
               },
               "\\figure" = {
                   enterPara(doParas)
                   ## This is what is needed for static html pages
                   if(dynamic) of1('<img src="figures/')
                   else of1('<img src="../help/figures/')
                   writeContent(block[[1]], tag)
                   of1('" ')
               	   if (length(block) > 1L
               	       && length(imgoptions <- .Rd_get_latex(block[[2]]))
		       && startsWith(imgoptions[1L], "options: ")) {
		       ## There may be escaped percent signs within
		       imgoptions <- gsub("\\%", "%",
                                          paste(imgoptions,
                                                collapse = " "),
                                          fixed=TRUE)
                       of1(sub("^options: ", "", imgoptions))
                       ## Expert use may have forgotten alt ...
                       if(!grepl("\\balt *=", imgoptions)) {
                           of1(' alt="')
                           writeContent(block[[1L]], tag)
                           of1('"')
                       }
	           } else {
		       of1('alt="')
		       writeContent(block[[length(block)]], tag)
		       of1('"')
                   }
                   of1('>')
               },
               "\\dontshow" =,
               "\\testonly" = {}, # do nothing
               "\\method" =,
               "\\S3method" =,
               "\\S4method" = {
                   # Should not get here
               },
               "\\tabular" = writeTabular(block),
               "\\subsection" = writeSection(block, tag),
               "\\if" =,
               "\\ifelse" =
               	    if (testRdConditional("html", block, Rdfile))
			writeContent(block[[2L]], tag)
		    else if (tag == "\\ifelse")
		    	writeContent(block[[3L]], tag),
               "\\out" = for (i in seq_along(block))
		   of1(block[[i]]),
               stopRd(block, Rdfile, "Tag ", tag, " not recognized")
               )
    }

    writeTabular <- function(table) {
    	format <- table[[1L]]
    	content <- table[[2L]]
    	if (length(format) != 1 || RdTags(format) != "TEXT")
    	    stopRd(table, Rdfile, "\\tabular format must be simple text")
    	format <- strsplit(format[[1L]], "", fixed = TRUE)[[1L]]
    	if (!all(format %in% c("l", "c", "r")))
    	    stopRd(table, Rdfile,
                   "Unrecognized \\tabular format: ", table[[1L]][[1L]])
        format <- c(l="left", c="center", r="right")[format]

        tags <- RdTags(content)

	leavePara(NA)
	of1('\n<table>\n')
	if (concordance)
	    conc$saveSrcref(table)
        newrow <- TRUE
        newcol <- TRUE
        ## Argh.  As of 2025-08, about 3000 CRAN packages have \\tabular
        ## with a trailing \cr ending the last row, which is invalid as
        ## per R-exts (the \cr starts another row which has a different
        ## number of fields than the other rows), and when processed
        ## results in bad HTML (spotted by v.NU but not HTML Tidy).  We
        ## could have checkRd() complain, but given the number of
        ## offenders let's drop such trainling \cr before processing, at
        ## least for the time being. 
        if(any(ind <- (tags == "\\cr"))) {
            i <- max(which(ind))
            j <- seq.int(i + 1L, length.out = length(content) - i)
            if(all(grepl("^[[:space:]]*$",
                         vapply(content[j], .Rd_deparse, "")))) {
                content <- content[-i]
                tags <- tags[-i]
            }
        }
        for (i in seq_along(tags)) {
            if (concordance)
                conc$saveSrcref(content[[i]])
            if (newrow) {
            	of1("<tr>\n ")
            	newrow <- FALSE
            	col <- 0
            }
            if (newcol) {
                col <- col + 1L
                if (col > length(format))
                    stopRd(table, Rdfile,
                           "Only ", length(format),
                           " columns allowed in this table")
            	of0('<td style="text-align: ', format[col], ';">')
            	newcol <- FALSE
            }
            switch(tags[i],
            "\\tab" = {
            	of1('</td>')
            	newcol <- TRUE
            },
            "\\cr" = {
            	if (!newcol) of1('</td>')
            	of1('\n</tr>\n')
            	newrow <- TRUE
            	newcol <- TRUE
            },
            writeBlock(content[[i]], tags[i], "\\tabular"))
        }
        if (!newcol) of1('</td>')
        if (!newrow) of1('\n</tr>\n')
        of1('\n</table>\n')
        inPara <<- FALSE
    }

    writeContent <- function(blocks, blocktag) {
        inlist <- FALSE
        itemskip <- FALSE

	tags <- RdTags(blocks)

	i <- 0
	while (i < length(tags)) {
	    i <- i + 1
            tag <- tags[i]
            block <- blocks[[i]]
            if (concordance)
            	conc$saveSrcref(block)
            if (length(pendingOpen)) { # Handle $, [ or [[ methods
            	if (tag == "RCODE" && startsWith(block, "(")) {
            	    block <- sub("^\\(", "", block)
            	    arg1 <- sub("[,)[:space:]].*", "", block)
		    block <- sub(paste0(arg1, "[[:space:]]*,[[:space:]]*"),
				 "", block)
            	    of0(arg1, pendingOpen)
                    pendingClose <<-
                        if(pendingOpen == "$") ""
                        else chartr("[", "]", pendingOpen)
            	} else of0("`", pendingOpen, "`")
            	pendingOpen <<- character()
            }
            if (length(pendingClose) && tag == "RCODE"
                && grepl("\\)", block)) { # Finish it off...
            	of0(sub("\\).*", "", block), pendingClose)
            	block <- sub("[^)]*\\)", "", block)
            	pendingClose <<- character()
            }
            switch(tag,
            "\\method" =,
            "\\S3method" =,
            "\\S4method" = {
               	blocks <- transformMethod(i, blocks, Rdfile)
               	tags <- RdTags(blocks)
               	i <- i - 1
            },
            "\\item" = {
    	    	leavePara(FALSE)
    	    	if (!inlist) {
    	    	    switch(blocktag,
                           "\\value" =  of1('<table role = "presentation">\n'),
                           "\\arguments" = of1('<table role = "presentation">\n'),
                           "\\itemize" = of1("<ul>\n"),
                           "\\enumerate" = of1("<ol>\n"),
                           "\\describe" = of1("<dl>\n"))
    	    	    inlist <- TRUE
    		} else {
    		    if (blocktag %in% c("\\itemize", "\\enumerate")) {
    		    	of1("</li>\n")
                        ## We have \item ..., so need to skip the space.
                        itemskip <- TRUE
                    }
    		}
    		switch(blocktag,
   		"\\value"=,
     		"\\arguments"= {
    		    of1('<tr><td>')
    		    inPara <<- NA
                    writeItemAsCode(blocktag, block[[1L]])
    		    of1('</td>\n<td>\n')
    		    inPara <<- FALSE
    		    writeContent(block[[2L]], tag)
    		    leavePara(FALSE)
    		    of1('</td></tr>')
    		},
    		"\\describe"= {
    		    of1("<dt>")
    		    inPara <<- NA
    		    writeContent(block[[1L]], tag)
    		    of1("</dt><dd>")
    		    inPara <<- FALSE
    		    writeContent(block[[2L]], tag)
    		    leavePara(FALSE)
    		    of1("</dd>")
    		},
    		"\\enumerate" =,
    		"\\itemize"= {
    		    inPara <<- FALSE
    		    of1("<li>")
    		})
    	    },
    	    { # default
    	    	if (inlist && (blocktag %notin% c("\\itemize", "\\enumerate"))
    	    	           && tag != "COMMENT"
    	    	           && !(tag == "TEXT" && isBlankRd(block))) {
    	    	    switch(blocktag,
    	    	    "\\arguments" =,
     	    	    "\\value" = of1("</table>\n"),
    	    	    "\\describe" = of1("</dl>\n"))
    		    inlist <- FALSE
    		    inPara <<- FALSE
    		}
                if (itemskip) {
                    ## The next item must be TEXT, and start with a space.
                    itemskip <- FALSE
                    if (tag == "TEXT") {
                        txt <- addParaBreaks(htmlify(block))
                        of1(txt)
                    } else writeBlock(block, tag, blocktag) # should not happen
                }
                else writeBlock(block, tag, blocktag) # "typical default"
    	    })
	}
	if (inlist) {
	    leavePara(FALSE)
	    switch(blocktag,
		"\\value"=,
		"\\arguments" = of1("</table>\n"),
		"\\itemize"   = of1("</li></ul>\n"),
		"\\enumerate" = of1("</li></ol>\n"),
		# "\\value"=,
		"\\describe"  = of1("</dl>\n"))
	}
    }

    writeSection <- function(section, tag) {
        if (tag %in% c("\\alias", "\\concept", "\\encoding", "\\keyword"))
            return() ## \alias only used on CHM header

        leavePara(NA)
        save <- sectionLevel
        sectionLevel <<- sectionLevel + 1L

        ## compute id and toc entries if required
        if (toc) {
            if (tag %in% c("\\section", "\\subsection")) {
                sec_value <- rdfragment2text(section[[1L]])
                sec_id <-
                    tag2id(name = if (standalone) NULL else name,
                           tagid = rdfragment2text(section[[1L]], html = FALSE))
            }
            else {
                sec_value <- paste0("<p>", sectionTitles[tag], "</p>")
                sec_id <- tag2id(tag = tag, name = if (standalone) NULL else name)
            }
            toc_entry <- list(id = trimws(sec_id), value = trimws(sec_value))
            toc_entries <<-
                c(toc_entries,
                  if (tag == "\\subsection") list(subsection = toc_entry)
                  else list(section = toc_entry))
            of1(paste0("\n\n<h", sectionLevel+2L, " id='", sec_id, "'>"))
        }
        else of1(paste0("\n\n<h", sectionLevel+2L, ">"))
        if (concordance)
            conc$saveSrcref(section)
    	if (tag == "\\section" || tag == "\\subsection") {
    	    title <- section[[1L]]
    	    section <- section[[2L]]
            ## FIXME: this needs trimming of whitespace
    	    writeContent(title, tag)
    	} else
    	    of1(sectionTitles[tag])
        of1(paste0("</h", sectionLevel+2L, ">\n\n"))
        if (tag == "\\usage") {
            of1("<pre><code class='language-R'>")
            inPara <<- NA
            pre <- TRUE
        } else if (tag == "\\examples") {
            if (dynamic && enhancedHTML && !Rhtml && !is.null(firstAlias))
                of1(sprintf("<p><a href='../Example/%s'>Run examples</a></p>",
                            topic2url(firstAlias)))
            if (Rhtml) of1("\n\n<!--begin.rcode\n") else of1("<pre><code class='language-R'>")
            inPara <<- NA
            pre <- TRUE
        } else {
            inPara <<- FALSE
            pre <- FALSE
        }
    	if (length(section)) {
	    ## There may be an initial \n, so remove that
	    s1 <- section[[1L]][1L]
	    if (RdTags(section)[1] %in% c("TEXT", "RCODE") && s1 == "\n") section <- section[-1L]
	    writeContent(section, tag)
	}
	leavePara(FALSE)
        if (pre) # must be \usage or \examples
            if (Rhtml && tag == "\\examples") of1("\nend.rcode-->\n\n")
            else of1("</code></pre>\n")
    	sectionLevel <<- save
    }

    ## Write a navigation menu (if toc == TRUE) based on toc_entries
    writeNav <- function() {

        of0('<nav class="topic" aria-label="Section Navigation">\n',
            '<div class="dropdown-menu">\n',
            '<h1>Contents</h1>\n',
            '<ul class="menu">\n')

        currentLevel <- 1L # entry_types = argitem, subsection are level 2
        ## toc_entries <- list( section|subsection|argitem = list(id, value) )
        entry_types <- names(toc_entries)
        for (i in seq_along(toc_entries)) {
            newLevel <-
                if (entry_types[[i]] %in% c("argitem", "subsection")) 2L
                else 1L
            if (newLevel > currentLevel) of1("  <ul>")
            else if (newLevel < currentLevel) of1("  </ul>")
            currentLevel <- newLevel
            e <- toc_entries[[i]] # id, value can be vectors
            of0(sprintf("<li><a href='#%s'>%s</a></li>\n", e$id, e$value))
        }

        of0('</ul>\n',
            '</div>\n',
            '</nav>')
    }


    
    ## ----------------------- Continue in main function -----------------------
    info <- list() # attribute to be returned if standalone = FALSE
    create_redirects <- FALSE
    if (is.character(out)) {
        if (out == "") {
            con <- stdout()
        } else {
	    con <- file(out, "wt")
            create_redirects <- !dynamic && package_help
	    on.exit(close(con))
	}
    } else {
    	con <- out
    	out <- summary(con)$description
    }
    Rd <- prepare_Rd(Rd, defines = defines, stages = stages,
                     fragment = fragment, ...)
    ## Check if man page already uses mathjaxr package
    ## (then skip mathjax processing)
    uses_mathjaxr <- function(rd)
    {
        done <- TRUE
        ## go through one by one until we hit \description
        for (frag in rd) {
            if (attr(frag, "Rd_tag") == "\\description") {
                done <- FALSE
                break
            }
        }
        if (done) return(FALSE)
        ## go through one by one until we hit \loadmathjax
        for (subfrag in frag) {
            if (identical(attr(subfrag, "Rd_tag"), "USERMACRO") &&
                identical(attr(subfrag, "macro"), "\\loadmathjax"))
                return(TRUE)
        }
        return(FALSE)
    }
    ## Both katex and mathjax need custom config scripts. For dynamic
    ## HTML these are in /doc/html/*-config.js (as well as the main
    ## katex script and CSS), but for static HTML, the appropriate
    ## relative path is not computable in general. So, for static HTML
    ## we only support katex, using a CDN for the main files and
    ## embedding the config in the output file itself
    if (is.null(texmath)) texmath <- "katex"
    if (texmath == "mathjax" && !dynamic) texmath <- "katex"
    doTexMath <- enhancedHTML && !uses_mathjaxr(Rd) &&
        texmath %in% c("katex", "mathjax")

    ## KaTeX / Mathjax resources (if they are used)
    if (doTexMath && texmath == "katex") {
        KATEX_JS <-
            if (dynamic) "/doc/html/katex/katex.js"
            else "https://cdn.jsdelivr.net/npm/katex@0.15.3/dist/katex.min.js"
        KATEX_CSS <- if (dynamic) "/doc/html/katex/katex.css"
                     else "https://cdn.jsdelivr.net/npm/katex@0.15.3/dist/katex.min.css"
        KATEX_CONFIG <-
            if (dynamic) "/doc/html/katex-config.js"
            else c("const macros = { \"\\\\R\": \"\\\\textsf{R}\", \"\\\\code\": \"\\\\texttt\"};", 
                   "function processMathHTML() {",
                   "    var l = document.getElementsByClassName('reqn');", 
                   "    for (let e of l) { katex.render(e.textContent, e, { throwOnError: false, macros }); }", 
                   "    return;",
                   "}")
    }
    if (doTexMath && texmath == "mathjax") {
        MATHJAX_JS <-
            if (dynamic && requireNamespace("mathjaxr", quietly = TRUE))
                "/library/mathjaxr/doc/mathjax/es5/tex-chtml-full.js"
            else
                "https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-chtml-full.js"
        MATHJAX_CONFIG <-
            if (dynamic) "/doc/html/mathjax-config.js"
            else "../../../doc/html/mathjax-config.js"
    }
    if (enhancedHTML) {
        PRISM_JS <- 
            if (dynamic) "/doc/html/prism.js"
            else NULL # "../../../doc/html/prism.js"
        PRISM_CSS <- 
            if (dynamic) "/doc/html/prism.css"
            else NULL # "../../../doc/html/prism.css"
    }
    Rdfile <- attr(Rd, "Rdfile")
    sections <- RdTags(Rd)
    if (fragment) {
    	if (sections[1L] %in% names(sectionOrder))
    	    for (i in seq_along(sections))
    	    	writeSection(Rd[[i]], sections[i])
    	else
    	    for (i in seq_along(sections))
    	    	writeBlock(Rd[[i]], sections[i], "")
    } else {
        if (create_redirects) createRedirects(out, Rd)
	name <- htmlify(Rd[[2L]][[1L]])
        firstAlias <-
            trimws(Rd[[ which(sections == "\\alias")[1] ]][[1]])
	if (concordance)
            conc$saveSrcref(.Rd_get_section(Rd, "title"))
	headtitle <- strwrap(.Rd_format_title(.Rd_get_title(Rd)),
	                     width=65, initial="R: ")
	if (length(headtitle) > 1) headtitle <- paste0(headtitle[1], "...")

        ## Create HTML header and footer
        if (standalone) {
            hfcomps <- # should we be able to specify static URLs here?
                HTMLcomponents(title = headtitle, logo = FALSE,
                               up = NULL,
                               top = NULL,
                               css = stylesheet,
                               outputEncoding = outputEncoding,
                               dynamic = dynamic, prism = enhancedHTML,
                               doTexMath = doTexMath, texmath = texmath,
                               PRISM_CSS_STATIC = NULL, PRISM_JS_STATIC = NULL)
            of1(paste(hfcomps$header, collapse = "")) # write out header
            of1('<main>')
            of0('\n\n<table style="width: 100%;">',
                '<tr><td>',
                name)
            if (nchar(package))
                of0(' {', package, '}')
            of0('</td><td style="text-align: right;">R Documentation</td></tr></table>\n\n')
        }

        ## id can identify help page when combined with others, and
        ## also needed to form argument id-s programmatically
        if (!standalone) {
            of0("<h2 id='", name2id(name), "'>")
        }
        else
            of0("<h2>")
	inPara <- NA
	title <- Rd[[1L]]
        info$name <- name
        info$title <- rdfragment2text(title)
        info$htmltitle <- info$title # Rd2HTML(fragment = TRUE) gives unbalanced <p>
        info$mathjaxr <- uses_mathjaxr(Rd)
        info$pkgsummary <- FALSE # possibly updated below if alias ends with '-package'
	if (concordance)
	    conc$saveSrcref(title)
	writeContent(title, sections[1])
	of1("</h2>")
	inPara <- FALSE
        if (!standalone) {
            ## create empty spans with aliases as id, so that we can link
            for (a in unique(trimws(unlist(Rd[ which(sections == "\\alias") ])))) {
                if (endsWith(a, "-package")) info$pkgsummary <- TRUE
                of0("<span id='", topic2id(a), "'></span>")
            }
        }
	for (i in seq_along(sections)[-(1:2)])
	    writeSection(Rd[[i]], sections[i])

	if(nzchar(version))
	    version <- paste0('Package <em>', package, '</em> version ', version, ' ')
	of1('\n')
        if (standalone) {
            if(nzchar(version))
                of0('<hr><div style="text-align: center;">[', version,
                    if (!no_links) '<a href="00Index.html">Index</a>',
                    ']</div>')
            of1('</main>\n')
            if (toc) writeNav()
            of1(paste(hfcomps$footer, collapse = "")) # write out footer
        }
        else attr(out, "info") <- info
    }
    if (concordance) {
    	conc$srcFile <- Rdfile
    	concdata <- followConcordance(conc$finish(), attr(Rd, "concordance"))
    	# NB:  Prior to R 4.3.0, the srcFile could be
    	#      an absolute path, possibly containing a 
    	#      colon.  This strips the leading part up to
    	#      "/man/".
    	concdata$srcFile <- stripPathTo(concdata$srcFile, "man")
    	attr(out, "concordance") <- concdata
    	of0('<!-- ',
    	    as.character(concdata),
    	    ' -->\n')
    }
    invisible(out)
} ## Rd2HTML()


## The following functions return 'relative' links assuming that all
## packages are installed in the same virtual library tree.

findHTMLlinks <-
function(pkgDir, lib.loc = NULL, level = 0 : 3)
{
    ## A variant of the above which splits levels for base and
    ## recommended packages, such that
    ##   Level 0: this package (installed in pkgDir)
    ##   Level 1: base packages
    ##   Level 2: recommended packages
    ##   Level 3: all packages installed in lib.loc
    if (is.null(lib.loc)) lib.loc <- .libPaths()

    Links <- list()
    if(3 %in% level)
        Links <- c(Links, lapply(lib.loc, .find_HTML_links_in_library))
    if(2 %in% level)
        Links <- c(lapply(file.path(.Library,
                                    .get_standard_package_names()$recommended),
                          .find_HTML_links_in_package),
                   Links)
    if(1 %in% level)
        Links <- c(lapply(file.path(.Library,
                                    .get_standard_package_names()$base),
                          .find_HTML_links_in_package),
                   Links)
    if (0 %in% level && nzchar(pkgDir))
        Links <- c(list(.find_HTML_links_in_package(pkgDir)), Links)
    Links <- unlist(Links)
    Links <- Links[!duplicated(names(Links))]
    gsub("[Rr]d$", "html", Links)
}

## These helper functions can optionally return the absolute path as
## well (in the local file system)

.find_HTML_links_in_package <-
function(dir, absolute = FALSE)
{
    ans <- 
        if (file_test("-f", f <- file.path(dir, "Meta", "links.rds")))
            readRDS(f)
        else if (file_test("-f", f <- file.path(dir, "Meta", "Rd.rds")))
            .build_links_index(readRDS(f), basename(dir))
        else character()
    if (absolute)
        structure(file.path(dir, "html", basename(ans), fsep = "/"),
                  names = names(ans))
    else
        ans
}

.find_HTML_links_in_library <-
function(dir, absolute = FALSE)
{
    ans <- 
        if (file_test("-f", f <- file.path(dir, ".Meta", "links.rds")))
            readRDS(f)
        else
            .build_library_links_index(dir)
    if (absolute)
        structure(file.path(dir, substring(ans, first = 7), fsep = "/"), # drop initial "../../"
                  names = names(ans))
    else
        ans
}

.build_library_links_index <-
function(dir)
{
    unlist(lapply(rev(dir(dir, full.names = TRUE)),
                  .find_HTML_links_in_package))
}

.DESCRIPTION_to_HTML <- function(descfile, dynamic = FALSE) {

    ## Similar to .DESCRIPTION_to_latex().

    trfm <- .gsub_with_transformed_matches

    ## A variant of htmlify() which optionally adds hyperlinks and does
    ## not HTMLify dashes inside these.
    htmlify_text <- function(x, a = FALSE, d = FALSE) {
        ## Use 'd' to indicate HTMLifying Description texts,
        ## transforming DOI and arXiv pseudo-URIs.
        x <- fsub("&", "&amp;", x)
        x <- fsub("``", "&ldquo;", x)
        x <- fsub("''", "&rdquo;", x)
        x <- psub("`([^']+)'", "&lsquo;\\1&rsquo;", x)
        x <- fsub("`", "'", x)
        x <- fsub("<", "&lt;", x)
        x <- fsub(">", "&gt;", x)
        if(a) {
            ## URL regexp as in .DESCRIPTION_to_latex().  CRAN uses
            ##   &lt;(URL: *)?((https?|ftp)://[^[:space:]]+)[[:space:]]*&gt;
            ##   ([[:space:]])((https?|ftp)://[[:alnum:]/.:@+\\_~%#?=&;,-]+[[:alnum:]/])
            ## (also used in toRd.citation().
            x <- trfm("&lt;(http://|ftp://|https://)([^[:space:],>]+)&gt;",
                      "<a href=\"\\1%s\">\\1\\2</a>",
                      x,
                      urlify,
                      2L)
        }
        if(d) {
            x <- trfm("&lt;(DOI|doi):[[:space:]]*([^<[:space:]]+[[:alnum:]])&gt;",
                      "&lt;<a href=\"https://doi.org/%s\">doi:\\2</a>&gt;",
                      x,
                      ## <FIXME>
                      ## Why not urlify?
                      function(u) utils::URLencode(u, TRUE),
                      ## </FIXME>
                      2L)
            x <- trfm("&lt;(arXiv|arxiv):(([[:alpha:].-]+/)?[[:digit:].]+)(v[[:digit:]]+)?([[:space:]]*\\[[^]]+\\])?&gt;",
                      "&lt;<a href=\"https://doi.org/10.48550/arXiv.%s\">doi:10.48550/arXiv.\\2</a>&gt;",
                      x,
                      urlify,
                      2L)
        }
        if(a || d) {
            ## Avoid mdash/ndash htmlification in the anchored parts.
            m <- gregexpr("<a href=\"[^>]*\">[^<>]*</a>", x)
            regmatches(x, m, invert = TRUE) <-
                lapply(regmatches(x, m, invert = TRUE),
                       function(x) {
                           x <- fsub("---", "&mdash;", x)
                           x <- fsub("--", "&ndash;", x)
                           x
                       })
        } else {
            x <- fsub("---", "&mdash;", x)
            x <- fsub("--", "&ndash;", x)
        }
        x
    }

    htmlify_compare_ops <- function(x) {
        x <- fsub("<=", "&le;", x)
        x <- fsub(">=", "&ge;", x)
        x <- fsub("!=", "&ne;", x)
        x
    }
    
    htmlify_license_spec <- function(x, p) {
        do_one <- function(x) {
            x <- gsub("[[:space:]]*([+|])[[:space:]]*", " \\1 ", x)
            a <- analyze_license(x)
            if(!a$is_standardizable) return(htmlify(x))

            htmlify_component_texts <- function(x) {
                x <- fsub("&", "&amp;", x)
                x <- fsub("<=", "&le;", x)
                x <- fsub(">=", "&ge;", x)
                x <- fsub("!=", "&ne;", x)
                x <- fsub("<", "&lt;", x)
                x <- fsub(">", "&gt;", x)
                x
            }

            components <- a$components
            expansions <- unlist(a$expansions)
            expanded <- length(expansions) > length(components)
            y <- character(length(expansions))
        
            ## Unlimited.
            y[expansions == "Unlimited"] <- "Unlimited"
            
            ## License file pointers.
            ## <FIXME>
            ## For now only hyperlink for dynamic help.
            re <- "(.*[^[:space:]])?(([[:space:]]*\\+[[:space:]]*)?file )(LICEN[CS]E)"
            ind <- grepl(re, expansions)
            if(any(ind)) {
                y[ind] <-
                    sub(re,
                        if(dynamic) {
                            sprintf("\\2<a href=\"/library/%s/\\4\">\\4</a>",
                                    p)
                        } else "\\2\\4",
                        expansions[ind])
                expansions[ind] <- sub(re, "\\1", expansions[ind])
            }
            ## </FIXME>

            ## Components with labels in the R license db.
            ## For dynamic help, use the common licenses shipped with R
            ## instead of the R-project.org license URLs.
            ldb <- R_license_db()
            pos <- match(expansions, ldb$Labels)
            ind <- !is.na(pos)
            if(any(ind)) {
                pos <- pos[ind]
                urls <- if(dynamic) {
                            paths <- ldb[pos, "File"]
                            ifelse(nzchar(paths),
                                   sprintf("/licenses/%s",
                                           basename(paths)),
                                   ldb[pos, "URL"])
                        } else
                            urls <- ldb[pos, "URL"]
                texts <- if(expanded) {
                             expansions[ind]
                         } else {
                             sub("[[:space:]]*\\+.*", "", components[ind])
                         }
                y[ind] <-
                    sprintf("<a href=\"%s\">%s</a>%s",
                            vapply(urls, urlify, ""),
                            htmlify_component_texts(texts),
                            y[ind])
            }
            
            y <- paste(y, collapse = " | ")
            if(expanded) {
                y <- sprintf("%s [expanded from: %s]",
                             y,
                             paste(htmlify_component_texts(components),
                                   collapse = " | "))
            }
        
            y
        }

        v <- unique(x)
        s <- vapply(v, do_one, "")
        s[match(x, v)]
    }

    htmlify_depends_spec <- function(x) {
        chunks <- strsplit(x, ",")
        ## Canonicalize.
        entries <- sub("^[[:space:]]*(.*)[[:space:]]*$", "\\1",
                       unlist(chunks, use.names = FALSE))
        entries <- sub("[[:space:]]*\\(", " (", entries)
        ## Try splitting at the first white space.
        pos <- regexpr("[[:space:]]", entries)
        names <- ifelse(pos == -1L, entries,
                        substring(entries, 1L, pos - 1L))
        rests <- ifelse(pos == -1L, "", substring(entries, pos))
        found <- logical(length(names))
        for(lib.loc in .libPaths()) {
            ## Very basic test for installed package ...
            found <- found | file.exists(file.path(lib.loc, names,
                                                   "DESCRIPTION"))
        }
        names[found] <- sprintf("<a href=\"/library/%s\">%s</a>",
                                names[found],
                                names[found])
        vapply(split(paste(names, rests, sep = ""),
                     rep.int(seq_along(chunks), lengths(chunks))),
               paste, "", collapse = ", ")
    }

    ## See <https://orcid.org/trademark-and-id-display-guidelines> for
    ## ORCID identifier display guidelines.
    ## We want the ORCID id transformed into a hyperlinked ORCID icon
    ## right after the family name (but before the roles).  We can
    ## achieve this by adding the canonicalized ORCID id (URL) to the
    ## 'family' element and simultaneously dropping the ORCID id from
    ## the 'comment' element, and then re-format.
    ## See <https://ror.readme.io/docs/display> for ROR display
    ## guidelines.
    .format_authors_at_R_field_with_expanded_identifiers <- function(a) {
        x <- utils:::.read_authors_at_R_field(a)
        format_person1 <- function(e) {
            cmt <- e$comment
            pos <- which((names(cmt) == "ORCID") &
                         grepl(.ORCID_iD_variants_regexp, cmt))
            if(length(pos) == 1L) {
                e$family <-
                    c(e$family,
                      sprintf("<https://replace.me.by.orcid.org/%s>",
                              .ORCID_iD_canonicalize(cmt[pos])))
                cmt <- cmt[-pos]
            }
            ## Of course, a person should not have both ORCID and ROR
            ## identifiers: could check for that.
            pos <- which((names(cmt) == "ROR") &
                         grepl(.ROR_ID_variants_regexp, cmt))
            if(length(pos) == 1L) {
                e$family <-
                    c(e$family,
                      sprintf("<https://replace.me.by.ror.org/%s>",
                              .ROR_ID_canonicalize(cmt[pos])))
                cmt <- cmt[-pos]
            }
            e$comment <- if(length(cmt)) cmt else NULL
            e
        }
        x <- lapply(unclass(x), format_person1)
        class(x) <- "person"
        utils:::.format_authors_at_R_field_for_author(x)
    }
    
    desc <- enc2utf8(.read_description(descfile))
    ## Drop empty fields: these are usually taken as missing.    
    desc <- desc[nzchar(desc)]
    pack <- desc["Package"]
    aatr <- desc["Authors@R"]
    ## <FIXME>
    ## .DESCRIPTION_to_latex() drops the
    ##    Package Packaged Built
    ## fields: why?  Should we do the same?
    ## Note that the package name will be used for the title in the HTML
    ## refman, so perhaps really drop.
    desc <- desc[names(desc) %w/o%
                 c("Package", "Authors@R")]
    ## </FIXME>

    ## <FIXME>
    ## What should we do with email addresses in the
    ##   Author Maintainer Contact
    ## fields?
    ## CRAN obfuscates, .DESCRIPTION_to_latex() uses \email which only
    ## adds markup but does not create mailto: URLs.
    ## </FIXME>

    if(!is.na(aatr))
        desc["Author"] <-
            .format_authors_at_R_field_with_expanded_identifiers(aatr)

    ## Take only Title and Description as *text* fields.
    desc["Title"] <- htmlify_text(desc["Title"])
    desc["Description"] <-
        htmlify_text(desc["Description"], a = TRUE, d = TRUE)
    ## Now the other fields.
    fields <- setdiff(names(desc),
                      c("Title", "Description", "License"))
    theops <- intersect(fields,
                        c("Depends", "Imports", "LinkingTo",
                          "Suggests", "Enhances"))
    desc[fields] <- fsub("&", "&amp;", desc[fields])
    ## Do this before turning '<' and '>' to HTML entities.
    desc[theops] <- htmlify_compare_ops(desc[theops])
    ## Do this before adding HTML markup ...
    desc[fields] <- fsub("<", "&lt;", desc[fields])
    desc[fields] <- fsub(">", "&gt;", desc[fields])
    ## HTMLify URLs and friends.
    for(f in intersect(fields,
                       c("URL", "BugReports",
                         "Additional_repositories",
                         ## BioC ...
                         "git_url"
                         ))) {
        ## The above already changed & to &amp; which urlify will
        ## do once more ...
        trafo <- function(s) urlify(gsub("&amp;", "&", s))
        desc[f] <- trfm("(^|[^>\"?])((https?|ftp)://[^[:space:],]*)",
                        "\\1<a href=\"%s\">\\2</a>",
                        desc[f],
                        trafo,
                        2L)
    }

    if(!is.na(aatr)) {
        desc["Author"] <-
            gsub(sprintf("&lt;https://replace.me.by.orcid.org/(%s)&gt;",
                         .ORCID_iD_regexp),
                 paste0("<a href=\"https://orcid.org/\\1\">",
                        "<img alt=\"ORCID iD\" ",
                        if(dynamic)
                            " src=\"/doc/html/orcid.svg\" "
                        else
                            " src=\"https://cloud.R-project.org/web/orcid.svg\" ",
                        "style=\"width:16px; height:16px; margin-left:4px; margin-right:4px; vertical-align:middle\"",
                        "></a>"),
                 desc["Author"])
        desc["Author"] <-
            gsub(sprintf("&lt;https://replace.me.by.ror.org/(%s)&gt;",
                         .ROR_ID_regexp),
                 paste0("<a href=\"https://ror.org/\\1\">",
                        "<img alt=\"ROR ID\" ",
                        if(dynamic)
                            " src=\"/doc/html/ror.svg\" "
                        else
                            " src=\"https://cloud.R-project.org/web/ror.svg\" ",
                        "style=\"width:20px; height:20px; margin-left:4px; margin-right:4px; vertical-align:middle\"",
                        "></a>"),
                 desc["Author"])
    }

    desc["License"] <- htmlify_license_spec(desc["License"], pack)

    if(dynamic)
        desc[theops] <- htmlify_depends_spec(desc[theops])

    ## <TODO>
    ## For dynamic help we should be able to further enhance by
    ## hyperlinking file pointers to
    ##   AUTHORS COPYRIGHTS
    ## </TODO>

    c("<table role='presentation'>",
      sprintf("<tr>\n<td>%s:</td>\n<td>%s</td>\n</tr>",
              names(desc), desc),
      "</table>")
}
#  File src/library/tools/R/Rd2latex.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2025 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

## TODO: can we do something useful with cross-package links?


### * .Rd_get_latex

# Return latex form of text, encoded in UTF-8.
.Rd_get_latex <-
function(x)
{
    tf <- tempfile()
    save <- options(useFancyQuotes = FALSE)
    on.exit({options(save); unlink(tf)})
    tryCatch(Rd2latex(x, tf, fragment = TRUE, outputEncoding = "UTF-8"),
             error = function(e) return(character()))
    enc2utf8(readLines(tf, warn = FALSE, encoding = "UTF-8"))
}

latex_canonical_encoding  <- function(encoding)
{
    if (encoding == "") encoding <- utils::localeToCharset()[1L]
    encoding <- tolower(encoding)
    encoding <- sub("iso_8859-([0-9]+)", "iso-8859-\\1", encoding)
    encoding <- sub("iso8859-([0-9]+)", "iso-8859-\\1", encoding)

    encoding[encoding == "iso-8859-1"] <-  "latin1"
    encoding[encoding == "iso-8859-2"] <-  "latin2"
    encoding[encoding == "iso-8859-3"] <-  "latin3"
    encoding[encoding == "iso-8859-4"] <-  "latin4"
    encoding[encoding == "iso-8859-5"] <-  "cyrillic"
    encoding[encoding == "iso-8859-6"] <-  "arabic"
    encoding[encoding == "iso-8859-7"] <-  "greek"
    encoding[encoding == "iso-8859-8"] <-  "hebrew"
    encoding[encoding == "iso-8859-9"] <-  "latin5"
    encoding[encoding == "iso-8859-10"] <-  "latin6"
    encoding[encoding == "iso-8859-14"] <-  "latin8"
    encoding[encoding %in% c("latin-9", "iso-8859-15")] <-  "latin9"
    encoding[encoding == "iso-8859-16"] <-  "latin10"
    encoding[encoding == "utf-8"] <-  "utf8"
    encoding
}

## 'encoding' is passed to parse_Rd, as the input encoding
Rd2latex <- function(Rd, out = "", defines = .Platform$OS.type,
                     stages = "render",
		     outputEncoding = "UTF-8", fragment = FALSE, ...,
                     writeEncoding = outputEncoding != "UTF-8",
		     concordance = FALSE)
{
    encode_warn <- FALSE
    WriteLines <-
        if(outputEncoding == "UTF-8" ||
           (outputEncoding == "" && l10n_info()[["UTF-8"]])) {
            function(x, con, outputEncoding, ...)
                writeLines(x, con, useBytes = TRUE, ...)
        } else {
            function(x, con, outputEncoding, ...) {
                y <- iconv(x, "UTF-8", outputEncoding,  mark = FALSE)
                if (anyNA(y)) {
                    y <- iconv(x, "UTF-8", outputEncoding,
                               sub = "byte", mark = FALSE)
                    encode_warn <<- TRUE
                }
                writeLines(y, con, useBytes = TRUE, ...)
            }
    }

    conc <- if(concordance) activeConcordance() # else NULL
    
    last_char <- ""
    skipNewline <- FALSE
    of0 <- function(...) of1(paste0(...))
    of1 <- function(text) {
        if (skipNewline) {
            skipNewline <<- FALSE
            if (text == "\n") return()
        }
    	if (concordance)
    	    conc$addToConcordance(text)
        nc <- nchar(text)
        last_char <<- substr(text, nc, nc)
        WriteLines(text, con, outputEncoding, sep = "")
    }

    trim <- function(x) {
        x <- psub1("^\\s*", "", as.character(x))
        psub1("\\s*$", "", x)
    }

    envTitles <- c("\\description"="Description", "\\usage"="Usage",
        "\\arguments"="Arguments",
        "\\format"="Format", "\\details"="Details", "\\note"="Note",
        "\\section"="", "\\author"="Author",
        "\\references"="References", "\\source"="Source",
        "\\seealso"="SeeAlso", "\\examples"="Examples",
        "\\value"="Value")

    sectionExtras <-
    c("\\usage"="verbatim",
      "\\examples"="ExampleCode")

    inCodeBlock <- FALSE ## used to indicate to texify where we are
    inCode <- FALSE
    inEqn <- FALSE
    inPre <- FALSE
    sectionLevel <- 0
    hasFigures <- FALSE

    startByte <- function(x) {
    	srcref <- attr(x, "srcref")
    	if (is.null(srcref)) -1L
    	else srcref[2L]
    }

    addParaBreaks <- function(x, tag) {
        if (isBlankLineRd(x)) "\n"
        else if (startByte(x) == 1L) psub("^\\s+", "", x)
        else x
    }

    texify <- function(x, code = inCodeBlock) {
        if(inEqn) return(x)
        if (!code) {
	    # Need to be careful to handle backslash, so do it in three steps.
	    # First, mark all the ones in the original text, but don't add
	    # any other special chars
	    x <- fsub("\\", "\\bsl", x)
	    # Second, escape other things, introducing more backslashes
	    x <- psub("([&$%_#])", "\\\\\\1", x)
	    ## pretty has braces in text.
	    x <- fsub("{", "\\{", x)
	    x <- fsub("}", "\\}", x)
	    x <- fsub("^", "\\textasciicircum{}", x)
	    x <- fsub("~", "\\textasciitilde{}", x)
	    # Third, add the terminal braces to the backslash
	    x <- fsub("\\bsl", "\\bsl{}", x)
	} else {
	    ## inCodeBlock/inPre is in alltt, where only \ { } have their usual meaning
	    if (inCodeBlock) {
		## We do want to escape { }, but unmatched braces had
		## to be escaped in earlier versions (e.g. Paren.Rd, body.tex).
		## So fix up for now
		x <- fsub1('"\\{"', '"{"', x)
	    } else if (inPre) {
		BSL <- '@BSL@'
		x <- fsub("\\", BSL, x)
		x <- psub("(?<!\\\\)\\{", "\\\\{", x)
		x <- psub("(?<!\\\\)}", "\\\\}", x)
		x <- fsub(BSL, "\\bsl{}", x)
		x <- psub("\\\\\\\\var\\\\\\{([^\\\\]*)\\\\}", "\\\\var{\\1}", x)
	    } else {
		## cat(sprintf("\ntexify in: '%s'\n", x))
		BSL <- '@BSL@'
		x <- fsub("\\", BSL, x)
		x <- psub("(?<!\\\\)\\{", "\\\\{", x)
		x <- psub("(?<!\\\\)}", "\\\\}", x)
		x <- psub("(?<!\\\\)([&$%_#])", "\\\\\\1", x)
		x <- fsub("^", "\\textasciicircum{}", x)
		x <- fsub("~", "\\textasciitilde{}", x)
		x <- fsub(BSL, "\\bsl{}", x)
		## avoid conversion to guillemets
		x <- fsub("<<", "<{}<", x)
		x <- fsub(">>", ">{}>", x)
		x <- fsub(",,", ",{},", x) # ,, is a ligature in the ec and lmodern fonts.
		## cat(sprintf("\ntexify out: '%s'\n", x))
	    }
	}
        x
    }

    # The quotes were Rd.sty macros, but Latex limitations (e.g. nesting \preformatted within)
    # mean we get better results expanding them here.

    wrappers <- list("\\dQuote" =c("``", "''"),
    		     "\\sQuote" =c("`", "'"),
    		     "\\cite"   =c("\\Cite{", "}"))

    writeWrapped <- function(block, tag) {
    	wrapper <- wrappers[[tag]]
    	if (is.null(wrapper))
    	    wrapper <- c(paste0(tag, "{"), "}")
    	if (concordance)
    	    conc$saveSrcref(block)
    	of1(wrapper[1L])
    	writeContent(block, tag)
    	of1(wrapper[2L])
    }

    writeURL <- function(block, tag) {
        ## really verbatim
        if (tag == "\\url")
            url <- as.character(block)
        else {
            url <- as.character(block[[1L]])
            tag <- "\\Rhref"
        }
        ## cleanup URL
        url <- lines2str(url)
        ## escape special characters for LaTeX
        url <- fsub("\\", "\\\\", url)
        ## support \href inside \tabular
        url <- fsub("%", "\\%",  url)
        url <- fsub("#", "\\#",  url)
        url <- fsub("&", "\\&",  url)  # needs hyperref >= 6.78n (2008-12-26)
        if (concordance)
            conc$saveSrcref(block)
    	of0(tag, "{", url, "}")
        if (tag == "\\Rhref") {
            if (concordance)
                conc$saveSrcref(block[[2L]])	
            of1("{")
            writeContent(block[[2L]], tag)
            of1("}")
        }
    }

    ## Currently ignores [option] except for [=dest] form
    ## (as documented)
    ## FIXME: so should not output cross-package links (unless for refman ...)
    writeLink <- function(tag, block) {
        parts <- get_link(block, tag, Rdfile)
        if (concordance)
            conc$saveSrcref(block)
        if (all(RdTags(block) == "TEXT")) {
            of0("\\LinkA{", latex_escape_name(parts$topic))
        } else { # don't \index link text containing markup etc
            of1("\\LinkB{")
            writeContent(block, tag)
        }
        of0("}{",
            latex_link_trans0(parts$dest), "}")
    }

    writeDR <- function(block, tag) {
    	if (concordance)
    	    conc$saveSrcref(block)
        if (length(block) > 1L) {
            of1('## Not run: ')
            writeContent(block, tag)
            of1('\n## End(Not run)')
        } else {
            of1('## Not run: ')
            writeContent(block, tag)
       }
    }

    ltxstriptitle <- function(x)
    {
        x <- fsub("\\R", "\\R{}", x)
        x <- psub("(?<!\\\\)([&$%_#])", "\\\\\\1", x)
        x <- fsub("^", "\\textasciicircum{}", x)
        x <- fsub("~", "\\textasciitilde{}", x)
        x
    }

    latex_escape_name <- function(x)
    {
        x <- fsub("\\", "\\textbackslash", x)
        x <- psub("([$#_&{}])", "\\\\\\1", x) #- escape them
        x <- fsub("\\textbackslash", "\\textbackslash{}", x)
        x <- fsub("^", "\\textasciicircum{}", x)
        x <- fsub("~", "\\textasciitilde{}", x)
        x <- fsub("%", "\\Rpercent{}", x)
        ## avoid conversion to guillemets
        x <- fsub("<<", "<{}<", x)
        x <- fsub(">>", ">{}>", x)
        x
    }

    latex_escape_name_for_index <- function(x) {
        ## In the index, ! @ | are escape characters, and one must quote
        ## these characters in the \index command by putting a double
        ## quotation mark (") in front of them, and one can only place a
        ## " in the index by quoting it.
        x <- latex_escape_name(x)
        ## <FIXME>
        ## Should really handle all of the above ...
        x <- fsub("@", "\"@", x)
        ## </FIXME>
        x
    }

    latex_link_trans0 <- function(x)
    {
        x <- fsub("\\Rdash", ".Rdash.", x)
        x <- fsub("-", ".Rdash.", x)
        x <- fsub("\\_", ".Rul.", x)
        x <- fsub("\\$", ".Rdol.", x)
        x <- fsub("\\^", ".Rcaret.", x)
        x <- fsub("^", ".Rcaret.", x)
        x <- fsub("_", ".Rul.", x)
        x <- fsub("$", ".Rdol.", x)
        x <- fsub("\\#", ".Rhash.", x) #
        x <- fsub("#", ".Rhash.", x)   #
        x <- fsub("\\&", ".Ramp.", x)
        x <- fsub("&", ".Ramp.", x)
        x <- fsub("\\~", ".Rtilde.", x)
        x <- fsub("~", ".Rtilde.", x)
        x <- fsub("\\%", ".Rpcent.", x)
        x <- fsub("%", ".Rpcent.", x)
        x <- fsub("\\\\", ".Rbl.", x)
        x <- fsub("{", ".Rlbrace.", x)
        x <- fsub("}", ".Rrbrace.", x)
        x
    }

    latex_code_alias <- function(x)
    {
        x <- fsub("{", "\\{", x)
        x <- fsub("}", "\\}", x)
        x <- psub("(?<!\\\\)([&$%_#])", "\\\\\\1", x)
        x <- fsub("^", "\\textasciicircum{}", x)
        x <- fsub("~", "\\textasciitilde{}", x)
        x <- fsub("<-", "<\\Rdash{}", x)
        x <- psub("([!|])", '"\\1', x)
        x
    }

    currentAlias <- NA_character_

    writeAlias <- function(block, tag) {
        alias <- as.character(block)
        if(length(alias) > 1L)
            stop("alias:\n",
                 sQuote(paste(alias, collapse = "\n")),
                 "\nis not one line")
        alias <- trim(alias)
        aa <- "\\aliasA{"
        ## Some versions of hyperref (from 6.79d) have trouble indexing these
        ## |, || in base, |.bit, %||% in ggplot2 ...
        ## And texindy used by some versions of texi2dvi chokes on {/(
        if(grepl("[|{(]", alias)) aa <- "\\aliasB{"
        if(is.na(currentAlias)) currentAlias <<- name
        if (pmatch(paste0(currentAlias, "."), alias, 0L)) {
            aa <- "\\methaliasA{"
        } else currentAlias <<- alias
        ## 'name' is linked from the header
        if (alias == name) return()
        alias2 <- latex_link_trans0(alias)
        if (concordance)
            conc$saveSrcref(block)
        of0(aa, latex_code_alias(alias), "}{",
            latex_escape_name(name), "}{", alias2, "}\n")
    }

    writeBlock <- function(block, tag, blocktag) {
    	if (concordance)
    	    conc$saveSrcref(block)
	switch(tag,
               UNKNOWN =,
               VERB = of1(texify(block, TRUE)),
               RCODE = of1(texify(block, TRUE)),
               TEXT = of1(addParaBreaks(texify(block), blocktag)),
               USERMACRO =,
               "\\newcommand" =,
               "\\renewcommand" = {},
               COMMENT = if (startByte(block) == 1L ||
                             (!inCodeBlock && last_char == "")) # indented comment line
                             skipNewline <<- TRUE,
               LIST = writeContent(block, tag),
               ## Avoid Rd.sty's \describe, \Enumerate and \Itemize:
               ## They don't support verbatim arguments, which we might need.
               "\\describe"= {
                   of1("\\begin{description}\n")
                   writeContent(block, tag)
                   of1("\n\\end{description}\n")
               },
               "\\enumerate"={
                   of1("\\begin{enumerate}\n")
                   writeContent(block, tag)
                   of1("\n\\end{enumerate}\n")
               },
               "\\itemize"= {
                   of1("\\begin{itemize}\n")
                   writeContent(block, tag)
                   of1("\n\\end{itemize}\n")
               },
               ## Verbatim-like
               "\\command"=,
               "\\env" =,
               "\\kbd"=,
               "\\option" =,
               "\\samp" = writeWrapped(block, tag),
               ## really verbatim
               "\\url"=,
               "\\href"= writeURL(block, tag),
               ## R-like
               "\\code"= {
                   inCode <<- TRUE
                   writeWrapped(block, tag)
                   inCode <<- FALSE
               },
               ## simple wrappers
               "\\abbr" =,
               "\\acronym" =,
               "\\bold"=,
               "\\dfn"=,
               "\\dQuote"=,
               "\\email"=,
               "\\emph"=,
               "\\file" =,
               "\\pkg" =,
               "\\sQuote" =,
               "\\strong"=,
               "\\var" =,
               "\\cite" =
                   if (inCodeBlock) writeContent(block, tag)
                   else writeWrapped(block, tag),
               "\\preformatted"= {
                   inPre <<- TRUE
                   of1("\\begin{alltt}")
                   writeContent(block, tag)
                   of1("\\end{alltt}\n")
                   inPre <<- FALSE
               },
               "\\Sexpr"= { of1("\\begin{verbatim}\n")  # This is only here if processing didn't get it...
	       	            of0(as.character.Rd(block, deparse=TRUE))
	       	            of1("\n\\end{verbatim}\n")
	       	          },

               "\\verb"= {
                   of0("\\AsIs{\\texttt{")
                   writeContent(block, tag)
                   of1("}}")
               },
               "\\special"= writeContent(block, tag), ## FIXME, verbatim?
               "\\linkS4class" =,
               "\\link" = writeLink(tag, block),
               "\\cr" = of1("\\\\{}"), ## might be followed by [
               "\\dots" =,
               "\\ldots" = of1(if(inCode || inCodeBlock) "..."  else tag),
               "\\R" = of0(tag, "{}"),
               "\\donttest" =, "\\dontdiff" = writeContent(block, tag),
               "\\dontrun"= writeDR(block, tag),
               "\\enc" = {
                   ## some people put more things in \enc than a word,
                   ## but Rd2txt does not cover that case ....
                   if (outputEncoding == "ASCII")
                       writeContent(block[[2L]], tag)
                   else
                       writeContent(block[[1L]], tag)
               } ,
               "\\eqn" =,
               "\\deqn" = {
               	   if (concordance)
               		conc$saveSrcref(block[[1L]])
                   of0(tag, "{")
                   inEqn <<- TRUE
                   writeContent(block[[1L]], tag)
                   inEqn <<- FALSE
                   of0('}{}')
               },
               "\\figure" = {
               	   if (concordance)
               		conc$saveSrcref(block[[1L]])
               	   of0('\\Figure{')
               	   writeContent(block[[1L]], tag)
               	   of0('}{')
               	   if (length(block) > 1L) {
               	       if (concordance)
               	   	   conc$saveSrcref(block[[2L]])	
		       includeoptions <- .Rd_get_latex(block[[2L]])
                       ## this was wrong if length(includeopptions) > 1
		       if (length(includeoptions))
                           for (z in includeoptions)
                               if(startsWith(z, "options: "))
                                   of0(sub("^options: ", "", z))
                   }
               	   of0('}')
               	   hasFigures <<- TRUE
               },
               "\\dontshow" =,
               "\\testonly" = {}, # do nothing
               "\\method" =,
               "\\S3method" =,
               "\\S4method" = {
                   ## should not get here
               },
               "\\tabular" = writeTabular(block),
               "\\subsection" = writeSection(block, tag),
               "\\if" =,
               "\\ifelse" =
		    if (testRdConditional("latex", block, Rdfile))
               		writeContent(block[[2L]], tag)
               	    else if (tag == "\\ifelse")
               	    	writeContent(block[[3L]], tag),
               "\\out" = for (i in seq_along(block)) {
               	   if (concordance)
               		conc$saveSrcref(block[[i]])
		   of1(block[[i]])
		   },
               stopRd(block, Rdfile, "Tag ", tag, " not recognized")
               )
    }

    writeTabular <- function(table) {
        ## FIXME does no check of correct format
    	format <- table[[1L]]
    	content <- table[[2L]]
    	if (length(format) != 1L || RdTags(format) != "TEXT")
    	    stopRd(table, Rdfile, "\\tabular format must be simple text")
        tags <- RdTags(content)
	of0('\n\\Tabular{', format, '}{')
        if (concordance)
            conc$saveSrcref(table[[1L]])
        for (i in seq_along(tags)) {
            if (concordance)
                conc$saveSrcref(content[[i]])	
            switch(tags[i],
                   "\\tab" = of1("&"),
                   "\\cr" = of1("\\\\{}"),
                   writeBlock(content[[i]], tags[i], "\\tabular"))
        }
        of1('}')
    }

    writeContent <- function(blocks, blocktag) {
        inList <- FALSE
        itemskip <- FALSE

	tags <- RdTags(blocks)

	i <- 0
	while (i < length(tags)) {
	    i <- i + 1
            block <- blocks[[i]]
            tag <- attr(block, "Rd_tag")
            ## this should not be null, but it might be in a erroneous Rd file
            if(!is.null(tag))
            switch(tag,
                   "\\method" =,
                   "\\S3method" =,
                   "\\S4method" = {
                   	blocks <- transformMethod(i, blocks, Rdfile)
                   	tags <- RdTags(blocks)
                   	i <- i - 1
                   },
                   "\\item" = {
                       if (blocktag %in% c("\\value", "\\arguments") && !inList) {
                           of1("\\begin{ldescription}\n")
                           inList <- TRUE
                       }
                       switch(blocktag,
                              "\\describe" = {
                              	  if (concordance)
                              	      conc$saveSrcref(block[[1L]])
                                  of1('\\item[')
                                  writeContent(block[[1L]], tag)
                                  of1('] ')
                                  if (concordance)
                                      conc$saveSrcref(block[[2L]])
                                  writeContent(block[[2L]], tag)
                              },
                              "\\value"=,
                              "\\arguments"={
                              	  if (concordance)
                              	      conc$saveSrcref(block[[1L]])
                                  of1('\\item[')
                                  inCode <<- TRUE
                                  writeItemAsCode(tag, block[[1L]])
                                  inCode <<- FALSE
                                  of1('] ')
                                  if (concordance)
                                      conc$saveSrcref(block[[2L]])
                                  writeContent(block[[2L]], tag)
                              },
                              "\\enumerate" =,
                              "\\itemize"= {
                                  of1("\\item{} ")
                                  itemskip <- TRUE
                              })
                       itemskip <- TRUE
                   },
                   "\\cr" = of1("\\\\{}"), ## might be followed by [
               { # default
                   if (inList && tag != "COMMENT"
                              && !(tag == "TEXT" && isBlankRd(block))) {
                       of1("\\end{ldescription}\n")
                       inList <- FALSE
                   }
                   if (itemskip) {
                       ## The next item must be TEXT, and start with a space.
                       itemskip <- FALSE
                       if (tag == "TEXT") {
                           txt <- psub("^ ", "", as.character(block))
                           of1(texify(txt))
                       } else writeBlock(block, tag, blocktag) # should not happen
                   } else writeBlock(block, tag, blocktag)
               })
	}
        if (inList) of1("\\end{ldescription}\n")
    }

    writeSectionInner <- function(section, tag)
    {
        if (length(section)) {
	    ## need \n unless one follows, so
	    nxt <- section[[1L]]
	    if (is.null(nxttag <- attr(nxt, "Rd_tag"))) # erroneous Rd file
		return()
	    if (nxttag %notin% c("TEXT", "RCODE") ||
		!startsWith(as.character(nxt), "\n")) of1("\n")
	    writeContent(section, tag)
	    if (last_char != "\n") of1("\n")
	}
    }

    writeSection <- function(section, tag) {
        if (tag == "\\encoding")
            return()
    	if (concordance)
    	    conc$saveSrcref(section)
        save <- sectionLevel
        sectionLevel <<- sectionLevel + 1
        if (tag == "\\alias")
            writeAlias(section, tag)
        else if (tag == "\\keyword") {
            key <- trim(section)
            if(any(key %in% .Rd_keywords_auto))
                return()
            of0("\\keyword{", latex_escape_name(key), "}{", ltxname, "}\n")
        }
        else if (tag == "\\concept") {
            key <- trim(section)
            of0("\\keyword{", latex_escape_name_for_index(key), "}{", ltxname, "}\n")
        }
        else if (tag == "\\section" || tag == "\\subsection") {
            macro <- c("Section", "SubSection", "SubSubSection")[min(sectionLevel, 3)]
    	    of0("%\n\\begin{", macro, "}{")
            writeContent(section[[1L]], tag)
            of1("}")
    	    writeSectionInner(section[[2L]], tag)
            of0("\\end{", macro, "}\n")
    	} else {
            title <- envTitles[tag]
            of0("%\n\\begin{", title, "}")
            extra <- sectionExtras[tag]
            if(!is.na(extra)) of0("\n\\begin{", extra, "}")
            if(tag %in% c("\\usage", "\\examples")) inCodeBlock <<- TRUE
            writeSectionInner(section, tag)
 	    inCodeBlock <<- FALSE
            if(!is.na(extra)) of0("\\end{", extra, "}\n")
            of0("\\end{", title, "}\n")
        }
        sectionLevel <<- save
    }

    writeItemAsCode <- function(blocktag, block) {
        ## Keep this in rsync with writeItemAsCode() in Rd2HTML.R!

        ## Argh.  Quite a few packages put the items in their value
        ## section inside \code.
        for(i in which(RdTags(block) == "\\code"))
            attr(block[[i]], "Rd_tag") <- "Rd"

        s <- as.character.Rd(block)
        s[s %in% c("\\dots", "\\ldots")] <- "..."
        s <- trimws(strsplit(paste(s, collapse = ""), ",", fixed = TRUE)[[1]])
        s <- s[nzchar(s)]
        s <- sprintf("\\code{%s}", texify(s))
        s <- paste0(s, collapse = ", ")
        of1(s)
    }

    Rd <- prepare_Rd(Rd, defines=defines, stages=stages, fragment=fragment, ...)
    Rdfile <- attr(Rd, "Rdfile")
    sections <- RdTags(Rd)

    if (is.character(out)) {
        if(out == "") {
            con <- stdout()
        } else {
	    con <- file(out, "wt")
	    on.exit(close(con))
	}
    } else {
    	con <- out
    	out <- summary(con)$description
    }

   if (outputEncoding != "ASCII" && !fragment) {
        latexEncoding <- latex_canonical_encoding(outputEncoding)
        if(writeEncoding) of0("\\inputencoding{", latexEncoding, "}\n")
    } else latexEncoding <- NA

    if (fragment) {
    	if (sections[1L] %in% names(sectionOrder))
    	    for (i in seq_along(sections))
    	    	writeSection(Rd[[i]], sections[i])
    	else
    	    for (i in seq_along(sections))
    	    	writeBlock(Rd[[i]], sections[i], "")
    } else {
	## we know this has been ordered by prepare2_Rd, but
	## need to sort the aliases (if any)
	nm <- character(length(Rd))
	isAlias <- sections == "\\alias"
	sortorder <- if (any(isAlias)) {
	    nm[isAlias] <- sapply(Rd[isAlias], as.character)
	    order(sectionOrder[sections], toupper(nm), nm)
	} else  order(sectionOrder[sections])
	Rd <- Rd[sortorder]
	sections <- sections[sortorder]

	title <- .Rd_get_latex(.Rd_get_section(Rd, "title"))
        ## This might have blank lines
        title <- paste(title[nzchar(title)], collapse = " ")

	name <- Rd[[2L]]
	if (concordance)
	    conc$saveSrcref(name)
	name <- trim(as.character(Rd[[2L]][[1L]]))
	ltxname <- latex_escape_name(name)

	of0('\\HeaderA{', ltxname, '}{',
	    ltxstriptitle(title), '}{',
	    latex_link_trans0(name), '}\n')

	for (i in seq_along(sections)[-(1:2)])
	    writeSection(Rd[[i]], sections[i])
    }
    if (encode_warn)
	warnRd(Rd, Rdfile, "Some input could not be re-encoded to ",
	       outputEncoding)
    if (concordance) {
    	conc$srcFile <- Rdfile
        concdata <- followConcordance(conc$finish(), attr(Rd, "concordance"))
        attr(out, "concordance") <- concdata
    }

    invisible(structure(out, latexEncoding = latexEncoding,
                        hasFigures = hasFigures))
}
#  File src/library/tools/R/Rd2pdf.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2025 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

#### R based engine for  R CMD Rdconv|Rd2pdf
####

## base packages do not have versions and this is called on
## DESCRIPTION.in
## encodings are tricky: this may be done in a foreign encoding
## (e.g., Latin-1 in UTF-8)
.DESCRIPTION_to_latex <- function(descfile, outfile, version = "Unknown",
                                  writeEncoding = TRUE)
{
    mytrfm <- .gsub_with_transformed_matches
    mygsub <- function(pattern, replacement, x)
        .Internal(gsub(pattern, replacement, x, FALSE, FALSE, FALSE, FALSE))
    ## Unlike tools:::psub and tools:::fsub, don't use useBytes = TRUE:
    mypsub <- function(pattern, replacement, x)
        .Internal(gsub(pattern, replacement, x, FALSE, TRUE,  FALSE, FALSE))
    myfsub <- function(pattern, replacement, x)
        .Internal(gsub(pattern, replacement, x, FALSE, FALSE,  TRUE, FALSE))
    texify <- function(x, one = TRUE, two = FALSE) {
        ## Handle LaTeX special characters.
        ## one: handle # $ % & _ ^ ~
        ##      backslash escape the first five
        ##      replace ^ by \textasciicircum{}
        ##      replace ~ by \textasciitilde{}
        ## two: handle { } \
        ##      backslash escape the first two
        ##      replace \ by \textbackslash{}
        if(two)
            x <- myfsub("\\", "\\textbackslash", x)
        if(one) {
            x <- mypsub("([#$%&_])", "\\\\\\1", x)
            x <- myfsub("^", "\\textasciicircum", x)
            x <- myfsub("~", "\\textasciitilde", x)
        }
        if(two) {
            x <- mypsub("([{}])", "\\\\\\1", x)
            x <- myfsub("\\textbackslash", "\\textbackslash{}", x)
        }
        if(one) {
            x <- myfsub("\\textasciicircum", "\\textasciicircum{}", x)
            x <- myfsub("\\textasciitilde", "\\textasciitilde{}", x)
        }
        x
    }
    mytrim <- function(x) {
        y <- unlist(strsplit(x, "\n", fixed = TRUE))
        lines2trim <- setdiff(which(nzchar(y)), 1L)
        if(!length(lines2trim))
            x
        else
            paste(replace(y, lines2trim,
                          .trim_common_leading_whitespace(y[lines2trim])),
                  collapse = "\n")
    }

    desc <- enc2utf8(.read_description(descfile))
    ## Drop empty fields: these are usually taken as missing.    
    desc <- desc[nzchar(desc)]
    if (is.character(outfile)) {
        out <- file(outfile, "a")
        on.exit(close(out))
    } else out <- outfile
    fields <- names(desc)
    fields <- fields %w/o% c("Package", "Packaged", "Built")
    if(writeEncoding && !is.na(desc["Encoding"])) {
        cat("\\inputencoding{utf8}\n", file = out)
    }
    ## Also try adding PDF title and author metadata.
    tit <- desc["Title"]
    tit <- paste0(desc["Package"], ": ",
                  texify(mygsub("[[:space:]]+", " ", tit), two = TRUE))
    tit <- paste0("\\ifthenelse{\\boolean{Rd@use@hyper}}",
                  "{\\hypersetup{pdftitle = {", tit, "}}}{}")
    writeLines(tit, con = out, useBytes = TRUE)
    ## Only try author from Authors@R.
    if(!is.na(aar <- desc["Authors@R"])) {
        aar <- tryCatch(utils:::.read_authors_at_R_field(aar),
                        error = identity)
        if(!inherits(aar, "error")) {
            aar <- Filter(utils:::.person_has_author_role, aar)
            aut <- format(aar, include = c("given", "family"))
            aut <- paste(aut[nzchar(aut)], collapse = "; ")
            aut <- texify(mygsub("[[:space:]]+", " ", aut), two = TRUE)
            if(nzchar(aut)) {
                aut <- paste0("\\ifthenelse{\\boolean{Rd@use@hyper}}",
                              "{\\hypersetup{pdfauthor = {", aut, "}}}{}")
                writeLines(aut, con = out, useBytes = TRUE)
            }
        }
    }
    ## And now the actual content.
    cat("\\begin{description}", "\\raggedright{}", sep="\n", file=out)
    for (f in fields) {
        ## Drop 'Authors@R' for now: this is formatted badly by \AsIs,
        ## and ideally was used for auto-generating the Author and
        ## Maintainer fields anyways ...
        if(f == "Authors@R") next
        text <- desc[f]
        if(f %in% c("Author", "Description"))
            text <- mytrim(text)
        ## munge 'text' appropriately (\\, {, }, "...")
        ## not sure why just these: copied from Perl Rd2dvi, then added to.
        ## KH: the LaTeX special characters are
        ##   # $ % & _ ^ ~ { } \
        ## \Rd@AsIs@dospecials in Rd.sty handles the first seven, so
        ## braces and backslashes need explicit handling.
        text <- mygsub('"([^"]*)"', "\\`\\`\\1''", text)
        text <- texify(text, one = FALSE, two = TRUE)
        text <- myfsub("@VERSION@", version, text)
        if(f %in% c("Author", "Maintainer", "Contact"))
            text <- mytrfm("<([^@ ]+)@([^> ]+)>",
                           "}\\\\email{%s@%s}\\\\AsIs{",
                           text,
                           list(texify, texify),
                           c(1L, 2L))
        if(f %in% c("URL", "BugReports", "Additional_repositories"))
            text <- mygsub("(http://|ftp://|https://)([^[:space:],]+)",
                           "}\\\\url{\\1\\2}\\\\AsIs{",
                           text)
        if(f %in% c("Author",       # possibly with ORCID URLs inside <>
                    "Description")) {
            text <- mygsub("<(http://|ftp://|https://)([^[:space:],>]+)>",
                           "<}\\\\url{\\1\\2}\\\\AsIs{>",
                           text)
        }
        if(f == "Description") {   # DOI and arXiv identifiers inside <>
            text <- mytrfm("<(DOI:|doi:)([[:space:]]*)([^[:space:]]+)>",
                           "<}\\\\Rhref{https://doi.org/%s}{\\1%s}\\\\AsIs{>",
                           text,
                           list(identity, texify),
                           c(3L, 3L))
            ## Fancy escaping should not be needed for arXiv ids.
            text <- mygsub("<(arXiv|arxiv):([[:alnum:]/.-]+)([[:space:]]*\\[[^]]+\\])?>",
                           "<}\\\\Rhref{https://doi.org/10.48550/arXiv.\\2}{doi:10.48550/arXiv.\\2}\\\\AsIs{>",
                           text)
        }
        text <- paste0("\\AsIs{", text, "}")
        writeLines(paste0("\\item[", texify(f, TRUE, TRUE), "]",
                          text),
                   con = out, useBytes = TRUE)
    }
    cat("\\end{description}\n", file = out)
}

## workhorse of .Rd2pdf
.Rdfiles2tex <-
    function(files, outfile, encoding = "unknown", outputEncoding = "UTF-8",
             append = FALSE, extraDirs = NULL, internals = FALSE,
             silent = FALSE, pkglist = NULL)
{
    if (dir.exists(files)) {
        .pkg2tex(files, outfile, internals = internals, asChapter = FALSE, 
                 encoding = encoding, outputEncoding = outputEncoding,
                 extraDirs = extraDirs, append = append,
                 silent = silent, pkglist = pkglist)
    } else {
        files <- strsplit(files, "[[:space:]]+")[[1L]]
        if (!silent) message("Converting Rd files to LaTeX ...")
        if (is.character(outfile)) {
            outfile <- file(outfile, if (append) "at" else "wt")
            on.exit(close(outfile))
        }
        hasFigures <- FALSE
        macros <- initialRdMacros(pkglist = pkglist)
        for(f in files) {
            if (!silent) cat("  ", basename(f), "\n", sep="")
            rd <- parse_Rd(f, encoding = encoding, macros = macros)
            if (!internals &&
                any(.Rd_get_metadata(rd, "keyword") == "internal"))
                next
            lines <- character()
            con <- textConnection("lines", "w", local = TRUE)
            res <- Rd2latex(rd, con,
                            outputEncoding = outputEncoding,
                            writeEncoding = FALSE,
                            stages = c("build", "install", "render"))
            close(con) # ensure final line is output
            if (attr(res, "hasFigures")) {
                graphicspath <- file.path(dirname(f), "figures")
                lines <- c(.file_path_to_LaTeX_graphicspath(graphicspath),
                           lines)
            	hasFigures <- TRUE
            }
            writeLines(lines, outfile)
        }
        list(hasFigures = hasFigures)
    }
}

## used for the refman (from doc/manual/Makefile*)
## and for directories from .Rdfiles2tex  (with asChapter = FALSE)
.pkg2tex <-
    function(pkgdir, outfile, internals = FALSE, asChapter = TRUE,
             encoding = "unknown", outputEncoding = "UTF-8",
             extraDirs = NULL, append = FALSE, silent = FALSE,
             pkglist = NULL)
{
    ## For Rd \packageFOO macro expansion:
    path <- normalizePath(pkgdir)
    if(file.exists(file.path(path, "DESCRIPTION")))
        Sys.setenv("_R_RD_MACROS_PACKAGE_DIR_" = path)
    else if((basename(path) == "man") &&
            file.exists(file.path(dirname(path), "DESCRIPTION")))
        Sys.setenv("_R_RD_MACROS_PACKAGE_DIR_" = dirname(path))

    ## sort order for topics, a little tricky
    re <- function(x) x[order(toupper(x), x)]

    ## given an installed package with a latex dir or a source package
    ## with a man dir, make a single file for use in the refman.

    options(warn = 1)
    if (missing(outfile))
        outfile <- paste0(basename(pkgdir), "-pkg.tex")

    hasFigures <- FALSE
    graphicspath <- NULL

    ## First check for a latex dir (from R CMD INSTALL --latex).
    ## Second guess is this is a >= 2.10.0 package with stored .rds files.
    ## If it does not exist, guess this is a source package.
    latexdir <- file.path(pkgdir, "latex")
    if (!dir.exists(latexdir)) {
        if (dir.exists(file.path(pkgdir, "help"))) {
            ## So convert it
            latexdir <- tempfile("ltx")
            dir.create(latexdir)
            if (!silent) message("Converting parsed Rd's to LaTeX ",
                                 appendLF = FALSE, domain = NA)
            Rd <- Rd_db(basename(pkgdir), lib.loc = dirname(pkgdir))
            if (!length(Rd)) {
                if (is.character(outfile))
                    close(file(outfile, if (append) "at" else "wt"))
                return(invisible(character()))
            }
            cnt <- 0L
            for(f in names(Rd)) {
                cnt <- cnt + 1L
                if (!silent && cnt %% 10L == 0L)
                    message(".", appendLF=FALSE, domain=NA)
                out <-  sub("[Rr]d$", "tex", basename(f))
                outfilename <- file.path(latexdir, out)
                res <- Rd2latex(Rd[[f]],
				  outfilename,
				  outputEncoding = outputEncoding,
				  defines = NULL, # already processed
				  writeEncoding = FALSE)
                hasFigures <- hasFigures || attr(res, "hasFigures")
            }
            if (hasFigures)
                graphicspath <- file.path(pkgdir, "help", "figures")
            if (!silent) message(domain = NA)
        } else {
            ## As from R 2.15.3, give priority to a man dir.
            mandir <- file.path(pkgdir, "man")
            if (dir.exists(mandir)) {
                files <- c(Sys.glob(file.path(mandir, "*.Rd")),
                           Sys.glob(file.path(mandir, "*.rd")))
                if (is.null(extraDirs)) extraDirs <- .Platform$OS.type
                for(e in extraDirs)
                    files <- c(files,
                               Sys.glob(file.path(mandir, e, "*.Rd")),
                               Sys.glob(file.path(mandir, e, "*.rd")))
                if (!length(files))
                    stop("this package has a ", sQuote("man"), " directory but no .Rd files",
                         domain = NA)
                macros <- loadPkgRdMacros(pkgdir)
                macros <- initialRdMacros(pkglist, macros)
            } else {
                ## (Be nice and find Rd files & system macros also when 'pkgdir' is
                ## not a package root directory.)
                mandir <- pkgdir
                files <- c(Sys.glob(file.path(mandir, "*.Rd")),
                           Sys.glob(file.path(mandir, "*.rd")))
                if (!length(files))
                    stop("this package does not have either a ", sQuote("latex"),
                         " or a (source) ", sQuote("man"), " directory",
                         domain = NA)
                macros <- initialRdMacros(pkglist)
            }
            paths <- files
            ## Use a partial Rd db if there is one.
            ## In this case, files will become a list of paths or
            ## preprocessed Rd objects to be passed to Rd2latex(), and
            ## paths will contain the corresponding paths.
            built_file <- file.path(pkgdir, "build", "partial.rdb")
            if(file_test("-f", built_file)) {
                db <- readRDS(built_file)
                pos <- match(names(db), basename(paths), nomatch = 0L)
                files <- as.list(files)
                files[pos] <- db[pos > 0L]
            }
            ## Use a stage23 Rd db if there is one and we were asked to
            ## use it.
            built_file <- file.path(pkgdir, "build", "stage23.rdb")
            if(file_test("-f", built_file)) {
                use <- Sys.getenv("_RD2PDF_USE_BUILT_STAGE23_RD_DB_IF_AVAILABLE_",
                                  "FALSE")
                if(isTRUE(config_val_to_logical(use))) {
                    db <- readRDS(built_file)
                    pos <- match(names(db), basename(paths), nomatch = 0L)
                    files <- as.list(files)
                    files[pos] <- db[pos > 0L]
                }
            }
            latexdir <- tempfile("ltx")
            dir.create(latexdir)
            if (!silent) message("Converting Rd files to LaTeX ",
                                 appendLF = FALSE, domain = NA)
            cnt <- 0L
            for(i in seq_along(paths)) {
                cnt <- cnt + 1L
                if(!silent && cnt %% 10L == 0L)
                    message(".", appendLF = FALSE, domain = NA)
                out <-  sub("\\.[Rr]d$", ".tex", basename(paths[i]))
                outfilename <- file.path(latexdir, out)
                res <- Rd2latex(files[[i]], outfilename,
                                stages = c("build", "install", "render"),
                                encoding = encoding,
                                outputEncoding = outputEncoding,
                                writeEncoding = FALSE,
                                macros = macros)
                hasFigures <- hasFigures || attr(res, "hasFigures")
            }
            if (hasFigures)
                graphicspath <- file.path(mandir, "figures")
            if (!silent) message(domain = NA)
        }
    } else {
        graphicspath <- file.path(pkgdir, "help", "figures")
        hasFigures <- dir.exists(graphicspath)
    }

    ## There are some restrictions, but the former "[[:alnum:]]+\\.tex$" was
    ## too strict.
    files <- dir(latexdir, pattern = "\\.tex$", full.names = TRUE)
    if (!length(files))
        stop("no validly-named files in the ", sQuote("latex"), " directory",
             domain = NA)

    if (is.character(outfile)) {
        outcon <- file(outfile, if (append) "at" else "wt")
        on.exit(close(outcon))
    } else outcon <- outfile

    if (asChapter)
        cat("\n\\chapter{The \\texttt{", basename(pkgdir), "} package}\n",
            sep = "", file = outcon)

    if (hasFigures && !is.null(graphicspath))
        cat(.file_path_to_LaTeX_graphicspath(graphicspath), "\n",
            sep = "", file = outcon)

    ## Extract (LaTeX-escaped, ASCII) \name for sorting.
    topics <- rep("", length(files))
    names(topics) <- files
    for (f in files) {
        lines <- readLines(f, encoding = "bytes") # possibly latin1, still
        hd <- lines[startsWith(lines, "\\HeaderA")]
        if (!length(hd)) {
            warning("file ", sQuote(f), " lacks a header: skipping",
                    domain = NA)
            next
        }
        this <- sub("\\\\HeaderA\\{\\s*([^}]*)\\}.*", "\\1", hd[1L], perl = TRUE)
        if (!internals &&
           any(startsWith(lines, "\\keyword{internal}")))
            next
        topics[f] <- this
    }

    topics <- topics[nzchar(topics)]
    ## <FIXME>
    ## these 'topics' come from Rd \name, not \alias entries, but we should
    ## (and WRE says) put the page aliased to the pkgname-package *topic* first
    ## which for >1500 CRAN packages is in a differently named file (90% pkg.Rd)
    ## </FIXME>
    summ <- which(endsWith(topics, "-package"))
    topics <- if (length(summ)) c(topics[summ], re(topics[-summ])) else re(topics)
    for (f in names(topics)) writeLines(readLines(f), outcon)

    if (asChapter)
        cat("\\clearpage\n", file = outcon)

    invisible(list(hasFigures = hasFigures))
}


### * .Rdconv

## replacement R code for Perl-based R CMD Rdconv

.Rdconv <- function(args = NULL)
{
    Usage <- function() {
        cat("Usage: R CMD Rdconv [options] FILE",
            "",
            "Convert R documentation in FILE to other formats such as plain text,",
            "HTML or LaTeX.",
            "",
            "Options:",
            "  -h, --help		print short help message and exit",
            "  -v, --version		print version info and exit",
            "  -t, --type=TYPE	convert to format TYPE",
            "  --encoding=enc        use 'enc' as the output encoding",
            "  --package=pkg         use 'pkg' as the package name",
            "  -o, --output=OUT	use 'OUT' as the output file",
            "      --os=NAME		assume OS 'NAME' (unix or windows)",
            "      --OS=NAME		the same as '--os'",
            "  --RdMacros=pkglist",
            "             		packages from which to get Rd macros",
            "",
            "Possible format specifications are 'txt' (plain text), 'html', 'latex',",
            "and 'example' (extract R code in the examples).",
            "",
            "The default is to send output to stdout, which is also given by '-o -'.",
            "Using '-o \"\"' will choose an output filename by removing a '.Rd'",
            "extension from FILE and adding a suitable extension.",
            "",
            "Report bugs at <https://bugs.R-project.org>.", sep = "\n")
    }

    options(showErrorCalls = FALSE, warn = 1)
    files <- character(0L)
    type <- "unknown"
    enc <- ""
    pkg <- ""
    out <- NULL
    os <- ""
    pkglist <- NULL

    if (is.null(args)) {
        args <- commandArgs(TRUE)
        ## it seems that splits on spaces, so try harder.
        args <- paste(args, collapse=" ")
        args <- strsplit(args,'nextArg', fixed = TRUE)[[1L]][-1L]
    }

    while(length(args)) {
        a <- args[1L]
        if (a %in% c("-h", "--help")) {
            Usage()
            q("no", runLast = FALSE)
        }
        else if (a %in% c("-v", "--version")) {
            cat("Rdconv: ",
                R.version[["major"]], ".",  R.version[["minor"]],
                " (r", R.version[["svn rev"]], ")\n", sep = "")
            cat("",
                .R_copyright_msg(1997),
                "This is free software; see the GNU General Public License version 2",
                "or later for copying conditions.  There is NO warranty.",
                sep="\n")
            q("no", runLast = FALSE)
        } else if (a == "-t") {
            if (length(args) >= 2L) {type <- args[2L]; args <- args[-1L]}
            else stop("-t option without value", call. = FALSE)
        } else if (substr(a, 1, 7) == "--type=") {
            type <- substr(a, 8, 1000)
        } else if (substr(a, 1, 11) == "--encoding=") {
            enc <- substr(a, 12, 1000)
        } else if (substr(a, 1, 10) == "--package=") {
            pkg <- substr(a, 11, 1000)
        } else if (a == "-o") {
            if (length(args) >= 2L) {out <- args[2L]; args <- args[-1L]}
            else stop("-o option without value", call. = FALSE)
        } else if (substr(a, 1, 9) == "--output=") {
            out <- substr(a, 10, 1000)
        } else if (substr(a, 1, 5) %in% c("--os=", "--OS=")) {
            os <- substr(a, 6, 1000)
        } else if (substr(a, 1, 11) == "--RdMacros=") {
            pkglist <- substr(a, 12, 1000)
        } else if (startsWith(a, "-")) {
            message("Warning: unknown option ", sQuote(a))
        } else files <- c(files, a)
        args <- args[-1L]
    }
    if (length(files) != 1L)
        stop("exactly one Rd file must be specified", call. = FALSE)
    if (is.character(out) && !nzchar(out)) {
        ## choose 'out' from filename
        bf <- sub("\\.[Rr]d$", "", file)
        exts <- c(txt=".txt", html=".html", latex=".tex", exmaple=".R")
        out <- paste0(bf,  exts[type])
    } else if (is.null(out)) out <- ""
    if (!nzchar(os)) os <- .Platform$OS.type
    macros <- initialRdMacros(pkglist = pkglist)
    switch(type,
           "txt" = {
               Rd2txt(files, out, package=pkg, defines=os,
                      outputEncoding = enc,
                      stages = c("build", "install", "render"),
                      macros = macros)
           },
           "html" = {
               if (!nzchar(enc)) enc <- "UTF-8"
               Rd2HTML(files, out, package = pkg, defines = os,
                       outputEncoding = enc, no_links = TRUE,
                       stages = c("build", "install", "render"),
                       macros = macros)
           },
           "latex" = {
               if (!nzchar(enc)) enc <- "UTF-8"
               Rd2latex(files, out, defines = os,
                        outputEncoding = enc,
                        stages = c("build", "install", "render"),
                        macros = macros)
           },
           "example" = {
               if (!nzchar(enc)) enc <- "UTF-8"
               Rd2ex(files, out, defines = os, outputEncoding = enc,
                     stages = c("build", "install", "render"),
                     macros = macros)
           },
           "unknown" = stop("no 'type' specified", call. = FALSE),
           stop("'type' must be one of 'txt', 'html', 'latex' or 'example'",
                call. = FALSE)
           )
    invisible()
}

### * .Rd2pdf

.Rd2pdf <-
function(pkgdir, outfile, title, silent = FALSE,
         description = TRUE, only_meta = FALSE,
         enc = "unknown", outputEncoding = "UTF-8", files_or_dir, OSdir,
         internals = FALSE, index = TRUE, pkglist = NULL)
{
    ## Write directly to the final location.  Encodings and figures
    ## may mean we need to make edits, but for most files one pass
    ## should be enough.
    out <- file(outfile, "wt")
    if (!nzchar(enc)) enc <- "unknown"

    desc <- NULL
    preconverted <- FALSE
    if (file.exists(f <- file.path(pkgdir, "DESCRIPTION"))) {
        desc <- read.dcf(f)[1,]
        if (enc == "unknown") {
            pkg_enc <- desc["Encoding"]
            if (!is.na(pkg_enc)) {
            	enc <- pkg_enc
            }
        }
        ## 'outputEncoding' is irrelevant when pkgdir contains a package
        ## installed with --latex: tex files were written using pkg_enc
        ## and specify their \inputencoding, so we need inputenc.
        preconverted <- dir.exists(file.path(pkgdir, "latex"))
    }

    ## Rd2.tex part 1: header
    writeLines("\\nonstopmode{}", out)  # for texinfo < 6.7 and MikTeX's texify
    cat("\\documentclass[", Sys.getenv("R_PAPERSIZE"), "paper]{book}\n",
        "\\usepackage[", Sys.getenv("R_RD4PDF", "times,inconsolata,hyper"), "]{Rd}\n",
        sep = "", file = out)
    if (index) writeLines("\\usepackage{makeidx}", out)
    inputenc <- Sys.getenv("RD2PDF_INPUTENC", "inputenc")
    ## this needs to be canonical, e.g. 'utf8'
    ## trailer is for detection if we want to edit it later.
    latex_outputEncoding <- latex_canonical_encoding(outputEncoding)
    asUTF8 <- latex_outputEncoding == "utf8"
    setEncoding <-
        if (!preconverted && asUTF8 && inputenc == "inputenc") {
            paste0("\\makeatletter\\@ifl@t@r\\fmtversion{2018/04/01}{}{",
                   "\\usepackage[utf8]{inputenc}}",
                   "\\makeatother")
        } else
        paste0("\\usepackage[",
               if (asUTF8) "utf8"
               else paste0(c(if (description) "utf8", latex_outputEncoding), collapse=","),
               "]{", inputenc, "} % @SET ENCODING@")
    useGraphicx <- "% \\usepackage{graphicx} % @USE GRAPHICX@"
    writeLines(c(
        setEncoding,
        if (inputenc == "inputenx" && asUTF8) {
            "\\IfFileExists{ix-utf8enc.dfu}{\\input{ix-utf8enc.dfu}}{}"
        },
        if (nzchar(Sys.getenv("_R_CYRILLIC_TEX_")) && asUTF8) {
            "\\IfFileExists{t2aenc.def}{\\usepackage[T2A]{fontenc}}{}"
        },
        useGraphicx,
        if (index) "\\makeindex{}",
        "\\begin{document}"
    ), out)
    if (!nzchar(title)) {
        if (is.character(desc))
            title <- paste0("Package `", desc["Package"], "'")
        else if (file.exists(f <- file.path(pkgdir, "DESCRIPTION.in"))) {
            desc <- read.dcf(f)[1,]
            title <- paste0("Package `", desc["Package"], "'")
        } else {
            if (dir.exists(pkgdir)) {
                subj <- paste0("all in \\file{", pkgdir, "}")
            } else {
                files <- strsplit(files_or_dir, "[[:space:]]+")[[1L]]
                subj1 <- if (length(files) > 1L) " etc." else ""
                subj <- paste0("\\file{", pkgdir, "}", subj1)
            }
            subj <- gsub("([_$])", "\\\\\\1", subj)
            title <- paste("\\R{} documentation}} \\par\\bigskip{{\\Large of", subj)
        }
    }
    cat("\\chapter*{}\n",
        "\\begin{center}\n",
        "{\\textbf{\\huge ", title, "}}\n",
        "\\par\\bigskip{\\large \\today}\n",
        "\\end{center}\n", sep = "", file = out)
    if(description) {
        if(file.exists(f <- file.path(pkgdir, "DESCRIPTION")))
            .DESCRIPTION_to_latex(f, out, writeEncoding = !asUTF8)
        else if(file.exists(f <- file.path(pkgdir, "DESCRIPTION.in"))) {
            ## running on the sources of a base package will have
            ## DESCRIPTION.in, only.
            version <- readLines(file.path(pkgdir, "../../../VERSION"))
            .DESCRIPTION_to_latex(file.path(pkgdir, "DESCRIPTION.in"),
                                  out, version, writeEncoding = !asUTF8)
        }
    }

    ## Rd2.tex part 2: body
    hasFigures <- FALSE
    ## if this looks like a package with no man pages, skip body
    if (file.exists(file.path(pkgdir, "DESCRIPTION")) &&
        !(dir.exists(file.path(pkgdir, "man")) ||
          dir.exists(file.path(pkgdir, "help")) ||
          dir.exists(file.path(pkgdir, "latex")))) only_meta <- TRUE
    if (!only_meta) {
        if (dir.exists(files_or_dir))
            writeLines(c(
                "\\Rdcontents{Contents}",
                if (!asUTF8) paste0("\\inputencoding{", latex_outputEncoding, "}")
            ), out)
        res <- .Rdfiles2tex(files_or_dir, out, encoding = enc,
                            outputEncoding = outputEncoding,
                            append = TRUE, extraDirs = OSdir, 
                            internals = internals, silent = silent,
                            pkglist = pkglist)
        if(length(res)) {
            hasFigures <- res$hasFigures
        }
    }

    ## Rd2.tex part 3: footer
    if (index) writeLines("\\printindex{}", out)
    writeLines("\\end{document}", out)
    close(out)

    ## enable graphicx only if needed
    if (hasFigures) {
        lines <- readLines(outfile)
        lines[lines == useGraphicx] <-
            "\\usepackage{graphicx}\\setkeys{Gin}{width=0.7\\textwidth}"
	writeLines(lines, outfile)
    }

    invisible(NULL)
}

### * .Rdnewer

## replacement for tools/Rdnewer.pl,
## called from doc/manual/Makefile
.Rdnewer <- function(dir, file)
    q("no", status = ..Rdnewer(dir, file), runLast = FALSE)

..Rdnewer <- function(dir, file, OS = .Platform$OS.type)
{
    ## Test whether any Rd file in the 'man' and 'man/$OS'
    ## subdirectories of directory DIR is newer than a given FILE.
    ## Return 0 if such a file is found (i.e., in the case of
    ## 'success'), and 1 otherwise, so that the return value can be used
    ## for shell 'if' tests.

    if (!file.exists(file)) return(0L)
    age <- file.mtime(file)

    if (any(file.mtime(c(Sys.glob(file.path(dir, "man", "*.Rd")),
                         Sys.glob(file.path(dir, "man", "*.rd"))))
            > age))
        return(0L)

    if (dir.exists(file.path(dir, OS))) {
        if (any(file.mtime(c(Sys.glob(file.path(dir, "man", OS, "*.Rd")),
                             Sys.glob(file.path(dir, "man", OS, "*.rd"))))
                > age))
            return(0L)
    }

    1L
}

### * ..Rd2pdf

## Driver called from R CMD Rd2pdf
## See the comments in install.R as to how this can be called directly.

..Rd2pdf <- function(args = NULL, quit = TRUE)
{
    do_cleanup <- function(quiet = FALSE) {
        if(clean) {
            setwd(startdir)
            unlink(build_dir, recursive = TRUE)
        } else if (!quiet) {
            cat("You may want to clean up by 'rm -Rf ", build_dir, "'\n", sep="")
        }
    }

    Usage <- function() {
        cat("Usage: R CMD Rd2pdf [options] files",
            "",
            "Generate PDF output from the Rd sources specified by files, by",
            "either giving the paths to the files, or the path to a directory with",
            "the sources of a package, or an installed package.",
            "",
            "Unless specified via option '--output', the basename of the output file",
            "equals the basename of argument 'files' if this specifies a package",
            "or a single file, and 'Rd2' otherwise.",
            "",
            "The Rd sources are assumed to be ASCII unless they contain \\encoding",
            "declarations (which take priority) or --encoding is supplied or if using",
            "package sources, if the package DESCRIPTION file has an Encoding field.",
            "The output encoding defaults to 'UTF-8'.",
            "",
            "Files are listed in the order given: for a package they are in alphabetic",
            "order of the \\name sections.",
            "",
            "Options:",
            "  -h, --help		print short help message and exit",
            "  -v, --version		print version info and exit",
            "  -q, --quiet		no output unless errors",
            "      --no-clean	do not remove created temporary files",
            "      --no-preview	do not preview generated PDF file",
            "      --encoding=enc    use 'enc' as the default input encoding",
            "      --outputEncoding=outenc",
            "                        use 'outenc' as the default output encoding",
            "      --os=NAME		use OS subdir 'NAME' (unix or windows)",
            "      --OS=NAME		the same as '--os'",
            "  -o, --output=FILE	write output to FILE",
            "      --force		overwrite output file if it exists",
            "      --title=NAME	use NAME as the title of the document",
            "      --no-index	do not index output",
            "      --no-description	do not typeset the description of a package",
            "      --internals	typeset 'internal' documentation (usually skipped)",
            "      --build-dir=DIR	use DIR as the working directory",
            "      --RdMacros=pkglist",
            "             		packages from which to get Rd macros",
            "",
            "The output papersize is set by the environment variable R_PAPERSIZE.",
            "The PDF previewer is set by the environment variable R_PDFVIEWER.",
            "",
            "Report bugs at <https://bugs.R-project.org>.",
            sep = "\n")
    }

    options(showErrorCalls = FALSE, warn = 1)

    if (is.null(args)) {
        args <- commandArgs(TRUE)
        args <- paste(args, collapse=" ")
        args <- strsplit(args,'nextArg', fixed = TRUE)[[1L]][-1L]
    }

    startdir <- getwd()
    if (is.null(startdir))
        stop("current working directory cannot be ascertained")
    build_dir <- paste0(".Rd2pdf", Sys.getpid())
    title <- ""
    quiet <- FALSE
    clean <- TRUE
    only_meta <- FALSE
    out_ext <- "pdf"
    output <- ""
    enc <- "unknown"
    outenc <- "UTF-8"
    index <- TRUE
    description <- TRUE
    internals <- FALSE
    files <- character()
    dir <- ""
    force <- FALSE
    pkglist <- NULL

    WINDOWS <- .Platform$OS.type == "windows"

    preview <- Sys.getenv("R_PDFVIEWER", if(WINDOWS) "open" else "false")
    OSdir <- if (WINDOWS) "windows" else "unix"

    while(length(args)) {
        a <- args[1L]
        if (a %in% c("-h", "--help")) {
            Usage()
            q("no", runLast = FALSE)
        } else if (a %in% c("-v", "--version")) {
            cat("Rd2pdf: ",
                R.version[["major"]], ".",  R.version[["minor"]],
                " (r", R.version[["svn rev"]], ")\n", sep = "")
            cat("",
                .R_copyright_msg(2000),
                "This is free software; see the GNU General Public License version 2",
                "or later for copying conditions.  There is NO warranty.",
                sep="\n")
            q("no", runLast = FALSE)
        } else if (a == "--batch") {
            # ignore for back-compatibility (now always use batch)
        } else if (a %in% c("-q", "--quiet")) {
            quiet <- TRUE
        } else if (a == "--no-clean") {
            clean <- FALSE
        } else if (a == "--no-preview") {
            preview <- "false"
        } else if (a == "--pdf") {
            # ignore for back-compatibility
        } else if (substr(a, 1, 8) == "--title=") {
            title <- substr(a, 9, 1000)
        } else if (a == "-o") {
            if (length(args) >= 2L) {output <- args[2L]; args <- args[-1L]}
            else stop("-o option without value", call. = FALSE)
        } else if (substr(a, 1, 9) == "--output=") {
            output <- substr(a, 10, 1000)
        } else if (a == "--force") {
            force <- TRUE
        } else if (a == "--only-meta") {
            only_meta <- TRUE
        } else if (substr(a, 1, 5) %in% c("--os=", "--OS=")) {
            OSdir <- substr(a, 6, 1000)
        } else if (substr(a, 1, 11) == "--encoding=") {
            enc <- substr(a, 12, 1000)
        } else if (substr(a, 1, 17) == "--outputEncoding=") {
            outenc <- substr(a, 18, 1000)
        } else if (substr(a, 1, 12) == "--build-dir=") {
            build_dir <- substr(a, 13, 1000)
        } else if (a == "--no-index") {
            index <- FALSE
        } else if (a == "--no-description") {
            description <- FALSE
        } else if (a == "--internals") {
            internals <- TRUE
        } else if (substr(a, 1, 11) == "--RdMacros=") {
            pkglist <- substr(a, 12, 1000)
        } else if (startsWith(a, "-")) {
            message("Warning: unknown option ", sQuote(a))
        } else files <- c(files, a)
        args <- args[-1L]
    }

    if(!length(files)) {
        message("no inputs")
        q("no", status = 1L, runLast = FALSE)
    }

    ## Windows does not allow .../man/, say, for a directory
    if(WINDOWS) files[1L] <- sub("[\\/]$", "", files[1L])
    if(dir.exists(files[1L])) {
        if(file.exists(file.path(files[1L], "DESCRIPTION"))) {
            if (!quiet) cat("Hmm ... looks like a package\n")
            dir <- files[1L]
            if(!nzchar(output)) output <- paste(basename(dir), out_ext, sep = ".")
        } else if (file.exists(f <- file.path(files[1L], "DESCRIPTION.in"))
                   && any(grepl("^Priority: *base", readLines(f)))) {
            if (!quiet) cat("Hmm ... looks like a package from the R distribution\n")
            dir <- files[1L]
            if(!nzchar(output)) output <- paste(basename(dir), out_ext, sep = ".")
            if(index && basename(dir) == "base") {
                index <- FALSE
                if (!quiet) cat("_not_ indexing 'base' package\n")
            }
        } else {
            dir <- if(dir.exists(d <- file.path(files[1L], "man"))) d else files[1L]
        }
    } else {
        description <- FALSE
        if(length(files) == 1L && !nzchar(output))
            output <- paste(sub("[.][Rr]d$", "", basename(files)), out_ext, sep = ".")
    }

    if(!nzchar(dir)) dir <- paste(files, collapse = " ")

    ## Prepare for building the documentation.
    if(!nzchar(output)) output <- paste0("Rd2.", out_ext)
    if(file.exists(output) && !force) {
        cat("file", sQuote(output), "exists; please remove it first\n")
        q("no", status = 1L, runLast = FALSE)
    }
    if(dir.exists(build_dir) && unlink(build_dir, recursive = TRUE)) {
        cat("cannot write to build dir\n")
        q("no", status = 2L, runLast = FALSE)
    }
    dir.create(build_dir, FALSE)

    res <-
        try(.Rd2pdf(files[1L], file.path(build_dir, "Rd2.tex"),
                    title, quiet, description, only_meta,
                    enc, outenc, dir, OSdir, internals, index,
                    pkglist))
    if (inherits(res, "try-error"))
        q("no", status = 11L, runLast = FALSE)

    if (!quiet)  cat("Creating", out_ext, "output from LaTeX ...\n")
    setwd(build_dir)

    ## R CMD _appends_ R's texmf tree to environmental TEXINPUTS, which could
    ## list another R version, so ensure Rd2pdf finds _this_ R's Rd.sty
    texinputs <- file.path(R.home("share"), "texmf", "tex", "latex")
    res <- try(texi2pdf('Rd2.tex', quiet = quiet, index = index, texinputs = texinputs))
    if(inherits(res, "try-error")) {
        res <- try(texi2pdf('Rd2.tex', quiet = quiet, index = index, texinputs = texinputs))
        if(inherits(res, "try-error")) {
            message("Error in running tools::texi2pdf()")
            do_cleanup()
            q("no", status = 1L, runLast = FALSE)
        }
    }

    setwd(startdir)
    if (!quiet)  cat("Saving output to", sQuote(output), "...\n")
    file.copy(file.path(build_dir, paste0("Rd2.", out_ext)), output,
              overwrite = force)
    if (!quiet)  cat("Done\n")

    do_cleanup(quiet)
    if(preview != "false") system(paste(preview, output))
    if (quit)
    	q("no", runLast = FALSE)
}


### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
#  File src/library/tools/R/Rd2txt.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2024 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

## This stops on
##  unrecognized tag
##  \\tabular format must be simple text
##  too many columns for format
##  invalid markup in \[S3]method
##  "Tag ", tag, " not expected in code block"

tabExpand <- function(x) {
    srcref <- attr(x, "srcref")
    start <- if(is.null(srcref)) 0L else srcref[5L] - 1L
    .Call(C_doTabExpand, x, start)
}

Rd2txt_options <- local({
    opts <- list(width = 80L,
                 minIndent = 10L,
    	         extraIndent = 4L,
    	         sectionIndent = 5L,
    	         sectionExtra = 2L,
    	         itemBullet = "* ",
    	         enumFormat = function(n) sprintf("%d. ", n),
    	         showURLs = FALSE,
                 code_quote = TRUE,
                 underline_titles = TRUE)
    function(...) {
        args <- list(...)
        if (!length(args))
            return(opts)
        else {
            if (is.list(args[[1L]])) args <- args[[1L]]
            result <- opts[names(args)]
            opts[names(args)] <<- args
            invisible(result)
        }
    }
})

transformMethod <- function(i, blocks, Rdfile) {
    editblock <- function(block, newtext)
    	list(tagged(newtext,
                    attr(block, "Rd_tag"),
                    attr(block, "srcref")))

    # Most of the internal functions below are more like macros
    # than functions; they mess around with these variables:

    chars <- NULL
    char <- NULL
    j <- NULL

    findOpen <- function(i) {
    	j <- i
    	char <- NULL
    	while (j < length(blocks)) {
    	    j <- j + 1L
    	    tag <- attr(blocks[[j]], "Rd_tag")
    	    if (tag == "RCODE") {

    	        # FIXME:  This search and the ones below will be fooled
    	        # by "#" comments

    	    	chars <- strsplit(blocks[[j]], "")[[1]]
    		parens <- cumsum( (chars == "(") - (chars == ")") )
    		if (any(parens > 0)) {
		    char <- which.max(parens > 0)
    	   	    break
    	   	}
    	    }
    	}
    	if (is.null(char))
    	    stopRd(block, Rdfile, sprintf("no parenthesis following %s", blocktag))
    	chars <<- chars
    	char <<- char
    	j <<- j
    }

    findComma <- function(i) {
	j <- i
	level <- 1L
	char <- NULL
	while (j < length(blocks)) {
	    j <- j + 1L
	    tag <- attr(blocks[[j]], "Rd_tag")
	    if (tag == "RCODE") {
		chars <- strsplit(blocks[[j]], "")[[1]]
		parens <- level + cumsum( (chars == "(") - (chars == ")") )
		if (any(parens == 1 & chars == ",")) {
		    char <- which.max(parens == 1 & chars == ",")
		    break
		}
		if (any(parens == 0))
		    break
		level <- parens[length(parens)]
	    }
	}
	if (is.null(char))
	    stopRd(block, Rdfile, sprintf("no comma in argument list following %s", blocktag))
        chars <<- chars
        char <<- char
        j <<- j
    }


    findClose <- function(i) {
        j <- i
    	level <- 1L
    	char <- NULL
    	while (j < length(blocks)) {
    	    j <- j + 1L
    	    tag <- attr(blocks[[j]], "Rd_tag")
    	    if (tag == "RCODE") {
    	    	chars <- strsplit(blocks[[j]], "")[[1]]
    	    	parens <- level + cumsum( (chars == "(") - (chars == ")") )
    	    	if (any(parens == 0)) {
    	    	    char <- which(parens == 0)[1]
    	    	    break
    	    	}
    	    	level <- parens[length(parens)]
    	    }
    	}
    	if (is.null(char))
    	    stopRd(block, Rdfile, sprintf("no closing parenthesis following %s", blocktag))
	chars <<- chars
        char <<- char
        j <<- j
    }

    rewriteBlocks <- function()
    	c(blocks[seq_len(j-1L)],
    	            editblock(blocks[[j]],
    	                      paste(chars[seq_len(char)], collapse="")),
    	            if (char < length(chars))
    	                editblock(blocks[[j]],
    	                          paste(chars[-seq_len(char)], collapse="")),
	            if (j < length(blocks)) blocks[-seq_len(j)])

    deleteBlanks <- function() {
	while (char < length(chars)) {
	    if (chars[char + 1] == " ") {
	    	char <- char + 1
	    	chars[char] <- ""
	    } else
	    	break
	}
	char <<- char
	chars <<- chars
    }

    block <- blocks[[i]]
    blocktag <- attr(block, "Rd_tag")
    srcref <- attr(block, "srcref")
    class <- block[[2L]] # or signature
    generic <- as.character(block[[1L]])
    default <- as.character(class) == "default"

    if(generic %in% c("[", "[[", "$")) {
	## need to assemble the call by matching parens in RCODE
	findOpen(i) # Sets chars, char and j
	chars[char] <- ""
	blocks <- c(blocks[seq_len(j-1L)],
	            editblock(blocks[[j]],
	                      paste(chars[seq_len(char)], collapse="")),
	            if (char < length(chars))
	                editblock(blocks[[j]],
	                          paste(chars[-seq_len(char)], collapse="")),
	            if (j < length(blocks)) blocks[-seq_len(j)])

	findComma(j) # Sets chars, char and j
	chars[char] <- generic
	# Delete blanks after the comma
	deleteBlanks()
	blocks <- rewriteBlocks()

	findClose(j)
	# Edit the closing paren
	chars[char] <- switch(generic,
		"[" = "]",
		"[[" = "]]",
		"$" = "")
	blocks[j] <- editblock(blocks[[j]],
	                         paste(chars, collapse=""))

	methodtype <- if (grepl("<-", blocks[[j]])) "replacement " else ""
    } else if(grepl(sprintf("^%s$",
			   paste(c("\\+", "\\-", "\\*",
				   "\\/", "\\^", "<=?",
				   ">=?", "!=?", "==",
				   "\\&", "\\|", "!",
				   "\\%[[:alnum:][:punct:]]*\\%"),
				 collapse = "|")),
		   generic)) {
        ## Binary operators and unary '!'.
	findOpen(i)

	if (generic != "!") {
            chars[char] <- ""
            blocks <- rewriteBlocks()
	    findComma(j)
	    chars[char] <- paste0(" ", generic, " ")
	    # Delete blanks after the comma
	    deleteBlanks()
	    blocks <- rewriteBlocks()
	} else {
            chars[char] <- "!"
	    blocks <- rewriteBlocks()
        }

	findClose(j)
	chars[char] <- ""
	blocks[j] <- editblock(blocks[[j]],
                               paste(chars, collapse=""))

	methodtype <- ""
    } else {
        findOpen(i)
	chars[char] <- paste0(generic, "(")
	blocks <- rewriteBlocks()
	findClose(j)
	methodtype <- if (grepl("<-", blocks[[j]])) "replacement " else ""
    }

    if (blocktag == "\\S4method") {
        ## some signatures are very long.
        blocks <- if(nchar(class) > 50L) {
            cl <- paste0("'", as.character(class), "'")
            if(nchar(cl) > 70L) {
                cl <- strsplit(cl, ",")[[1L]]
                ncl <- length(cl)
                cl[-ncl] <- paste0(cl[-ncl], ",")
                cl[-1L] <- paste0("  ", cl[-1L])
            }
            cl <- paste("##", cl, collapse="\n")
            c( blocks[seq_len(i-1L)],
              list(tagged(paste0("## S4 ", methodtype,
                                 "method for signature \n"),
                          "RCODE", srcref)),
              list(tagged(cl, "TEXT", srcref)),
              list(tagged("\n", "RCODE", srcref)),
              blocks[-seq_len(i)] )
        } else
            c( blocks[seq_len(i-1L)],
              list(tagged(paste0("## S4 ", methodtype,
                                 "method for signature '"),
                          "RCODE", srcref)),
              class,
              list(tagged("'\n", "RCODE", srcref)),
              blocks[-seq_len(i)] )
    } else if (default)
    	blocks <- c( blocks[seq_len(i-1)],
                     list(tagged(paste0("## Default S3 ", methodtype,
                                        "method:\n"),
                                 "RCODE", srcref)),
    		     blocks[-seq_len(i)] )
    else
    	blocks <- c( blocks[seq_len(i-1)],
                     list(tagged(paste0("## S3 ", methodtype,
                                        "method for class '"),
                                 "RCODE", srcref)),
		     class,
		     list(tagged("'\n", "RCODE", srcref)),
		     blocks[-seq_len(i)] )
    blocks
}# transformMethod()

Rd2txt <-
    function(Rd, out="", package = "", defines=.Platform$OS.type,
             stages = "render", outputEncoding = "",
             fragment = FALSE, options, ...)
{

    ## we need to keep track of where we are.
    buffer <- character()	# Buffer not yet written to con
    				# Newlines have been processed, each line in buffer is
    				# treated as a separate input line (but may be wrapped before output)
    linestart <- TRUE		# At start of line?
    indent <- 0L		# Default indent
    wrapping <- TRUE		# Do word wrap?
    keepFirstIndent <- FALSE	# Keep first line indent?
    dropBlank <- FALSE		# Drop initial blank lines?
    haveBlanks <- 0L		# How many blank lines have just been written?
    enumItem <- 0L		# Last enumeration item number
    inEqn <- FALSE		# Should we do edits needed in an eqn?
    sectionLevel <- 0		# How deeply nested within sections/subsections

    saveOpts <- Rd2txt_options()
    on.exit(Rd2txt_options(saveOpts))# Rd files may change these, so restore them
    				     # whether or not the caller set them.
    if (!missing(options)) Rd2txt_options(options)

## these attempt to mimic pre-2.10.0 layout
    WIDTH <- 0.9 * Rd2txt_options()$width
    HDR_WIDTH <- WIDTH - 2L

    startCapture <- function() {
    	save <- list(buffer=buffer, linestart=linestart, indent=indent,
                     wrapping=wrapping, keepFirstIndent=keepFirstIndent,
                     dropBlank=dropBlank, haveBlanks=haveBlanks,
                     enumItem=enumItem, inEqn=inEqn)
    	buffer <<- character()
    	linestart <<- TRUE
    	indent <<- 0L
    	wrapping <<- TRUE
    	keepFirstIndent <<- FALSE
    	dropBlank <<- FALSE
    	haveBlanks <<- 0L
    	enumItem <<- 0L
    	inEqn <<- FALSE
    	save
    }

    endCapture <- function(saved) {
    	result <- buffer
    	buffer <<- saved$buffer
    	linestart <<- saved$linestart
    	indent <<- saved$indent
    	wrapping <<- saved$wrapping
    	keepFirstIndent <<- saved$keepFirstIndent
    	dropBlank <<- saved$dropBlank
    	haveBlanks <<- saved$haveBlanks
    	enumItem <<- saved$enumItem
    	inEqn <<- saved$inEqn
    	result
    }

    ## for efficiency
    WriteLines <-
        if(outputEncoding == "UTF-8" ||
           (outputEncoding == "" && l10n_info()[["UTF-8"]])) {
        function(x, con, outputEncoding, ...)
            writeLines(x, con, useBytes = TRUE, ...)
    } else {
        function(x, con, outputEncoding, ...) {
            x <- iconv(x, "UTF-8", outputEncoding, sub="byte", mark=FALSE)
            writeLines(x, con, useBytes = TRUE, ...)
        }
    }

    ## Use display widths as used by cat not print.
    ## This may receive length(x) > 1 lines.
    ## Optionally format only if the input can be collapsed into a single line.
    frmt <- function(x, justify = "left", width = 0L, collapsed = FALSE) {
        justify <- match.arg(justify, c("left", "right", "centre", "none"))
        if(justify == "none" || !length(x))
            return(x)
        if(collapsed) { # also trims single-line input
            y <- paste0(trim(x), collapse = " ")
            w <- nchar(y, "width")
            if(w < width)
                x <- y
            else return(x)
        } else {
            w <- nchar(x, "width")
        }
        if(any(w < width)) {
            excess <- pmax(0, width - w)
            left <- right <- 0L
            if(justify == "left") right <- excess
            else if(justify == "right")  left <- excess
            else if(justify == "centre") {
                left <- excess %/% 2
                right <- excess-left
            }
            paste0(strrep(" ", left), x, strrep(" ", right))
        } else x
    }

    wrap <- function(doWrap = TRUE)
	if (doWrap != wrapping) { flushBuffer(); wrapping <<- doWrap }

    putw <- function(...)  { wrap(TRUE); put(...) }

    putf <- function(...)  { wrap(FALSE); put(...) }

    put <- function(...) {
        txt <- paste0(..., collapse="")
        trail <- endsWith(txt, "\n")
        # Convert newlines
        txt <- strsplit(txt, "\n", fixed = TRUE)[[1L]]
        if (dropBlank) {
            while(length(txt) && grepl("^[[:space:]]*$", txt[1L]))
            	txt <- txt[-1L]
            if (length(txt)) dropBlank <<- FALSE
        }
        if(!length(txt)) return()
        haveBlanks <<- 0

        if (linestart) buffer <<- c(buffer, txt)
        else if (length(buffer)) {
            buffer[length(buffer)] <<-
                paste0(buffer[length(buffer)], txt[1L])
            buffer <<- c(buffer, txt[-1L])
        }
        else buffer <<- txt
        linestart <<- trail
    }

    flushBuffer <- function() {
    	if (!length(buffer)) return()

    	if (wrapping) {
	    if (keepFirstIndent) {
		first <- nchar(psub1("[^ ].*", "", buffer[1L]))
		keepFirstIndent <<- FALSE
	    } else
		first <- indent

	    buffer <<- c(buffer, "")  # Add an extra blank sentinel
	    blankLines <- grep("^[[:space:]]*$", buffer)
	    result <- character()
	    start <- 1L
	    for (i in seq_along(blankLines)) {
		if (blankLines[i] > start) {
		    result <- c(result,
                                strwrap(paste(buffer[start:(blankLines[i]-1L)],
                                              collapse = " "),
                                        WIDTH, indent = first, exdent = indent))
		    first <- indent
                }
                result <- c(result, "")
		start <- blankLines[i]+1L
	    }
            ## we want to collapse multiple blank lines when wrapping
            ## and to remove the sentinel (which we need to do first or
            ## we will drop a single blank line)
            buffer <<- result[-length(result)]
            empty <- !nzchar(buffer)
            drop <- empty & c(FALSE, empty[-length(empty)])
            buffer <<- buffer[!drop]
	} else {  # Not wrapping
	    if (keepFirstIndent) {
		if (length(buffer) > 1L)
		    buffer[-1L] <<- paste0(strrep(" ", indent), buffer[-1L])
		keepFirstIndent <- FALSE
	    } else
		buffer <<- paste0(strrep(" ", indent), buffer)
	}

    	if (length(buffer)) WriteLines(buffer, con, outputEncoding)
    	buffer <<- character()
    	linestart <<- TRUE
    }

    encoding <- "unknown"

    li <- l10n_info()
    ## See the comment in ?Rd2txt as to why we do not attempt fancy quotes
    ## in Windows CJK locales -- and in any case they would need more work
    ## This covers the common single-byte locales and Thai (874)
    use_fancy_quotes <-
        (.Platform$OS.type == "windows" &&
         ((li$codepage >= 1250 && li$codepage <= 1258) || li$codepage == 874)) ||
        li[["UTF-8"]]

    if(!isFALSE(getOption("useFancyQuotes")) && use_fancy_quotes) {
    	LSQM <- "\u2018"                # Left single quote
    	RSQM <- "\u2019"                # Right single quote
    	LDQM <- "\u201c"                # Left double quote
    	RDQM <- "\u201d"                # Right double quote
    } else {
        LSQM <- RSQM <- "'"
        LDQM <- RDQM <- '"'
    }

    trim <- function(x) {
        x <- psub1("^\\s*", "", x)
        psub1("\\s*$", "", x)
    }

    ## underline via backspacing
    txt_header <- function(header) {
        opts <- Rd2txt_options()
        header <- paste(strwrap(header, WIDTH), collapse="\n")
        if (opts$underline_titles) {
            letters <- strsplit(header, "", fixed = TRUE)[[1L]]
            isaln <- grep("[[:alnum:]]", letters)
            letters[isaln] <- paste0("_\b", letters[isaln])
            paste(letters, collapse = "")
        } else header
    }

    unescape <- function(x) {
        x <- psub("(---|--)", "-", x)
        x
    }

    writeCode <- function(x) {
        txt <- as.character(x)
        if(inEqn) txt <- txt_eqn(txt)
        txt <- fsub('"\\{"', '"{"', txt)
        put(txt)
    }

    ## Strip a pending blank line
    stripBlankLine <- function() {
        n <- length(buffer)
        strip <- n > 0L && grepl("^[[:blank:]]*$", buffer[n])
        if (strip) buffer <<- buffer[-n]
        strip
    }
    ## Strip pending blank lines, then add n new ones.
    blankLine <- function(n = 1L) {
    	while (stripBlankLine()) NULL
	flushBuffer()
	if (n > haveBlanks) {
	    buffer <<- rep_len("", n - haveBlanks)
	    flushBuffer()
	    haveBlanks <<- n
	}
	dropBlank <<- TRUE
    }

    txt_eqn <- function(x) {
        x <- psub("\\\\(Alpha|Beta|Gamma|Delta|Epsilon|Zeta|Eta|Theta|Iota|Kappa|Lambda|Mu|Nu|Xi|Omicron|Pi|Rho|Sigma|Tau|Upsilon|Phi|Chi|Psi|Omega|alpha|beta|gamma|delta|epsilon|zeta|eta|theta|iota|kappa|lambda|mu|nu|xi|omicron|pi|rho|sigma|tau|upsilon|phi|chi|psi|omega|sum|prod|sqrt)", "\\1", x)
        x <- psub("\\\\(dots|ldots)", "...", x)
        x <- fsub("\\le", "<=", x)
        x <- fsub("\\ge", ">=", x)
        x <- fsub("\\infty", "Inf", x)
        ## FIXME: are these needed?
        x <- psub("\\\\(bold|strong|emph|var)\\{([^}]*)\\}", "\\2", x)
        x <- psub("\\\\(code|samp)\\{([^}]*)\\}", "'\\2'", x)
        x
    }

    writeDR <- function(block, tag) {
        if (length(block) > 1L) {
            putf('## Not run:\n')
            writeCodeBlock(block, tag)
            blankLine(0L)
            putf('## End(Not run)\n')
        } else {
            putf('## Not run: ')
            writeCodeBlock(block, tag)
        }
    }

    writeQ <- function(block, tag, quote=tag)
    {
        if (use_fancy_quotes) {
            if (quote == "\\sQuote") {
                put(LSQM); writeContent(block, tag); put(RSQM)
            } else {
                put(LDQM); writeContent(block, tag); put(RDQM)
            }
        } else {
            if (quote == "\\sQuote") {
                put("'"); writeContent(block, tag); put("'")
            } else {
                put("\""); writeContent(block,tag); put("\"")
            }
        }
    }

    writeBlock <- function(block, tag, blocktag) {
        switch(tag,
               UNKNOWN =,
               VERB =,
               RCODE = writeCode(tabExpand(block)),
               TEXT = if(blocktag == "\\command") putw(block) else putw(unescape(tabExpand(block))),
               USERMACRO =,
               "\\newcommand" =,
               "\\renewcommand" = {},
               COMMENT = {
                   stripBlankLine()     # drop indentation
                   linestart <<- FALSE  # eat subsequent \n also for non-indented comments
               },
               LIST = writeContent(block, tag),
               "\\describe" = {
               	   blankLine(0L)
                   writeContent(block, tag)
                   blankLine()
               },
               "\\itemize"=,
               "\\enumerate"= {
               	   blankLine(0L)
                   enumItem0 <- enumItem
                   enumItem <<- 0L
                   indent0 <- indent
                   opts <- Rd2txt_options()
                   indent <<- max(opts$minIndent,
                              indent + opts$extraIndent)
                   dropBlank <<- TRUE
                   writeContent(block, tag)
                   blankLine()
                   indent <<- indent0
                   enumItem <<- enumItem0
               },
               "\\code"=,
               "\\command"=,
               "\\env"=,
               "\\file"=,
               "\\kbd"=,
               "\\option"=,
               "\\pkg"=,
               "\\samp" = {
                   opts <- Rd2txt_options()
                   if(opts$code_quote)
                       writeQ(block, tag, quote="\\sQuote")
                   else writeContent(block,tag)
               },
               "\\email" = {
                   # for legibility, do not URLencode: some use ". at ." etc
                   put("<mailto:", lines2str(as.character(block)), ">")
               },
               "\\url" = {
                   put("<", utils::URLencode(lines2str(as.character(block))), ">")
               },
               "\\href" = {
                   opts <- Rd2txt_options()
                   writeContent(block[[2L]], tag)
                   if (opts$showURLs)
  			put(" <", utils::URLencode(lines2str(as.character(block[[1L]]))), ">")
               },
               "\\Sexpr"= put(as.character.Rd(block, deparse=TRUE)),
               "\\abbr" =,
               "\\acronym" =,
               "\\cite"=,
               "\\dfn"= ,
               "\\special" = writeContent(block, tag),
               "\\var" = {
                   put("<")
                   writeContent(block, tag)
                   put(">")
               },
               "\\bold"=,
               "\\strong"= {
                   put("*")
                   writeContent(block, tag)
                   put("*")
               },
               "\\emph"= {
                   put("_")
                   writeContent(block, tag)
                   put("_")
               },
               "\\sQuote" =,
               "\\dQuote"= writeQ(block, tag) ,
               "\\preformatted"= {
                   blankLine()
                   wrap(FALSE)
                   writeCodeBlock(block, tag)
                   blankLine()
               },
               "\\verb"= {
                   writeContent(block[1L], tag)
                   if (length(block) > 1L) {
                       wrap(FALSE) # flush and keep subsequent linebreaks/formatting
                       writeContent(block[-1L], tag)
                   }
               },
               "\\linkS4class" =,
               "\\link" = writeContent(block, tag),
               "\\cr" = {
                   if (!length(buffer)) { # \cr\cr
                       dropBlank <<- FALSE
                       put("\n")
                   }
                   ## we want to print out what we have, and if
                   ## followed immediately by \n (as it usually is)
                   ## discard that.  This is not entirely correct,
                   ## but it is better than before ....
                   flushBuffer()
                   dropBlank <<- TRUE
                   },
               "\\dots" =,
               "\\ldots" = put("..."),
               "\\R" = put("R"),
               "\\enc" = {
                   ## Test to see if we can convert the encoded version
                   txt <- as.character(block[[1L]])
                   test <- iconv(txt, "UTF-8", outputEncoding, mark = FALSE)
                   txt <- if(!anyNA(test)) txt else as.character(block[[2L]])
                   put(txt)
               } ,
               "\\eqn" = {
                   block <- block[[length(block)]]
                   ## FIXME: treat 2 of 2 differently?
                   inEqn0 <- inEqn
                   inEqn <<- TRUE
                   dropBlank <<- TRUE
                   writeContent(block, tag)
                   inEqn <<- inEqn0
               },
               "\\deqn" = {
                   blankLine()
                   block <- block[[length(block)]]
                   save <- startCapture()
                   inEqn <<- TRUE
                   writeContent(block, tag)
                   eqn <- endCapture(save)
                   ## try collapsing into a single centred line (as in R < 4.4.0)
                   ## but only if the source block spans at most 3 lines
                   if(length(eqn) <= 3L)
                       eqn <- frmt(eqn, justify = "centre",
                                   width = WIDTH - indent, collapsed = TRUE)
                   putf(paste(eqn, collapse="\n"))
    		   blankLine()
               },
               "\\figure" = {
                   blankLine()
                   save <- startCapture()
                   writeContent(block[[length(block)]], tag)
                   alt <- endCapture(save)
                   if (length(alt)) {
                       alt <- frmt(alt, justify = "centre",
                                   width = WIDTH - indent)
                       putf(paste(alt, collapse = "\n"))
                       blankLine()
                   }
               },
               "\\tabular" = writeTabular(block),
               "\\subsection" = writeSection(block, tag),
               "\\if"=,
               "\\ifelse" =
                   if (testRdConditional("text", block, Rdfile))
               		writeContent(block[[2L]], tag)
               	   else if (tag == "\\ifelse")
               	   	writeContent(block[[3L]], tag),
               "\\out" = for (i in seq_along(block))
		   put(block[[i]]),
               stopRd(block, Rdfile, "Tag ", tag, " not recognized")
               )
    }

    writeTabular <- function(table) {
    	formats <- table[[1L]]
    	content <- table[[2L]]
    	if (length(formats) != 1L || RdTags(formats) != "TEXT")
    	    stopRd(table, Rdfile, "\\tabular format must be simple text")
    	formats <- strsplit(formats[[1L]], "", fixed = TRUE)[[1L]]
        tags <- RdTags(content)
        entries <- list()
        row <- 1L
        col <- 1L
        save <- startCapture()
        dropBlank <<- TRUE
        newEntry <- function() {
            entries <<- c(entries, list(list(text=trim(endCapture(save)),
	                   	             row=row, col=col)))
            save <<- startCapture()
            dropBlank <<- TRUE
        }
        for (i in seq_along(tags)) {
            switch(tags[i],
                  "\\tab" = {
                  	newEntry()
                   	col <- col + 1L
                   	if (col > length(formats))
                   	    stopRd(content[[i]], Rdfile,
                                   sprintf("too many columns for format '%s'",
                                           table[[1L]]))
                   },
                   "\\cr" = {
                   	newEntry()
                   	row <- row + 1L
			col <- 1L
                    },
                   writeBlock(content[[i]], tags[i], "\\tabular")
                   )
        }
        newEntry()
        endCapture(save)
        entries <- with(entries[[length(entries)]],
        	    {
                        if (!length(text) && col == 1L)
                            entries[-length(entries)]
                        else
                            entries
                    })
        if(!length(entries)) return()
        rows <- entries[[length(entries)]]$row
        cols <- max(vapply(entries, function(e) e$col, 1L))
        widths <- rep_len(0L, cols)
        lines <- rep_len(1L, rows)
        for (i in seq_along(entries)) {
            e <- entries[[i]]
            while(length(e$text) && !nzchar(e$text[length(e$text)])) {
            	e$text <- e$text[-length(e$text)]
            	entries[[i]] <- e
            }
            if (any(nzchar(e$text)))
            	widths[e$col] <- max(widths[e$col], max(nchar(e$text, "w")))
            ## NOTE: if an entry spanned multiple Rd lines, length(e$text) > 1.
            ## Whereas Rd lines are collapsed in both HTML (which auto-wraps)
            ## and PDF output, line breaks are preserved here (even though
            ## this is unusual for a LaTeX-like context) and the width
            ## is determined by the longest (trimmed) line of the column.
            lines[e$row] <- max(lines[e$row], length(e$text))
        }
        result <- matrix("", sum(lines), cols)
        for (i in seq_len(cols))
            result[, i] <- strrep(" ", widths[i])
        firstline <- c(1L, 1L+cumsum(lines))
        for (i in seq_along(entries)) {
            e <- entries[[i]]
            if(!length(e$text)) next
            ## FIXME: this is not right: it justifies strings as if
            ## they are escaped, so in particular \ takes two columns.
            text <- frmt(e$text, justify=formats[e$col], width=widths[e$col])
            for (j in seq_along(text))
            	result[firstline[e$row] + j - 1L, e$col] <- text[j]
        }
        blankLine()
        indent0 <- indent
        indent <<- indent + 1L
        for (i in seq_len(nrow(result))) {
            putf(paste0(" ", result[i,], " ", collapse=""))
# This version stripped leading blanks on the first line
#            for (j in seq_len(cols))
#            	putf(" ", result[i,j], " ")
            putf("\n")
        }
        blankLine()
        indent <<- indent0
    }

    writeCodeBlock <- function(blocks, blocktag)
    {
    	tags <- RdTags(blocks)
	i <- 0
	while (i < length(tags)) {
	    i <- i + 1
            block <- blocks[[i]]
            tag <- tags[i]
            switch(tag,
                   "\\method" =,
                   "\\S3method" =,
                   "\\S4method" = {
                   	blocks <- transformMethod(i, blocks, Rdfile)
                   	tags <- RdTags(blocks)
                   	i <- i - 1
                   },
                   UNKNOWN =,
                   VERB =,
                   RCODE =,
                   TEXT = writeCode(tabExpand(block)),
                   "\\donttest" =, "\\dontdiff" =,
                   "\\special" =,
                   "\\var" = writeCodeBlock(block, tag),
                   "\\dots" =, # \ldots is not really allowed
                   "\\ldots" = put("..."),
                   "\\dontrun"= writeDR(block, tag),
                   COMMENT = # skip over whole comment lines, only (as in Rd2ex)
                       if (attr(block, "srcref")[2L] == 1L) linestart <<- FALSE,
		   USERMACRO =,
		   "\\newcommand" =,
		   "\\renewcommand" =,
                   "\\dontshow" =,
                   "\\testonly" = {}, # do nothing
                   ## All the markup such as \emph
                   stopRd(block, Rdfile, "Tag ", tag,
                          " not expected in code block")
                   )
        }
    }

    writeContent <- function(blocks, blocktag) {
        itemskip <- FALSE
	tags <- RdTags(blocks)

	for (i in seq_along(tags)) {
            tag <- tags[i]
            block <- blocks[[i]]
            switch(tag,
                   "\\item" = {
                       switch(blocktag,
                              "\\describe"= {
                                  blankLine()
                                  save <- startCapture()
                                  dropBlank <<- TRUE
                                  writeContent(block[[1L]], tag)
                                  DLlab <- trim(endCapture(save))
                                  indent0 <- indent
                                  opts <- Rd2txt_options()
                                  indent <<- max(opts$minIndent,
                                                 indent + opts$extraIndent)
                                  keepFirstIndent <<- TRUE
                                  putw(strrep(" ", indent0),
                                       DLlab,
                                       " ")
                                  writeContent(block[[2L]], tag)
			  	  blankLine(0L)
                                  indent <<- indent0
                              },
                              "\\value"=,
                              "\\arguments"= {
                                  blankLine()
                                  save <- startCapture()
                                  dropBlank <<- TRUE
                                  writeItemAsCode(tag, block[[1L]])
                                  DLlab <- trim(endCapture(save))
                                  indent0 <- indent
                                  opts <- Rd2txt_options()
                                  indent <<- max(opts$minIndent, indent + opts$extraIndent)
                                  keepFirstIndent <<- TRUE
                                  DLlab <- paste0(DLlab[nzchar(DLlab)], collapse = " ")
                                  putw(frmt(paste0(DLlab, ": "),
                                              justify="right", width=indent))
                                  writeContent(block[[2L]], tag)
			  	  blankLine(0L)
                                  indent <<- indent0
                              },
                              "\\itemize" =,
                              "\\enumerate" = {
                              	  blankLine()
                              	  keepFirstIndent <<- TRUE
                              	  opts <- Rd2txt_options()
                              	  if (blocktag == "\\itemize")
                              	      label <- opts$itemBullet
                              	  else {
                              	      enumItem <<- enumItem + 1L
                              	      label <- opts$enumFormat(enumItem)
                              	  }
                              	  putw(frmt(label, justify="right",
                                            width=indent))
                              })
                       itemskip <- TRUE
                   },
               { # default
                   if (itemskip) {
                       ## The next item must be TEXT, and start with a space.
                       itemskip <- FALSE
                       if (tag == "TEXT") {
                           txt <- psub("^ ", "", as.character(tabExpand(block)))
                           put(txt)
                           if (!haveBlanks &&
                               blocktag %in% c("\\describe", "\\value", "\\arguments"))
                           dropBlank <<- FALSE  # keep blank line for following text
                       } else writeBlock(block, tag, blocktag) # should not happen
                   } else writeBlock(block, tag, blocktag)
               })
	}
    }

    writeSection <- function(section, tag) {
        if (tag %in% c("\\alias", "\\concept", "\\encoding", "\\keyword"))
            return()
    	save <- c(indent, sectionLevel, keepFirstIndent, dropBlank, wrapping)
    	blankLine(min(sectionLevel, 1L))
    	titlePrefix <- strrep("  ", sectionLevel)
    	opts <- Rd2txt_options()
        indent <<- opts$sectionIndent + opts$sectionExtra*sectionLevel
        sectionLevel <<- sectionLevel + 1
        keepFirstIndent <<- TRUE
        if (tag == "\\section" || tag == "\\subsection") {
            ## section header could have markup
            title <- .Rd_format_title(.Rd_get_text(section[[1L]]))
            putf(titlePrefix, txt_header(title), ":")
            blankLine()
            dropBlank <<- TRUE
            wrapping <<- TRUE
            keepFirstIndent <<- FALSE
    	    writeContent(section[[2L]], tag)
    	} else if (tag %in% c("\\usage", "\\examples")) {
            putf(txt_header(sectionTitles[tag]), ":")
            blankLine()
            dropBlank <<- TRUE
            wrapping <<- FALSE
            keepFirstIndent <<- FALSE
            writeCodeBlock(section, tag)
    	} else {
            putf(txt_header(sectionTitles[tag]), ":")
            blankLine()
            dropBlank <<- TRUE
            wrapping <<- TRUE
            keepFirstIndent <<- FALSE
            writeContent(section, tag)
        }
        blankLine()

        indent <<- save[1L]
        sectionLevel <<- save[2L]
        keepFirstIndent <<- save[3L]
        dropBlank <<- save[4L]
        wrapping <<- save[5L]
    }

    if (is.character(out)) {
        if(out == "") {
            con <- stdout()
        } else {
	    con <- file(out, "wt")
	    on.exit(close(con), add=TRUE)
	}
    } else {
    	con <- out
    	out <- summary(con)$description
    }

    writeItemAsCode <- function(blocktag, block) {
        ## Keep this in rsync with writeItemAsCode() in Rd2HTML.R!
        
        ## Argh.  Quite a few packages put the items in their value
        ## section inside \code.
        for(i in which(RdTags(block) == "\\code"))
            attr(block[[i]], "Rd_tag") <- "Rd"

        s <- as.character.Rd(block)
        s[s %in% c("\\dots", "\\ldots")] <- "..."
        s <- trimws(strsplit(paste(s, collapse = ""), ",", fixed = TRUE)[[1]])
        s <- s[nzchar(s)]
        s <- paste0(s, collapse = ", ")
        putf(s)
    }
        
    Rd <- prepare_Rd(Rd, defines=defines, stages=stages, fragment=fragment, ...)
    Rdfile <- attr(Rd, "Rdfile")
    sections <- RdTags(Rd)
    if (fragment) {
    	if (sections[1L] %in% names(sectionOrder))
    	    for (i in seq_along(sections))
    	    	writeSection(Rd[[i]], sections[i])
    	else
    	    for (i in seq_along(sections))
    	    	writeBlock(Rd[[i]], sections[i], "")
    } else {
	title <- .Rd_format_title(.Rd_get_title(Rd))

	name <- .Rd_topic_for_display(.Rd_get_name(Rd),
                                      .Rd_get_metadata(Rd, "alias"))

	if(nzchar(package)) {
	    left <- name
	    mid <- if(nzchar(package)) paste0("package:", package) else ""
	    right <- "R Documentation"
	    if(encoding != "unknown")
		right <- paste0(right, "(", encoding, ")")
	    pad <- max(HDR_WIDTH - nchar(left, "w") - nchar(mid, "w") - nchar(right, "w"), 0)
	    pad0 <- pad %/% 2L
	    pad1 <- strrep(" ", pad0)
	    pad2 <- strrep(" ", pad - pad0)
	    putf(paste0(left, pad1, mid, pad2, right, "\n\n"))
	}

	putf(txt_header(title))
	blankLine()

	for (i in seq_along(sections)[-(1:2)])
	    writeSection(Rd[[i]], sections[i])
    }
    blankLine(0L)
    invisible(out)
}
#  File src/library/tools/R/RdConv2.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2024 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/


RdTags <- function(Rd) {
    res <- lapply(Rd, attr, "Rd_tag")
    if(length(res)) simplify2array(res, FALSE) else character()
}

isBlankRd <- function(x)
    length(grep("^[[:blank:]]*\n?$", x, perl = TRUE)) == length(x) # newline optional

isBlankLineRd <- function(x) {
    utils:::getSrcByte(x) == 1L &&
    length(grep("^[[:blank:]]*\n", x, perl = TRUE)) == length(x)   # newline required
}

.makeMessageRd <- function(block, Rdfile, ..., showSource = FALSE)
{
    srcref <- attr(block, "srcref")
    if (missing(Rdfile) && !is.null(srcref)) {
    	srcfile <- attr(srcref, "srcfile")
    	if (is.environment(srcfile))
    	    Rdfile <- srcfile$filename
    }
    Rdfile <-
        if(missing(Rdfile) || is.null(Rdfile))
            ""
        else { # Rdfile could be an absolute path (Rbuild tempdir)
            OS_subdir <- intersect(basename(dirname(Rdfile)), c("unix", "windows"))
            paste0(paste0(OS_subdir, "/", recycle0 = TRUE),
                   basename(Rdfile), ":", recycle0 = FALSE)
        }
    if (is.null(srcref))
        paste0(Rdfile, " ", ...)
    else {
        from <- srcref[1L]
        loc <- paste0(Rdfile, from,
                      if (from != srcref[3L]) paste0("-", srcref[3L]))
        src <- if (showSource) tryCatch(error = function (e) NULL, {
            ## show first source line and column marker for the block
            line <- getSrcLines(attr(srcref, "srcfile"), from, from) # Enc=UTF-8
            ## FIXME: marker may be misplaced for
            ##        expanded USERMACRO (seen srcref[5L] > srcref[6L])
            sprintf("\n  %4s | %s", c(from, ""),
                    c(tabExpand(line), paste0(strrep(" ", srcref[5L] - 1L), "^")))
        })
        paste0(loc, ": ", ...,
               paste0(src, collapse = ""))
    }
}

stopRd <- function(block, Rdfile, ...)
{
    msg <- .makeMessageRd(block, Rdfile, ...)
    stop(msg, call. = FALSE, domain = NA)
}

warnRd <- function(block, Rdfile, ...)
{
    msg <- .makeMessageRd(block, Rdfile, ...)
    warning(msg, call. = FALSE, domain = NA, immediate. = TRUE)
}

RweaveRdDefaults <- list(
    width = 6,
    height = 6,
    eval = TRUE,
    fig = FALSE,
    echo = FALSE,
    keep.source = TRUE, # was ignored, effectively interactive(), thus often FALSE, in R < 4.4.0
    results = "text",
    strip.white = "true",
    stage = "install")

RweaveRdOptions <- function(options)
{

    ## convert a character string to logical
    c2l <- function(x){
        if(is.null(x)) return(FALSE)
        else return(as.logical(toupper(as.character(x))))
    }

    NUMOPTS <- c("width", "height")
    NOLOGOPTS <- c(NUMOPTS, "results", "stage", "strip.white")

    for(opt in names(options)){
        if(opt %notin% NOLOGOPTS) {
            if(!is.logical(oldval <- options[[opt]])){
                options[[opt]] <- c2l(oldval)
            }
            if(is.na(options[[opt]]))
                stop(gettextf("invalid value for '%s' : %s", opt, oldval),
                     domain = NA)
        }
        else if(opt %in% NUMOPTS){
            options[[opt]] <- as.numeric(options[[opt]])
        }
    }

    if(!is.null(options$results))
        options$results <- match.arg(tolower(options$results),
                                     c("text", "verbatim", "rd", "hide"))
    if(!is.null(options$stage))
        options$stage <- match.arg(tolower(options$stage),
                                   c("build", "install", "render"))
    if(!is.null(options$strip.white))
        options$strip.white <- tolower(options$strip.white)

    options
}

tagged <- function(x, tag, srcref = NULL) {
    attr(x, "Rd_tag") <- tag
    attr(x, "srcref") <- srcref
    x
}

evalWithOpt <- function(expr, options, env)
{
    res <- tagged("", "COMMENT")
    if(options$eval){
        result <- tryCatch(withVisible(eval(expr, env)), error=function(e) e)

        if(inherits(result, "error")) return(result)
        switch(options$results,
        "text" = if (result$visible)
		    res <- paste(as.character(result$value), collapse=" "),
        "verbatim" = if (result$visible) print(result$value),
        "rd" = res <- result$value)
    }
    return(res)
}

# The parser doesn't distinguish between types of Sexprs, we do
expandDynamicFlags <- function(block, options = RweaveRdDefaults) {
    recurse <- function(block) {
	flags <- getDynamicFlags(block)
	if (flags["\\Sexpr"]) {
	    if (identical(tag <- attr(block, "Rd_tag"), "\\Sexpr")) {
		if (is.null(opts <- attr(block, "Rd_option"))) opts <- ""
		# modify locally
                options <- utils:::SweaveParseOptions(opts, options, RweaveRdOptions)
                flags[options$stage] <- TRUE
	    } else if (identical(tag, "\\RdOpts")) {
	        # modify globally
	    	options <<- utils:::SweaveParseOptions(block, options, RweaveRdOptions)
	    } else { # Has \Sexpr flag, so must be a list
		for (i in seq_along(block)) {
		    block[[i]] <- recurse(block[[i]])
		    flags <- flags | getDynamicFlags(block[[i]])
		}
	    }
	    block <- setDynamicFlags(block, flags)
	}
	block
    }
    recurse(block)
}

getDynamicFlags <- function(block) {
    flag <- attr(block, "dynamicFlag")
    if (is.null(flag)) c("#ifdef"=FALSE, "\\Sexpr"=FALSE, build=FALSE, install=FALSE, render=FALSE)
    else c("#ifdef" = flag %% 2L > 0L,               # 1
           "\\Sexpr" = (flag %/% 2L) %% 2L > 0L,     # 2
           build = (flag %/% 4L) %% 2L > 0L,         # 4
           install = (flag %/% 8L) %% 2L > 0L,       # 8
           render = (flag %/% 16L) %% 2L > 0L)       # 16
}

setDynamicFlags <- function(block, flags) {  # flags in format coming from getDynamicFlags
    flag <- sum(flags * c(1L,2L,4L,8L,16L))
    if (flag == 0L) flag <- NULL
    attr(block, "dynamicFlag") <- flag
    block
}

replaceRdSrcrefs <- function(Rd, srcref) {
    if(!is.null(attr(Rd, "srcref")))
	attr(Rd, "srcref") <- srcref
    if(is.list(Rd)) # recurse
	for(i in seq_along(Rd))
	    Rd[[i]] <- replaceRdSrcrefs(Rd[[i]], srcref)
    Rd
}

processRdChunk <- function(code, stage, options, env, macros)
{
    if (is.null(opts <- attr(code, "Rd_option"))) opts <- ""
    codesrcref <- attr(code, "srcref")
    Rdfile <- attr(codesrcref, "srcfile")$filename
    options <- utils:::SweaveParseOptions(opts, options, RweaveRdOptions)
    if (stage == options$stage) {
        #  The code below is very similar to RWeaveLatexRuncode, but simplified

        # Results as a character vector for now; convert to list later
        res <- character(0)

        tags <- RdTags(code)
        if (length(bad <- setdiff(tags, c("RCODE", "TEXT", "COMMENT"))))
            ## also USERMACROs are currently not supported inside \Sexpr{}
            warnRd(code, Rdfile, "\\Sexpr expects R code; found ",
                   paste0(sQuote(bad), collapse = ", "))
	code <- structure(code[tags != "COMMENT"],
	                  srcref = codesrcref) # retain for error locations
	chunkexps <- tryCatch(
	    parse(text = as.character(code), keep.source = options$keep.source),
	    error = function (e) stopRd(code, Rdfile, conditionMessage(e))
	)

	if(length(chunkexps) == 0L)
	    return(tagged(code, "LIST"))

	srcrefs <- attr(chunkexps, "srcref")
	lastshown <- 0L
	err <- NULL
	for(nce in seq_along(chunkexps))
	{
	    ce <- chunkexps[[nce]]

	    if (options$echo && options$results == "verbatim") {

	    if (nce <= length(srcrefs) && !is.null(srcref <- srcrefs[[nce]])) {
		srcfile <- attr(srcref, "srcfile")
		showfrom <- srcref[1L]
		showto <- srcref[3L]
		dce <- getSrcLines(srcfile, lastshown+1, showto)
		leading <- showfrom-lastshown
		lastshown <- showto
		while (length(dce) && grepl("^[[:blank:]]*$", dce[1L])) {
		    dce <- dce[-1L]
		    leading <- leading - 1L
		}
	    } else {
		dce <- deparse(ce, width.cutoff=0.75*getOption("width"))
		leading <- 1L
	    }
	    if (length(dce)) {
		res <- c(res,"\n",
                         paste0(getOption("prompt"), dce[1L:leading],
                                collapse="\n"))
		if (length(dce) > leading)
		    res <- c(res, "\n",
                             paste0(getOption("continue"), dce[-(1L:leading)],
                                    collapse="\n"))
	    }

	    }

	    tmpcon <- file()
	    sink(file = tmpcon)
	    if(options$eval) err <- evalWithOpt(ce, options, env)
	    res <- c(res, "\n") # attempt to  make sure final line is complete
	    sink()
	    output <- readLines(tmpcon, warn = FALSE) # sometimes attempt fails.
	    close(tmpcon)
	    ## delete empty output
	    if(length(output) == 1L && output[1L] == "") output <- NULL

	    if (inherits(err, "error"))
	    	stopRd(code, Rdfile, conditionMessage(err))

	    if(length(output) && (options$results != "hide")) {
		output <- paste(output, collapse="\n")
		if(options$strip.white %in% c("all", "true")) {
		    output <- sub("^[[:space:]]*\n", "", output)
		    output <- sub("\n[[:space:]]*$", "", output)
		    if(options$strip.white == "all")
		      output <- sub("\n[[:space:]]*\n", "\n", output)
		}
		res <- c(res, output, "\n")
		remove(output)
	    }
	}
	if (options$results == "rd") {
	    res <- enc2utf8(as.character(err))   # The last value of the chunk
	    tmpcon <- file()
	    writeLines(res, tmpcon, useBytes = TRUE)
	    parseFragment <- function(cond) {
	    	               seek(tmpcon, 0)
	    	               parse_Rd(tmpcon, encoding="UTF-8", fragment=TRUE, macros = macros)
	    	            }
	    res <- tryCatch(parse_Rd(tmpcon, encoding="UTF-8", fragment=FALSE, macros = macros),
	    	            warning = parseFragment, error = parseFragment,
	    	            finally = close(tmpcon))
	    # Now remove that extra newline added by the writeLines
	    last <- res[[length(res)]]
	    if (attr(last, "Rd_tag") == "TEXT" && (len <- length(last)))
	        res[[length(res)]][len] <- gsub("\\n$", "", last[len])

	    flag <- getDynamicFlags(res)
            if (any(flag)) { # needs a later stage (#ifdef is processed below)
                bad <- flag[c(stage, switch(stage, install = "build",
                                            render = c("build", "install")))]
                if (any(bad))
                    warnRd(code, Rdfile,
                           "unprocessed ",
                           paste0(sQuote(names(bad)[bad]), collapse = "/"),
                           " macro from ", stage, "-stage \\Sexpr")
            }

	    # We may have multiple chunks now.  If they are in
	    # a section, we can wrap them in LIST, but at top
	    # level we can't, so we disallow multiple sections.

	    # First clear out the junk.
	    tags <- RdTags(res)
	    keep <- rep.int(TRUE, length(tags))
	    for (i in seq_along(tags)) {
	        if (tags[i] == "TEXT" && res[[i]] == "")
	            keep[i] <- FALSE
	    }
	    res <- res[keep]
	    tags <- tags[keep]
	    if (length(res) > 1) {
	    	is_section <- !is.na(sectionOrder[tags])
	    	if (!any(is_section))
	    	    res <- tagged(res, "LIST")
	    	else {
	    	    if (sum(is_section) > 1)
	    		stop(gettextf("Only one Rd section per %s is supported.",
                                      "\\Sexpr"),
                             domain = NA)
	    	    res <- res[[which(is_section)]]
	    	}
	    } else if (length(res) == 1) res <- res[[1]]
	    else res <- tagged("", "TEXT")

	    if (is.list(res)) {
	    	res <- setDynamicFlags(res, flag)
	    	res <- prepare_Rd(res, defines = .Platform$OS.type, options=options,
	                           stage2 = FALSE, stage3 = FALSE)
	    }
	} else if (options$results == "text")
	    res <- tagged(enc2utf8(as.character(err)), "TEXT")
	else if (options$results == "hide" || !length(res))
	    res <- tagged("", "COMMENT")
	else { ## if (length(res)) 
	    res <- lapply(as.list(res), function(x) tagged(x, "VERB"))
	    res <- tagged(res, "\\verb")
	}
    } else res <- code
    ## return :
    replaceRdSrcrefs(res, codesrcref)
}

processRdIfdefs <- function(blocks, defines)
{
    recurse <- function(block) {
    	if (!(getDynamicFlags(block)["#ifdef"])) return(block)

        if (!is.null(tag <- attr(block, "Rd_tag"))) {
	    if (tag %in% c("#ifdef", "#ifndef")) {
		target <- block[[1L]][[1L]]
		# The target will have picked up some whitespace and a newline
		target <- psub("[[:blank:][:cntrl:]]*", "", target)
		block <-
                    if((target %in% defines) == (tag == "#ifdef")) {
                        flag <- getDynamicFlags(block[[2L]])
                        block <- tagged(block[[2L]], "#expanded")
                        setDynamicFlags(block, flag)
                    } else
                        tagged(list(
                            tagged(paste(tag, target, "not active"),
                                   "COMMENT", attr(block, "srcref")),
                            ## converters expect (and drop) newline from COMMENT
                            tagged("\n",
                                   "TEXT", attr(block, "srcref"))
                        ), "#expanded")
	    }
	}
	if (is.list(block)) {
	    i <- 1L
	    ## save possible outer \Sexpr flags and options
	    flags <- getDynamicFlags(block); flags["#ifdef"] <- FALSE
	    opts <- attr(block, "Rd_option")
	    while (i <= length(block)) {
	    	newval <- recurse(block[[i]])
	    	newtag <- attr(newval, "Rd_tag")
	    	if (!is.null(newtag) && newtag == "#expanded") { # ifdef has expanded.
	    	    all <- seq_along(block)
	    	    before <- all[all < i]
	    	    after  <- all[all > i]
	    	    block <- tagged(c(block[before], newval, block[after]),
                                    tag, attr(block, "srcref"))
	    	} else {
	    	    flags <- flags | getDynamicFlags(newval)
		    block[[i]] <- newval
		    i <- i+1L
		}
	    }
	    attr(block, "Rd_option") <- opts
	    setDynamicFlags(block, flags)
	} else
	    block
    } # end{recurse}

    recurse(blocks)
}

processRdSexprs <-
    function(block, stage, options = RweaveRdDefaults,
             env = new.env(hash = TRUE, parent = globalenv()), macros)
{
    recurse <- function(block) {
    	if (!any(getDynamicFlags(block)[c("\\Sexpr",stage)])) return(block)

        if (is.list(block)) {
            if (!is.null(tag <- attr(block, "Rd_tag"))) {
        	if (tag == "\\Sexpr")
            	    block <- processRdChunk(block, stage, options, env, macros=macros)
            	else if (tag == "\\RdOpts")
    	    	    options <<-
                        utils:::SweaveParseOptions(block, options, RweaveRdOptions)
    	    }
    	    if (is.list(block)) {
		for (i in seq_along(block))
		    block[[i]] <- recurse(block[[i]])
	    }
	}
	block
    }

    if (!any(getDynamicFlags(block)[c("\\Sexpr",stage)])) return(block)
    expandDynamicFlags(recurse(block), options)
}

# Get rid of parts of the path up to first, if any
stripPathTo <- function(path, first) {
    pattern <- paste0("^.*[/\\]", first, "[/\\]")
    sub(pattern, "", path)
}

prepare_Rd <-
    function(Rd, encoding = "unknown", defines = NULL, stages = NULL,
             fragment = FALSE, options = RweaveRdDefaults,
             stage2 = TRUE, stage3 = TRUE, ..., msglevel = 0)
{
    concordance <- NULL
    if (is.character(Rd)) {
        Rdfile <- Rd
        ## do it this way to get info in internal warnings
        Rd <- eval(substitute(parse_Rd(f, encoding = enc, fragment = frag, ...),
                              list(f = Rd, enc = encoding, frag = fragment)))
    } else if(inherits(Rd, "connection")) {
        Rdfile <- summary(Rd)$description
        Rd <- parse_Rd(Rd, encoding = encoding, fragment = fragment, ...)
    } else {
    	Rdfile <- attr(Rd, "Rdfile")
    	concordance <- attr(Rd, "concordance")
    }
    srcref <- attr(Rd, "srcref")
    if (is.null(Rdfile) && !is.null(srcref))
    	Rdfile <- attr(srcref, "srcfile")$filename
    if (fragment) meta <- NULL
    else {
	pratt <- attr(Rd, "prepared")
	if (is.null(pratt)) pratt <- 0L
	if ("build" %in% stages)
	    Rd <- processRdSexprs(Rd, "build", options, macros=attr(Rd, "macros"))
	if (!is.null(defines))
	    Rd <- processRdIfdefs(Rd, defines)
	for (stage in c("install", "render"))
	    if (stage %in% stages)
		Rd <- processRdSexprs(Rd, stage, options, macros=attr(Rd, "macros"))
	if (is.null(concordance)) {
	    concordance <- try(as.Rconcordance(unlist(Rd[RdTags(Rd) == "COMMENT"]), silent = TRUE))
	    if (inherits(concordance, "try-error"))
	    	concordance <- NULL
	}
	if (pratt < 2L && stage2)
	    Rd <- prepare2_Rd(Rd, Rdfile, stages)
	meta <- attr(Rd, "meta")
	if (pratt < 3L && stage3)
	    Rd <- prepare3_Rd(Rd, Rdfile, msglevel = msglevel)

	# Restore flags from any sections that are left
	Rd <- setDynamicFlags(Rd, apply(sapply(Rd, getDynamicFlags), 1, any))
    }
    structure(Rd, Rdfile = Rdfile, class = "Rd", meta = meta,
              srcref = srcref, concordance = concordance)
}

## auxiliary, currently called only from prepare_Rd(*, stage2 = TRUE)
prepare2_Rd <- function(Rd, Rdfile, stages)
{
    sections <- RdTags(Rd)

    ## FIXME: we no longer make any use of \Rdversion
    version <- which(sections == "\\Rdversion")
    if (length(version) > 1L)
    	stopRd(Rd[[version[2L]]], Rdfile,
               "Only one \\Rdversion declaration is allowed")

    ## Give warning (pro tem) for nonblank text outside a section
    if (length(bad <- grep("[^[:blank:][:cntrl:]]",
                           unlist(Rd[sections == "TEXT"]),
                           perl = TRUE, useBytes = TRUE )))
        for(s in bad)
            warnRd(Rd[sections == "TEXT"][[s]], Rdfile,
                   "All text must be in a section")

    drop <- rep.int(FALSE, length(sections))

    ## Check specific sections are unique (\title and \name are checked below,
    ## others can be repeated: \alias, \concept, \keyword, \section, \note)
    unique_tags <-
        paste0("\\",
               c("description", "usage", "arguments",
                 "format", "details", "value", "references", "source",
                 "seealso", "examples", "author", "encoding"))
    for (tag in unique_tags) {
        where <- which(sections == tag)
        if(length(where) > 1L) {
            warnRd(NULL, Rdfile,
                   sprintf("Only one %s section is allowed: the first will be used", tag))
            drop[where[-1L]] <- TRUE
        }
    }

    enc <- which(sections == "\\encoding")
    if (length(enc)) {
    	encoding <- Rd[[enc[1L]]]
    	if (!identical(RdTags(encoding), "TEXT"))
            stopRd(encoding, Rdfile,
                   "\\encoding must be plain text on a line by itself")
    }

    dt <- which(sections == "\\docType")
    docTypes <- character(length(dt))
    if(length(dt)) {
        if(length(dt) > 1L)
            warnRd(NULL, Rdfile,
                   "Multiple \\docType sections are not supported")
        for(i in seq_along(dt)) {
            docType <- Rd[[dt[i]]]
            if(!identical(RdTags(docType), "TEXT"))
        	stopRd(docType, Rdfile, "'docType' must be plain text")
            ## Some people have \docType{ package } and similar.
            docTypes[i] <- sub("^ *", "", sub(" *$", "", docType[[1L]]))
            if (docTypes[i] %notin%
                c("data", "package", "methods", "class", "import"))
                warnRd(docType, Rdfile, "docType ", sQuote(docTypes[i]),
                       " is unrecognized")
         }
    }

    generator <- if((sections[1L] == "COMMENT") &&
                    startsWith(Rd[[1L]], "% Generated by"))
                     c(Rd[[1L]])
                 else ""

    ## Drop all the parts that are not rendered
    extras <- c("COMMENT", "TEXT", "\\docType", "\\Rdversion", "\\RdOpts",
                "USERMACRO", "\\newcommand", "\\renewcommand")
    drop <- drop | (sections %in% extras)
    bad <- sections %notin% c(names(sectionOrder), extras)
    ## \Sexpr[stage=render] is OK, if we are not at the render stage yet
    if ("render" %notin% stages) {
      render <- vapply(Rd, function(r) getDynamicFlags(r)[["render"]], TRUE)
      bad <- bad & (sections != "\\Sexpr" | !render)
    }
    if (any(bad)) {
        for(s in which(bad))
            warnRd(Rd[[s]], Rdfile, "Section ",
                   sections[s], " is unrecognized and will be dropped")
        drop <- drop | bad
    }
    Rd <- Rd[!drop]
    sections <- sections[!drop]
    sortorder <- order(sectionOrder[sections])
    Rd <- Rd[sortorder]
    sections <- sections[sortorder]
    if (!identical(sections[1:2], c("\\title", "\\name"))
        || identical(sections[3L], "\\name"))
    	stopRd(NULL, Rdfile,
               "Sections \\title, and \\name must exist and be unique in Rd files")

    ## \name (parsed verbatim) must not contain any markup
    if (length(Rd[[2L]]) != 1L ||
        grepl("\\", Rd[[2L]][[1L]], fixed = TRUE))
        stopRd(Rd[[2L]], Rdfile, "\\name must only contain simple text")

    ## is this really what we want?  docTypes is a vector.
    structure(Rd, meta = list(docType = docTypes, generator = generator))
}

## auxiliary, currently called only from prepare_Rd(*, stage3 = TRUE)
prepare3_Rd <- function(Rd, Rdfile, msglevel = 0)
{
    ## Drop 'empty' sections: less rigorous than checkRd test
    keep <- rep.int(TRUE, length(Rd))
    checkEmpty <- function(x, this)
    {
        if(this) return(TRUE)
        if(is.list(x))
            for(xx in x) this <- checkEmpty(xx, this)
        else {
            tag <- attr(x, "Rd_tag")
            if(!is.null(tag)) # guard against incompletely parsed Rd files
            switch(tag,
		   USERMACRO =, "\\newcommand" =, "\\renewcommand" =, COMMENT =
                                                                          {},
                   VERB =, RCODE =, TEXT =
                                        if(any(grepl("[^[:space:]]", s,
                                                     perl=TRUE, useBytes=TRUE)))
                                            return(TRUE),
                   return(TRUE))
        }
        this
    }
    for (i in seq_along(Rd)) {
        this <- FALSE
        s0 <- section <- Rd[[i]]
        tag <- attr(section, "Rd_tag")
        if(tag == "\\section") {
            tagtitle <- sQuote(trimws(.Rd_deparse(section[[1L]])))
            section <- section[[2L]]
        } else tagtitle <- tag
        for(s in section) this <- checkEmpty(s, this)
        keep[i] <- this
        if(!this && msglevel > 0)
            warnRd(s0, Rdfile, "Dropping empty section ", tagtitle)
    }
    Rd[keep]
}

sectionOrder <- c("\\title"=1, "\\name"=2, "\\alias"=2.1, "\\concept"=2.2,
                  "\\keyword"=2.3, "\\encoding"=2.4,
    "\\description"=3, "\\usage"=4, "\\arguments"=5,
    "\\format"=6, "\\details"=7, "\\value"=8, "\\section"=9,
    "\\note"=10, "\\author" = 11, "\\source"=12, "\\references"=13,
    "\\seealso"=14, "\\examples"=15)

sectionTitles <-
    c("\\description"="Description", "\\usage"="Usage",
      "\\arguments"="Arguments", "\\format"="Format", "\\details"="Details",
      "\\note"="Note", "\\section"="section", "\\author"="Author(s)",
      "\\references"="References", "\\source"="Source",
      "\\seealso"="See Also", "\\examples"="Examples", "\\value"="Value",
      "\\title"="Title", "\\name"="Name")

psub <- function(pattern, replacement, x)
    gsub(pattern, replacement, x, perl = TRUE)

psub1 <- function(pattern, replacement, x)
    sub(pattern, replacement, x, perl = TRUE)

fsub <- function(pattern, replacement, x)
    gsub(pattern, replacement, x, fixed = TRUE)

fsub1 <- function(pattern, replacement, x)
    sub(pattern, replacement, x, fixed = TRUE)


## for lists of messages, see ../man/checkRd.Rd
checkRd <- function(Rd, defines = .Platform$OS.type, stages = "render",
                    unknownOK = TRUE, listOK = TRUE, ..., def_enc = FALSE)
{
    allow_empty_item_in_describe <- config_val_to_logical(
        Sys.getenv("_R_CHECK_RD_ALLOW_EMPTY_ITEM_IN_DESCRIBE_", "FALSE"))
    note_lost_braces <- config_val_to_logical(
        Sys.getenv("_R_CHECK_RD_NOTE_LOST_BRACES_", "TRUE"))

    warnRd <- function(block, Rdfile, ..., level = 0L)
    {
        msg <- sprintf("checkRd: (%d) %s", level,
                       .makeMessageRd(block, Rdfile, ...))
        .messages <<- c(.messages, msg)
    }

    checkLink <- function(tag, block) {
    	option <- attr(block, "Rd_option")
    	if(!is.null(option)) checkContent(option, tag)
    	checkContent(block, tag)
        get_link(block, tag, Rdfile) ## to do the same as Rd2HTML
    }

    checkEmail <- function(block) {
        pattern <- .make_RFC_2822_email_address_regexp()
        if(length(block)) {
            address <- lines2str(.Rd_deparse(block, tag = FALSE))
            if(!grepl(re_anchor(pattern), address))
                warnRd(block, Rdfile, level = 7,
                       "Invalid email address: ", address)
        }
    }

    checkURL <- function(block, tag) {
        pattern <- .make_RFC_2822_email_address_regexp()        
        if(tag == "\\url")
            u <- .Rd_deparse(block, tag = FALSE)
        else
            u <- .Rd_deparse(block[[1L]], tag = FALSE)
        u <- lines2str(u)
        parts <- parse_URI_reference(u)
        if(nzchar(s <- parts[, "scheme"])) {
            if(is.na(match(s, c(IANA_URI_scheme_db$URI_Scheme,
                                "javascript"))) ||
               ((s == "mailto") &&
                !grepl(re_anchor(pattern), parts[, "path"])))
                warnRd(block, Rdfile, level = 7,
                       "Invalid URL: ", u)
        }
    }

    checkLIST <- function(block, tag, blocktag, preblocks = NULL)
    {
        ## skip empty block or wrapped \Sexpr Rd result
        if (!length(block) || inherits(block, "Rd"))
            return()
        if (!listOK)
            stopRd(block, Rdfile, "Lost braces", showSource = TRUE)
        level <- -3
        msg2 <- NULL
        showSource <- TRUE
        if (note_lost_braces) {
            ## try to raise real issues like "code{.}" or "{1,2}",
            ## ignoring bib-braces, \tab *{}, \itemize{\item *{}}, {\sspace}
            npre <- length(preblocks)
            pretags <- vapply(preblocks, function (block) {
                tag <- attr(block, "Rd_tag")
                if (tag == "TEXT" && grepl("^[[:space:]]*$", block)) "BLANK"
                else tag
            }, "")
            pretagsNB <- pretags[pretags != "BLANK"]
            if (npreNB <- length(pretagsNB)) { # skip '{{...}}'
                pretag <- pretagsNB[npreNB]
                tags <- RdTags(block)
                inItemize <- blocktag %in% c("\\itemize", "\\enumerate")
                separated <- npre == 0L || pretags[npre] == "BLANK" ||
                    (pretags[npre] == "TEXT" && # catch 'emph{Journal}', '\"{o}',
                     ## '"[...]{...}', but ignore {P}oisson-{G}amma or ({EM})
                     !grepl("([[:alnum:]]|\\\\[[:punct:]]|[])])$", preblocks[[npre]]))
                ignore <-
                    (length(tags) == 1L && startsWith(tags, "\\") &&
                     separated) || # ignore ' {\code{...}}' but not ' code{\link{}}'
                    (length(tags) == 2L && tags[1L] == "USERMACRO") || # '{\sspace}'
                    (inItemize && pretag == "\\item") || # '\item {}'
                    pretag == "\\tab" || # '\tab {}'
                    (!is.null(srcref <- attr(block, "srcref")) &&
                     srcref[1L] == srcref[3L] && srcref[5L] > srcref[6L]) || # kludge for Rdpack
                    (sectiontag %in% c("\\source", "\\references") && (
                        separated || pretag == "\\cr" # '\cr\cr{ref}' relicts
                    ))
                if (!ignore) {
                    level <- -1
                    ## extra message for frequent misuse of \item *{label} *{desc}
                    if (inItemize && npreNB > 1L && pretag == "LIST" &&
                        pretagsNB[npreNB - 1L] == "\\item") {
                        msg2 <- paste0(" in ", blocktag, "; ",
                                       if (sectiontag == "\\value")
                                           "\\value handles \\item{}{} directly"
                                       else "meant \\describe ?")
                        showSource <- FALSE # misleading marker, often many \items
                    } else if (separated && identical(tags, "TEXT")) {
                        ## simple braced text: 'X_{i-1}' w/o \eqn, '{pkg}'
                        msg2 <- "; missing escapes or markup?"
                    }
                }
            }
        }
        warnRd(block, Rdfile, level = level,
               "Lost braces", msg2, showSource = showSource)
    }

    checkBlock <- function(block, tag, blocktag, preblocks = NULL)
    {
	switch(tag,
               ## parser already warned here
               UNKNOWN = if (!unknownOK)
               stopRd(block, Rdfile, "Unrecognized macro ", block[[1L]]),
               VERB = ,
               RCODE = ,
               TEXT = if(!grepl("^[[:space:]]*$", block)) {
                   has_text <<- TRUE
                   if(inEnc2 || !def_enc) {
                       ## check for encoding; parse_Rd converts to UTF-8,
                       ## unless encoding="ASCII", when non-ASCII content fails
                       ## (thus used by .check_package_parseRd if !def_enc);
                       ## so this check is only useful for the 'inEnc2' part or
                       ## to checkRd() individual Rd files outside of packages.
                       msg2 <- if(inEnc2) "in second part of \\enc"
                               else "without declared encoding"
                       if(Encoding(block) == "UTF-8")
                           warnRd(block, Rdfile, level = -1,
                                  "Non-ASCII contents ", msg2,
                                  ":\n  ", sQuote(trimws(block)))
                       ## the following gives mostly false positives nowadays,
                       ## from using such notation in the source file, as in
                       ## iconv.Rd, showNonASCII.Rd, UTF8filepaths.Rd:
                       ## if(grepl("<[0123456789abcdef][0123456789abcdef]>", block))
                       ##     warnRd(block, Rdfile, level = -3,
                       ##            "Apparent non-ASCII contents ", msg2)
                   }
                   if(tag == "TEXT") {
                       pat <- "([^\\]|^)\\\\[#$&_^~]"
                       if(grepl(pat, block)) {
                           txt <- sub("^[^\\]*", "",
                                      unlist(regmatches(block,
                                                        gregexpr(pat,
                                                                 block))))
                           warnRd(block, Rdfile, level = -1,
                                  "Escaped LaTeX specials: ",
                                  paste(txt, collapse = " "))
                       }
                   }
               },
               USERMACRO =,
               "\\newcommand" =,
               "\\renewcommand" =,
               COMMENT = {},
               LIST = {
                   checkLIST(block, tag, blocktag, preblocks)
                   checkContent(block, tag)
               },
               "\\describe"=,
               "\\enumerate"=,
               "\\itemize"=,
               "\\bold"=,
               "\\cite"=,
               "\\command"=,
               "\\dfn"=,
               "\\emph"=,
               "\\kbd"= checkContent(block, tag),
               "\\code"=,
               "\\preformatted"= checkCodeBlock(block, tag),
               "\\Sexpr"=,
               "\\special"=,
               "\\strong"=,
               "\\var" =,
               "\\verb"= checkContent(block, tag),
               "\\linkS4class" =,
               "\\link" = checkLink(tag, block),
               "\\email" = {
                   checkEmail(block)
                   has_text <<- TRUE
               },
               "\\url" = {
                   checkURL(block, tag)
                   has_text <<- TRUE
               },
               "\\cr" ={},
               "\\dots" =,
               "\\ldots" =,
               "\\R" = has_text <<- TRUE,
               "\\abbr" =,
               "\\acronym" =,
               "\\env" =,
               "\\file" =,
               "\\option" =,
               "\\pkg" =,
               "\\samp" =,
               "\\sQuote" =,
               "\\dQuote" = checkContent(block, tag),
               "\\method" =,
               "\\S3method" =,
               "\\S4method" =
                   warnRd(block, Rdfile, level = 7, "Tag ", tag,
                          " not valid outside a code block"),
               "\\enc" = {
                   checkContent(block[[1L]], tag)
                   ## second arg should always be ASCII
                   save_enc <- def_enc
                   def_enc <<- FALSE
                   inEnc2 <<- TRUE
                   checkContent(block[[2L]], tag)
                   def_enc <<- save_enc
                   inEnc2 <<- FALSE
               },
               "\\eqn" =,
               "\\deqn" =,
               "\\figure" = {
                   checkContent(block[[1L]], tag)
                   if (length(block) > 1L) checkContent(block[[2L]], tag)
               },
               "\\tabular" = checkTabular(block),
               "\\subsection" = {
                   checkSection(block, tag)
                   has_text <<- TRUE
               },
               "\\if" =,
               "\\ifelse" = {
    		   condition <- block[[1L]]
    		   tags <- RdTags(condition)
    		   if (!all(tags %in% c("TEXT", "\\Sexpr")))
    		       stopRd(block, Rdfile, "Condition must be \\Sexpr or plain text")
    		   condition <- condition[tags == "TEXT"]
    		   allow <- trimws(strsplit(paste(condition, collapse=""), ",")[[1L]])
    		   unknown <- allow %w/o% c("", "latex", "example", "text",
                                            "html", "TRUE", "FALSE")
    		   if (length(unknown))
    		       warnRd(block, Rdfile, level = 7, "Unrecognized format: ", unknown)
                   checkContent(block[[2L]], tag)
                   if (tag == "\\ifelse")
                       checkContent(block[[3L]], tag)
               },
               "\\href" = {
                   if (!identical(RdTags(block[[1L]]), "VERB"))
                   	stopRd(block, Rdfile,
                               "First argument to \\href must be verbatim URL")
                   checkURL(block, tag)
               	   checkContent(block[[2L]], tag)
               },
               "\\out" = {
               	   tags <- RdTags(block)
               	   if (!all(tags == "VERB"))
               	       stopRd(block, Rdfile, "Must contain verbatim text")
               },
               warnRd(block, Rdfile, level = 7, "Tag ", tag, " not recognized"))
    }

    checkCodeBlock <- function(blocks, blocktag)
    {
	for (block in blocks) {
            tag <- attr(block, "Rd_tag")
            switch(tag,
                   ## parser already warned here
                   UNKNOWN = if (!unknownOK)
                   stopRd(block, Rdfile, "Unrecognized macro ", block[[1L]]),
                   VERB = ,
                   RCODE = ,
                   TEXT = if(!grepl("^[[:space:]]*$", block)) {
                       has_text <<- TRUE
                       if((inEnc2 || !def_enc) && Encoding(block) == "UTF-8") {
                           ## same as in checkBlock
                           msg2 <- if(inEnc2) "in second part of \\enc"
                                   else "without declared encoding"
                           warnRd(block, Rdfile, level = -1,
                                  "Non-ASCII contents ", msg2,
                                  ":\n  ", sQuote(trimws(block)))
                       }
                   },
		   USERMACRO =,
		   "\\newcommand" =,
		   "\\renewcommand" =,
                   COMMENT = {},
                   "\\var" = checkCodeBlock(block, blocktag), # not preformatted, but the parser checks that
                   "\\special" = checkCodeBlock(block, blocktag),
                   "\\dots" = has_text <<- TRUE,
                   "\\ldots" = {
                       ## but it is rendered as ... in all converters
                       warnRd(block, Rdfile, level = -3,
                              "Tag ", tag, " is invalid in a code block")
                       has_text <<- TRUE
                   },
                   ## these are valid in \code, at least
                   "\\linkS4class" =,
                   "\\link" = checkLink(tag, block),
                   "\\method" =,
                   "\\S3method" =,
                   "\\S4method" = if(blocktag == "\\usage") {
                       checkContent(block[[1L]], tag) # generic
                       checkContent(block[[2L]], tag) # class
                   } else warnRd(block, Rdfile, level = 7,
                                 "Tag ", tag, " is only valid in \\usage"),
                   "\\dontrun" =,
                   "\\donttest" =, "\\dontdiff" =,
                   "\\dontshow" =,
                   "\\testonly" = if(blocktag == "\\examples")
                   checkCodeBlock(block, blocktag)
                   else warnRd(block, Rdfile, level = 7,
                               "Tag ", tag, " is only valid in \\examples"),
                   {
                       warnRd(block, Rdfile, level = 7,
                              "Tag ", tag, " is invalid in a ",
                              blocktag, " block")
                       has_text <<- TRUE  # likely, e.g. \url
                   })
        }
    }

    checkTabular <- function(table) {
        has_text <<- TRUE
    	format <- table[[1L]]
    	content <- table[[2L]]
    	if (length(format) != 1 || RdTags(format) != "TEXT")
    	    warnRd(table, Rdfile, level = 7,
                   "\\tabular format must be simple text")
    	format <- strsplit(format[[1L]], "", fixed=TRUE)[[1L]]
    	if (!all(format %in% c("l", "c", "r")))
    	    warnRd(table, Rdfile, level = 7,
                   "Unrecognized \\tabular format: ", table[[1L]][[1L]])
        tags <- RdTags(content)

        newrow <- TRUE
        for (i in seq_along(tags)) {
            if (newrow) {
            	newrow <- FALSE
            	col <- 0
            	newcol <- TRUE
            }
            if (newcol) {
                col <- col + 1
                if (col > length(format))
                    warnRd(table, Rdfile, level = 7,
                           "Only ", length(format),
                           if (length(format) == 1) " column " else " columns ",
                           "allowed in this table")
            	newcol <- FALSE
            }
            switch(tags[i],
            "\\tab" = {
            	newcol <- TRUE
            },
            "\\cr" = {
            	newrow <- TRUE
            },
            checkBlock(content[[i]], tags[i], "\\tabular",
                       content[seq_len(i-1L)]))
        }
    }

    checkContent <- function(blocks, blocktag) {
        inlist <- FALSE

	tags <- RdTags(blocks)

	for (i in seq_along(tags)) {
            tag <- tags[i]
            block <- blocks[[i]]
            switch(tag,
            "\\item" = {
    	    	if (!inlist) inlist <- TRUE
                CHECK_BLOCKS <- c(if (!allow_empty_item_in_describe) "\\describe",
                                  "\\arguments", "\\value")
                if((blocktag %in% CHECK_BLOCKS) &&
                    isBlankRd(block[[1L]]))
                    warnRd(block, Rdfile, level = 5,
                           "\\item in ", blocktag,
                           " must have non-empty label")
    		switch(blocktag,
    		"\\arguments"= {
    		    checkContent(block[[1L]], tag)
    		    checkContent(block[[2L]], tag)
    		},
    		"\\value"=,
    		"\\describe"= {
    		    checkContent(block[[1L]], tag)
    		    checkContent(block[[2L]], tag)
    		},
    		"\\enumerate"=,
    		"\\itemize"= {})
    	    },
    	    { # default
    	    	if (inlist && (blocktag %notin% c("\\itemize", "\\enumerate"))
    	    	           && !(tag == "TEXT" && isBlankRd(block))) {
    		    inlist <- FALSE
    		}
                checkBlock(block, tag, blocktag,
                           blocks[seq_len(i-1L)])
    	    })
	}
    }

    has_text <- FALSE
    checkSection <- function(section, tag) {
    	s0 <- section
    	if (tag == "\\section" || tag == "\\subsection") {
    	    title <- section[[1L]]
    	    checkContent(title, tag)
    	    section <- section[[2L]]
            ## replace 'tag' in message below
            tagtitle <- sQuote(trimws(.Rd_deparse(title)))
    	} else tagtitle <- tag
        has_text <<- FALSE
        ## if (tag == "\\synopsis")  # already removed via prepare_Rd
        ##     stopRd(section, Rdfile, "\\synopsis was removed in R 3.1.0")
        if (tag %in% c("\\usage", "\\examples"))
            checkCodeBlock(section, tag)
    	else checkContent(section, tag)
        if(!has_text) warnRd(s0, Rdfile, level = 3,
                             "Empty section ", tagtitle)

        if (tag %in% c("\\title", "\\section", "\\subsection")) {
            rd <- .Rd_deparse(if (tag == "\\title") section else title,
                              tag = FALSE)
            if (grepl("[^.]\\.[[:space:]]*$", rd) &&
                !grepl("(etc|et[[:space:]]+al)\\.[[:space:]]*$", rd))
                warnRd(s0, Rdfile, level = -5,
                       tag, if (tag != "\\title") " name",
                       " should not end in a period")
        }
    }

    .messages <- character()
    .whandler <-     function(e) {
        .messages <<- c(.messages, paste("prepare_Rd:", conditionMessage(e)))
        tryInvokeRestart("muffleWarning")
    }

    Rd <- withCallingHandlers({
        prepare_Rd(Rd, defines=defines, stages=stages,
                   warningCalls = FALSE, ..., msglevel = 1)
    }, warning = .whandler)
    Rdfile <- attr(Rd, "Rdfile")

    ## check \name
    name <- as.character(Rd[[2L]])
    if(grepl("[!|@]", name)) # these cause LaTeX indexing problems
        warnRd(Rd[[2L]], Rdfile, level = 5,
               "\\name should not contain !, | or @")
    if (Encoding(name) == "UTF-8" ||
        !all(utils::charClass(trimws(name), "print")))
        warnRd(Rd[[2L]], Rdfile, level = -1,
               "\\name should only contain printable ASCII characters")

    ## check all sections
    sections <- RdTags(Rd)
    if (any(sections == "\\encoding")) def_enc <- TRUE
    inEnc2 <- FALSE
    for (i in seq_along(sections)) {
        sectiontag <- sections[i] # also used in checkLIST()
        checkSection(Rd[[i]], sectiontag)
    }

    structure(.messages, class = "checkRd")
}

print.checkRd <- function(x, minlevel = -Inf, ...)
{
    fromParse <- startsWith(x, "prepare_Rd")
    x1 <- x[fromParse]
    x2 <- x[!fromParse]
    levs <- as.numeric(sub("^checkRd: \\(([-0123456789]+)(.*)", "\\1", x2))
    xx <- if(minlevel > 0) x2[levs >= minlevel] else c(x1, x2[levs >= minlevel])
    writeLines(unique(xx))
    invisible(x)
}

testRdConditional <- function(format, conditional, Rdfile) {
    condition <- conditional[[1L]]
    tags <- RdTags(condition)
    if (!all(tags == "TEXT")) stopRd(conditional, Rdfile, "condition must be plain text")

    allow <- trimws(strsplit(paste(condition, collapse=""), ",")[[1L]])
    any(c("TRUE", format) %in% allow)
}

toRd <- function(obj, ...)
    UseMethod("toRd")

toRd.default <- function(obj, ...) {
    fsub <- function(from, to, x) gsub(from, to, x, fixed=TRUE)# useBytes=FALSE  {really?}
    fsub("%", "\\%",
     fsub("}", "\\}",
      fsub("{", "\\{",
       fsub("\\", "\\\\", as.character(obj)))))
}
#  File src/library/tools/R/RdHelpers.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 2019-2023 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

Rd_escape_specials <-
function(x)
{
    gsub("([%{}\\])", "\\\\\\1", x)
}

Rd_expr_PR <-
function(x)
{
    baseurl <- "https://bugs.R-project.org/show_bug.cgi?id"
    sprintf("\\href{%s=%s}{PR#%s}", baseurl, x, x)
}

## These following functions are to take information from the package
## DESCRIPTION file at build time.  During a build, the current
## directory holds the DESCRIPTION file; set dir to something else if
## used in a different context.

Rd_macros_package_dir <-
function()
    Sys.getenv("_R_RD_MACROS_PACKAGE_DIR_", ".")

Rd_package_title <-
function(pkg, dir = Rd_macros_package_dir())
{
    desc <- .read_description(file.path(dir, "DESCRIPTION"))
    
    if (pkg != desc["Package"])
    	stop(gettextf("DESCRIPTION file is for package '%s', not '%s'", desc["Package"], pkg))
    Rd_escape_specials(desc["Title"])
}

Rd_package_description <-
function(pkg, dir = Rd_macros_package_dir())
{
    desc <- .read_description(file.path(dir, "DESCRIPTION"))
    if (pkg != desc["Package"])
    	stop(gettextf("DESCRIPTION file is for package '%s', not '%s'", desc["Package"], pkg))
    Rd_escape_specials(desc["Description"])
}

Rd_package_author <-
function(pkg, dir = Rd_macros_package_dir())
{
    desc <- .read_description(file.path(dir, "DESCRIPTION"))
    if (pkg != desc["Package"])
    	stop(gettextf("DESCRIPTION file is for package '%s', not '%s'", desc["Package"], pkg))
    desc <- c(desc, .expand_package_description_db_R_fields(desc))
    Rd_escape_specials(desc["Author"])
}

Rd_package_maintainer <-
function(pkg, dir = Rd_macros_package_dir())
{
    desc <- .read_description(file.path(dir, "DESCRIPTION"))
    if (pkg != desc["Package"])
    	stop(gettextf("DESCRIPTION file is for package '%s', not '%s'", desc["Package"], pkg))
    desc <- c(desc, .expand_package_description_db_R_fields(desc))
    Rd_escape_specials(desc["Maintainer"])
}

Rd_package_DESCRIPTION <-
function(pkg, lib.loc = Sys.getenv("R_BUILD_TEMPLIB"))
{
    if (!length(find.package(pkg, lib.loc = lib.loc, quiet=TRUE)))
        "This package was not yet installed at build time.\\cr"
    else {
	tabular <- function(col1, col2)
	    c("\\tabular{ll}{", paste0(col1, " \\tab ", col2, "\\cr"), "}")

	desc <- utils::packageDescription(pkg, lib.loc = lib.loc)
	if (pkg != desc[["Package"]])
	    stop(gettextf("DESCRIPTION file is for package '%s', not '%s'", desc["Package"], pkg))
	desc <- desc[names(desc) != "Built"] # Probably a stale value
	tabular(paste0(names(desc), ":"), Rd_escape_specials(unlist(desc)))
    }
}

Rd_package_indices <-
function(pkg, lib.loc = Sys.getenv("R_BUILD_TEMPLIB"))
{
    if (!length(find.package(pkg, lib.loc = lib.loc, quiet=TRUE)))
        result <- c("", "Index:  This package was not yet installed at build time.\\cr")
    else {
    	tabular <- function(col1, col2)
    	    c("\\tabular{ll}{", paste0(col1, " \\tab ", col2, "\\cr"), "}")

        info <- library(help = pkg, lib.loc = lib.loc,
	  	    character.only = TRUE)

	result <- NULL
	# FIXME:  these indices should contain links...
	if (!is.null(info$info[[2L]]))
	    ## this is readLines(system.file("INDEX", package = pkg, lib.loc = lib.loc))
	    result <- c("", "Index of help topics:", "\\preformatted{",
				  info$info[[2L]], "}")
	if (!is.null(info$info[[3L]]))
	    ## FIXME: unreachable in build stage as vignettes get only built after partial.rdb
	    result <- c(result, "",
			"Further information is available in the following vignettes:\\cr\\cr",
			tabular(paste0("\\code{", info$info[[3L]][,1], "}"),
			      info$info[[3L]][,2]))
    }
    result
}

Rd_expr_doi <-
function(x)
{
    ## Be nice ...
    x <- .canonicalize_doi(x)

    u <- Rd_escape_specials(urlify_doi(x))
    x <- Rd_escape_specials(x)
    ## Poor person's way to allow LaTeX to break lines at slashes and
    ## dashes:
    y <- gsub("/", "\\out{\\slash{}}", fixed = TRUE,
              gsub("-", "\\out{\\-}", x, fixed = TRUE))

    sprintf("\\ifelse{text}{%s}{\\ifelse{latex}{%s}{%s}}",
            sprintf("doi:%s <https://doi.org/%s>",  # same format as showURLs=TRUE
                    x, u),
            sprintf("\\href{https://doi.org/%s}{doi:%s}",
                    u, y),
            sprintf("\\href{https://doi.org/%s}{doi:%s}",
                    u, x)
            )
}
#  File src/library/tools/R/Rdtools.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2014 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/


RdTextFilter <-
function(ifile, encoding = "unknown", keepSpacing = TRUE,
         drop = character(), keep = character(),
         macros = file.path(R.home("share"), "Rd", "macros", "system.Rd"))
{
    if(inherits(ifile, "srcfile"))
        ifile <- ifile$filename
    if (inherits(ifile, "Rd")) {
	# Undo sorting done in prepare2_Rd
	srcrefs <- sapply(ifile, function(s) attr(s, "srcref"))
	p <- ifile[ order(srcrefs[1,], srcrefs[2,]) ]
	class(p) <- class(ifile)
    } else
    	p <- parse_Rd(ifile, encoding = encoding, macros = macros)

    tags <- RdTags(p)

    if ("\\encoding" %in% tags) {
	encoding <- p[[which.max(tags == "\\encoding")]][[1L]]
	if (encoding %in% c("UTF-8", "utf-8", "utf8")) encoding <- "UTF-8"
	if (!inherits(ifile, "Rd"))
	    p <- parse_Rd(ifile, encoding=encoding, macros = macros)
    } else
	encoding <- ""

    ## Directly using a text connection to accumulate the filtered
    ## output seems to be faster than using .eval_with_capture(): to use
    ## the latter, change mycat to cat, or use mycat <- cat, and create
    ## out via
    ## out <- .eval_with_capture({
    ##     show(p)
    ##     mycat("\n")
    ## })$output

##    myval <- character()
    mycon <- textConnection("myval", open = "w", local = TRUE,
                            encoding = "UTF-8")
    on.exit(close(mycon))
    mycat <- function(...) cat(..., file = mycon)

    prevline <- 1L
    prevcol <- 0L

    doPartialMarkup <- function(x, tags, i) { # handle things like \bold{pre}fix
        result <- FALSE
    	if (i < length(tags)
            && tags[i+1L] == "TEXT"
            && length(x[[i]]) == 1L
            && tags[i] %in% c("\\bold", "\\emph", "\\strong")
            && (tags[i] %notin% drop)
            && RdTags(x[[i]]) == "TEXT") {
    	    text1 <- x[[i]][[1L]]
    	    if (length(grep("[[:alpha:]]$", text1))) { # Ends in alpha
    	    	text2 <- x[[i+1L]]
    	    	if (length(grep("^[[:alpha:]]", text2))) { # Starts with alpha
    	    	    show(text1)
    	    	    prevcol <<- prevcol+1L # Shift text2 left by one column
    	    	    saveline <- prevline
    	    	    show(text2, extraSpace = keepSpacing)
    		    result <- TRUE
    		}
    	    }
	}
	result
    }

    show <- function(x, extraSpace = FALSE) {
	srcref <- attr(x, "srcref")
	firstline <- srcref[1L]
	lastline <- srcref[3L]
	firstcol <- srcref[5L]
	lastcol <- srcref[6L]
	tag <- attr(x, "Rd_tag")
	if (is.null(tag)) tag <- "NULL"
	if (tag %in% drop) tag <- "DROP"
	else if (tag %in% keep) tag <- "KEEPLIST"  # Include both text and lists
	switch(tag,
	KEEP =,
	TEXT = {
	    if (prevline < firstline) {
		prevcol <<- 0L
		mycat(rep.int("\n",
                              if(keepSpacing) firstline - prevline else 1L))
	    }
	    if (keepSpacing)
                mycat(rep.int(" ", max(0, firstcol - prevcol - 1L)), sep = "")
	    x <- as.character(srcref) # go back to original form
	    if (extraSpace) {
	    	if (grepl("[^\n]* ", x))
	    	    x <- sub(" ", "  ", x)
	    	else if (firstline == lastline)
	    	    x <- paste0(x, " ")
	    }
	    mycat(x, sep = "")
	    prevcol <<- lastcol
	    prevline <<- lastline
	},
	"\\S3method"=,
	"\\S4method"=,
        "\\command"=,
	"\\docType"=,
	"\\email"=,
	"\\encoding"=,
	"\\file"=,
	"\\keyword"=,
	"\\link"=,
        "\\linkS4class"=,
	"\\method"=,
	"\\pkg"=,
	"\\var"=,
	DROP = {},  # do nothing

	"\\tabular"=,
	"#ifdef"=,
	"#ifndef"={  # Ignore the first arg, process the second
	    show(x[[2L]])
	},
	"\\item"={   # Ignore the first arg of a two-arg item
	    if (length(x) == 2L) show(x[[2L]])
	},
	{	# default
	    if (is.list(x)) {
             	tags <- RdTags(x)
             	i <- 0L
             	while (i < length(x)) {
             	    i <- i + 1L
             	    if (doPartialMarkup(x, tags, i))
             	    	i <- i + 1L
             	    else
             		show(x[[i]])
             	}
	    } else if (tag == "KEEPLIST") {
	    	attr(x, "Rd_tag") <- "KEEP"
	    	show(x)
	    }
	})# {switch}
    }# end show()

    show(p)
    mycat("\n")

    out <- textConnectionValue(mycon)

    ## Ideally, we would always canonicalize to UTF-8.
    ## However, when using RdTextFilter() for aspell(), it is not clear
    ## whether this is a good idea: the aspell program does not need to
    ## have full UTF-8 support (and what precisely holds is not clear:
    ## the manuals says that aspell
    ##   can easily check documents in UTF-8 without having to use a
    ##   special dictionary.
    ## but also
    ##   If Aspell is compiled with a version of the curses library that
    ##   support wide characters then Aspell can also check UTF-8 text.
    ## So at least until this can be resolved, turn filter results for
    ## Rd files originally in latin1 back to latin1.
    if(encoding == "latin1")
        out <- iconv(out, "UTF-8", "latin1")

    out
}
#  File src/library/tools/R/read.00Index.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2012 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

read.00Index <-
function(file)
{
    if(is.character(file)) {
        if(file == "") file <- stdin()
        else {
            file <- file(file, "r")
            on.exit(close(file))
        }
    }
    if(!inherits(file, "connection"))
        stop(gettextf("argument '%s' must be a character string or connection",
                      file),
             domain = NA)

    y <- matrix("", nrow = 0L, ncol = 2L)
    x <- paste(readLines(file), collapse = "\n")

    ## <FIXME>
    ## We cannot necessarily assume that the 00Index-style file to be
    ## read in was generated by @code{Rdindex()} or by R using
    ## formatDL(style = "table").  In particular, some packages have
    ## 00Index files with (section) headers and footers in addition to
    ## the data base chunks which are description lists rendered in
    ## tabular form.  Hence, we need some heuristic for identifying the
    ## db chunks.  Easy to the human eye (is there a column for aligning
    ## entries?) but far from trivial ... as a first approximation we
    ## try to consider chunks containing at least one tab or three
    ## spaces a db chunk.  (A better heuristic would be the following:
    ## entries rendered in one line have item and description separated
    ## by at least 3 spaces or tabs; entries with a line break have
    ## continuation lines starting with whitespace (no test whether for
    ## alignment).  If a chunk is made of such entries only it is
    ## considered a db chunk.  But not all current packages follow this
    ## scheme.  Argh.)
    ## Clearly we need to move to something better in future versions.
    ## </FIXME>

    ## First split into paragraph chunks separated by whitespace-only
    ## lines.
    for(chunk in unlist(strsplit(x, "\n[ \t\n]*\n"))) {
        entries <- tryCatch({
            if(!grepl("(   |\t)", chunk))
                NULL
            else {
                ## Combine entries with continuation lines.
                chunk <- gsub("\n[ \t]+", "\t", chunk)
                ## Split into lines and then according to whitespace.
                x <- strsplit(unlist(strsplit(chunk, "\n")), "[ \t]")
                cbind(unlist(lapply(x, `[[`, 1L)),
                      unlist(lapply(x, function(t) {
                          paste(t[-c(1L, which(!nzchar(t)))],
                                collapse = " ")
                      })))
            }
        },
                            error = identity)
        if(!inherits(entries, "error") && NCOL(entries) == 2L)
            y <- rbind(y, entries)
    }
    colnames(y) <- c("Item", "Description")
    y
}
#  File src/library/tools/R/recode.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2023 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

### Remap a character string from encoded text to LaTeX escapes
encoded_text_to_latex <-
    function(x, encoding = c("latin1", "latin2", "latin9", "UTF-8", "utf8"))
{
    encoding <- match.arg(encoding)
    do_latin1 <- function(x) {
        xx <- charToRaw(x)
        paste(latin1table[as.integer(xx)], collapse="")}
    do_latin2 <- function(x) {
        xx <- charToRaw(x)
        paste(latin2table[as.integer(xx)], collapse="")}
    do_latin9 <- function(x) {
        xx <- charToRaw(x)
        paste(latin9table[as.integer(xx)], collapse="")}
    do_utf8 <- function(x) {
        xx <- utf8ToInt(x)
        y <- rep.int("?", length(xx))
        y[xx < 512] <- utf8table[xx[xx < 512]]
        y[xx == 0x02C6] <- "{\\textasciicircum}"
        y[xx == 0x02C7] <- "{\\textasciicaron}"
        y[xx == 0x02CA] <- "{\\textasciitilde}"
        y[xx == 0x02D8] <- "{\\textasciibreve}"
        y[xx == 0x02D9] <- "{\\textperiodcentered}"
        y[xx == 0x02DD] <- "{\\textacutedbl}"
        y[xx == 0x200C] <- "{\\textcompwordmark}"
        y[xx == 0x2018] <- "{\\textquoteleft}"
        y[xx == 0x2019] <- "{\\textquoteright}"
        y[xx == 0x201C] <- "{\\textquotedblleft}"
        y[xx == 0x201D] <- "{\\textquotedblright}"
        y[xx == 0x2020] <- "{\\textdagger}"
        y[xx == 0x2022] <- "{\\textbullet}"
        y[xx == 0x2026] <- "{\\textellipsis}"
        y[xx == 0x20AC] <- "{\\texteuro}"
        paste(y, collapse="")
    }
    as.vector(switch(encoding,
                     "latin1" = sapply(x, do_latin1),
                     "latin2" = sapply(x, do_latin2),
                     "latin9" = sapply(x, do_latin9),
                     "UTF-8"  = sapply(x, do_utf8),
                     "utf8"   = sapply(x, do_utf8),
                     stop("unimplemented encoding")
                        ))
}

latin1table <- c(
     rep.int("?", 31), ## omit 0x0
     ## 0x20 to %x7F
     rawToChar(as.raw(seq.int(32, 126)), multiple=TRUE), "?",
     ## 0x80 to 0x9F
     rep.int("?", 32),
     ## 0xA0 = 160 on
     "{\\nobreakspace}", "{\\textexclamdown}", "{\\textcent}", "{\\textsterling}", "{\\textcurrency}", "{\\textyen}", "{\\textbrokenbar}", "{\\S}",
     '\\"{}', "{\\textcopyright}", "{\\textordfeminine}", "{\\guillemotleft}", "{\\textlnot}", "\\-", "{\\textregistered}", "{\\a={}}",
     "{\\textdegree}", "{\\textpm}", "{\\mathtwosuperior}", "{\\maththreesuperior}", "{\\a'{}}", "{\\textmu}", "{\\P}", "{\\textperiodcentered}",
     "{\\c\\ }", "{\\mathonesuperior}", "{\\textordmasculine}", "{\\guillemotright}", "{\\textonequarter}", "{\\textonehalf}", "{\\textthreequarters}", "{\\textquestiondown}",
     "{\\a`A}", "{\\a'A}", "{\\^A}", "{\\~A}", '{\\"A}', "{\\r A}", "{\\AE}", "{\\c C}",
     "{\\a`E}", "{\\a'E}", "{\\^E}", '{\\"E}', "{\\a`I}", "{\\a'I}", "{\\^I}", '{\\"I}',
     "{\\DH}", "{\\~N}", "{\\a`O}", "{\\a'O}", "{\\^O}", "{\\~O}", '{\\"O}', "{\\texttimes}",
     "{\\O}", "{\\a`U}", "{\\a'U}", "{\\^U}", '{\\"U}', "{\\a'Y}", "{\\TH}", "{\\ss}",
     "{\\a`a}", "{\\a'a}", "{\\^a}", "{\\~a}", '{\\"a}', "{\\r a}", "{\\ae}", "{\\c c}",
     "{\\a`e}", "{\\a'e}", "{\\^e}", '{\\"e}',"{\\a`\\i}", "{\\a'\\i}", "{\\^\\i}", '{\\"\\i}',
     "{\\dh}", "{\\~n}", "{\\a`o}", "{\\a'o}", "{\\^o}", "{\\~o}", '{\\"o}', "{\\textdiv}",
     "{\\o}", "{\\a`u}", "{\\a'u}", "{\\^u}", '{\\"u}', "{\\a'y}", "{\\th}", '{\\"y}'
     )

latin2table <- c(
     rep.int("?", 31), ## omit 0x0
     ## 0x20 to %x7F
     rawToChar(as.raw(seq.int(32, 126)), multiple=TRUE), "?",
     ## 0x80 to 0x9F
     rep.int("?", 32),
     ## 0xA0 = 160 on
     "{\\nobreakspace}", "{\\k A}", "{\\u{}}", "{\\L}", "{\\textcurrency}", "{\\v L}", "{\\a'S}", "{\\S}",
     '\\"{}', "{\\v S}", "{\\c S}", "{\\v T}", "{\\\'Z}", "\\-", "{\\v Z}", "{\\.Z}",
     "{\\textdegree}", "{\\k A}", "{\\k\\ }", "{\\l}", "{\\a'{}}", "{\\v l}", "{\\a's}", "{\\v{}}",
     "{\\c\\ }", "{\\v s}", "{\\c s}", "{\\v t}", "{\\'z}", "{\\H{}}", "{\\v z}", "{\\.z}",
     "{\\a'R}", "{\\a'A}", "{\\^A}", "{\\u A}", '{\\"A}', "{\\'L}", "{\\a'C}", "{\\c C}",
     "{\\v C}", "{\\a'E}", "{\\k E}", '{\\"E}', "{\\v E}", "{\\'I}", "{\\^I}", '{\\v D}',
     "{\\DJ}", "{\\a'N}", "{\\v N}", "{\\a'O}", "{\\^O}", "{\\H O}", '{\\"O}', "{\\texttimes}",
     "{\\v R}", "{\\r U}", "{\\a'U}", "{\\H U}", '{\\"U}', "{\\a'Y}", "{\\c I}", "{\\ss}",
     "{\\a'r}", "{\\a'a}", "{\\^a}", "{\\u a}", '{\\"a}', "{\\'l}", "{\\a'c}", "{\\c c}",
     "{\\v c}", "{\\a'e}", "{\\k e}", '{\\"e}', "{\\v e}", "{\\'\\i}", "{\\^\\i}", '{\\v d}',
     "{\\dj}", "{\\a'n}", "{\\v n}", "{\\a'o}", "{\\^o}", "{\\H o}", '{\\"o}', "{\\textdiv}",
     "{\\v r}", "{\\r u}", "{\\a'u}", "{\\H u}", '{\\"u}', "{\\a'y}", "{\\c t}", '{\\.{}}'
     )

latin9table <- latin1table
latin9table[c(0xD0, 0xDD, 0xDE,
              0xF0, 0xFD, 0xFE)] <-
    c("{\\u G}", "{\\.I}", "{\\c S}",
      "{\\u g}", "{\\i}",  "{\\c s}")


utf8table <- c(latin1table, rep.int("?", 256))

utf8table[0x0102:0x107] <-
    c("{\\u A}","{\\u a}", "{\\k A}", "{\\k a}", "{\\a'C}", "{\\a'c}")
utf8table[0x010C:0x111] <-
    c( "{\\v C}","{\\v c}","{\\v D}","{\\v d}","{\\DJ}","{\\dj}")

utf8table[0x0118:0x11B] <- c("{\\k E}","{\\k e}", "{\\v E}","{\\v e}")
utf8table[0x011E:0x11F] <- c("{\\u G}","{\\u g}")
utf8table[0x0130:0x131] <- c("{\\.I}","{\\i}")
utf8table[0x0139:0x13A] <- c("{\\a'L}","{\\a'l}")
utf8table[0x013D:0x13E] <- c("{\\v L}","{\\v l}")
utf8table[0x0141L:0x144] <- c("{\\L}","{\\l}","{\\a'N}","{\\a'n}")
utf8table[0x0147:0x14B] <- c("{\\v N}","{\\v n}","?","{\\NG}","{\\ng}")
utf8table[0x0150:0x155] <- c("{\\H O}","{\\H o}","{\\OE}","{\\oe}","{\\a'R}","{\\a'r}")
utf8table[0x0158:0x15B] <- c("{\\v R}","{\\v r}","{\\a'S}","{\\a's}")
utf8table[0x015E:0x165] <- c("{\\c S}","{\\c s}","{\\v S}","{\\v s}",
                             "{\\c T}","{\\c t}","{\\v T}","{\\v t}")
utf8table[0x016E:0x171] <- c("{\\r U}","{\\r u}","{\\H U}","{\\H u}")
utf8table[0x0178:0x17E] <- c('{\\"Y}',"{\\a'Z}","{\\a'z}","{\\.Z}", "{\\.z}","{\\v Z}","{\\v z}")
utf8table[0x0192] <- "{\\textflorin}"
#  File src/library/tools/R/rortools.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 2025 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

## See <https://ror.org/about/faqs/#what-is-a-ror-identifier>:
##   A ROR ID consists of a unique 9-character string appended to the
##   ROR domain. The preferred form of a ROR identifier is the entire
##   URL: https://ror.org/02mhbdp94 rather than ror.org/02mhbdp94 or
##   02mhbdp94, although the ROR API will recognize all three of these
##   forms as ROR IDs. 
## So apparently the unique 9-character string is not the ROR ID and has 
## no name to refer to ... argh.  For now, name things as we do for
## ORCID iDs.

### ** .ROR_ID_regexp

.ROR_ID_regexp <-
    ".{9}"

### ** .ROR_ID_variants_regexp

.ROR_ID_variants_regexp <-
    sprintf("^<?((https://|)ror.org/)?(%s)>?$", .ROR_ID_regexp)

### ** .ROR_ID_canonicalize

.ROR_ID_canonicalize <- function(x)
    sub(.ROR_ID_variants_regexp, "\\3", x)

### ** .ROR_ID_is_valid

.ROR_ID_is_valid <- function(x)
    grepl(.ROR_ID_variants_regexp, x)

### ** .ROR_ID_is_alive

.ROR_ID_is_alive <- function(x) {
    ## For now use HEAD requests for the full ROR ID.
    ## See <https://ror.readme.io/v2/docs/rest-api> for getting more
    ## information.
    ## Assume all given ids are canonical.
    urls <- sprintf("https://ror.org/%s", x)
    resp <- .curl_multi_run_worker(urls, nobody = TRUE)
    vapply(resp, .curl_response_status_code, 0L) == 200L
}

### ** ROR_ID_from_person

.ROR_ID_from_person <- function(x)
    vapply(unclass(x),
           function(e) e$comment["ROR"] %||% NA_character_,
           "")

### ** .ROR_ID_db_from_package_sources

.ROR_ID_db_from_package_sources <-
function(dir, add = FALSE)
{
    ids1 <- .ROR_ID_from_person(.persons_from_metadata(dir))
    ids1 <- ids1[!is.na(ids1)]
    ids2 <- .ROR_ID_from_person(.persons_from_citation(dir))
    ids2 <- ids2[!is.na(ids2)]
    db  <- data.frame(ID = c(character(), ids1, ids2),
                      Parent = c(rep_len("DESCRIPTION",
                                         length(ids1)),
                                 rep_len("inst/CITATION",
                                         length(ids2))))
    if(add)
        db$Parent <- file.path(basename(dir), db$Parent)
    db
}

### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
#  File src/library/tools/R/Rprof.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2023 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

.Rprof <- function(args = NULL, no.q = FALSE)
{
    do_exit <-
	if(no.q)
	    function(status) (if(status) stop else message)(
		".Rprof() exit status ", status)
	else
	    function(status) q("no", status = status, runLast = FALSE)

    Usage <- function() {
        cat("Usage: R CMD Rprof [options] [file]",
            "",
            "Post-process profiling information in file generated by Rprof().",
            "",
            "Options:",
            "  -h, --help       print short help message and exit",
            "  -v, --version    print version info and exit",
            "  --lines          print line information",
            "  --total          print only by total",
            "  --self           print only by self",
            "  --linesonly      print only by line (implies --lines)",
            "  --min%total=     minimum % to print for 'by total'",
            "  --min%self=      minimum % to print for 'by self'",
            "",
            "If 'file' is omitted 'Rprof.out' is used",
            "",
            "Report bugs at <https://bugs.R-project.org>.", sep = "\n")
    }

    if (is.null(args)) {
        args <- commandArgs(TRUE)
        ## it seems that splits on spaces, so try harder.
        args <- paste(args, collapse=" ")
        args <- strsplit(args,'nextArg', fixed = TRUE)[[1L]][-1L]
    }

    files <- character()
    bytotal <- byself <- bylines <- TRUE
    lines <- FALSE
    mintotal <- minself <- -1L
    while(length(args)) {
        a <- args[1L]
        if (a %in% c("-h", "--help")) {
            Usage()
            do_exit(0L)
        }
        else if (a %in% c("-v", "--version")) {
            cat("R profiling post-processor: ",
                R.version[["major"]], ".",  R.version[["minor"]],
                " (r", R.version[["svn rev"]], ")\n", sep = "")
            cat("",
                .R_copyright_msg(1997),
                "This is free software; see the GNU General Public License version 2",
                "or later for copying conditions.  There is NO warranty.",
                sep = "\n")
            do_exit(0L)
        } else if (a == "--total") {
            bytotal <- TRUE
            byself <- FALSE
            bylines <- FALSE
        } else if (a == "--self") {
            bytotal <- FALSE
            byself <- TRUE
            bylines <- FALSE
        } else if (a == "--linesonly") {
            lines <- TRUE
            byself <- FALSE
            bytotal <- FALSE
            bylines <- TRUE
        } else if (a == "--lines") {
            lines <- TRUE
        } else if (substr(a, 1, 12)  == "--min%total=") {
            mintotal <- as.integer(substr(a, 13, 1000))
        } else if (substr(a, 1, 11)  == "--min%self=") {
            minself <- as.integer(substr(a, 12, 1000))
        } else files <- c(files, a)
        args <- args[-1L]
    }
    file <- if (!length(files)) "Rprof.out" else files[1L]

    res <- utils::summaryRprof(file, lines = if (lines) "show" else "hide")

    cat("\nEach sample represents", format(res$sample.interval), "seconds.\n")
    cat("Total run time:", format(res$sampling.time), "seconds.\n")
    cat("\nTotal seconds: time spent in function and callees.\n")
    cat("Self seconds: time spent in function alone.\n\n")

    printed <- FALSE
    if (bytotal) {
        m <- data.frame(res$by.total[c(2,1,4,3)], row.names(res$by.total))
        if(mintotal > 0) m <- m[m[,1L] >= mintotal,,drop = FALSE]
        writeLines(c("   %       total       %        self",
                     " total    seconds     self    seconds    name",
                     sprintf("%6.1f%10.2f%10.1f%10.2f     %s",
                             m[,1L], m[,2L], m[,3L], m[,4L], m[,5L])))
        printed <- TRUE
    }
    if(byself) {
        if (printed) cat("\n\n")
        m <- data.frame(res$by.self[c(2,1,4,3)], row.names(res$by.self))
        if(minself > 0) m <- m[m[,1L] >= minself,,drop = FALSE]
        writeLines(c("   %        self       %      total",
                     "  self    seconds    total   seconds    name",
                     sprintf("%6.1f%10.2f%10.1f%10.2f     %s",
                             m[,1L], m[,2L], m[,3L], m[,4L], m[,5L])))
        printed <- TRUE
    }
    if(lines && bylines) {
        if (printed) cat("\n\n")
        m <- data.frame(res$by.line[c(2,1,4,3)], row.names(res$by.line))
        if(minself > 0) m <- m[m[,1L] >= minself,,drop = FALSE]
        if(mintotal > 0) m <- m[m[,1L] >= mintotal,,drop = FALSE]
        writeLines(c("   %        self       %      total",
                     "  self    seconds    total   seconds    name",
                     sprintf("%6.1f%10.2f%10.1f%10.2f     %s",
                             m[,1L], m[,2L], m[,3L], m[,4L], m[,5L])))
    }
    do_exit(0L)
}
#  File src/library/tools/R/sha256.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2024 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

sha256sum <- function(files, bytes) {
    if (!missing(files) && !missing(bytes))
        stop("'files' and 'bytes' are mutually exclusive")
    if (!missing(bytes)) {
        if (!is.raw(bytes)) stop("'bytes' must be a raw vector")
        .Call(C_Rsha256, bytes)
    } else {
        files <- path.expand(files)
        structure(.Call(C_Rsha256, files), names=files)
    }
}

# The following fns are neither used nor exported - for now.

.installSHA256sums <- function(pkgDir, outDir = pkgDir)
{
    dot <- getwd()
    if (is.null(dot))
        stop("current working directory cannot be ascertained")
    setwd(pkgDir)
    x <- sha256sum(dir(".", recursive=TRUE))
    setwd(dot)
    x <- x[names(x) != "SHA256"]
    cat(paste(x, names(x), sep=" *"), sep="\n",
        file=file.path(outDir, "SHA256"))
}

checkSHA256sums <- function(package, dir)
{
    if(missing(dir)) dir <- find.package(package, quiet = TRUE)
    if(length(dir) != 1L) return(NA)
    sha256file <- file.path(dir, "SHA256")
    if(!file.exists(sha256file)) return(NA)
    inlines <- readLines(sha256file)
    ## now split on the first space.
    xx <- sub("^([0-9a-fA-F]*)(.*)", "\\1", inlines)
    nmxx <- names(xx) <- sub("^[0-9a-fA-F]* [ |*](.*)", "\\1", inlines)
    dot <- getwd()
    if (is.null(dot))
        stop("current working directory cannot be ascertained")
    setwd(dir)
    x <- sha256sum(dir(dir, recursive = TRUE))
    setwd(dot)
    x <- x[names(x) != "SHA256"]
    nmx <- names(x)
    res <- TRUE
    not.here <- (nmxx %notin% nmx)
    if(any(not.here)) {
        res <- FALSE
        if (sum(not.here) > 1L)
            cat("files", paste(sQuote(nmxx[not.here]), collapse = ", "),
                "are missing\n", sep = " ")
        else
            cat("file", sQuote(nmxx[not.here]), "is missing\n", sep = " ")
    }
    nmxx <- nmxx[!not.here]
    diff <- xx[nmxx] != x[nmxx]
    if(any(diff)) {
        res <- FALSE
        files <- nmxx[diff]
        if(length(files) > 1L)
            cat("files", paste(sQuote(files), collapse = ", "),
                "have the wrong SHA256 checksums\n", sep = " ")
        else cat("file", sQuote(files), "has the wrong SHA256 checksum\n")
    }
    res
}

.hex.chars <- c("0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c", "d", "e", "f")

hex2raw <- function(x) {
  if (length(x) != 1L) stop("x must be a single string")
  if (!nzchar(x)) return(raw(1L))
  ## pad with 0 to full bytes
  m <- match(strsplit(tolower(x),"")[[1L]], .hex.chars)
  if (any(is.na(m))) stop("invalid hex string")
  if (length(m) %% 2 == 1) m <- c(1L, m) ## add leading 0 for full byte
  as.raw(colSums(matrix(m - 1L, 2) * c(16L, 1L)))
}

.pad <- function(x, n) if (length(x) < n) c(x, raw(n - length(x))) else x

hmac <- function(key, x, hash, block) {
  key <- .pad(if (length(key) > block) hex2raw(hash(key)) else key, block)
  # HMAC := HASH( c( key ^ 0x5c, HASH( c( key ^ 0x36, x ) ) ) )
  hash(c(xor(key, as.raw(0x5c)),
       hex2raw(hash(c(xor(key, as.raw(0x36)), x)))))
}

hmac.sha256 <- function(key, x) hmac(key, x, function(x) sha256sum(bytes=x), 64L)
hmac.md5 <- function(key, x) hmac(key, x, function(x) md5sum(bytes=x), 64L)
#  File src/library/tools/R/sotools.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 2011-2025 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

if(.Platform$OS.type == "windows") {
    read_symbols_from_dll_state <- new.env(hash = FALSE) # small
    read_symbols_from_dll <- function(f, rarch)
    {
	DLL_nm <- read_symbols_from_dll_state$DLL_nm
	if (is.null(DLL_nm)) {
	    ## R CMD config will fail when 'sh' (from Rtools) is not on PATH
	    DLL_nm <- tryCatch(
	        Rcmd(c("config", "OBJDUMP"), stdout = TRUE, stderr = FALSE),
		error = function(x) NULL,
		warning = function(x) NULL)

	    if (is.null(DLL_nm) || !nzchar(DLL_nm) ||
	        !file.exists(paste0(DLL_nm, ".exe"))) {

		## fall back to the old behavior: take OBJDUMP from PATH
		DLL_nm <- "objdump.exe"
		if(!nzchar(Sys.which(DLL_nm))) {
		    warning("this requires 'objdump.exe' to be on the PATH")
		    return()
		}
	    }
	    read_symbols_from_dll_state$DLL_nm <- DLL_nm
	}
        f <- file_path_as_absolute(f)
        s0 <- suppressWarnings(system2(DLL_nm, c("-x", shQuote(f)),
                                       stdout = TRUE, stderr = TRUE))
        status <- attr(s0, "status")
        if (length(status) && status != 0) return()
        l1 <- grep("^\tDLL Name:", s0)
        l2 <- grep("^The Export Tables", s0)
        if (!length(l1) || !length(l2)) return()
        s1 <- s0[(l1[1L] + 2L):(l2 - 4L)]

        # The format of the dump of import tables changed in Rtools45
        # (binutils 2.43.1).  Previously, there was a joint column
        # "Hint/Ord".  Newly these are split ("Ordinal" and "Hint").  The
        # regex below has been relaxed to match both.
        s2 <- grep("\t[0-9a-f]+[\t ]+", s1, value = TRUE)
        sub(".* ([_A-Za-z0-9]+)$", "\\1", s2)
    }
}

read_symbols_from_object_file <- function(f)
{
    ## For GCC & LTO, we need a different command, possibly with args
    ## On macOS, the system nm works with LTO objects.
    ## Do not use NM as make sets it.
    nm <- Sys.getenv("UserNM")
    if(!nzchar(nm)) {
        ## reasonable to assume nm is on the path
        nm <- Sys.which("nm")
        if(nzchar(nm)) nm <- shQuote(nm)
    }
    if(!nzchar(nm)) {
        warning("this requires 'nm' to be on the PATH")
        return()
    }
    f <- file_path_as_absolute(f)
    if(!(file.size(f))) return()
    s <- strsplit(system(sprintf("%s -Pg %s", nm, shQuote(f)),
                         intern = TRUE),
                  " +")
    ## Cannot simply rbind() this because elements may have 2-4 entries.
    n <- length(s)
    tab <- matrix("", nrow = n, ncol = 4L)
    colnames(tab) <- c("name", "type", "value", "size")
    ## Compute desired i and j positions in tab.
    i <- rep.int(seq_len(n), lengths(s))
    j <- unlist(lapply(s, seq_along))
    tab[n * (j - 1L) + i] <- unlist(s)
    tab
}

## env variable formerly in etc/Renviron, now in ../Makefile
system_ABI <- Sys.getenv("R_SYSTEM_ABI")
if((system_ABI == "") || (substr(system_ABI, 1L, 1L) %in% c("@", "?"))) {
    system_ABI <- character()
} else {
    system_ABI <- unlist(strsplit(system_ABI, ",", fixed = TRUE))
    names(system_ABI) <- c("system", "CC", "CXX", "F77", "FC")
}

## entry points for std::terminate are commented out as almost all
## come from system headers.
so_symbol_names_table <-
    ## 'linux' == glibc, principally but checked with Alpine Linux's musl
    c("linux, C, gcc, abort, abort",
      ## https://refspecs.linuxbase.org/LSB_5.0.0/LSB-Core-generic/LSB-Core-generic/baselib---assert-fail-1.html
      "Linux, C, gcc, assert, __assert_fail",
      "linux, C, gcc, assert, __assert_fail_base",
      "linux, C, gcc, exit, exit",
      "linux, C, gcc, _exit, _exit", ## may not be seen
      "linux, C, gcc, _Exit, _Exit", ## _Exit is C99 and may not be a fn call
      "linux, C, gcc, printf, printf",
      "linux, C, gcc, printf, __printf_chk",
      "linux, C, gcc, printf, puts",
      "linux, C, gcc, puts, puts",
      "linux, C, gcc, putchar, putchar",
      "linux, C, gcc, stderr, stderr",
      "linux, C, gcc, stdout, stdout",
      "linux, C, gcc, sprintf, sprintf",
      "linux, C, gcc, sprintf, __sprintf_chk",
      "linux, C, gcc, vprintf, vprintf",
      "linux, C, gcc, vprintf, __vprintf_chk",
      "linux, C, gcc, vsprintf, vsprintf",
#      "linux, C, gcc, vprintf, vfprintf", # but also from REvprintf
      "linux, C, gcc, vsprintf, __vsprintf_chk",
      "linux, C, gcc, rand, rand",
      "linux, C, gcc, random, random",
      "linux, C, gcc, rand_r, rand_r",
      "linux, C, gcc, srand, srand",
      "linux, C, gcc, srandom, srandom",
      "linux, C, gcc, srandom_r, srandom_r",
      "linux, C, gcc, srand48, srand48",
      "linux, C++, gxx, std::cout, _ZSt4cout",
      "linux, C++, gxx, std::cerr, _ZSt4cerr",
      #"linux, C++, gxx, std::terminate, _ZSt9terminatev",
      ## libc++ variants
      "linux, C++, gxx, std::cout, _ZNSt3__14coutE", # std::__1::cout
      "linux, C++, gxx, std::cerr, _ZNSt3__14cerrE",
      "linux, Fortran, gfortran, open, _gfortran_st_open",
      "linux, Fortran, gfortran, close, _gfortran_st_close",
      "linux, Fortran, gfortran, rewind, _gfortran_st_rewind",
      "linux, Fortran, gfortran, read, _gfortran_st_read",
      "linux, Fortran, gfortran, write, _gfortran_st_write",
      "linux, Fortran, gfortran, print, _gfortran_st_write",
      "linux, Fortran, gfortran, stop, _gfortran_stop_numeric_f08",
      "linux, Fortran, gfortran, stop, _gfortran_stop_string",
      "linux, Fortran, gfortran, rand, _gfortran_rand",
      "linux, Fortran, gfortran, random_init, _gfortran_random_init",
      "linux, Fortran, gfortran, random_number, _gfortran_arandom_r4",
      "linux, Fortran, gfortran, random_number, _gfortran_arandom_r8",
      "linux, Fortran, gfortran, random_number, _gfortran_arandom_r16",
      "linux, Fortran, gfortran, random_number, _gfortran_random_r4",
      "linux, Fortran, gfortran, random_number, _gfortran_random_r8",
      "linux, Fortran, gfortran, random_number, _gfortran_random_r16",
      "linux, Fortran, gfortran, random_number, _gfortran_rand",
      "linux, Fortran, gfortran, random_seed, _gfortran_random_seed_i4",
      "linux, Fortran, gfortran, random_seed, _gfortran_random_seed_i8",
      "linux, Fortran, gfortran, exit, _gfortran_exit_i4",
      "linux, Fortran, gfortran, exit, _gfortran_exit_i8",

      ## Classic flang from Dec 2017 (and untested since)
      "linux, Fortran, ClassicFlang, open, f90io_open03",
      "linux, Fortran, ClassicFlang, open, f90io_open2003",
      "linux, Fortran, ClassicFlang, close, f90io_close",
      "linux, Fortran, ClassicFlang, rewind, f90io_rewind",
      "linux, Fortran, ClassicFlang, write, f90io_print_init",
      "linux, Fortran, ClassicFlang, print, f90io_print_init",
      "linux, Fortran, ClassicFlang, read, f90io_fmt_read",
      "linux, Fortran, ClassicFlang, write, f90io_fmt_write",
      "linux, Fortran, ClassicFlang, stop, f90_stop",
      "linux, Fortran, ClassicFlang, stop, f90_stop08",
      "linux, Fortran, ClassicFlang, rand, rand",

      ## and for for the 'flang' in the LLVM tree, currently
      ## with executable 'flang-new'.
      ## This currently has static libs for its runtimes.
      "linux, Fortran, flang-new, stop, _FortranAStopStatement",
      "linux, Fortran, flang-new, stop, _FortranAStopStatementText",
      "linux, Fortran, flang-new, open, _FortranAioBeginOpenUnit",
      "linux, Fortran, flang-new, close, _FortranAioBeginClose",
      "linux, Fortran, flang-new, rewind, _FortranAioBeginRewind",
      "linux, Fortran, flang-new, read, _FortranAioInputAscii",
      "linux, Fortran, flang-new, read, _FortranAioInputCharacter",
      "linux, Fortran, flang-new, read, _FortranAioInputComplex32",
      "linux, Fortran, flang-new, read, _FortranAioInputComplex64",
      "linux, Fortran, flang-new, read, _FortranAioOutputExternalListInput",
      "linux, Fortran, flang-new, read, _FortranAioInputInteger",
      "linux, Fortran, flang-new, read, _FortranAioInputLogical",
      "linux, Fortran, flang-new, read, _FortranAioInputNamelist",
      "linux, Fortran, flang-new, read, _FortranAioInputReal32",
      "linux, Fortran, flang-new, read, _FortranAioInputReal64",
      "linux, Fortran, flang-new, read, _FortranAioInputUnformattedBlock",
      "linux, Fortran, flang-new, print, _FortranAioOutputAscii",
      "linux, Fortran, flang-new, print, _FortranAioOutputCharacter",
      "linux, Fortran, flang-new, print, _FortranAioOutputComplex32",
      "linux, Fortran, flang-new, print, _FortranAioOutputComplex64",
      "linux, Fortran, flang-new, print, _FortranAioOutputExternalListOutput",
      "linux, Fortran, flang-new, print, _FortranAioOutputInteger128",
      "linux, Fortran, flang-new, print, _FortranAioOutputInteger16",
      "linux, Fortran, flang-new, print, _FortranAioOutputInteger32",
      "linux, Fortran, flang-new, print, _FortranAioOutputInteger64",
      "linux, Fortran, flang-new, print, _FortranAioOutputInteger8",
      "linux, Fortran, flang-new, print, _FortranAioOutputLogical",
      "linux, Fortran, flang-new, print, _FortranAioOutputNamelist",
      "linux, Fortran, flang-new, print, _FortranAioOutputReal32",
      "linux, Fortran, flang-new, print, _FortranAioOutputReal64",
      "linux, Fortran, flang-new, write, _FortranAioOutputAscii",
      "linux, Fortran, flang-new, write, _FortranAioOutputCharacter",
      "linux, Fortran, flang-new, write, _FortranAioOutputComplex32",
      "linux, Fortran, flang-new, write, _FortranAioOutputComplex64",
      "linux, Fortran, flang-new, write, _FortranAioOutputExternalListOutput",
      "linux, Fortran, flang-new, write, _FortranAioOutputInteger128",
      "linux, Fortran, flang-new, write, _FortranAioOutputInteger16",
      "linux, Fortran, flang-new, write, _FortranAioOutputInteger32",
      "linux, Fortran, flang-new, write, _FortranAioOutputInteger64",
      "linux, Fortran, flang-new, write, _FortranAioOutputInteger8",
      "linux, Fortran, flang-new, write, _FortranAioOutputLogical",
      "linux, Fortran, flang-new, write, _FortranAioOutputNamelist",
      "linux, Fortran, flang-new, write, _FortranAioOutputReal32",
      "linux, Fortran, flang-new, write, _FortranAioOutputReal64",
      "linux, Fortran, flang-new, write, _FortranAioOutputUnformatedBlock",
      ## does not support rand()
      ## https://discourse.llvm.org/t/support-for-gnu-fortran-extensions/69630
      "linux, Fortran, flang-new, random_init, _FortranARandomInit",
      "linux, Fortran, flang-new, random_number, _FortranARandomNumber",
      "linux, Fortran, flang-new, random_seed, _FortranARandomSeed",
      "linux, Fortran, flang-new, random_seed, _FortranARandomSeedGet",
      "linux, Fortran, flang-new, random_seed, _FortranARandomSeedSize",

      ## Intel 'Clasic' and 202x
      "linux, Fortran, intel, stop, for_stop",
      "linux, Fortran, intel, stop, for_stop_core",
      "linux, Fortran, intel, stop, for_stop_core8",
      "linux, Fortran, intel, stop, for_stop_core_impl",
      "linux, Fortran, intel, stop, for_stop_core_int",
      "linux, Fortran, intel, stop, for_stop_core_int8",
      "linux, Fortran, intel, stop, for_stop_core_quiet",
      "linux, Fortran, intel, stop, for_stop_core_quiet_int8",
      "linux, Fortran, intel, print, for_write_seq_lis",
      "linux, Fortran, intel, open, for_open",
      "linux, Fortran, intel, open, for_open_args",
      "linux, Fortran, intel, open, for_open_default",
      "linux, Fortran, intel, open, for_open_key",
      "linux, Fortran, intel, close, for_close",
      "linux, Fortran, intel, rewind, for_rewind",
      "linux, Fortran, intel, read, for_read_seq_lis",
      "linux, Fortran, intel, read, for_read_seq_fmt",
      "linux, Fortran, intel, write, for_write_seq_lis",
      "linux, Fortran, intel, write, for_write_seq_fmt",
      "linux, Fortran, intel, write, for_write_seq_nml",
      ## does not support rand() except in module ifport
      "linux, Fortran, intel, rand, rand_",
      "linux, Fortran, intel, random_number, for_random_number",
      "linux, Fortran, intel, random_number, for_random_number_single",
      "linux, Fortran, intel, random_seed, for_random_seed_bit_size",
      "linux, Fortran, intel, random_seed, for_random_seed_get",

      ## Apple clang identifies itself as gcc, so configure has used that
      "macos, C, gcc, abort, _abort", # not currently seen
      "macos, C, gcc, assert, ___assert_rtn", # not currently seen
      "macos, C, gcc, exit, _exit",
      "macos, C, gcc, _exit, __exit",
      "macos, C, gcc, _Exit, __Exit",
      "macos, C, gcc, _Exit, __exit",
      "macos, C, gcc, printf, _printf",
      "macos, C, gcc, printf, _puts",
      "macos, C, gcc, puts, _puts",
      "macos, C, gcc, putchar, _putchar",
      "macos, C, gcc, stderr, ___stderrp",
      "macos, C, gcc, stdout, ___stdoutp",
      "macos, C, gcc, sprintf, _sprintf", # old
      "macos, C, gcc, sprintf, ___sprintf_chk",
      "macos, C, gcc, vprintf, _vprintf",
      "macos, C, gcc, vsprintf, _vsprintf", # old
      "macos, C, gcc, vsprintf, ___vsprintf_chk",
      "macos, C, gcc, rand, _rand",
      "macos, C, gcc, random, _random",
      "macos, C, gcc, rand_r, _rand_r",
      "macos, C, gcc, srand, _srand",
      "macos, C, gcc, srandom, _srandom",
      "macos, C, gcc, srand48, _srand48",
      #"macos, C++, gxx, std::cout, __ZSt4cout", # not with clang
      #"macos, C++, gxx, std::cerr, __ZSt4cerr",
      "macos, C++, gxx, std::cout, __ZNSt3__14coutE", # std::__1::cout
      "macos, C++, gxx, std::cerr, __ZNSt3__14cerrE",
      #"macos, C++, gxx, std::terminate, __ZSt9terminatev",
      "macos, Fortran, gfortran, open, __gfortran_st_open",
      "macos, Fortran, gfortran, close, __gfortran_st_close",
      "macos, Fortran, gfortran, rewind, __gfortran_st_rewind",
      "macos, Fortran, gfortran, read, __gfortran_st_read",
      "macos, Fortran, gfortran, write, __gfortran_st_write",
      "macos, Fortran, gfortran, print, __gfortran_st_write",
      "macos, Fortran, gfortran, stop, __gfortran_stop_numeric",
      "macos, Fortran, gfortran, stop, __gfortran_stop_string",
      "macos, Fortran, gfortran, rand, __gfortran_rand",
      "macos, Fortran, gfortran, random_init, __gfortran_random_init",
      "macos, Fortran, gfortran, random_number, __gfortran_arandom_r4",
      "macos, Fortran, gfortran, random_number, __gfortran_arandom_r8",
      "macos, Fortran, gfortran, random_number, __gfortran_arandom_r16",
      "macos, Fortran, gfortran, random_number, __gfortran_random_r4",
      "macos, Fortran, gfortran, random_number, __gfortran_random_r8",
      "macos, Fortran, gfortran, random_number, __gfortran_random_r16",
      "macos, Fortran, gfortran, random_number, __gfortran_rand",
      "macos, Fortran, gfortran, random_seed, __gfortran_random_seed_i4",
      "macos, Fortran, gfortran, random_seed, __gfortran_random_seed_i8",
      "macos, Fortran, gfortran, exit, __gfortran_exit_i4",
      "macos, Fortran, gfortran, exit, __gfortran_exit_i8",

      ## This is old: freebsd defaults to clang these days, and
      ## gfortran and (classic) flang are available (and 'f18' will be)
      "freebsd, C, gcc, abort, abort",
      "freebsd, C, gcc, assert, __assert",
      "freebsd, C, gcc, exit, exit",
      "freebsd, C, gcc, _exit, _exit",
      "freebsd, C, gcc, _Exit, _Exit",
      "freebsd, C, gcc, printf, printf",
      "freebsd, C, gcc, printf, puts",
      "freebsd, C, gcc, puts, puts",
      "freebsd, C, gcc, putchar, putchar",
      "freebsd, C, gcc, stderr, __stderrp",
      "freebsd, C, gcc, stdout, __stdoutp",
      "freebsd, C, gcc, sprintf, sprintf",
      "freebsd, C, gcc, vprintf, vprintf",
      "freebsd, C, gcc, vsprintf, vsprintf",
      "freebsd, C++, gxx, std::cout, _ZSt4cout",
      "freebsd, C++, gxx, std::cerr, _ZSt4cerr",
      ## libc++ variants
      "freebsd, C++, gxx, std::cout, _ZNSt3__14coutE",
      "freebsd, C++, gxx, std::cerr, _ZNSt3__14cerrE",
      "freebsd, C, gcc, rand, rand",
      "freebsd, C, gcc, random, random",
      "freebsd, C, gcc, srand, srand",
      "freebsd, C, gcc, srandom, srandom",
      "freebsd, C, gcc, srand48, srand48",
      "freebsd, Fortran, gfortran, open, _gfortran_st_open",
      "freebsd, Fortran, gfortran, close, _gfortran_st_close",
      "freebsd, Fortran, gfortran, rewind, _gfortran_st_rewind",
      "freebsd, Fortran, gfortran, read, _gfortran_st_read",
      "freebsd, Fortran, gfortran, write, _gfortran_st_write",
      "freebsd, Fortran, gfortran, print, _gfortran_st_write",
      "freebsd, Fortran, gfortran, stop, _gfortran_stop_numeric_f08",
      "freebsd, Fortran, gfortran, stop, _gfortran_stop_string",
      "freebsd, Fortran, gfortran, rand, _gfortran_rand",

      ## stdout, stderr do not show up on Solaris
      "solaris, C, solcc, abort, abort",
      "solaris, C, solcc, assert, __assert_c99",
      "solaris, C, solcc, exit, exit",
      "solaris, C, solcc, _exit, _exit",
      "solaris, C, solcc, _Exit, _Exit",
      "solaris, C, solcc, printf, printf",
      "solaris, C, solcc, putchar, putchar",
      "solaris, C, solcc, puts, puts",
      "solaris, C, solcc, sprintf, sprintf",
      "solaris, C, solcc, vprintf, vprintf",
      "solaris, C, solcc, vsprintf, vsprintf",
      "solaris, C++, solCC, std::cout, __1cDstdEcout_",
      "solaris, C++, solCC, std::cerr, __1cDstdEcerr_",
      #"solaris, C++, solCC, std::terminate, _ZSt9terminatev",
      "solaris, C, solcc, random, random",
      "solaris, C, solcc, rand, rand",
      "solaris, C, solcc, rand_r, rand_r",
      "solaris, C, solcc, srand, srand",
      "solaris, C, solcc, srandom, srandom",
      "solaris, C, solcc, srand48, srand48",
      "solaris, Fortran, solf95, open, __f90_open",
      "solaris, Fortran, solf95, close, __f90_close",
      "solaris, Fortran, solf95, rewind, __f90_rewind",
      "solaris, Fortran, solf95, read, __f90_eifr",
      "solaris, Fortran, solf95, read, __f90_esfr",
      "solaris, Fortran, solf95, print, __f90_eslw",
      "solaris, Fortran, solf95, write, __f90_eslw",
      "solaris, Fortran, solf95, print, __f90_esfw",
      "solaris, Fortran, solf95, write, __f90_esfw",
      "solaris, Fortran, solf95, write, __f90_esuw",
      "solaris, Fortran, solf95, stop, __f90_stop",
      "solaris, Fortran, solf95, stop, __f90_stop_int",
      "solaris, Fortran, solf95, stop, __f90_stop_char",
      "solaris, Fortran, solf95, runtime, abort",
      "solaris, Fortran, solf95, rand, rand_",

      "solaris, C, gcc, abort, abort",
      "solaris, C, gcc, assert, __assert_c99",
      "solaris, C, gcc, exit, exit",
      "solaris, C, gcc, _exit, _exit",
      "solaris, C, gcc, _Exit, _Exit",
      "solaris, C, gcc, printf, printf",
      "solaris, C, gcc, printf, puts",
      "solaris, C, gcc, puts, puts",
      "solaris, C, gcc, putchar, putchar",
      "solaris, C, gcc, sprintf, sprintf",
      "solaris, C, gcc, vprintf, vprintf",
      "solaris, C, gcc, vsprintf, vsprintf",
      "solaris, C, gcc, rand, rand",
      "solaris, C, gcc, random, random",
      "solaris, C, gcc, rand_r, rand_r",
      "solaris, C, gcc, srand, srand",
      "solaris, C, gcc, srandom, srandom",
      "solaris, C, gcc, srand48, srand48",
      "solaris, C++, gxx, std::cout, _ZSt4cout",
      "solaris, C++, gxx, std::cerr, _ZSt4cerr",
      "solaris, C++, gxx, std::cerr, _ZSt4cerr",
      #"solaris, C++, gxx, std::terminate, _ZSt9terminatev",
      "solaris, Fortran, gfortran, open, _gfortran_st_open",
      "solaris, Fortran, gfortran, close, _gfortran_st_close",
      "solaris, Fortran, gfortran, rewind, _gfortran_st_rewind",
      "solaris, Fortran, gfortran, read, _gfortran_st_read",
      "solaris, Fortran, gfortran, write, _gfortran_st_write",
      "solaris, Fortran, gfortran, print, _gfortran_st_write",
      "solaris, Fortran, gfortran, stop, _gfortran_stop_numeric_f08",
      "solaris, Fortran, gfortran, stop, _gfortran_stop_string",
      "solaris, Fortran, gfortran, rand, _gfortran_rand",

      ## Windows statically links libstdc++, libgfortran
      ## only in .o, positions hard-coded in check_so_symbols
      "windows, C++, g++, std::cout, _ZSt4cout",
      "windows, C++, g++, std::cerr, _ZSt4cerr",
      #"windows, C++, gxx, std::terminate, _ZSt9terminatev",
      "windows, Fortran, gfortran, open, _gfortran_st_open",
      "windows, Fortran, gfortran, close, _gfortran_st_close",
      "windows, Fortran, gfortran, rewind, _gfortran_st_rewind",
      "windows, Fortran, gfortran, write, _gfortran_st_write",
      "windows, Fortran, gfortran, print, _gfortran_st_write",
      ## in DLL
      "windows, C, gcc, abort, abort",
      "windows, C++, gxx, runtime, abort",
      "windows, Fortran, gfortran, runtime, abort",
      "windows, C, gcc, assert, _assert",
      "windows, C, gcc, exit, exit",
      "windows, C, gcc, _exit, _exit",
      "windows, C, gcc, _Exit, _Exit",
      "windows, C, gcc, printf, printf",
      "windows, C, gcc, printf, puts",
      "windows, C, gcc, puts, puts",
      "windows, C, gcc, putchar, putchar",
      "windows, C, gcc, sprintf, sprintf",
      "windows, C, gcc, vprintf, vprintf",
      "windows, C, gcc, vsprintf, vsprintf",
      ## Windows does not have (s)random
      "windows, C, gcc, rand, rand",
      "windows, C, gcc, rand_r, rand_r",
      "windows, C, gcc, srand, srand",
      "windows, C, gcc, srand48, srand48",
      "windows, Fortran, gfortran, stop, exit",
      ## next will not show up with static libgfortran
      "windows, Fortran, gfortran, rand, _gfortran_rand",
      "windows, Fortran, gfortran, random_init, _gfortran_random_init",
      "windows, Fortran, gfortran, random_number, _gfortran_arandom_r4",
      "windows, Fortran, gfortran, random_number, _gfortran_arandom_r8",
      "windows, Fortran, gfortran, random_number, _gfortran_arandom_r16",
      "windows, Fortran, gfortran, random_number, _gfortran_random_r4",
      "windows, Fortran, gfortran, random_number, _gfortran_random_r8",
      "windows, Fortran, gfortran, random_number, _gfortran_random_r16",
      "windows, Fortran, gfortran, random_seed, _gfortran_random_seed_i4",
      "windows, Fortran, gfortran, random_seed, _gfortran_random_seed_i8",
      "windows, Fortran, gfortran, exit, _gfortran_exit_i4",
      "windows, Fortran, gfortran, exit, _gfortran_exit_i8",

      ## currently copy from Linux
      ## flang-new but executable already named 'flang'
      "windows, Fortran, flang, stop, _FortranAStopStatement",
      "windows, Fortran, flang, stop, _FortranAStopStatementText",
      "windows, Fortran, flang, open, _FortranAioBeginOpenUnit",
      "windows, Fortran, flang, close, _FortranAioBeginClose",
      "windows, Fortran, flang, rewind, _FortranAioBeginRewind",
      "windows, Fortran, flang, read, _FortranAioInputAscii",
      "windows, Fortran, flang, read, _FortranAioInputCharacter",
      "windows, Fortran, flang, read, _FortranAioInputComplex32",
      "windows, Fortran, flang, read, _FortranAioInputComplex64",
      "windows, Fortran, flang, read, _FortranAioOutputExternalListInput",
      "windows, Fortran, flang, read, _FortranAioInputInteger",
      "windows, Fortran, flang, read, _FortranAioInputLogical",
      "windows, Fortran, flang, read, _FortranAioInputNamelist",
      "windows, Fortran, flang, read, _FortranAioInputReal32",
      "windows, Fortran, flang, read, _FortranAioInputReal64",
      "windows, Fortran, flang, read, _FortranAioInputUnformattedBlock",
      "windows, Fortran, flang, print, _FortranAioOutputAscii",
      "windows, Fortran, flang, print, _FortranAioOutputCharacter",
      "windows, Fortran, flang, print, _FortranAioOutputComplex32",
      "windows, Fortran, flang, print, _FortranAioOutputComplex64",
      "windows, Fortran, flang, print, _FortranAioOutputExternalListOutput",
      "windows, Fortran, flang, print, _FortranAioOutputInteger128",
      "windows, Fortran, flang, print, _FortranAioOutputInteger16",
      "windows, Fortran, flang, print, _FortranAioOutputInteger32",
      "windows, Fortran, flang, print, _FortranAioOutputInteger64",
      "windows, Fortran, flang, print, _FortranAioOutputInteger8",
      "windows, Fortran, flang, print, _FortranAioOutputLogical",
      "windows, Fortran, flang, print, _FortranAioOutputNamelist",
      "windows, Fortran, flang, print, _FortranAioOutputReal32",
      "windows, Fortran, flang, print, _FortranAioOutputReal64",
      "windows, Fortran, flang, write, _FortranAioOutputAscii",
      "windows, Fortran, flang, write, _FortranAioOutputCharacter",
      "windows, Fortran, flang, write, _FortranAioOutputComplex32",
      "windows, Fortran, flang, write, _FortranAioOutputComplex64",
      "windows, Fortran, flang, write, _FortranAioOutputExternalListOutput",
      "windows, Fortran, flang, write, _FortranAioOutputInteger128",
      "windows, Fortran, flang, write, _FortranAioOutputInteger16",
      "windows, Fortran, flang, write, _FortranAioOutputInteger32",
      "windows, Fortran, flang, write, _FortranAioOutputInteger64",
      "windows, Fortran, flang, write, _FortranAioOutputInteger8",
      "windows, Fortran, flang, write, _FortranAioOutputLogical",
      "windows, Fortran, flang, write, _FortranAioOutputNamelist",
      "windows, Fortran, flang, write, _FortranAioOutputReal32",
      "windows, Fortran, flang, write, _FortranAioOutputReal64",
      "windows, Fortran, flang, write, _FortranAioOutputUnformatedBlock",
      ## Next is a guess.
      "windows, Fortran, flang, rand, rand_",
      "windows, Fortran, flang, random_init, _FortranARandomInit",
      "windows, Fortran, flang, random_number, _FortranARandomNumber",
      "windows, Fortran, flang, random_seed, _FortranARandomSeed",
      "windows, Fortran, flang, random_seed, _FortranARandomSeedGet",
      "windows, Fortran, flang, random_seed, _FortranARandomSeedSize",

      "windows, C++, clang++, std::cout, _ZNSt3__14coutE",
      "windows, C++, clang++, std::cerr, _ZNSt3__14cerrE",
      "windows, C++, clang++, std::terminate, _ZSt9terminatev",
      "windows, C, clang, exit, exit",
      "windows, C, clang, printf, printf",
      "windows, C, clang, printf, puts",
      "windows, C, clang, puts, puts",
      "windows, C, clang, putchar, putchar",
      "windows, C, clang, sprintf, sprintf",
      "windows, C, clang, vprintf, vprintf",
      "windows, C, clang, vsprintf, vsprintf",
      "windows, C, clang, rand, rand",
      "windows, C, clang, srand, srand"
      )
so_symbol_names_table <-
    do.call(rbind,
            strsplit(so_symbol_names_table,
                     split = ", ", fixed = TRUE))
colnames(so_symbol_names_table) <-
    c("system", "language", "compiler", "ssname", "osname")

## Subscript according to system and compiler types here, rather than
## repeatedly doing this at run time.
so_symbol_names_table <-
    so_symbol_names_table[(so_symbol_names_table[, "system"] ==
                           system_ABI["system"]) &
                          (so_symbol_names_table[, "compiler"] %in%
                           system_ABI[c("CC", "CXX", "F77", "FC")]),
                          c("language", "ssname", "osname"),
                          drop = FALSE]

so_symbol_names_handlers_db <- list()
## <NOTE>
## As we record the low-level (possibly mangled) symbol names for
## each system/compiler combination, there is no need for handlers to
## demangle into user-level names (e.g., using c++filt).
## </NOTE>
so_symbol_names_handlers_db$linux <-
function(x)
{
    ## Linux ELF symbol versioning, see
    ##  http://lists.debian.org/lsb-spec/1999/12/msg00017.html:
    ## name@version for alternatives, name@@version for the default.
    sub("@.*", "", x)
}

so_symbol_names_handlers_db$freebsd <-
function(x)
{
    ## same as Linux, most likely, lots of name@@VERSION
    sub("@.*", "", x)
}

## Obsolete ones first,
nonAPI <- c("chol_", "chol2inv_", "cg_", "ch_", "rg_",
            "fft_factor", "fft_work", "Brent_fmin", "optif0",

## then entry points which are not attribute-hidden
## and in a non-API header or no header at all or marked as non-API in a header

            "OutDec", "PRIMOFFSET", "RC_fopen", "R_CollectFromIndex",
            "R_CompiledFileName", "R_FileExists",
            "R_FreeStringBuffer", "R_FunTab", "R_GE_setVFontRoutines",
            "R_GetVarLocMISSING",
            "R_MethodsNamespace", "R_NewHashedEnv",
            "R_OpenCompiledFile", "R_PV", "R_ParseContext",
            "R_ParseContextLast", "R_ParseContextLine",
            "R_ParseError", "R_ParseErrorMsg", "R_SrcfileSymbol",
            "R_SrcrefSymbol", "R_Visible", "R_addTaskCallback",
            "R_cairoCdynload", "R_data_class",
            "R_deferred_default_method", "R_execMethod",
            "R_findVarLocInFrame","R_fopen", "R_gc_torture",
            "R_getTaskCallbackNames", "R_get_arith_function",
            "R_gzclose", "R_gzgets", "R_gzopen", "R_ignore_SIGPIPE",
            "R_isForkedChild", "R_isMethodsDispatchOn",
            "R_moduleCdynload", "R_primitive_generic",
            "R_primitive_methods", "R_print", "R_removeTaskCallback",
            "R_running_as_main_program", "R_setInternetRoutines",
            "R_setLapackRoutines", "R_setX11Routines",
            "R_set_prim_method", "R_set_quick_method_check",
            "R_set_standardGeneric_ptr", "R_strtod4",
            "R_subassign3_dflt", "R_taskCallbackRoutine",
            "Rconn_fgetc", "Rconn_printf", "Rdownload",
            "Rf_EncodeComplex", "Rf_EncodeElement",
            "Rf_EncodeEnvironment", "Rf_EncodeInteger",
            "Rf_EncodeLogical", "Rf_EncodeReal", "Rf_GPretty",
            "Rf_NewEnvironment", "Rf_PrintDefaults",
            "Rf_ReplIteration", "Rf_Seql", "Rf_addTaskCallback",
            "Rf_begincontext", "Rf_callToplevelHandlers",
            "Rf_checkArityCall", "Rf_con_pushback",
            "Rf_copyMostAttribNoTs", "Rf_deparse1", "Rf_deparse1line",
            "Rf_dpptr", "Rf_endcontext", "Rf_envlength",
            "Rf_formatComplex", "Rf_formatInteger",
            "Rf_formatLogical", "Rf_formatReal", "Rf_init_con",
            "Rf_isProtected", "Rf_mbrtowc", "Rf_mkFalse",
            "Rf_printNamedVector", "Rf_printRealVector",
            "Rf_printVector", "Rf_removeTaskCallbackByIndex",
            "Rf_removeTaskCallbackByName", "Rf_set_iconv",
            "Rf_sortVector", "Rf_strIsASCII", "Rf_strchr",
            "Rf_strrchr", "Rf_ucstomb", "Rf_utf8towcs",
            "Rf_wcstoutf8", "Rg_PolledEvents", "Rg_set_col_ptrs",
            "Rf_wait_usec", "Ri18n_iswctype", "Ri18n_wcswidth",
            "Ri18n_wctype", "Ri18n_wcwidth", "Rsockclose",
            "Rsockconnect", "Rsocklisten", "Rsockopen", "Rsockread",
            "Rsockwrite", "Runzip", "UNIMPLEMENTED_TYPE",
            "baseRegisterIndex", "Rf_csduplicated", "Rf_currentTime",
            "dcar", "dcdr", "dchdc_", "do_Rprof", "do_Rprofmem", "do_X11",
            "do_contourLines", "do_edit", "do_getGraphicsEventEnv",
            "do_getSnapshot", "do_playSnapshot", "do_saveplot",
            "do_set_prim_method", "dqrrsd_","dqrxb_", "dtype",
            "dummy_fgetc", "dummy_ii", "dummy_vfprintf", "epslon_",
            "extR_HTTPDCreate", "extR_HTTPDStop", "fdhess",
            "getConnection", "getPRIMNAME", "known_to_be_latin1",
            "locale2charset", "match5", "matherr",
            "max_contour_segments", "Rf_mbcsToUcs2", "Rf_memtrace_report",
            "parseError", "pythag_", "rs_", "rwarnc_",
            "tql2_", "tqlrat_", "tred1_", "tred2_", "utf8locale", "yylloc",
            "R_opendir", "R_readdir", "R_closedir",
            # "signrank_free", "wilcox_free" are API only from 4.2.0
            "ENSURE_NAMEDMAX", "IS_ASCII", "IS_UTF8", "SET_PRSEEN",
            "ddfind",

## Rinterface.h, Rembedded.h, R_ext/{RStartup,eventloop}.h
            "AllDevicesKilled", "R_CStackLimit", "R_CStackStart",
            "R_ClearerrConsole", "R_CleanTempDir", "R_Consolefile",
            "R_DefCallbacks", "R_DefParams", "R_DefParamsEx",
            "R_DirtyImage", "R_GUIType", "R_GlobalContext",
            "R_HistoryFile", "R_HistorySize", "R_Home", "R_HomeDir",
            "R_Interactive", "R_Outputfile",
            "R_PolledEvents", "R_ReplDLLdo1", "R_ReplDLLinit",
            "R_RestoreGlobalEnv", "R_RestoreGlobalEnvFromFile",
            "R_RestoreHistory", "R_RunExitFinalizers", "R_SaveGlobalEnv",
            "R_SaveGlobalEnvToFile", "R_SelectEx", "R_SetParams",
            "R_SetWin32", "R_SignalHandlers", "R_SizeFromEnv", "R_NoEcho",
            "R_Suicide", "R_TempDir", "R_checkActivity",
            "R_checkActivityEx", "R_runHandlers",
            "R_setStartTime", "R_set_command_line_arguments",
            "R_setupHistory", "R_timeout_handler", "R_timeout_val",
            "R_wait_usec", "RestoreAction", "Rf_CleanEd",
            "Rf_KillAllDevices", "Rf_endEmbeddedR", "Rf_initEmbeddedR",
            "Rf_initialize_R", "Rf_jump_to_toplevel", "Rf_mainloop",
            "SaveAction", "editorcleanall", "fpu_setup",
            "freeRUser", "free_R_HOME",
            "getDLLVersion", "getRUser", "get_R_HOME",
            "getSelectedHandler", "initStdinHandler",
            "process_site_Renviron", "process_system_Renviron",
            "process_user_Renviron", "ptr_R_Busy", "ptr_R_ChooseFile",
            "ptr_R_CleanUp", "ptr_R_ClearerrConsole", "ptr_R_EditFile",
            "ptr_R_EditFiles", "ptr_R_FlushConsole", "ptr_R_ProcessEvents",
            "ptr_R_ReadConsole", "ptr_R_ResetConsole", "ptr_R_ShowFiles",
            "ptr_R_ShowMessage", "ptr_R_Suicide", "ptr_R_WriteConsole",
            "ptr_R_WriteConsoleEx", "ptr_R_addhistory", "ptr_R_loadhistory",
            "ptr_R_savehistory", "ptr_do_dataentry", "ptr_do_dataviewer",
            "ptr_do_selectlist", "readconsolecfg",
            "run_Rmainloop", "setup_Rmainloop",

## non-API, removed in R 4.5.0 and long deprecated in R_ext/RS.h (and as call_S in S.h)
            "call_R",
## non-API, declared in Defn.h
            "Rf_setSVector",
## non-API, declared in Rinternals.h
            ## not yet, in Rcpp headers "SET_TYPEOF",
            ## not yet, used in an example in R-exts "SET_OBJECT",
            "SET_S4_OBJECT", "UNSET_S4_OBJECT",
            "R_curErrorBuf",
            "SETLENGTH", "SET_TRUELENGTH", "SETLEVELS",
            "SET_ENVFLAGS", "SET_FRAME", "SET_ENCLOS", "SET_HASHTAB",
            "SET_PRENV", "SET_PRVALUE", "SET_PRCODE", "STDVEC_DATAPTR",
            "IS_GROWABLE", "SET_GROWABLE_BIT", "SET_NAMED",
            "R_PromiseExpr",
            "R_tryWrap",
            "DDVAL", "NAMED", "INTERNAL", "SYMVALUE", "PRSEEN",
            "REAL0", "COMPLEX0", "LEVELS", "FRAME", "HASHTAB",
            "ENVFLAGS", "RDEBUG", "SET_RDEBUG",
            "STRING_PTR", "VECTOR_PTR",
            "SET_FORMALS", "SET_BODY", "SET_CLOENV", "Rf_findVarInFrame3",
            "PRCODE", "PRENV", "PRVALUE", "R_nchar",
            "Rf_NonNullStringMatch",
            "SET_TYPEOF", "TRUELENGTH", "XLENGTH_EX",
            "XTRUELENGTH", "Rf_gsetVar",
            "Rf_isValidString", "Rf_isValidStringF",
            "R_shallow_duplicate_attr",
            ## Documented in WRE in section "Some API replacements for
            ## non-API entry points":
            "EXTPTR_PROT", "EXTPTR_TAG", "EXTPTR_PTR",
            "OBJECT", "IS_S4_OBJECT",
            "Rf_GetOption", "R_lsInternal",
            "REAL0", "COMPLEX0",
            "STRING_PTR", "DATAPTR", "STDVEC_DATAPTR",
            "Rf_allocSExp",
            "Rf_isFrame",
            "BODY", "FORMALS", "CLOENV", "ENCLOS",
            "IS_ASCII", "IS_UTF8",
## in the non-API header R_ext/Connections.h
            "R_new_custom_connection", "R_ReadConnection",
            "R_WriteConnection", "R_GetConnection",

## in ../../../include/R_ext/Applic.h -- these are API now:
## 	"dqrcf_", "dqrqty_", "dqrqy_", "dqrrsd_", "dqrxb_",
##	"dqrdc2_", "dqrls_",
## "d1mach_" and "i1mach_" are API now in R-exts.
            "R_Pretty") ## hidden, so unlikely to be usable
##          "optif9")   ## used by pcaPP

## grDevices uses R_Home R_InputHandlers R_TempDir R_Visible R_cairoCdynload R_fopen R_gzclose R_gzgets R_gzopen R_isForkedChild Rf_envlength Rf_strIsASCII Rf_utf8towcs Rg_set_col_ptrs Ri18n_wcwidth addInputHandler do_X11 do_contourLines do_getGraphicsEventEnv do_getSnapshot do_playSnapshot do_saveplot locale2charset mbcsToUcs2 ptr_R_ProcessEvents

## graphics uses OutDec R_print Rf_EncodeComplex Rf_EncodeInteger Rf_EncodeLogical Rf_EncodeReal Rf_GPretty Rf_PrintDefaults Rf_envlength Rf_formatComplex Rf_formatReal baseRegisterIndex known_to_be_latin1 max_contour_segments

## methods uses R_GetVarLocMISSING R_MakeExternalPtrFn R_MethodsNamespace R_data_class R_deferred_default_method R_execMethod R_findVarLocInFrame R_primitive_generic R_primitive_methods R_set_prim_method R_set_quick_method_check R_set_standardGeneric_ptr R_subassign3_dflt Rf_NewEnvironment Rf_envlength do_set_prim_method getPRIMNAME

## parallel uses R_isForkedChild

## stats uses Rf_PrintDefaults Rf_Seql Rf_copyMostAttribNoTs Rf_deparse1 Rf_deparse1line Rf_envlength Rf_mkFalse fdhess memtrace_report signrank_free wilcox_free

## tcltk uses R_Consolefile R_GUIType R_InputHandlers R_Outputfile R_PolledEvents R_checkActivity R_runHandlers R_timeout_handler R_timeout_val R_wait_usec ptr_R_ClearerrConsole ptr_R_FlushConsole ptr_R_ReadConsole ptr_R_ResetConsole ptr_R_WriteConsole

## tools uses RC_fopen R_FileExists R_NewHashedEnv R_ParseContext R_ParseContextLast R_ParseContextLine R_ParseError R_ParseErrorMsg R_SrcfileSymbol R_SrcrefSymbol Rconn_fgetc Rf_begincontext Rf_endcontext Rf_envlength Rf_mbrtowc Rf_strchr extR_HTTPDCreate extR_HTTPDStop getConnection parseError R_opendir R_readdir R_closedir

## utils uses R_ClearerrConsole R_FreeStringBuffer R_GUIType R_moduleCdynload R_print R_strtod4 Rconn_fgetc Rconn_printf Rdownload Rf_EncodeElement Rf_PrintDefaults Rf_begincontext Rf_con_pushback Rf_endcontext Rf_envlength Rf_sortVector Rsockclose Rsockconnect Rsocklisten Rsockopen Rsockread Rsockwrite Runzip UNIMPLEMENTED_TYPE csduplicated do_Rprof do_Rprofmem do_edit getConnection known_to_be_latin1 ptr_R_addhistory ptr_R_loadhistory ptr_R_savehistory ptr_do_dataentry ptr_do_dataviewer ptr_do_selectlist

## modules use PRIMOFFSET R_GE_setVFontRoutines R_setInternetRoutines R_setLapackRoutines R_setX11Routines Rf_set_iconv currentTime dummy_fgetc dummy_vfprintf ucstomb utf8locale


check_so_symbols <- if(.Platform$OS.type == "windows") {
    function(so, rarch, have_tables = FALSE)
    {
        if(!length(system_ABI)) return()
        nms <- read_symbols_from_dll(so, rarch)
        ind <- so_symbol_names_table[, "osname"] %in% nms
        if(have_tables) ind[1:4] <- TRUE
        tab <- so_symbol_names_table[ind, , drop = FALSE]
        attr(tab, "file") <- so
        tab2 <- intersect(sub("^_", "", nms), nonAPI)
        if ("removeInputHandler" %in% tab2)
            tab2 <- setdiff(tab2, c("R_InputHandlers", "addInputHandler",
                                    "removeInputHandler"))
        if(length(tab2)) attr(tab, "nonAPI") <- tab2
        tab2b <- setdiff(c("R_registerRoutines", "R_useDynamicSymbols"),
                         sub("^_", "", nms))
        if(length(tab2b)) attr(tab, "RegSym") <- tab2b
        class(tab) <- "check_so_symbols"
        tab
    }
} else {
    function(so)
    {
        if(!length(system_ABI)) return()
        tab <- read_symbols_from_object_file(so)
        tab2 <- tab[tab[, "type"] == "U", "name"]
	nms <- tab[, "name"]
        sys <- system_ABI["system"]
        if(!is.null(snh <- so_symbol_names_handlers_db[[sys]]))
            nms <- snh(nms)
        ind <- so_symbol_names_table[, "osname"] %in% nms
        tab <- so_symbol_names_table[ind, , drop = FALSE]
        attr(tab, "file") <- so
        tab2 <- sub("^_", "", tab2)

        tab2a <- intersect(tab2, nonAPI)
        if ("removeInputHandler" %in% tab2a)
            tab2a <- setdiff(tab2a, c("R_InputHandlers", "addInputHandler",
                                    "removeInputHandler"))
        if(length(tab2a)) attr(tab, "nonAPI") <- tab2a

        tab2b <- setdiff(c("R_registerRoutines", "R_useDynamicSymbols"), tab2)
        if(length(tab2b)) attr(tab, "RegSym") <- tab2b

        class(tab) <- "check_so_symbols"
        tab
    }
}

format.check_so_symbols <-
function(x, ...)
{
    if(!length(x)) return(character())
    ## <FIXME split.matrix>
    entries <- split.data.frame(x, x[, "osname"])
    objects <- vector("list", length(entries))
    names(objects) <- names(entries)
    if(length(objs <- attr(x, "objects")))
        objects[names(objs)] <- objs
    c(gettextf("File %s:", sQuote(attr(x, "file"))),
      unlist(Map(function(u, v, w)
                 c(strwrap(gettextf("Found %s, possibly from %s",
                                    sQuote(v),
                                    paste(sprintf("%s (%s)",
                                                  sQuote(u[, "ssname"]),
                                                  u[, "language"]),
                                          collapse = ", ")),
                           indent = 2L, exdent = 4L),
                   if(length(w) > 1L) {
                       strwrap(sprintf("Objects: %s",
                                       paste(sQuote(w), collapse =
                                             ", ")),
                               indent = 4L, exdent = 6L)
                   } else if(length(w)) {
                       strwrap(sprintf("Object: %s", sQuote(w)),
                               indent = 4L, exdent = 6L)
                   }),
                 entries, names(entries), objects)))
}

check_compiled_code <-
if(.Platform$OS.type == "windows") {
    function(dir)
    {
        ## Check compiled code in the DLL(s) of an installed package.

        r_arch <- .Platform$r_arch
        useST <- config_val_to_logical(Sys.getenv("_R_SHLIB_BUILD_OBJECTS_SYMBOL_TABLES_", "FALSE"))
        useSR <- config_val_to_logical(Sys.getenv("_R_CHECK_NATIVE_ROUTINE_REGISTRATION_", "FALSE"))

        compare <- function(x, strip_ = FALSE) {
            ## Compare symbols in the DLL and in objects:
            symbols <-
                Filter(length,
                       lapply(tables,
                              function(tab) {
                                  nm <- tab[, "name"]
                                  if (strip_) nm <- sub("^_", "", nm)
                                  nm <- sub("_gfortran_stop.*", "exit", nm)
                                  intersect(x[, "osname"], nm)
                              }))
            ## Drop the DLL symbols not in any object.
            so <- attr(x, "file")
            osnames_in_objects <- unique(as.character(unlist(symbols)))
            x <- x[!is.na(match(x[, "osname"], osnames_in_objects)), , drop = FALSE]
            attr(x, "file") <- .file_path_relative_to_dir(so, dir, TRUE)

            attr(x, "objects") <-
                split(rep.int(names(symbols), lengths(symbols)),
                      unlist(symbols))
            class(x) <- "check_so_symbols"
            x
        }

        so_files <-
            Sys.glob(file.path(dir, "libs/i386",
                               sprintf("*%s", .Platform$dynlib.ext)))
        bad <- if(length(so_files)) {
            objects_symbol_tables_file <-
                file.path(dir, "libs/i386", "symbols.rds")
            if(file_test("-f", objects_symbol_tables_file)) {
                bad <- Filter(length, lapply(so_files, check_so_symbols,
                                             rarch = "i386", have_tables = TRUE))
                tables <- readRDS(objects_symbol_tables_file)
                Filter(length, lapply(bad, compare, strip_ = TRUE))
            } else {
                if(useST)
                    cat("Note: information on .o files for i386 is not available\n")
                Filter(length, lapply(so_files, check_so_symbols, rarch="i386"))
            }
        } else NULL
        nAPIs <- lapply(lapply(so_files, check_so_symbols, rarch = "i386"),
                        function(x) if(length(z <- attr(x, "nonAPI")))
                        structure(z,
                                  file =
                                  .file_path_relative_to_dir(attr(x, "file"),
                                                             dir, TRUE),
                                  class = "check_nonAPI_calls"))
        bad <- c(bad, Filter(length, nAPIs))

        if (useSR) {
            nRS <- lapply(lapply(so_files, check_so_symbols, rarch = "i386"),
                          function(x) if(length(z <- attr(x, "RegSym")))
                          structure(z,
                                    file =
                                    .file_path_relative_to_dir(attr(x, "file"),
                                                               dir, TRUE),
                                    class = "check_RegSym_calls"))
            bad <- c(bad, Filter(length, nRS))
        }

        so_files <-
            Sys.glob(file.path(dir, "libs/x64",
                               sprintf("*%s", .Platform$dynlib.ext)))
        bad2 <- if(length(so_files)) {
            objects_symbol_tables_file <- file.path(dir, "libs/x64", "symbols.rds")
            if(file_test("-f", objects_symbol_tables_file)) {
                bad2 <- Filter(length, lapply(so_files, check_so_symbols,
                                              rarch = "x64", have_tables = TRUE))
                tables <- readRDS(objects_symbol_tables_file)
                Filter(length, lapply(bad2, compare))
            } else {
                if(useST)
                    cat("Note: information on .o files for x64 is not available\n")
                Filter(length, lapply(so_files, check_so_symbols, rarch="x64"))
            }
        } else NULL
        nAPIs <- lapply(lapply(so_files, check_so_symbols, rarch = "x64"),
                        function(x) if(length(z <- attr(x, "nonAPI")))
                        structure(z,
                                  file =
                                  .file_path_relative_to_dir(attr(x, "file"),
                                                             dir, TRUE),
                                  class = "check_nonAPI_calls"))
        bad2 <- c(bad2, Filter(length, nAPIs))

        if (useSR) {
            nRS <- lapply(lapply(so_files, check_so_symbols, rarch = "x64"),
                          function(x) if(length(z <- attr(x, "RegSym")))
                          structure(z,
                                    file =
                                    .file_path_relative_to_dir(attr(x, "file"),
                                                               dir, TRUE),
                                    class = "check_RegSym_calls"))
            bad2 <- c(bad2, Filter(length, nRS))
        }

        if(!length(bad) && !length(bad2)) return(invisible(NULL))

        bad <- if(length(bad) && length(bad2)) rbind(bad, bad2)
        else if(length(bad2)) bad2 else bad
        class(bad) <- "check_compiled_code"
        bad
    }
} else {
    function(dir)
    {
        ## Check compiled code in the shared objects of an installed package.

        r_arch <- .Platform$r_arch
        useST <- config_val_to_logical(Sys.getenv("_R_SHLIB_BUILD_OBJECTS_SYMBOL_TABLES_", "FALSE"))
        useSR <- config_val_to_logical(Sys.getenv("_R_CHECK_NATIVE_ROUTINE_REGISTRATION_", "FALSE"))

        compare <- function(x) {
            ## Compare symbols in the so and in objects:
            symbols <-
                Filter(length,
                       lapply(tables,
                              function(tab) {
                                  nm <- tab[, "name"]
                                  intersect(x[, "osname"], nm)
                              }))
            ## Drop the so symbols not in any object.
            so <- attr(x, "file")
            ## (Alternatively, provide a subscript method
            ## for class "check_so_symbols".)
            osnames_in_objects <- unique(as.character(unlist(symbols)))
            x <- x[!is.na(match(x[, "osname"], osnames_in_objects)), , drop = FALSE]
            attr(x, "file") <- .file_path_relative_to_dir(so, dir, TRUE)
            attr(x, "objects") <-
                split(rep.int(names(symbols), lengths(symbols)),
                      unlist(symbols))
            class(x) <- "check_so_symbols"
            x
        }

        so_files <- if(nzchar(r_arch))
            Sys.glob(file.path(dir, "libs", r_arch,
                               sprintf("*%s", .Platform$dynlib.ext)))
        else
            Sys.glob(file.path(dir, "libs",
                               sprintf("*%s", .Platform$dynlib.ext)))
        if(!length(so_files)) return(invisible(NULL)) # typically a fake install

        bad <- Filter(length, lapply(so_files, check_so_symbols))
        ## Allow experimenting with finding bad symbols not in
        ## symbols.rds, likely from following the "best approach" from
        ## section "Compiling in sub-directories" of WRE and compiling
        ## code in subdirs into static libraries instead of adding to
        ## OBJECTS.
        ## See PR#18789 <https://bugs.r-project.org/show_bug.cgi?id=18789>,
        ## "R CMD check does not check symbol tables of linked static
        ## libraries".
        if(config_val_to_logical(Sys.getenv("_R_CHECK_COMPILED_CODE_USE_OBJECTS_SYMBOL_TABLES_",
                                    "TRUE"))) {
        objects_symbol_tables_file <- if(nzchar(r_arch))
            file.path(dir, "libs", r_arch, "symbols.rds")
        else file.path(dir, "libs", "symbols.rds")
        if(file_test("-f", objects_symbol_tables_file)) {
            tables <- readRDS(objects_symbol_tables_file)
            bad <- Filter(length, lapply(bad, compare))
        } else if(useST)
            cat("Note: information on .o files is not available\n")
        }
        nAPIs <- lapply(lapply(so_files, check_so_symbols),
                        function(x) if(length(z <- attr(x, "nonAPI")))
                        structure(z,
                                  file =
                                  .file_path_relative_to_dir(attr(x, "file"),
                                                             dir, TRUE),
                                  class = "check_nonAPI_calls"))
        bad <- c(bad, Filter(length, nAPIs))

        if (useSR) {
            nRS <- lapply(lapply(so_files, check_so_symbols),
                          function(x) if(length(z <- attr(x, "RegSym")))
                          structure(z,
                                    file =
                                    .file_path_relative_to_dir(attr(x, "file"),
                                                               dir, TRUE),
                                    class = "check_RegSym_calls"))
            bad <- c(bad, Filter(length, nRS))
        }
        class(bad) <- "check_compiled_code"
        bad
    }
}

format.check_compiled_code <-
function(x, ...)
{
    if(!length(x)) return(character())
    ## sapply does not always simplify as one wants here if there is
    ## more than one DLL.
    paste(unlist(lapply(x, format)), collapse = "\n")
}

format.check_nonAPI_calls <-
function(x, ...)
{
    if(length(x))
        c(gettextf("File %s:", sQuote(attr(x, "file"))),
          if (length(x) > 1L) {
              strwrap(paste("Found non-API calls to R:",
                            paste(sQuote(x), collapse = ", ")),
                      indent = 2L, exdent = 4L)
          } else paste("  Found non-API call to R:", sQuote(x))
          )
    else character()
}

format.check_RegSym_calls <-
function(x, ...)
{
    if(length(x))
        c(gettextf("File %s:", sQuote(attr(x, "file"))),
          if (length(x) > 1L) {
              strwrap(paste("Found no calls to:",
                            paste(sQuote(x), collapse = ", ")),
                      indent = 2L, exdent = 4L)
          } else paste("  Found no call to:", sQuote(x))
          )
    else character()
}

.shlib_objects_symbol_tables <-
function(file = "symbols.rds")
{
    args <- commandArgs(trailingOnly = TRUE)
    pos <- which(args == "--pkglibs")[1L]
    objects <- args[seq_len(pos - 1L)]
    pkglibs <- args[-seq_len(pos)]
    ## Also determine the local static libraries linked against by
    ## following the approach suggested in section "Compiling in
    ## sub-directories" of WRE.
    if(length(pkglibs)) {
        files <- list.files("..", recursive = TRUE, pattern = "[.]a$",
                            all.files = TRUE, full.names = TRUE)
        if(any(ind <- startsWith(files, "../src/")))
            files[ind] <- substring(files[ind], 8L)
        ## Case A: local static libs given via their path.
        libpaths <- pkglibs[file.exists(pkglibs)]
        ## Case B: local static libs given as '-lfoo'.
        libnames <- pkglibs[startsWith(pkglibs, "-l")]
        libnames <- sprintf("lib%s.a", substring(libnames, 3L))
        objects <- c(objects,
                     files[normalizePath(files) %in%
                           normalizePath(libpaths)],
                     files[basename(files) %in% libnames])
        objects <- unique(objects)
    }
    tables <- lapply(objects, read_symbols_from_object_file)
    names(tables) <- objects
    saveRDS(tables, file = file, version = 2)
}


### --- Helpers for registering native routines added in R 3.4.0 ---

package_ff_call_db <-
function(dir)
{
    ## A few packages such as CDM use base::.Call
    ff_call_names <- c(".C", ".Call", ".Fortran", ".External",
                       "base::.C", "base::.Call",
                       "base::.Fortran", "base::.External",
                       ## internal ones
                       ".Call.graphics", ".External.graphics",
                       ".External2")

    predicate <- function(e) {
        (length(e) > 1L) &&
            !is.na(match(deparse(e[[1L]])[1L], ff_call_names))
    }

    calls <- .find_calls_in_package_code(dir,
                                         predicate = predicate,
                                         recursive = TRUE)
    calls <- unlist(Filter(length, calls))

    if(!length(calls)) return(NULL)

    attr(calls, "dir") <- dir
    calls
}

native_routine_registration_db_from_ff_call_db <-
function(calls, dir = NULL, character_only = TRUE)
{
    if(!length(calls)) return(NULL)

    ff_call_names <-
        c(".C", ".Call", ".Fortran", ".External",
          ".Call.graphics", ".External.graphics", ".External2")
    ff_call_args <- lapply(ff_call_names,
                           function(e) args(get(e, baseenv())))
    names(ff_call_args) <- ff_call_names
    ff_call_args_names <-
        lapply(lapply(ff_call_args,
                      function(e) names(formals(e))), setdiff,
               "...")

    if(is.null(dir))
        dir <- attr(calls, "dir")

    package <- # drop name
        as.vector(.get_package_metadata(dir)["Package"])

    symbols <- character()
    nrdb <-
        lapply(calls,
               function(e) {
                   if (startsWith(deparse(e[[1L]]), "base::"))
                       e[[1L]] <- e[[1L]][3L]
                   ## First figure out whether ff calls had '...'.
                   pos <- which(unlist(Map(identical,
                                           lapply(e, as.character),
                                           "...")))
                   ## Then match the call with '...' dropped.
                   ## Note that only .NAME could be given by name or
                   ## positionally (the other ff interface named
                   ## arguments come after '...').
                   if(length(pos)) e <- e[-pos]
                   ## drop calls with only ...
                   if(length(e) < 2L) return(NULL)
                   cname <- as.character(e[[1L]])
                   ## The help says
                   ##
                   ## '.NAME' is always matched to the first argument
                   ## supplied (which should not be named).
                   ##
                   ## But some people do (Geneland ...).
                   nm <- names(e); nm[2L] <- ""; names(e) <- nm
                   e <- match.call(ff_call_args[[cname]], e)
                   ## Only keep ff calls where .NAME is character
                   ## or (optionally) a name.
                   s <- e[[".NAME"]]
                   t <- typeof(s)
                   if(is.name(s)) {
                       s <- deparse(s)[1L]
                       if(character_only) {
                           symbols <<- c(symbols, s)
                           return(NULL)
                       }
                   } else if(is.character(s)) {
                       s <- s[1L]
                   } else { ## expressions
                       symbols <<- c(symbols, deparse(s))
                       return(NULL)
                   }
                   ## Drop the ones where PACKAGE gives a different
                   ## package. Ignore those which are not char strings.
                   if(!is.null(p <- e[["PACKAGE"]]) &&
                      is.character(p) && !identical(p, package))
                       return(NULL)
                   n <- if(length(pos)) {
                            ## Cannot determine the number of args: use
                            ## -1 which might be ok for .External().
                            -1L
                        } else {
                            sum(is.na(match(names(e),
                                            ff_call_args_names[[cname]]))) - 1L
                        }
                   ## Could perhaps also record whether 's' was a symbol
                   ## or a character string ...
                   cbind(cname, s, n, t)
               })
    nrdb <- do.call(rbind, nrdb)
    nrdb <- as.data.frame(unique(nrdb), stringsAsFactors = FALSE)

    if(NROW(nrdb) == 0L || length(nrdb) != 4L) {
        message("no native symbols were extracted")
        return(NULL)
    }
    nrdb[, 3L] <- as.numeric(nrdb[, 3L])
    nrdb <- nrdb[order(nrdb[, 1L], nrdb[, 2L], nrdb[, 3L]), ]
    nms <- nrdb[, "s"]
    dups <- unique(nms[duplicated(nms)])

    ## Now get the namespace info for the package.
    info <- parseNamespaceFile(basename(dir), dirname(dir))
    ## Could have ff calls with symbols imported from other packages:
    ## try dropping these eventually.
    imports <- info$imports
    imports <- imports[lengths(imports) == 2L]
    imports <- unlist(lapply(imports, `[[`, 2L))

    info <- info$nativeRoutines[[package]]
    ## Adjust native routine names for explicit remapping or
    ## namespace .fixes.
    if(length(symnames <- info$symbolNames)) {
        ind <- match(nrdb[, 2L], names(symnames), nomatch = 0L)
        nrdb[ind > 0L, 2L] <- symnames[ind]
    } else if(!character_only &&
              any((fixes <- info$registrationFixes) != "")) {
        ## There are packages which have not used the fixes, e.g. utf8latex
        ## fixes[1L] is a prefix, fixes[2L] is an undocumented suffix
        nrdb[, 2L] <- sub(paste0("^", fixes[1L]), "", nrdb[, 2L])
        if(nzchar(fixes[2L]))
            nrdb[, 2L] <- sub(paste0(fixes[2L]), "$", "", nrdb[, 2L])
    }
    ## See above.
    if(any(ind <- !is.na(match(nrdb[, 2L], imports))))
        nrdb <- nrdb[!ind, , drop = FALSE]

    ## Fortran entry points are mapped to l/case
    dotF <- nrdb$cname == ".Fortran"
    nrdb[dotF, "s"] <- tolower(nrdb[dotF, "s"])

    attr(nrdb, "package") <- package
    attr(nrdb, "duplicates") <- dups
    attr(nrdb, "symbols") <- unique(symbols)
    nrdb
}

format_native_routine_registration_db_for_skeleton <-
function(nrdb, align = TRUE, include_declarations = FALSE)
{
    if(!length(nrdb))
        return(character())

    fmt1 <- function(x, n) {
        c(if(align) {
              paste(format(sprintf("    {\"%s\",", x[, 1L])),
                    format(sprintf(if(n == "Fortran")
                                       "(DL_FUNC) &F77_NAME(%s),"
                                   else
                                       "(DL_FUNC) &%s,",
                                   x[, 1L])),
                    format(sprintf("%d},", x[, 2L]),
                           justify = "right"))
          } else {
              sprintf(if(n == "Fortran")
                          "    {\"%s\", (DL_FUNC) &F77_NAME(%s), %d},"
                      else
                          "    {\"%s\", (DL_FUNC) &%s, %d},",
                      x[, 1L],
                      x[, 1L],
                      x[, 2L])
          },
          "    {NULL, NULL, 0}")
    }

    package <- attr(nrdb, "package")
    dups <- attr(nrdb, "duplicates")
    symbols <- attr(nrdb, "symbols")

    nrdb <- split(nrdb[, -1L, drop = FALSE],
                  factor(nrdb[, 1L],
                         levels =
                             c(".C", ".Call", ".Fortran", ".External")))

    has <- vapply(nrdb, NROW, 0L) > 0L
    nms <- names(nrdb)
    entries <- substring(nms, 2L)
    blocks <- Map(function(x, n) {
                      c(sprintf("static const R_%sMethodDef %sEntries[] = {",
                                n, n),
                        fmt1(x, n),
                        "};",
                        "")
                  },
                  nrdb[has],
                  entries[has])

    decls <- c(
        "/* FIXME: ",
        "   Add declarations for the native routines registered below.",
        "*/")

    if(include_declarations) {
        prepare <- function(nargs, type = "void *")
            if(nargs > 0) paste(rep.int(type, nargs), collapse=", ")
            else "void"
        decls <- c(
            "/* FIXME: ",
            "   Check these declarations against the C/Fortran source code.",
            "*/",
            if(NROW(y <- nrdb$.C)) {
                 args <- sapply(y$n, function(n) if(n >= 0) prepare(n)
                                else "/* FIXME */")
                c("", "/* .C calls */",
                  paste0("extern void ", y$s, "(", args, ");"))
           },
            if(NROW(y <- nrdb$.Call)) {
                args <- sapply(y$n, function(n) if(n >= 0) prepare(n, "SEXP")
                               else "/* FIXME */")
               c("", "/* .Call calls */",
                  paste0("extern SEXP ", y$s, "(", args, ");"))
            },
            if(NROW(y <- nrdb$.Fortran)) {
                 args <- sapply(y$n, function(n) if(n >= 0) prepare(n)
                                else "/* FIXME */")
                c("", "/* .Fortran calls */",
                  paste0("extern void F77_NAME(", y$s, ")(", args, ");"))
            },
            if(NROW(y <- nrdb$.External))
                c("", "/* .External calls */",
                  paste0("extern SEXP ", y$s, "(SEXP);"))
            )
    }

    headers <- if(NROW(nrdb$.Call) || NROW(nrdb$.External))
        c("#include <R.h>", "#include <Rinternals.h>")
    else if(NROW(nrdb$.Fortran)) "#include <R_ext/RS.h>"
    else character()

    c(headers,
      "#include <stdlib.h> // for NULL",
      "#include <R_ext/Rdynload.h>",
      "",
      if(length(symbols)) {
          c("/*",
            "  The following symbols/expressions for .NAME have been omitted",
            "", strwrap(symbols, indent = 4, exdent = 4), "",
            "  Most likely possible values need to be added below.",
            "*/", "")
      },
      if(length(dups)) {
          c("/*",
            "  The following name(s) appear with different usages",
            "  e.g., with different numbers of arguments:",
            "", strwrap(dups, indent = 4, exdent = 4), "",
            "  This needs to be resolved in the tables and any declarations.",
            "*/", "")
      },
      decls,
      "",
      unlist(blocks, use.names = FALSE),
      ## We cannot use names with '.' in: WRE mentions replacing with "_"
      sprintf("void R_init_%s(DllInfo *dll)",
              gsub(".", "_", package, fixed = TRUE)),
      "{",
      sprintf("    R_registerRoutines(dll, %s);",
              paste0(ifelse(has,
                            paste0(entries, "Entries"),
                            "NULL"),
                     collapse = ", ")),
      "    R_useDynamicSymbols(dll, FALSE);",
      "}")
}

package_native_routine_registration_db <-
function(dir, character_only = TRUE)
{
    calls <- package_ff_call_db(dir)
    native_routine_registration_db_from_ff_call_db(calls, dir, character_only)
}

package_native_routine_registration_skeleton <-
function(dir, con = stdout(), align = TRUE, character_only = TRUE,
         include_declarations = TRUE)
{
    nrdb <- package_native_routine_registration_db(dir, character_only)
    writeLines(format_native_routine_registration_db_for_skeleton(nrdb,
                align, include_declarations),
               con)
}
#  File src/library/tools/R/Sweavetools.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2015 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/


SweaveTeXFilter <-
function(ifile, encoding = "unknown")
{
    if(inherits(ifile, "srcfile"))
        ifile <- ifile$filename

    syntax <- utils:::SweaveGetSyntax(ifile)

    ## Read in an re-encode as needed.
    ## Alternatively, could use utils:::SweaveReadFile() ...
    lines <- readLines(ifile, warn = FALSE)
    if(encoding != "unknown") {
        if(encoding == "UTF-8")
            Encoding(lines) <- "UTF-8"
        else
            lines <- iconv(lines, encoding, "", sub = "byte")
    }

    TEXT <- 1L
    CODE <- 0L

    dpos <- grep(syntax$doc, lines)
    cpos <- grep(syntax$code, lines)

    recs <- list2DF(list(line = c(dpos,
                                  cpos),
                         type = c(rep.int(TEXT, length(dpos)),
                                  rep.int(CODE, length(cpos)))))
    recs <- recs[order(recs$line),]
    last <- 0L
    state <- TEXT
    for (i in seq_len(nrow(recs))) {
    	line <- recs$line[i]
    	if (state == CODE)
    	    lines[(last+1L):line] <- ""
    	else
    	    lines[line] <- ""
    	state <- recs$type[i]
    	last <- line
    }
    lines
}
make_sysdata_rda <-
function()
{
    IANA_URI_scheme_db <- get_IANA_URI_scheme_db()

    IANA_HTTP_status_code_db <- get_IANA_HTTP_status_code_db()

    ## See <https://en.wikipedia.org/wiki/List_of_HTTP_status_codes>.
    table_of_HTTP_status_codes <- IANA_HTTP_status_code_db$Description
    names(table_of_HTTP_status_codes) <- IANA_HTTP_status_code_db$Value

    save(IANA_URI_scheme_db,
         IANA_HTTP_status_code_db,
         table_of_HTTP_status_codes,
         file = "sysdata.rda",
         compress = TRUE)
}
#  File src/library/tools/R/testing.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2024 The R Core Team
#
# NB: also copyright date in Usage.
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

## functions principally for testing R and packages

massageExamples <-
    function(pkg, files, outFile = stdout(), use_gct = FALSE,
             addTiming = FALSE, ..., commentDonttest = TRUE)
{
    if(dir.exists(files[1L])) {
        old <- Sys.getlocale("LC_COLLATE")
        Sys.setlocale("LC_COLLATE", "C")
        files <- sort(Sys.glob(file.path(files, "*.R")))
        Sys.setlocale("LC_COLLATE", old)
    }

    if(is.character(outFile)) {
        out <- file(outFile, "wt")
        on.exit(close(out))
        cntFile <- paste0(outFile, "-cnt")
    } else {
        out <- outFile
        cntFile <- NULL
    }

    count <- 0L # of files using \donttest

    lines <- c(paste0('pkgname <- "', pkg, '"'),
               'source(file.path(R.home("share"), "R", "examples-header.R"))',
               if (use_gct) {
                   gct_n <- as.integer(Sys.getenv("_R_CHECK_GCT_N_", "0"))
                   if(!is.na(gct_n) && gct_n > 0L)
                       sprintf("gctorture2(%s)", gct_n)
                   else "gctorture(TRUE)"
               },
               "options(warn = 1)")
    cat(lines, sep = "\n", file = out)
    if(.Platform$OS.type == "windows")
        cat("options(pager = \"console\")\n", file = out)
    if(addTiming) {
        ## adding timings
        cat("base::assign(\".ExTimings\", \"", pkg,
            "-Ex.timings\", pos = 'CheckExEnv')\n", sep="", file = out)
        cat("base::cat(\"name\\tuser\\tsystem\\telapsed\\n\", file=base::get(\".ExTimings\", pos = 'CheckExEnv'))\n", file = out)
        ## a package left OutDec = "," at the end of an example
        cat("base::assign(\".format_ptime\",",
            "function(x) {",
            "  if(!is.na(x[4L])) x[1L] <- x[1L] + x[4L]",
            "  if(!is.na(x[5L])) x[2L] <- x[2L] + x[5L]",
            "  options(OutDec = '.')",
            "  format(x[1L:3L], digits = 7L)",
            "},",
            "pos = 'CheckExEnv')\n", sep = "\n", file = out)
        cat("### * </HEADER>\n", file = out)
    }

    if(pkg == "tcltk") {
        if(capabilities("tcltk")) cat("require('tcltk')\n\n", file = out)
        else cat("q()\n\n", file = out)
    } else if(pkg != "base")
        cat("library('", pkg, "')\n\n", sep = "", file = out)

    cat("base::assign(\".oldSearch\", base::search(), pos = 'CheckExEnv')\n", file = out)
    ## cat("assign(\".oldNS\", loadedNamespaces(), pos = 'CheckExEnv')\n", file = out)
    cat("base::assign(\".old_wd\", base::getwd(), pos = 'CheckExEnv')\n",
        file = out)
    for(file in files) {
        nm <- sub("\\.R$", "", basename(file))
        ## make a syntactic name out of the filename
        nm <- gsub("[^- .a-zA-Z0-9_]", ".", nm, perl = TRUE, useBytes = TRUE)
        if (pkg == "grDevices" && nm == "postscript") next
        ## Latin-1 examples are treated separately
        if (pkg == "graphics" && nm == "text") next
        if(!file.exists(file))
            stop("file ", file, " cannot be opened", domain = NA)
        lines <- readLines(file)
        have_examples <- any(grepl("_ Examples _|### \\*+ Examples",
                                   lines, perl = TRUE, useBytes = TRUE))
        ## skip comment lines
        com <- grep("^#", lines, perl = TRUE, useBytes = TRUE)
        lines1 <- if(length(com)) lines[-com] else lines
        have_par <- any(grepl("[^a-zA-Z0-9.]par\\(|^par\\(",
                                lines1, perl = TRUE, useBytes = TRUE))
        have_contrasts <- any(grepl("options\\(contrasts",
                                   lines1, perl = TRUE, useBytes = TRUE))

        if(have_examples)
            cat("cleanEx()\nnameEx(\"", nm, "\")\n", sep = "", file = out)

        cat("### * ", nm, "\n\n", sep = "", file = out)
        cat("flush(stderr()); flush(stdout())\n\n", file = out)
        if(addTiming)
            cat("base::assign(\".ptime\", proc.time(), pos = \"CheckExEnv\")\n",
                file = out)
        if (commentDonttest) {
            dont_test <- FALSE
            for (line in lines) {
                if(any(grepl("^[[:space:]]*## No test:", line,
                             perl = TRUE, useBytes = TRUE))) {
                    dont_test <- TRUE
                    count <- count + 1L
                }
                if(!dont_test) cat(line, "\n", sep = "", file = out)
                if(any(grepl("^[[:space:]]*## End\\(No test\\)", line,
                             perl = TRUE, useBytes = TRUE)))
                    dont_test <- FALSE
            }
        } else
            for (line in lines) cat(line, "\n", sep = "", file = out)

        if(addTiming) {
            cat("base::assign(\".dptime\", (proc.time() - get(\".ptime\", pos = \"CheckExEnv\")), pos = \"CheckExEnv\")\n", file = out)
            cat("base::cat(\"", nm, "\", base::get(\".format_ptime\", pos = 'CheckExEnv')(get(\".dptime\", pos = \"CheckExEnv\")), \"\\n\", file=base::get(\".ExTimings\", pos = 'CheckExEnv'), append=TRUE, sep=\"\\t\")\n", sep = "", file = out)
        }
        if(have_par)
            cat("graphics::par(get(\"par.postscript\", pos = 'CheckExEnv'))\n", file = out)
        if(have_contrasts)
            cat("base::options(contrasts = c(unordered = \"contr.treatment\",",
                "ordered = \"contr.poly\"))\n", sep="", file = out)
    }

    cat(readLines(file.path(R.home("share"), "R", "examples-footer.R")),
        sep = "\n", file = out)

    if(count && !is.null(cntFile)) writeLines(as.character(count), cntFile)
}

## compares 2 files
## 2022-07: it is reasonable to assume that almost all users will
## have diff (it is part of Rtools), and currently only GNU diff
## (from 2022 on macOS) and FreeBSD versions semm to be in use.
## So the support without diff is minimal.
Rdiff <- function(from, to, useDiff = FALSE, forEx = FALSE,
                  nullPointers = TRUE, Log = FALSE)
{
    clean <- function(txt)
    {
        if(!length(txt)) return(txt)
        ## remove R header
        if(length(top <- grep("^(R version|R : Copyright|R Under development)",
                              txt, perl = TRUE, useBytes = TRUE)) &&
           length(bot <- grep("quit R.$", txt, perl = TRUE, useBytes = TRUE)))
            txt <- txt[-(top[1L]:bot[1L])]
        ## for massageExamples(), used for timings
        ll <- grep("</HEADER>", txt, fixed = TRUE, useBytes = TRUE)
        if(length(ll)) txt <- txt[-seq_len(max(ll))]
        ll <- grep("<FOOTER>", txt, fixed = TRUE, useBytes = TRUE)
        if(length(ll)) txt <- txt[seq_len(max(ll) - 1L)]
        ## remove header change in R 3.5.0
        if(forEx) {
            ll <- grep('".old_wd"', txt, fixed = TRUE, useBytes = TRUE)
            if(length(ll)) txt <- txt[-ll]
        }
        ## remove BATCH footer
        nl <- length(txt)
        if(nl > 3L && startsWith(txt[nl-2L], "> proc.time()"))
            txt <- txt[1:(nl-3L)]
        ## remove text between IGNORE_RDIFF markers.
        ## documented in ?Rdiff, i.e., not only  if(forEx)
        txt <- txt[(cumsum(txt == "> ## IGNORE_RDIFF_BEGIN") <=
                    cumsum(txt == "> ## IGNORE_RDIFF_END"))]
        ## (Keeps the end markers, but that's ok.)
        if (nullPointers) {
            ## remove pointer addresses from listings
            ## useBytes=TRUE as some tests intentionally use invalid strings
            txt <- gsub("<(environment|bytecode|pointer|promise): [x[:xdigit:]]+>", "<\\1: 0>", txt,
                        useBytes = TRUE)
            ## standardize hashtable, pro tem
            ## useBytes=TRUE as some tests intentionally use invalid strings
            txt <- sub("<hashtable.*>", "<hashtable output>", txt,
                       useBytes = TRUE)
        }
        ## regularize fancy quotes.  First UTF-8 ones:
        txt <- .canonicalize_quotes(txt)
        if(.Platform$OS.type == "windows") {
            ## not entirely safe ...
            txt <- gsub(paste0("(",rawToChar(as.raw(0x91)),"|",rawToChar(as.raw(0x92)),")"),
                        "'", txt, perl = TRUE, useBytes = TRUE)
            txt <- gsub(paste0("(",rawToChar(as.raw(0x93)),"|",rawToChar(as.raw(0x94)),")"),
                        '"', txt, perl = TRUE, useBytes = TRUE)
        }
        ## massageExamples() adds options(pager = "console") only for
        ## Windows, but we should ignore a corresponding diff on all
        ## platforms.
        txt <- txt[!grepl('options(pager = "console")', txt,
                          fixed = TRUE, useBytes = TRUE)]
        pat <- '(^Time |^Loading required package|^Package [A-Za-z][A-Za-z0-9]+ loaded|^<(environment|promise|pointer|bytecode):|^End.Don\'t show)'
        txt[!grepl(pat, txt, perl = TRUE, useBytes = TRUE)]
    }
    clean2 <- function(txt)
    {
        ## useBytes=TRUE as some tests intentionally use invalid strings
        eoh <- grep("^> options\\(warn = 1\\)$", txt, useBytes = TRUE)
        if(length(eoh)) txt[-(1L:eoh[1L])] else txt
    }
    trimPDF <- function(txt)
    {
        ## drop the PDF header
        if (length(txt) < 2L || !startsWith(txt[1L], "%PDF"))
            stop("not a PDF file")
        ## drop second line if comment, often non-ASCII
        txt <- if(startsWith(txt[1L], "%")) txt[-(1:2)] else txt[-1L]
        ## Remove variable parts of the header
        pat <- '(^/CreationDate |^/ModDate |^/Producer)'
        txt[!grepl(pat, txt, perl = TRUE, useBytes = TRUE)]
    }

    useDiff0 <- useDiff

    left <- readLines(from)
    right <- readLines(to)
    asPDF <- length(left) >= 1L && startsWith(left[1L], "%PDF")
    if (useDiff && !nzchar(Sys.which("diff"))) {
        if(!asPDF)
            warning("'diff' is not available so useDiff = FALSE will be used")
        useDiff <- FALSE
    }

    if(asPDF) {
        if(!useDiff) {
            out <- if(!useDiff0) "comparing PDF files requires useDiff = TRUE"
                   else "comparing PDF files requires 'diff'"
            if (Log) return(list(status = 0L, out = out))
            else {message(out); return(invisible(0L))}
        }
        left <- trimPDF(left); right <- trimPDF(right)
    } else {
        left <- clean(left); right <- clean(right)
        if (forEx) {
            left <- clean2(left)
            ## remove lines from R CMD check --timings
            left <- filtergrep("[.](format_|)ptime", left, useBytes = TRUE)
            right <- clean2(right)
        }
    }

    if (!useDiff) {
        if(length(left) == length(right)) {
            ## The idea is to emulate diff -b, as documented by POSIX:
            ## https://pubs.opengroup.org/onlinepubs/9699919799/utilities/diff.html

            ## useBytes=TRUE as some tests intentionally use invalid strings
            bleft  <- gsub("[[:space:]]*$", "", left, useBytes=TRUE)
            bright <- gsub("[[:space:]]*$", "", right, useBytes=TRUE)
            bleft  <- gsub("[[:space:]]+", " ", bleft, useBytes=TRUE)
            bright <- gsub("[[:space:]]+", " ", bright, useBytes=TRUE)
            if(all(bleft == bright))
                return(if(Log) list(status = 0L, out = character()) else 0L)
            cat("\n")
            diff <- bleft != bright
            ## FIXME do run lengths here
            for(i in which(diff))
                cat(i,"c", i, "\n< ", left[i], "\n", "---\n> ", right[i], "\n",
                    sep = "")
            if (Log) {
                i <- which(diff)
                out <- paste0(i,"c", i, "\n< ", left[i], "\n", "---\n> ", right[i])
                list(status = 1L, out = out)
            } else invisible(1L)
        } else { ## no diff, different lengths
            out <- "files differ in number of lines"
            if (Log) list(status = 2L, out = out)
            else {message(out); invisible(2L)}
        }
    } else {
        out <- character()
        a <- tempfile("Rdiffa")
        writeLines(left, a)
        b <- tempfile("Rdiffb")
        writeLines(right, b)
        if (Log) {
            tf <- tempfile()
            status <- system2("diff", c("-bw", shQuote(a), shQuote(b)),
                              stdout = tf, stderr = tf)
            list(status = status, out = c(out, readLines(tf)))
        } else system(paste("diff -bw", shQuote(a), shQuote(b)))
    }
} ## {Rdiff}

.is.writeable <- function(dir)
{
    # see packages2.R for comment on unreliability of file.access
    ok <- TRUE
    fn <- file.path(dir, paste0("_test_dir_", Sys.getpid()))
    res <- try(dir.create(fn, showWarnings = FALSE))
    if(inherits(res, "try-error") || !res)
        ok <- FALSE
    else
        unlink(fn, recursive = TRUE)
    if (ok) {
        fn <- file.path(dir, paste0("_test_file_", Sys.getpid()))
        res <- try(file.create(fn, showWarnings = FALSE))
        if(inherits(res, "try-error") || !res)
            ok <- FALSE
        else
            unlink(fn)
    }
    ok
}

testInstalledPackages <-
    function(outDir = ".", errorsAreFatal = TRUE,
             scope = c("both", "base", "recommended"),
             types = c("examples", "tests", "vignettes"),
             srcdir = NULL, Ropts = "", ...)
{
    if (!.is.writeable(outDir))
        stop("directory ", sQuote(outDir), " is not writeable ", domain = NA)
    ow <- options(warn = 1)
    on.exit(ow)
    scope <- match.arg(scope)
    status <- 0L
    pkgs <- character()
    known_packages <- .get_standard_package_names()
    if (scope %in% c("both", "base"))
        pkgs <- known_packages$base
    if (scope %in% c("both", "recommended"))
        pkgs <- c(pkgs, known_packages$recommended)
    mc.cores <- as.integer(Sys.getenv("TEST_MC_CORES", "1"))
    if (.Platform$OS.type != "windows" &&
        !is.na(mc.cores) && mc.cores > 1L) {
        do_one <- function(pkg) {
            if(is.null(srcdir) && pkg %in% known_packages$base)
                srcdir <- R.home("tests/Examples")
            testInstalledPackage(pkg, .Library, outDir, types, srcdir, Ropts, ...)
        }
        res <- parallel::mclapply(pkgs, do_one, mc.cores = mc.cores,
                                  mc.preschedule = FALSE)
        res <- unlist(res) != 0L
        if (any(res)) {
            for(i in which(res))
                warning(gettextf("testing '%s' failed", pkgs[i]),
                        domain = NA, call. = FALSE, immediate. = TRUE)
            if (errorsAreFatal)
                stop(sprintf(ngettext(sum(res), "%d of the package tests failed",
                                      "%d of the package tests failed",
                                       domain = "R-tools"), sum(res)),
                     domain = NA, call. = FALSE)
        }
    } else {
        for (pkg in pkgs) {
            if(is.null(srcdir) && pkg %in% known_packages$base)
                srcdir <- R.home("tests/Examples")
            res <- testInstalledPackage(pkg, .Library, outDir, types, srcdir, Ropts, ...)
            if (res) {
                status <- 1L
                msg <- gettextf("testing '%s' failed", pkg)
                if (errorsAreFatal) stop(msg, domain = NA, call. = FALSE)
                else warning(msg, domain = NA, call. = FALSE, immediate. = TRUE)
            }
        }
    }
    invisible(status)
}

testInstalledPackage <-
    function(pkg, lib.loc = NULL, outDir = ".",
             types = c("examples", "tests", "vignettes"),
             srcdir = NULL, Ropts = "", ...)
{
    types <- match.arg(types, c("examples", "tests", "vignettes"), several.ok=TRUE)
    pkgdir <- find.package(pkg, lib.loc)
    owd <- setwd(outDir)
    on.exit(setwd(owd))
    strict <- as.logical(Sys.getenv("R_STRICT_PACKAGE_CHECK", "FALSE"))
    useDiff <- nzchar(Sys.which("diff"))

    if ("examples" %in% types) {
        message(gettextf("Testing examples for package %s", sQuote(pkg)),
                domain = NA)
        Rfile <- .createExdotR(pkg, pkgdir, silent = TRUE, ...)
        if (length(Rfile)) {
            outfile <- paste0(pkg, "-Ex.Rout")
            failfile <- paste0(outfile, ".fail")
            savefile <- paste0(outfile, ".prev")
            if (file.exists(outfile)) file.rename(outfile, savefile)
            unlink(failfile)
            ## Create as .fail in case this R session gets killed
            cmd <- paste(shQuote(file.path(R.home("bin"), "R")),
                         "CMD BATCH --vanilla --no-timing", Ropts,
                         shQuote(Rfile), shQuote(failfile))
            if (.Platform$OS.type == "windows") {
                Sys.setenv(R_LIBS="")
                cmd <- paste(cmd, "LANGUAGE=C")
            } else
                cmd <- paste("R_LIBS= LANGUAGE=C", cmd)
            res <- system(cmd)
            if (res) {
                message(gettextf("Error: running examples in %s failed", sQuote(Rfile)),
                        domain = NA)
                return(invisible(1L))
            } else
                file.rename(failfile, outfile)

            savefile <- paste0(outfile, ".save")
            if (!is.null(srcdir)) savefile <- file.path(srcdir, savefile)
            else {
                tfile <- file.path(pkgdir, "tests", "Examples" , savefile)
                if(!file.exists(savefile) && file.exists(tfile))
                    savefile <- tfile
            }
            if (file.exists(savefile)) {
               if (file.exists(savefile)) {
                   message(gettextf("  comparing %s to %s ...",
                                    sQuote(outfile), sQuote(basename(savefile))),
                           appendLF = FALSE, domain = NA)
                   cmd <-
                       sprintf("invisible(tools::Rdiff('%s','%s',%s,TRUE))",
                               outfile, savefile, as.character(useDiff))
                   out <- R_runR(cmd, "--vanilla --no-echo")
                   if(length(out)) {
                       if(strict)
                           message(" ERROR")
                       else
                           message(" NOTE")
                       writeLines(paste0("  ", out))
                       if(strict)
                           stop("  ",
                                "results differ from reference results")
                   } else {
                       message(" OK")
                   }
                }
            } else {
                prevfile <- paste0(outfile, ".prev")
                if (file.exists(prevfile)) {
                    message(gettextf("  comparing %s to %s ...",
                            sQuote(outfile), sQuote(basename(prevfile))),
                            appendLF = FALSE, domain = NA)
                    cmd <-
                        sprintf("invisible(tools::Rdiff('%s','%s',%s,TRUE))",
                                outfile, prevfile, as.character(useDiff))
                    out <- R_runR(cmd, "--vanilla --no-echo")
                    if(length(out)) {
                        message(" NOTE")
                        writeLines(paste0("  ", out))
                    } else {
                        message(" OK")
                    }
                }
            }
        } else
            warning(gettextf("no examples found for package %s", sQuote(pkg)),
                    call. = FALSE, domain = NA)
    }

    ## FIXME merge with code in .runPackageTests
    if ("tests" %in% types && dir.exists(d <- file.path(pkgdir, "tests"))) {
        this <- paste0(pkg, "-tests")
        unlink(this, recursive = TRUE)
        dir.create(this)
        ## system(paste("cp -pR", file.path(d, "*"), this))
        file.copy(Sys.glob(file.path(d, "*")), this, recursive = TRUE)
        setwd(this)
        message(gettextf("Running specific tests for package %s",
                         sQuote(pkg)), domain = NA)
        Rfiles <- dir(".", pattern="\\.[rR]$")
        for(f in Rfiles) {
            message(gettextf("  Running %s", sQuote(f)), domain = NA)
            outfile <- sub("rout$", "Rout", paste0(f, "out"))
            cmd <- paste(shQuote(file.path(R.home("bin"), "R")),
                         "CMD BATCH --vanilla --no-timing", Ropts,
                         shQuote(f), shQuote(outfile))
            cmd <- if (.Platform$OS.type == "windows") paste(cmd, "LANGUAGE=C")
            else paste("LANGUAGE=C", cmd)
           res <- system(cmd)
            if (res) {
                file.rename(outfile, paste0(outfile, ".fail"))
                message(gettextf("Error: running the tests in %s failed", sQuote(f)),
                        domain = NA)
                return(invisible(1L))
            }
            savefile <- paste0(outfile, ".save")
            if (file.exists(savefile)) {
                message(gettextf("  comparing %s to %s ...",
                                 sQuote(outfile), sQuote(savefile)),
                        appendLF = FALSE, domain = NA)
                res <- Rdiff(outfile, savefile, useDiff)
                if (!res) message(" OK")
            }
        }
        setwd(owd)
    }

    if ("vignettes" %in% types && dir.exists(file.path(pkgdir, "doc"))) {
        message(gettextf("Running vignettes for package %s", sQuote(pkg)),
                domain = NA)
        out <- format(checkVignettes(pkg, lib.loc = lib.loc,
                                     latex = FALSE, weave = TRUE))
        if (length(out)) {
            writeLines(out)
            return(invisible(1L))
        }
    }

    invisible(0L)
}

## run all the tests in a directory: for use by R CMD check.
## trackObjs has .Rin files

## used by R CMD check
.runPackageTestsR <- function(...)
{
    cat("\n");
    status <- .runPackageTests(...)
    q("no", status = status)
}

.runPackageTests <-
    function(use_gct = FALSE, use_valgrind = FALSE, Log = NULL,
             stop_on_error = TRUE, ...)
{
    tlim <- Sys.getenv("_R_CHECK_ONE_TEST_ELAPSED_TIMEOUT_",
            Sys.getenv("_R_CHECK_TESTS_ELAPSED_TIMEOUT_",
            Sys.getenv("_R_CHECK_ELAPSED_TIMEOUT_")))
    tlim <- get_timeout(tlim)
    if (!is.null(Log)) Log <- file(Log, "wt")
    WINDOWS <- .Platform$OS.type == "windows"
    td0 <- as.numeric(Sys.getenv("_R_CHECK_TIMINGS_"))
    theta <-
        as.numeric(Sys.getenv("_R_CHECK_TEST_TIMING_CPU_TO_ELAPSED_THRESHOLD_",
                              NA_character_))
    if (is.na(td0)) td0 <- Inf
    print_time <- function(t1, t2, Log)
    {
        td <- t2 - t1
        if(td[3L] < td0) td2 <- ""
        else {
            td2 <- if (td[3L] > 600) {
                td <- td/60
                if(WINDOWS) sprintf(" [%dm]", round(td[3L]))
                else sprintf(" [%dm/%dm]", round(sum(td[-3L])), round(td[3L]))
            } else {
                if(WINDOWS) sprintf(" [%ds]", round(td[3L]))
                else sprintf(" [%ds/%ds]", round(sum(td[-3L])), round(td[3L]))
            }
        }
        message(td2, domain = NA)
        if (!is.null(Log)) cat(td2, "\n", sep = "",  file = Log)
    }
    runone <- function(f)
    {
        message(gettextf("  Running %s", sQuote(f)),
                appendLF = FALSE, domain = NA)
        if(!is.null(Log))
            cat("  Running ", sQuote(f), sep = "", file = Log)
        outfile <- sub("rout$", "Rout", paste0(f, "out"))
        cmd <- paste(shQuote(file.path(R.home("bin"), "R")),
                     "CMD BATCH --vanilla",
                     if(use_valgrind) "--debugger=valgrind",
                     shQuote(f), shQuote(outfile))
        if (WINDOWS) {
            Sys.setenv(LANGUAGE="C")
            Sys.setenv(R_TESTS="startup.Rs")
        } else
            cmd <- paste("LANGUAGE=C", "R_TESTS=startup.Rs", cmd)
        t1 <- proc.time()
        res <- system(cmd, timeout = tlim)
        t2 <- proc.time()
        print_time(t1, t2, Log)
        if (!WINDOWS && !is.na(theta)) {
            td <- t2 - t1
            cpu <- sum(td[-3L])
            if(cpu >= pmax(theta * td[3L], 1)) {
                ratio <- round(cpu/td[3L], 1L)
                msg <- sprintf("Running R code in %s had CPU time %g times elapsed time\n",
                               sQuote(f), ratio)
                cat(msg)
                if (!is.null(Log)) cat(msg, file = Log)
            }
        }
        if (res) {
            if(identical(res, 124L)) report_timeout(tlim)
            file.rename(outfile, paste0(outfile, ".fail"))
            return(1L)
        }
        savefile <- paste0(outfile, ".save")
        if (file.exists(savefile)) {
            message(gettextf("  Comparing %s to %s ...",
                             sQuote(outfile), sQuote(savefile)),
                    appendLF = FALSE, domain = NA)
            if(!is.null(Log))
                cat("  Comparing ", sQuote(outfile), " to ",
                    sQuote(savefile), " ...", sep = "", file = Log)
            if(!is.null(Log)) {
                ans <- Rdiff(outfile, savefile, TRUE, Log = TRUE)
                writeLines(ans$out)
                writeLines(ans$out, Log)
                res <- ans$status
            } else res <- Rdiff(outfile, savefile, TRUE)
            if (!res) {
                message(" OK")
                if(!is.null(Log)) cat(" OK\n", file = Log)
            }
        }
        0L
    }

    file.copy(file.path(R.home("share"), "R", "tests-startup.R"), "startup.Rs")
    if (use_gct) cat("gctorture(TRUE)" , file = "startup.Rs", append = TRUE)
    nfail <- 0L ## allow for later running all tests even if some fail.
    Rinfiles <- dir(".", pattern="\\.Rin$")
    for(f in Rinfiles) {
        message("  Processing ", sQuote(f), domain = NA)
        if (!is.null(Log))
            cat("  Processing ", sQuote(f), "\n", sep = "", file = Log)
        cmd <- paste(shQuote(file.path(R.home("bin"), "R")),
                     "CMD BATCH --no-timing --vanilla --no-echo", shQuote(f))
        if (system(cmd)) {
            nfail <- nfail + 1L
            file.rename(paste0(f, ".Rout"), paste0(f, ".Rout.fail"))
            if (stop_on_error) return(1L)
        }
    }

    Rfiles <- dir(".", pattern="\\.[rR]$")
    for(f in Rfiles) {
        nfail <- nfail + runone(f)
        if (nfail > 0 && stop_on_error) return(nfail)
    }
    if (!is.null(Log)) close(Log)
    return(nfail)
}

## Defaults for commenting are the same as per-3.2.0 version.
.createExdotR <-
    function(pkg, pkgdir, silent = FALSE, use_gct = FALSE, addTiming = FALSE,
             ..., commentDontrun = TRUE, commentDonttest = TRUE,
             installed = TRUE)
{
    Rfile <- paste0(pkg, "-Ex.R")

    db <- if(installed)
              Rd_db(basename(pkgdir), lib.loc = dirname(pkgdir))
          else
              Rd_db(dir = pkgdir)
    if (!length(db)) {
        message("no parsed files found")
        return(invisible(NULL))
    }
    if (!silent) message("  Extracting from parsed Rd's ",
                         appendLF = FALSE, domain = NA)
    files <- names(db)
    if (pkg == "grDevices")
        files <- files[!grepl("^(unix|windows)/", files)]
    filedir <- tempfile()
    dir.create(filedir)
    on.exit(unlink(filedir, recursive = TRUE))
    cnt <- 0L
    for(f in files) {
        nm <- sub("\\.[Rr]d$", "", basename(f))
        Rd2ex(db[[f]],
              file.path(filedir, paste0(nm, ".R")),
              defines = NULL, commentDontrun = commentDontrun,
              commentDonttest = commentDonttest)
        cnt <- cnt + 1L
        if(!silent && cnt %% 10L == 0L)
            message(".", appendLF = FALSE, domain = NA)
    }
    if (!silent) message()
    nof <- length(Sys.glob(file.path(filedir, "*.R")))
    if(!nof) return(invisible(NULL))

    massageExamples(pkg, filedir, Rfile, use_gct, addTiming,
                    commentDonttest = commentDonttest, ...)
    invisible(Rfile)
}

## NB: Test even when tests were *not* installed, via appropriate  testSrcdir = <dir>
testInstalledBasic <- function(scope = c("basic", "devel", "both", "internet", "all"),
                               outDir = file.path(R.home(), "tests"),
                               testSrcdir = getTestSrcdir(outDir))
{
    scope <- match.arg(scope)

    ## We need to force C collation: might not work
    oLCcoll <- Sys.getlocale("LC_COLLATE") ; on.exit(Sys.setlocale("LC_COLLATE", oLCcoll))
    Sys.setlocale("LC_COLLATE", "C")
### ---- "basic" tests ("devel", etc -------> further down (!)
    ## "strict specific" (test-src-strict-1):
    tests1 <- c("eval-etc", "simple-true", "arith-true", "lm-tests",
                "ok-errors", "method-dispatch", "array-subset",
                "p-r-random-tests", "d-p-q-r-tst-2",
                "any-all", "structure", "d-p-q-r-tests")
    ## "sloppy specific":
    tests2 <- c("complex", "print-tests", "lapack", "datasets", "datetime",
                "iec60559")
    ## regression tests (strict specific, too)
    tests3 <- c("reg-tests-1a", "reg-tests-1b", "reg-tests-1c", "reg-tests-1d",
                "reg-tests-1e", "reg-tests-2",
                "reg-examples1", "reg-examples2", "reg-packages",
                "reg-S4-examples",
                "classes-methods",
                ## reg-translation, reg-ex*3 ... see "devel" below
                "datetime3",
                "p-qbeta-strict-tst",
                "reg-IO", "reg-IO2", "reg-plot", "reg-S4", "reg-BLAS")

    useDiff <- nzchar(Sys.which("diff"))  # only check once
    runone <- function(f, diffOK = FALSE, inC = TRUE)
    {
        f <- fR <- paste0(f, ".R")
        if(srcDiffers)
            f <- file.path(testSrcdir, fR)
        ## already needed for .Rin -> .R :
        if (.Platform$OS.type == "windows") {
            ## NB: Sys.setlocale("LC_ALL", "....") fails on (some) Windows
            ## --  specific LC_* seems to work, even via *.setenv():
            Sys.set2 <- function(...) { # version returning previous setting
                stopifnot(...length() == 1L, nzchar(nm <- ...names()))
                oVal <- Sys.getenv(nm)
                Sys.setenv(...)
                oVal
            }
            oRD <- Sys.set2(R_DEFAULT_PACKAGES="")
            oLa <- Sys.set2(LANGUAGE = "C")
            oLC <- Sys.set2(LC_COLLATE="C")
            oLT <- Sys.set2(LC_TIME  = "C") # e.g for month names in reg-tests-1c.R
            oSR <- Sys.set2(SRCDIR = SRCDIR)
            on.exit(Sys.setenv(LANGUAGE = oLa, R_DEFAULT_PACKAGES = oRD,
                               LC_COLLATE = oLC, LC_TIME = oLT, SRCDIR = oSR))
            if (inC) { # breaks reg-plot-latin1, so restore between tests
                oenv <- Sys.getenv("LC_CTYPE", unset = NA)
                on.exit(if (is.na(oenv)) Sys.unsetenv("LC_CTYPE")
                        else Sys.setenv(LC_CTYPE=oenv), add = TRUE)
                Sys.setenv(LC_CTYPE="C")
            }
            ## ignore all 'extra' (incl. 'inC')  and hope
            mkCmd <- identity
        } else { ## non-Windows
            extra <- if(inC) paste(extra0,  "LC_ALL=C") else extra0
            mkCmd <- function(cmd) paste(extra, cmd)
        }
        if (!file.exists(f)) { # try *.Rin, creating *.R
            if (!file.exists(fin <- paste0(f, "in")))
                stop("file ", sQuote(f), " not found", domain = NA)
            f <- fR # in outDir (= our working dir) !
            message("creating ", sQuote(f), domain = NA)
            cmd <- mkCmd(paste(shQuote(file.path(R.home("bin"), "R")),
			       "--vanilla --no-echo -f", fin))
            if (system(cmd))
                stop("creation of ", sQuote(f), " failed", domain = NA)
            ## This needs an extra trailing space to match the .Rin.R rule
            cat("\n", file = f, append = TRUE)
            on.exit(unlink(f), add = TRUE)
        }
        message("  running code in ", sQuote(f), domain = NA)
        outfile <- sub("rout$", "Rout", paste0(fR, "out"))
        cmd <- paste(shQuote(file.path(R.home("bin"), "R")),
                     "CMD BATCH --vanilla --no-timing",
                     shQuote(f), shQuote(outfile))
        res <- system(mkCmd(cmd))
        if (res) {
            file.rename(outfile, paste0(outfile, ".fail"))
            message("FAILED")
            return(1L)
        }
        savefile <- paste0(outfile, ".save")
        if(srcDiffers)
            savefile <- file.path(testSrcdir, savefile)
        if (file.exists(savefile)) {
            message(gettextf("  comparing %s to %s ...",
                             sQuote(outfile), sQuote(savefile)),
                    appendLF = FALSE, domain = NA)
            res <- Rdiff(outfile, savefile, useDiff)
            if (!res) message(" OK")
            else if (!diffOK) return(1L)
        }
        0L
    } # end{runone}

    owd <- setwd(outDir)
    on.exit(setwd(owd), add=TRUE)
    if (!.is.writeable("."))
        stop(gettextf("directory %s is not writeable ", sQuote(outDir)), domain = NA)
    ## to get the *default* testSrcdir = getTestSrcdir(outDir) :
    getTestSrcdir <- function(odir) {
        ## Know here to be inside 'odir' or 'outDir'.
        if(file.exists("eval-etc.R")) # all is fine
            return(odir)
        ## now, on unix-alike, if(build != src) the '<build>/tests/Makefile' has something like
        ##  srcdir = ../../R/tests
        if(file.exists("Makefile")) {
            lns <- readLines("Makefile", 12L) # currently it's the 5-th lines
            srcdir <- sub(" +$", "", # trailing blanks
                          sub("^srcdir *= *", "", grep("^srcdir", lns, value=TRUE)))
            if(dir.exists(srcdir))
                return(srcdir)
        }
        ## give up
        odir
    }
    comparePdf <- function(fnam) {
        ff <- paste0(fnam, ".pdf")
        fsv <- paste0(ff, ".save")
        if(srcDiffers) fsv <- file.path(testSrcdir, fsv)
        message("  comparing '",ff,"' to '",fsv,"' ...", appendLF = FALSE, domain = NA)
        res <- Rdiff(ff, fsv, TRUE)
        message(if(res != 0L) "DIFFERED" else "OK")
    }
    srcDiffers <- (normalizePath(testSrcdir) != normalizePath(outDir))
    SRCDIR <- if(srcDiffers) testSrcdir else "."
    extra0 <- paste("LANGUAGE=en", "LC_COLLATE=C", "LC_TIME=C",
                    "R_DEFAULT_PACKAGES=", paste0("SRCDIR=",SRCDIR))
    if (scope %in% c("basic", "both", "all")) {
        message("running strict specific tests", domain = NA)
        for (f in tests1) if (runone(f)) return(1L)
        message("running sloppy specific tests", domain = NA)
        for (f in tests2) runone(f, TRUE)
        message("running regression tests", domain = NA)
        for (f in tests3) {
            if (runone(f)) return(invisible(1L))
            if (f == "reg-plot") {
                comparePdf(f)
            }
        }
        runone("reg-encodings", inC=FALSE)
        runone("reg-translation", inC=FALSE)
        runone("reg-tests-3", TRUE)
        runone("reg-examples3", TRUE)
        message("running tests of plotting Latin-1", domain = NA)
        message("  expect failure or some differences if not in a Latin or UTF-8 locale", domain = NA)

        if (runone("reg-plot-latin1", TRUE, inC=FALSE) == 0L) {
            comparePdf("reg-plot-latin1")
        }
    }

    if (scope %in% c("devel", "both", "all")) {
        message("running tests of date-time printing\n expect platform-specific differences", domain = NA)
        ## "datetime" and "datetime3" are in "basic" above
        runone("datetime2")
        runone("datetime4")
        runone("datetime5")
        message("running tests of consistency of as/is.*", domain = NA)
        runone("isas-tests")
        message("running tests of random deviate generation (should no longer ever fail)")
        runone("p-r-random-tests", TRUE)
        message("running miscellaneous strict devel checks", domain = NA)
        if (runone("misc-devel")) return(invisible(1L))
        message("running tests demos from base and stats", domain = NA)
        if (runone("demos")) return(invisible(1L))
        if (runone("demos2")) return(invisible(1L))
        message("running tests of primitives", domain = NA)
        if (runone("primitives")) return(invisible(1L))
        message("running regexp regression tests", domain = NA)
        if (runone("utf8-regex", inC = FALSE)) return(invisible(1L))
        if (runone("PCRE")) return(invisible(1L))
        message("running tests on encodings & iconv() - first with C, then current locale", domain = NA)
        if (runone("iconv"             )) return(invisible(1L))
        if (runone("iconv", inC = FALSE)) return(invisible(1L))
        message("running tests of CRAN tools", domain = NA)
        if (runone("CRANtools")) return(invisible(1L))
        message("running tests to possibly trigger segfaults", domain = NA)
        if (runone("no-segfault")) return(invisible(1L))
    }
    if (scope %in% c("internet", "all")) {
        message("running tests of Internet functions", domain = NA)
        runone("internet")
        message("running more Internet and socket tests", domain = NA)
        runone("internet2")
        runone("libcurl")
    }
    invisible(0L)
}

detachPackages <- function(pkgs, verbose = TRUE)
{
    pkgs <- pkgs[pkgs %in% search()]
    if(!length(pkgs)) return()
    if(verbose){
        msg <- paste("detaching", paste(sQuote(pkgs), collapse = ", "))
        cat("", strwrap(msg, exdent = 2L), "", sep = "\n")
    }

    ## Normally 'pkgs' will be in reverse order of attachment (latest first)
    ## but not always (e.g. BioC package CMA attaches at the end).

    ## The items need not all be packages
    ## and non-packages can be on the list multiple times.
    isPkg <- startsWith(pkgs,"package:")
    for(item in pkgs[!isPkg]) {
        pos <- match(item, search())
        if(!is.na(pos)) .detach(pos)
    }

    pkgs <- pkgs[isPkg]
    if(!length(pkgs)) return()

    deps <- lapply(pkgs, function(x) if(exists(".Depends", x, inherits = FALSE)) get(".Depends", x) else character())
    names(deps) <- pkgs

    unload <- nzchar(Sys.getenv("_R_CHECK_UNLOAD_NAMESPACES_"))
    ## unloading 'grid' kills all devices
    ## tcltk is unhappy to have its DLL unloaded repeatedly
    exclusions <- c("grid", "tcltk")
    exclusions <- paste0("package:", exclusions)
    while(length(deps)) {
        unl <- unlist(deps)
        for(i in seq_along(deps)) {
            this <- names(deps)[i]
	    if(.rmpkg(this) %in% unl) next else break
        }
        ## hopefully force = TRUE is never needed, but it does ensure
        ## that progress gets made
        try(detach(this, character.only = TRUE,
                   unload = unload && (this %notin% exclusions),
                   force = TRUE))
        deps <- deps[-i]
    }
}

## Wrapper for  R CMD Rdiff   based on Rdiff() above :
.Rdiff <- function(no.q = FALSE)
{
    options(showErrorCalls=FALSE)

    Usage <- function() {
        cat("Usage: R CMD Rdiff [options] FROM-file TO-file EXITSTATUS",
            "",
            "Diff R output files FROM-FILE and TO-FILE discarding the R startup message,",
            "where FROM-FILE equal to '-' means stdin.",
            "",
            "Options:",
            "  -e, --forEx    uses 'forEx = TRUE' in Rdiff()",
            "  -h, --help     print this help message and exit",
            "  -v, --version  print version info and exit",
            "",
            "Report bugs at <https://bugs.R-project.org>.",
            sep = "\n")
    }

    do_exit <-
	if(no.q)
	    function(status = 0L) (if(status) stop else message)(
		".Rdiff() exit status ", status)
	else
	    function(status = 0L) q("no", status = status, runLast = FALSE)

    args <- commandArgs(TRUE)
    if (!length(args)) {
        Usage()
        do_exit(1L)
    }
    args <- paste(args, collapse=" ")
    args <- strsplit(args,'nextArg', fixed = TRUE)[[1L]][-1L]
    if (length(args) == 1L) {
        if(args[1L] %in% c("-h", "--help")) { Usage(); do_exit(0) }
        if(args[1L] %in% c("-v", "--version")) {
            cat("R output diff: ",
                R.version[["major"]], ".",  R.version[["minor"]],
                " (r", R.version[["svn rev"]], ")\n", sep = "")
            cat("",
                .R_copyright_msg(2000),
                "This is free software; see the GNU General Public License version 2",
                "or later for copying conditions.  There is NO warranty.",
                sep = "\n")
            do_exit(0)
        }
        Usage()
        do_exit(1L)
    }
    if (length(args) == 0L) {
        Usage()
        do_exit(1L)
    }
    ## options before file args {potentially allow multiple}:
    forEx <- any(is.ex <- args %in% c("-e", "--forEx"))
    if(forEx) args <- args[!is.ex]
    exitstatus <- as.integer(args[3L])
    if(is.na(exitstatus)) # default, also if length(args) == 2
        exitstatus <- 0L
    left <- args[1L]
    if(left == "-") left <- "stdin"
    status <- Rdiff(left, args[2L], useDiff = TRUE, forEx = forEx)
    if(status) status <- exitstatus
    do_exit(status)
} ## .Rdiff()

#  File src/library/tools/R/toHTML.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2025 The R Core Team

toHTML <- function(x, ...) UseMethod("toHTML")

HTMLheader <-
function(title="R", logo=TRUE,
         up=NULL,
         top=file.path(Rhome, "doc/html/index.html"),
         Rhome="",
         css = file.path(Rhome, "doc/html/R.css"),
         headerTitle = paste("R:", title),
         outputEncoding = "UTF-8")
{
    result <-
        c('<!DOCTYPE html>',
          "<html>",
          paste0('<head><title>', headerTitle, '</title>'),
          paste0('<meta http-equiv="Content-Type" content="text/html; charset=',
                 mime_canonical_encoding(outputEncoding), '">'),
          '<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes">',
          paste0('<link rel="stylesheet" type="text/css" href="', css, '">'),
          '</head><body><div class="container">',
          paste('<h1>', title))
    if (logo)
    	result <- c(result,
                    paste0('<img class="toplogo" src="',
                           file.path(Rhome, 'doc/html/Rlogo.svg'),
                           '" alt="[R logo]">'))
    result <- c(result, '</h1>', '<hr>')
    if (!is.null(up) || !is.null(top)) {
    	result <- c(result, '<div style="text-align: center;">')
    	if (!is.null(up))
    	    result <- c(result,
    	        paste0('<a href="', up, '"><img class="arrow" src="',
                       file.path(Rhome, 'doc/html/left.jpg'),
                       '" alt="[Up]"></a>'))
    	if (!is.null(top))
    	    result <- c(result,
    	    	paste0('<a href="', top, '"><img class="arrow" src="',
    	    	      file.path(Rhome, 'doc/html/up.jpg'),
    	    	      '" alt="[Top]"></a>'))
    	result <- c(result, '</div>')
    }
    result
}

toHTML.packageIQR <-
function(x, ...)
{
    db <- x$results

    # Re-encode as utf-8
    x$title <- iconv(x$title, to="UTF-8")
    x$footer <- iconv(x$footer, to="UTF-8")
    db <- iconv(db, to="UTF-8")

    ## Split according to Package.
    out <- if(nrow(db) == 0L)
         NULL
    else
        lapply(split(1:nrow(db), db[, "Package"]),
               function(ind) db[ind, c("Item", "Title"), drop = FALSE])

    result <- HTMLheader(...)

    for(pkg in names(out)) {
        result <- c(result,
		    paste0('<h2>', htmlify(x$title), ' in package &lsquo;',
			   htmlify(pkg), '&rsquo;</h2>'),
		    '<table cols="2" style="width: 100%;">',
		    paste0('<tr>\n',
			   ' <td style="text-align: left; vertical-align: top; width: 10%;">\n',
			   htmlify(out[[pkg]][, "Item"]),
			   '\n </td>\n',
                           ' <td style="text-align: left; vertical-align: top; width: 90%;">\n',
			   htmlify(out[[pkg]][, "Title"]),
			   '\n </td>\n</tr>\n'),
		    '</table>')
    }
    if(!is.null(x$footer))
    	result <- c(result, '<p>',
    	                    htmlify(x$footer),
    	                    '</p>')
    result <- c(result, '</div></body></html>')
    result
}

toHTML.news_db <-
function(x, ...)
{
    ## local version
    htmlify2 <- function(x) {
        x <- psub("<([[:alnum:]._]+)>", "@VAR@\\1@EVAR@", x)
        x <- fsub("&", "&amp;", x)
        x <- fsub("---", "&mdash;", x)
        ## usually a flag like --timing
        ## x <- fsub("--", "&ndash;", x)
        x <- fsub("``", "&ldquo;", x)
        x <- fsub("''", "&rdquo;", x)
        x <- psub("`([^']+)'", "&lsquo;\\1&rsquo;", x)
        x <- fsub("`", "'", x)
        x <- fsub("<", "&lt;", x)
        x <- fsub(">", "&gt;", x)
        x <- fsub("@VAR@", "<var>", x)
        x <- fsub("@EVAR@", "</var>", x)
        x
    }

    ## For now, only do something if the NEWS file could be read without
    ## problems, see utils:::print.news_db():
    if(!.news_db_has_no_bad_entries(x))
        return(character())

    print_items <- function(x)
        c("<ul>", sprintf("<li>%s</li>", x), "</ul>")

    if(is.null(x$HTML))
        x$HTML <- htmlify2(iconv(x$Text, to = "UTF-8"))

    vchunks <- split(x, x$Version)
    vchunks <-
        vchunks[order(numeric_version(sub(" *patched", ".1", names(vchunks)),
                                      strict = FALSE), # "R-devel" -> NA
                      na.last = FALSE, decreasing = TRUE)]
    dates <- vapply(vchunks, function(v) v$Date[1L], "")
    vheaders <- sprintf("<h2>Changes in version %s%s</h2>",
                        names(vchunks),
                        ifelse(is.na(dates), "",
                               sprintf(" (%s)", dates)))
    c(HTMLheader(...),
      unlist(lapply(seq_along(vchunks),
                    function(i) {
                        vchunk <- vchunks[[i]]
                        if(all(!is.na(category <- vchunk$Category)
                               & nzchar(category))) {
                            ## need to preserve order of headings.
                            cchunks <- split(vchunk,
                                             factor(category, levels=unique(category)))
                            c(vheaders[i],
                              Map(function(h, t)
                                  c(h, print_items(t$HTML)),
                                  sprintf("<h3>%s</h3>",
                                          htmlify2(names(cchunks))),
                                  cchunks))
                        } else {
                            c(vheaders[i],
                              print_items(vchunk$HTML))
                        }
                    })
             ),
      "</div></body></html>")
}

toHTML.news_db_from_md <-
function(x, ...)
{
    do_vchunk <- function(vchunk) {
        cheaders <- vchunk$Category
        ind <- nzchar(cheaders)
        cheaders[ind] <- paste0("<h3>", cheaders[ind], "</h3>")
        z <- unlist(Map(c, cheaders, vchunk$HTML),
                    use.names = FALSE)
        z[nzchar(z)]
    }

    vchunks <- split(x, x$Version)
    ## Re-order according to decreasing version.
    vchunks <- vchunks[order(numeric_version(names(vchunks),
                                             strict = FALSE),
                             decreasing = TRUE)]

    dates <- vapply(vchunks, function(v) v$Date[1L], "")
    vheaders <- sprintf("<h2>Changes in version %s%s</h2>",
                        names(vchunks),
                        ifelse(is.na(dates), "",
                               sprintf(" (%s)", dates)))

    c(HTMLheader(...),
      unlist(Map(c, vheaders, lapply(vchunks, do_vchunk))),
      "</div></body></html>")
}

# To support static linking, URLs should be relative.
# Argument "depth" below says how far down in the hierarchy
# we are starting from, e.g. /library/stats/html/mean.html
# is depth 3
# .writeVignetteHtmlIndex() uses depth=NULL to omit the directory prefix.

makeVignetteTable <- function(vignettes, depth=2) {
    out <- c('<table style="width: 100%;">',
             '<col style="width: 22%;">',
             '<col style="width:  2%;">',
             '<col style="width: 50%;">',
             '<col style="width:  8%;">',
             '<col style="width:  8%;">',
             '<col style="width:  8%;">')
    for (i in seq_len(nrow(vignettes))) {
	Outfile <- vignettes[i, "PDF"]
	topic <- file_path_sans_ext(Outfile)
	Title <- vignettes[i, "Title"]
	File  <- vignettes[i, "File"]
	R     <- vignettes[i, "R"]
	pkg   <- vignettes[i, "Package"]
        root <- if (!is.null(depth))
                    c(rep.int("../", depth), "library/", pkg, "/doc/")
	link  <- c('<a href="', root,
		  if (nchar(Outfile)) Outfile else File, '">',
		  pkg, "::", topic, '</a>')
	line <- c('<tr><td style="text-align: right; vertical-align: top;">', link,
		    '</td>\n<td></td><td style="vertical-align: top;">', Title,
		    '</td>\n<td style="vertical-align: top;">',
		    if (nchar(Outfile))
			c('<a href="', root, Outfile,'">', vignette_type(Outfile), '</a>'),
		    '</td>\n<td style="vertical-align: top;">',
		    '<a href="', root, File,'">source</a>',
		    '</td>\n<td style="vertical-align: top; white-space: nowrap">',
		    if (nchar(R))
		    	c('<a href="', root, R,'">R code</a>'),
		    '</td></tr>')
	out <- c(out, paste(line, collapse=''))
     }
     c(out, '</table>')
}

makeDemoTable <- function(demos, depth=2) {
    out <- c('<table style="width: 100%;">',
             '<col style="width: 22%;">',
             '<col style="width:  2%;">',
             '<col style="width: 54%;">',
             '<col style="width: 20%;">')
    for (i in seq_len(nrow(demos))) {
	topic <- demos[i, "Topic"]
	pkg <- demos[i, "Package"]
        root <- c(rep.int("../", depth), "library/", pkg, "/")
	Title <- demos[i, "Title"]
	path <- file.path(demos[i, "LibPath"], "demo")
	files <- basename(list_files_with_type(path, "demo", full.names=FALSE))
	file <- files[topic == file_path_sans_ext(files)]
	if (length(file) == 1) {
	    link <- c('<a href="', root, 'demo/', file, '">',
			  pkg, "::", topic, '</a>')
	    runlink <- c(' <a href="', root, 'Demo/', topic,
	                 '">(Run demo)</a>')
	} else {
	    link <- c(pkg, "::", topic)
	    runlink <- ""
	}
	line <- c('<tr><td style="text-align: right; vertical-align: top;">', link,
		    '</td>\n<td></td><td style="vertical-align: top;">', Title,
		    '</td>\n<td style="vertical-align: top; white-space: nowrap">', runlink,
		    '</td></tr>')
	out <- c(out, paste(line, collapse=''))
     }
     c(out, '</table>')
}

makeHelpTable <- function(help, depth=2) {
    out <- c('<table style="width: 100%;">',
             '<col style="width: 22%;">',
             '<col style="width:  2%;">',
             '<col style="width: 74%;">')
    pkg <- help[, "Package"]
    ## Target could be ../library/pkg/help/topic or ../library/pkg/html/filename.html
    ## We only have topic, so can only do the former. Topics may contain
    ## special characters, so need to be encoded.
    root <- paste0(strrep("../", depth), "library/", pkg, "/help/")
    topic <- help[, "Topic"]
    Title <- help[, "Title"]
    links <- paste0('<a href="', root, topic2url(topic), '">',
		    ifelse(nchar(pkg), paste0(pkg, "::"), ""),
		    topic, '</a>')
    lines <- paste0('<tr><td style="text-align: right; vertical-align: top;">', links,
		    '</td>\n<td></td><td style="vertical-align: top;">', Title,
		    '</td></tr>')
    c(out, lines, '</table>')
}

toHTML.citation <-
function(x, header = TRUE, ...)
{
    len <- length(x)
    if(!len) return(character())

    is_non_blank_string <- function(s) {
        (length(s) == 1L) && length(grep("[^[:blank:]]", s))
    }

    format_entry_as_text <- function(x) {
        c(if(is_non_blank_string(header <- x$header))
          c("<p>", htmlify(header), "</p>"),
          "<blockquote>",
          ## Proceed as in .format_bibentry_as_citation used by
          ## utils:::print.bibentry: use textVersion if given.
          ## <FIXME>
          ## Stop using textVersion eventually ...
          if(!is.null(tv <- x$textVersion)) {
              c("<p>", htmlify(tv), "</p>")
          } else {
              format(x, "html")
          },
          ## </FIXME>
          "</blockquote>",
          if(is_non_blank_string(footer <- x$footer))
          c("<p>", htmlify(footer), "</p>")
          )
    }

    format_entry_as_BibTeX <- function(x) {
        bib <- unclass(utils::toBibtex(x))
        len <- length(bib)
        out <- c(paste0("  ", bib[1L]),
                 strwrap(bib[-c(1L, len)], indent = 4L, exdent = 6L),
                 "  }")
        c("<pre>",
          htmlify(out, FALSE),
          "</pre>")
    }

    htmlify <- function(s, a = TRUE) {
        ## See <https://en.wikipedia.org/wiki/Character_encodings_in_HTML>
        ## which in turn refers to
        ## <http://www.w3.org/TR/REC-html40/sgml/sgmldecl.html>: HTML
        ## forbids characters with Unicode code points
        ##   0 to 31 except 9, 10 and 13 (\t, \n and \r)
        ## and
        ##   127 to 159
        ## (octal \000 to \037 and \177 to \237).
        ## Replace these by hex bytes.
        s <- .replace_chars_by_hex_subs(s, invalid_HTML_chars_re)
        s <- fsub("&", "&amp;", s)
        s <- fsub("<", "&lt;",  s)
        s <- fsub(">", "&gt;",  s)
        if(a) {
            ## Some people have <http://something> as recommended for
            ## in-text URLs.
            s <- .gsub_with_transformed_matches("&lt;(URL: *)?((https?|ftp)://[^[:space:]]+)[[:space:]]*&gt;",
                                                "&lt;<a href=\"%s\">\\2</a>&gt;",
                                                s,
                                                urlify,
                                                2L)
            ## Need to ignore results of the above translation ...
            ## Regexp based on Perl HTML::TextToHTML, note that the dash
            ## must be last ...
            s <- .gsub_with_transformed_matches("([[:space:]])((https?|ftp)://[[:alnum:]/.:@+\\_~%#?=&;,-]+[[:alnum:]/])",
                                                "\\1<a href=\"%s\">\\2</a>",
                                                s,
                                                urlify,
                                                2L)
            s <- .gsub_with_transformed_matches("&lt;(DOI|doi):[[:space:]]*([^<[:space:]]+[[:alnum:]])&gt;",
                                                "&lt;<a href=\"https://doi.org/%s\">doi:\\2</a>&gt;",
                                                s,
                                                urlify,
                                                2L)
            s <- .gsub_with_transformed_matches("[^>\"](DOI|doi):[[:space:]]*([^<[:space:]&]+[[:alnum:]])",
                                                "&lt;<a href=\"https://doi.org/%s\">doi:\\2</a>&gt;",
                                                s,
                                                urlify,
                                                2L)
        }
        s
    }

    package <- attr(x, "package")

    if (!(is.character(header) || is.logical(header))) {
        warning("unknown header specification")
	header <- TRUE
    }
    if (identical(header, "R")) {
        header <- HTMLheader(...)
	footer <- c("</div></body>", "</html>")
    } else if (isFALSE(header)) {
        header <- character(0L)
	footer <- character(0L)
    } else {
        if(isTRUE(header))
            header <-
                c("<head>",
                  if(is.null(package))
                      "<title>Citation information</title>"
                  else
                      sprintf("<title>%s citation information</title>",
                              package),
                  "<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">",
                  '<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes">',
                  "</head>")
        header <- c("<!DOCTYPE html>",
                  "<html>",
                  header,
		  '<body><div class="container">')
	footer <- c("</div></body>", "</html>")
    }

    c(header,
      if(is_non_blank_string(mheader <- attr(x, "mheader")))
      c("<p>", htmlify(mheader), "</p>"),
      do.call(c, lapply(x, format_entry_as_text)),
      if(is_non_blank_string(mfooter <- attr(x, "mfooter")))
      c("<p>", htmlify(mfooter), "</p>"),
      c("<p>",
        ngettext(len,
                 "Corresponding BibTeX entry:",
                 "Corresponding BibTeX entries:"),
        "</p>",
        do.call(c, lapply(x, format_entry_as_BibTeX))),
      footer)
}


## Similar to HTMLheader, but for internal use (for now at
## least). Refactors creation of HTML header and footer as previously
## done by Rd2HTML(), to allow re-use.

HTMLcomponents <- function(title = "R", logo = FALSE,
                           up = NULL,
                           top = NULL, # file.path(Rhome, "doc/html/index.html"),
                           Rhome = "",
                           css = file.path(Rhome, "doc/html/R.css"),
                           headerTitle = title,
                           outputEncoding = "UTF-8",

                           dynamic = FALSE, prism = TRUE,
                           doTexMath = TRUE, texmath = "katex",

                           ## URLs to be used for static HTML (only)
                           ## Ignored if dynamic = TRUE

                           KATEX_JS_STATIC = "https://cdn.jsdelivr.net/npm/katex@0.15.3/dist/katex.min.js",
                           KATEX_CSS_STATIC = "https://cdn.jsdelivr.net/npm/katex@0.15.3/dist/katex.min.css",
                           MATHJAX_JS_STATIC = "https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-chtml-full.js",
                           MATHJAX_CONFIG_STATIC = file.path(Rhome, "doc/html/mathjax-config.js"),
                           PRISM_JS_STATIC = c("https://cdnjs.cloudflare.com/ajax/libs/prism/1.29.0/prism.min.js",
                                               "https://cdnjs.cloudflare.com/ajax/libs/prism/1.29.0/components/prism-r.min.js"),
                           PRISM_CSS_STATIC = "https://cdnjs.cloudflare.com/ajax/libs/prism/1.29.0/themes/prism.min.css",
                           language = NA_character_
                           )
{
    header <- character(0)
    footer <- character(0)
    addh <- function(...) { header <<- c(header, ...) }
    addf <- function(...) { footer <<- c(footer, ...) }

    ## KaTeX / Mathjax resources (if they are used)
    if (doTexMath && texmath == "katex") {
        KATEX_JS <-
            if (dynamic) "/doc/html/katex/katex.js"
            else KATEX_JS_STATIC
        KATEX_CSS <- if (dynamic) "/doc/html/katex/katex.css"
                     else KATEX_CSS_STATIC
        KATEX_CONFIG <-
            if (dynamic) "/doc/html/katex-config.js"
            else c(r"(const macros = { "\\R": "\\textsf{R}", "\\mbox": "\\text", "\\code": "\\texttt"};)",
                   "function processMathHTML() {",
                   "    var l = document.getElementsByClassName('reqn');", 
                   "    for (let e of l) { katex.render(e.textContent, e, { throwOnError: false, macros }); }", 
                   "    return;",
                   "}")
    }
    if (doTexMath && texmath == "mathjax") {
        MATHJAX_JS <-
            if (dynamic && requireNamespace("mathjaxr", quietly = TRUE))
                "/library/mathjaxr/doc/mathjax/es5/tex-chtml-full.js"
            else
                MATHJAX_JS_STATIC
        MATHJAX_CONFIG <-
            if (dynamic) "/doc/html/mathjax-config.js"
            else MATHJAX_CONFIG_STATIC
    }
    if (prism) {
        PRISM_JS <- 
            if (dynamic) "/doc/html/prism.js"
            else PRISM_JS_STATIC
        PRISM_CSS <- 
            if (dynamic) "/doc/html/prism.css"
            else PRISM_CSS_STATIC
    }

    addh('<!DOCTYPE html>',
         if(!is.na(language))
             sprintf('<html lang="%s">', language)
         else
             "<html>",
         '<head><title>')

    ## headtitle <- strwrap(.Rd_format_title(.Rd_get_title(Rd)),
    ##                      width=65, initial="R: ")
    ## if (length(headtitle) > 1) headtitle <- paste0(headtitle[1], "...")

    addh(htmlify(headerTitle))
    addh('</title>\n',
         '<meta http-equiv="Content-Type" content="text/html; charset=',
         mime_canonical_encoding(outputEncoding),
         '">\n')
    addh('<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes">\n')
    ## include CSS from prismjs.com for code highlighting
    if (prism && length(PRISM_CSS) == 1L)
        addh('<link href="', urlify(PRISM_CSS), '" rel="stylesheet">\n')
    if (doTexMath) {
        if (texmath == "katex") {
            addh('<link rel="stylesheet" href="', urlify(KATEX_CSS), '">\n',
                if (dynamic) paste0('<script src="', urlify(KATEX_CONFIG), '"></script>\n')
                else paste0('<script>\n', paste(KATEX_CONFIG, collapse = "\n"), '</script>\n'),
                '<script defer src="', urlify(KATEX_JS), '"\n    onload="processMathHTML();"></script>\n')
        }
        else if (texmath == "mathjax") {
            addh('<script src="', urlify(MATHJAX_CONFIG), '"></script>\n',
                 '<script async src="', urlify(MATHJAX_JS), '"></script>\n')
        }
    }
    addh(paste0('<link rel="stylesheet" type="text/css" href="', css, '">\n'),
         '</head><body>',
         '<div class="container">')


    ## Footer:
    addf('\n</div>\n') # closes div.container
    ## include JS from prismjs.com for code highlighting
    if (prism && length(PRISM_JS) > 0L)
        for (u in PRISM_JS)
            addf('<script src="', urlify(u), '"></script>\n')
    addf('</body></html>\n')

    ## Optional part of header (title + logo, up, top)

    if (!nzchar(title)) {
        addh('<h1>', title)
        if (logo)
            addh(paste0('<img class="toplogo" src="',
                        file.path(Rhome, 'doc/html/Rlogo.svg'),
                        '" alt="[R logo]">'))
        addh('</h1>', '<hr>')
    }
    if (!is.null(up) || !is.null(top)) {
    	addh('<div style="text-align: center;">')
    	if (!is.null(up))
    	    addh(paste0('<a href="', up, '"><img class="arrow" src="',
                        file.path(Rhome, 'doc/html/left.jpg'),
                        '" alt="[Up]"></a>'))
    	if (!is.null(top))
    	    addh(paste0('<a href="', top, '"><img class="arrow" src="',
                        file.path(Rhome, 'doc/html/up.jpg'),
                        '" alt="[Top]"></a>'))
    	addh('</div>')
    }

    return(list(header = header, footer = footer))
}
#  File src/library/tools/R/tools-defunct.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2013 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

## Defunct 2009-08-19
## Removed for 3.0.0
## Rd_parse <-function(file, text = NULL) .Defunct("parse_Rd")

## Deprecated for 3.0.2
## Defunct for 3.1.0 (and there is no such 'file')
## Removed for 4.1.0
## readNEWS <- function(file = file.path(R.home(), "NEWS"),
##                      trace = FALSE, chop = c("first", "1", "par1", "keepAll"))
##     .Defunct()

## checkNEWS <- function(file = file.path(R.home(), "NEWS"))
##     .Defunct()


## <entry>
## Deprecated in 3.3.0 (r70156 (Sat, 13 Feb 2016))
## Defunct    in 4.1.0
package.dependencies <-
    function(x, check = FALSE, depLevel = c("Depends", "Imports", "Suggests"))
{
    .Defunct("package_dependencies")
}

pkgDepends <- function(pkg, recursive=TRUE, local=TRUE,
                       reduce=TRUE, lib.loc=NULL) {
    .Defunct("package_dependencies()")# or also dependsOnPkgs() ?
}

getDepList <- function(depMtrx, instPkgs, recursive=TRUE,
                       local=TRUE, reduce=TRUE, lib.loc=NULL)
{
    .Defunct("dependsOnPkgs() or package_dependencies()")
}

installFoundDepends <- function(depPkgList, ...) {
    .Defunct()
}
## <entry/>

## <entry>
## Deprecated in 3.6.0 (r75... (12 Sep 2018)) -- should have belonged to prev.
vignetteDepends <-
    function(vignette, recursive = TRUE, reduce = TRUE,
             local = TRUE, lib.loc = NULL)
{
    .Defunct("vignetteInfo()$depends or package_dependencies()")
}
## </entry>
#  File src/library/tools/R/tools-deprecated.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2020 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/


## keep one entry (possibly commented) here :

## <entry>
## Deprecated in 3.6.0 (r75... (12 Sep 2018)) -- should have belonged to prev.
## vignetteDepends <-
##     function(vignette, recursive = TRUE, reduce = TRUE,
##              local = TRUE, lib.loc = NULL)
## {
##     .Deprecated("vignetteInfo()$depends or package_dependencies()")

##     if (length(vignette) != 1L)
##         stop("argument 'vignette' must be of length 1")
##     if (!nzchar(vignette)) return(invisible()) # lets examples work.
##     if (!file.exists(vignette))
##         stop(gettextf("file '%s' not found", vignette), domain = NA)

##     vigDeps <- vignetteInfo(vignette)$depends

##     depMtrx <- getVigDepMtrx(vigDeps)
##     instPkgs <- utils::installed.packages(lib.loc=lib.loc)
##     getDepList(depMtrx, instPkgs, recursive, local, reduce)
## }
## </entry>
#  File src/library/tools/R/translations.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2023 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

#### R based engine for managing translations

## This only works in a UTF-8 locale: specifically substr needs to count
## UTF-8 chars
en_quote <- function(potfile, outfile)
{
    tfile <- tempfile()
    cmd <- paste("msginit -i", potfile, "--no-translator -l en -o", tfile)
    if(system(cmd, ignore.stderr = TRUE) != 0L)
        stop("running msginit failed", domain = NA)
    tfile2 <- tempfile()
    cmd <- paste("msgconv -t UTF-8 -o", tfile2, tfile)
    if(system(cmd) != 0L) stop("running msgconv failed", domain = NA)
    lines <- readLines(tfile2) # will be in UTF-8
    starts <- which(startsWith(lines, "msgstr"))
    current <- 1L; out <- character()
    for (s in starts) {
        if (current < s)
            out <- c(out, lines[seq.int(current, s-1L, 1L)])
        start <- sub('([^"]*)"(.*)"$', "\\1", lines[s])
        this <- sub('([^"]*)"(.*)"$', "\\2", lines[s])
        current <- s+1L
        while(!is.na(line <- lines[current]) && startsWith(line, '"')) {
            this <- c(this, sub('^"(.*)"$', "\\1", line))
            current <- current + 1L
        }
        nc <- nchar(this); n <- length(nc)
        this <- paste0(this, collapse="")
        ## This is the fixup: need to avoid apostrophes, which follow alnum
        this <- gsub("^'([^`']*)'",'\u2018\\1\u2019', this)
        this <- gsub("([^[:alpha:]]|\\\\n)'([^`']*)'",'\\1\u2018\\2\u2019', this)
        out <- if (n > 1L) {
            ## now split where it was before
            this1 <- character()
            sc <- c(0, cumsum(nc))
            for(i in seq_along(nc)) {
                if(!nc[i]) this1 <- c(this1, "")
                else {
                    this1 <- c(this1, substr(this, sc[i]+1L, sc[i+1]))
                }
            }
            c(out,
              paste0(start, '"', this1[1L] , '"'),
              paste0('"', this1[-1L] , '"'))
        } else
            c(out, paste0(start, '"', this , '"'))
    }
    if(current <= length(lines))
        out <- c(out, lines[seq.int(current, length(lines), 1L)])
    ## in case this is done on Windows, force LF line endings
    con <- file(outfile, "wb")
    writeLines(out, con, useBytes = TRUE)
    close(con)
}

update_pkg_po <- function(pkgdir, pkg = NULL, version = NULL,
                          pot_make = TRUE, mo_make = TRUE,
                          verbose = getOption("verbose"),
                          mergeOpts = "", # only those *in addition* to --update
                          copyright, bugs)
{
    same <- function(a, b)
    {
        tmpa <- readLines(a); tmpb <- readLines(b)
        tmpa <- filtergrep('^"POT-Creation-Date:', tmpa)
        tmpb <- filtergrep('^"POT-Creation-Date:', tmpb)
        identical(tmpa, tmpb)
    }

    ## Follow previous version by always collating in C.
    pwd <- getwd()
    coll <-  Sys.getlocale("LC_COLLATE")
    on.exit({Sys.setlocale("LC_COLLATE", coll); setwd(pwd)})
    Sys.setlocale("LC_COLLATE", "C")
    setwd(pkgdir)
    dir.create("po", FALSE)
    files <- dir("po")

    desc <- "DESCRIPTION"
    if(file.exists(desc)) {
        desc <- read.dcf(desc, fields = c("Package", "Version"))
        name <- desc[1L]
        if (is.null(pkg))	pkg <- name
        if (is.null(version))	version <- desc[2L]
        if (missing(copyright)) copyright <- NULL
        if (missing(bugs))	bugs <- NULL
        stem <- file.path("inst", "po")
    }
    if (is.null(pkg) || pkg %in% .get_standard_package_names()$base) { # A base package
        pkg <- basename(pkgdir)
        name <- "R"
        version <- as.character(getRversion())
        copyright <- "The R Core Team"
        bugs <- "bugs.r-project.org"
        stem <- file.path("..", "translations", "inst")
    }

    ## The interpreter is 'src' for the base package.
    is_base <- (pkg == "base")
    have_src <- paste0(pkg, ".pot") %in% files
    mergeCmd <- paste("msgmerge", if(is.character(mergeOpts)) paste("--update", mergeOpts))

    ## do R-pkg domain first
  if(pot_make) {
    ofile <- tempfile()
    if(verbose) cat("Creating pot: .. ")
    xgettext2pot(".", ofile, name, version, bugs)
    potfile <- file.path("po", paste0("R-", pkg, ".pot"))
    if(file.exists(potfile) && same(potfile, ofile)) {
        if(verbose) cat("the same() as previous: not copying.\n")
    } else {
        if(verbose) cat("copying to potfile", potfile, "\n")
        file.copy(ofile, potfile, overwrite = TRUE)
    }
  } else {
        if(!file.exists(potfile <- file.path("po", paste0("R-", pkg, ".pot"))))
            stop(gettextf("file '%s' does not exist", potfile), domain = NA)
    }
    pofiles <- dir("po", pattern = "R-.*[.]po$", full.names = TRUE)
    pofiles <- pofiles[pofiles != "po/R-en@quot.po"]
    ## .po file might be newer than .mo
    for (f in pofiles) {
        lang <- sub("^R-(.*)[.]po$", "\\1", basename(f))
        ## Interestingly does *not* update the file dates
        cmd <- paste(mergeCmd, f, shQuote(potfile))
        if(verbose) cat("Running cmd", cmd, ":\n") else
        message("  R-", lang, ":", appendLF = FALSE, domain = NA)
        if(system(cmd) != 0L) {
            warning("running msgmerge on ", sQuote(f), " failed", domain = NA)
            next
        }
        res <- checkPoFile(f, TRUE)
        if(nrow(res)) {
            print(res)
            message("not installing", domain = NA)
            next
        }
        if(!mo_make) next
        dest <- file.path(stem, lang, "LC_MESSAGES")
        dir.create(dest, FALSE, TRUE)
        dest <- file.path(dest, sprintf("R-%s.mo", pkg))
 #       if(file_test("-ot", f, dest)) next
        cmd <- paste("msgfmt -c --statistics -o", shQuote(dest), shQuote(f))
        if(verbose) cat("Running cmd", cmd, ":\n")
        if(system(cmd) != 0L)
            warning(sprintf("running msgfmt on %s failed", basename(f)),
                    domain = NA, immediate. = TRUE)
    }

    ## do en@quot
    if (l10n_info()[["UTF-8"]] && mo_make) {
        lang <- "en@quot"
        message("  R-", lang, ":", domain = NA)
        # f <- "po/R-en@quot.po"
        f <- tempfile()
        en_quote(potfile, f)
        dest <- file.path(stem, lang, "LC_MESSAGES")
        dir.create(dest, FALSE, TRUE)
        dest <- file.path(dest, sprintf("R-%s.mo", pkg))
        cmd <- paste("msgfmt -c --statistics -o", shQuote(dest), shQuote(f))
        if(verbose) cat("Running cmd", cmd, ":\n")
        if(system(cmd) != 0L)
            warning(sprintf("running msgfmt on %s failed", basename(f)),
                    domain = NA, immediate. = TRUE)
    }

    if(!(is_base || have_src)) return(invisible())

  if(pot_make) {
    ofile <- tempfile()
    if (!is_base) {
        dom <- pkg
        od <- setwd("src")
        exts <- "[.](c|cc|cpp|m|mm)$"
        cfiles <- dir(".", pattern = exts)
        for (subdir in c("windows", "cairo")) { # only grDevices/src/cairo
          if(dir.exists(subdir))
            cfiles <- c(cfiles,
                        dir(subdir, pattern = exts, full.names = TRUE))
        }
    } else {
        dom <- "R"
        od <- setwd("../../..")
        cfiles <- filtergrep("^#", readLines("po/POTFILES"))
    }
    cmd <- sprintf("xgettext --keyword=_ --keyword=N_ -o %s", shQuote(ofile))
    cmd <- c(cmd, paste0("--package-name=", name),
             paste0("--package-version=", version),
             "--add-comments=TRANSLATORS:",
             if(!is.null(copyright))
                 sprintf('--copyright-holder="%s"', copyright),
             if(!is.null(bugs))
                 sprintf('--msgid-bugs-address="%s"', bugs),
             if(is_base) "-C") # avoid messages about .y
    cmd <- paste(c(cmd, cfiles), collapse=" ")
    if(verbose) cat("Running cmd", cmd, ":\n")
    if(system(cmd) != 0L) stop("running xgettext failed", domain = NA)
    setwd(od)

    ## compare ofile and po/dom.pot, ignoring dates.
    potfile <- file.path("po", paste0(dom, ".pot"))
    if(!same(potfile, ofile)) file.copy(ofile, potfile, overwrite = TRUE)

  } else { # not pot_make
        dom <- if(is_base) "R" else pkg
        if(!file.exists(potfile <- file.path("po", paste0(dom, ".pot"))))
            stop(gettextf("file '%s' does not exist", potfile), domain = NA)
    }
    pofiles <- dir("po", pattern = "^[^R].*[.]po$", full.names = TRUE)
    pofiles <- pofiles[pofiles != "po/en@quot.po"]
    for (f in pofiles) {
        lang <- sub("[.]po", "", basename(f))
        cmd <- paste(mergeCmd, shQuote(f), shQuote(potfile))
        if(verbose) cat("Running cmd", cmd, ":\n") else
        message("  ", lang, ":", appendLF = FALSE, domain = NA)
        if(system(cmd) != 0L) {
            warning("running msgmerge on ",  f, " failed", domain = NA)
            next
        }
        res <- checkPoFile(f, TRUE)
        if(nrow(res)) {
            print(res)
            message("not installing", domain = NA)
            next
        }
        if(!mo_make) next
        dest <- file.path(stem, lang, "LC_MESSAGES")
        dir.create(dest, FALSE, TRUE)
        dest <- file.path(dest, sprintf("%s.mo", dom))
#        if(file_test("-ot", f, dest)) next
        cmd <- paste("msgfmt -c --statistics -o", shQuote(dest), shQuote(f))
        if(verbose) cat("Running cmd", cmd, ":\n")
        if(system(cmd) != 0L)
            warning(sprintf("running msgfmt on %s failed", basename(f)),
                    domain = NA)
    }
    ## do en@quot
    if (l10n_info()[["UTF-8"]] && mo_make) {
        lang <- "en@quot"
        message("  ", lang, ":", domain = NA)
        f <- tempfile()
        en_quote(potfile, f)
        dest <- file.path(stem, lang, "LC_MESSAGES")
        dir.create(dest, FALSE, TRUE)
        dest <- file.path(dest, sprintf("%s.mo", dom))
        cmd <- paste("msgfmt -c --statistics -o", shQuote(dest), shQuote(f))
        if(verbose) cat("Running cmd", cmd, ":\n")
        if(system(cmd) != 0L)
            warning(sprintf("running msgfmt on %s failed", basename(f)),
                    domain = NA)
    }

    invisible()
}

# (not exported)
update_RGui_po <- function(srcdir,
                           pot_make = TRUE, mo_make = TRUE,
                           mergeOpts = "")
{
    same <- function(a, b)
    {
        tmpa <- readLines(a); tmpb <- readLines(b)
        tmpa <- filtergrep('^"POT-Creation-Date:', tmpa)
        tmpb <- filtergrep('^"POT-Creation-Date:', tmpb)
        identical(tmpa, tmpb)
    }
    ## Follow previous version by always collating in C.
    pwd <- getwd()
    coll <-  Sys.getlocale("LC_COLLATE")
    on.exit({Sys.setlocale("LC_COLLATE", coll); setwd(pwd)})
    Sys.setlocale("LC_COLLATE", "C")
    setwd(srcdir)
    potfile <- "src/library/base/po/RGui.pot"
  if(pot_make) {
    cfiles <- c(file.path("src/gnuwin32",
                          c("console.c", "editor.c",  "extra.c",
                            "pager.c", "preferences.c", "rui.c", "system.c")),
                file.path("src/extra/graphapp",
                          c("clipboard.c", "dialogs.c", "gmenus.c",
                            "metafile.c", "printer.c")),
                "src/library/utils/src/windows/dataentry.c",
                "src/library/utils/src/windows/widgets.c",
                "src/library/grDevices/src/devWindows.c")
    ofile <- tempfile()
    cmd <- sprintf("xgettext --keyword --keyword=G_ --keyword=GN_ -o %s", shQuote(ofile))
    cmd <- c(cmd, "--package-name=R",
             paste0("--package-version=", getRversion()),
             "--add-comments=TRANSLATORS:",
             '--copyright-holder="The R Core Team"',
             '--msgid-bugs-address="bugs.r-project.org"')
    cmd <- paste(c(cmd, cfiles), collapse=" ")
    if(system(cmd) != 0L) stop("running xgettext failed", domain = NA)
    ## compare ofile and po/RGui.pot, ignoring dates.
    if(!same(potfile, ofile)) file.copy(ofile, potfile, overwrite = TRUE)
  }
    pofiles <- dir("src/library/base/po", pattern = "^RGui-.*[.]po$", full.names = TRUE)
    for (f in pofiles) {
        lang <- sub("^RGui-(.*)[.]po$", "\\1", basename(f))
        lang2 <- sub("[.]po", "", basename(f))
        message("  ", lang2, ":", appendLF = FALSE, domain = NA)
        cmd <- paste("msgmerge --update", mergeOpts, f, potfile)
        if(system(cmd) != 0L) {
            warning("running msgmerge failed", domain = NA)
            next
        }
        res <- checkPoFile(f, FALSE)
        if(nrow(res)) {
            print(res)
            next
        }
        if(!mo_make) next
        dest <- file.path("src/library/translations/inst", lang, "LC_MESSAGES")
        dir.create(dest, FALSE, TRUE)
        dest <- file.path(dest, "RGui.mo")
        if (file_test("-ot", f, dest)) next
        cmd <- paste("msgfmt -c --statistics -o", dest, f)
        if(system(cmd) != 0L)
            warning(sprintf("running msgfmt on %s failed", basename(f)),
                    domain = NA)
    }

    invisible()
}



## make package out of current translations.
make_translations_pkg <- function(srcdir, outDir = ".", append = "-1")
{
    src <- file.path(srcdir, "src/library/translations")
    dest <- file.path(tempdir(), "translations")
    dir.create(dest, FALSE)
    file.copy(file.path(src, "inst"),  dest, recursive = TRUE)
    lines <- readLines(file.path(src, "DESCRIPTION.in"))
    ver <- getRversion()
    lines <- gsub("@VERSION@", ver, lines, fixed = TRUE)
    lines[2] <- paste0(lines[2], append)
    ver <- unclass(getRversion())[[1]]
    deps <- sprintf("Depends: R (>= %s.%d.0), R (< %d.%d.0)",
                    ver[1], ver[2], ver[1], ver[2] + 1)
    lines <- c(lines, deps)
    writeLines(lines, file.path(dest, "DESCRIPTION"))
    cmd <- shQuote(file.path(R.home(), "bin", "R"))
    cmd <- paste(cmd, "CMD", "build", shQuote(dest))
    if(system(cmd) != 0L) stop("R CMD build failed")
    tarball <- Sys.glob(file.path(tempdir(), "translations_*.tar.gz"))
    file.rename(tarball, file.path(outDir, basename(tarball)))
    invisible()
}
#  File src/library/tools/R/update_packages.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2017 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

## Code in this file adapted by Gabriel Becker from
## code distributed as part of the switchr R package.
## The modifications in this file and copyright thereof are
## donated without restriction to the R project.
##
## Original code and the switchr R package are
## Copyright 2018 Genentech Inc. All Rights Reserved.
## Author: Gabriel Becker <gabembecker@gmail.com>
## Distributed under the Artistic 2.0 License
## (re-licensed here to GPL 2+)

## canonical field order, from calling available.packages
## on CRAN repository (same result for Bioconductor)
fieldorder = c("Package", "Version", "Priority", "Depends",
               "Imports", "LinkingTo", "Suggests", "Enhances",
               "License", "License_is_FOSS", "License_restricts_use",
               "OS_type", "Archs", "MD5sum", "NeedsCompilation",     
               "File", "Repository")         

update_PACKAGES <- function(dir = ".", fields = NULL,
                            type = c("source", "mac.binary", 
                                     "win.binary"),
                            verbose.level = as.integer(dryrun),
                            latestOnly = TRUE,
                            addFiles = FALSE,
                            rds_compress = "xz",
                            strict = TRUE,
                            dryrun = FALSE)
{
    if(!is.integer(verbose.level))
        verbose.level = as.integer(verbose.level)
    type <- match.arg(type)
    stopifnot(verbose.level >= 0L && verbose.level <= 2L)
    PKGSfile <- file.path(dir, "PACKAGES")
    ## whether we will call write_PACKAGES directly/immediately
    calldown <- FALSE
    retdat <- NULL
    if(type == "win.binary" && strict) {
        warning("PACKAGES files do not include MD5 sums in the win.binary case",
                ", so strict checking is impossible. Calling down to write_PACKAGES ",
                "directly.")
        calldown <- TRUE
    } else if (!file.exists(PKGSfile)) {
        ## no PACKAGES file to update
        warning("No existing PACKAGES file found at ", PKGSfile)
        calldown <- TRUE
    } else if (!all(dim(retdat <- as.data.frame(read.dcf(PKGSfile),
                                         stringsAsFactors = FALSE)) > 0L)) {
        ## retdat is populated in the if condition here.
        ## read without fields restriction, because reducing number
        ## of fields is ok, adding fields means we need reprocessing

        ##0 rows and/or 0 columns
        warning("Existing PACKAGES file contained no rows and/or no columns")
        calldown <- TRUE
    }
    okfields <- names(retdat)
    
    ## can't update PACKAGES file if existing entries don't have all
    ## the required fields
    if(!calldown && !is.null(fields) && !all(fields %in% okfields)) {
        warning("Specified fields no present in existing PACKAGES file: ",
                paste(setdiff(fields, okfields), collapse = " "))
        calldown <- TRUE
    }
    
    ## call straight down to write_PACKAGES if:
    ## 1. type is win.binary and strict is TRUE (no MD5 sums to check against,
    ##    only way to get full strictness is write_PACKAGES
    ## 2. no PACKAGES file already exists or it's empty
    ## 3. 1+ specified field not present in existing PACKAGES file
    if(calldown) {
        if(verbose.level > 0L)
            message("Unable to update existing PACKAGES file. Calling write_PACKAGES directly.")
        return(write_PACKAGES(dir = dir, fields = fields, type = type,
                              verbose = verbose.level == 2,
                              latestOnly = latestOnly,
                              addFiles = addFiles, rds_compress = rds_compress))
    }
    ## we know file exists by this point
    pmtime <- file.info(PKGSfile)$mtime
    if(verbose.level > 0L) {
        message("Updating existing repository [strict mode: ",
                if(strict) "ON" else "OFF",
                "]\nDetected PACKAGES file with ", nrow(retdat),
                " entries at ", PKGSfile)
    }
    
    if(!is.null(fields))
        retdat <- retdat[, fields]
    
    pkgfiles <- list.files(dir, pattern = .get_pkg_file_pattern(type),
                           full.names = TRUE)
    if(length(pkgfiles) == 0L)
        stop("unable to find any package tarballs in ", dir)
    
    if(is.null(retdat$File)) {
        tbmatches <- match(paste(retdat$Package,
                                 retdat$Version,
                                 sep = "_"),
                           ## above doesn't have the extensions
                           gsub(.get_pkg_file_pattern(type, ext.only = TRUE),
                                "",
                                basename(pkgfiles)))
        ## this gets NAs for entries that don't have tarballs
        ## taken care of via keeprows below.
        retdat$tarball <- pkgfiles[tbmatches]
    } else
        retdat$tarball <- retdat$File

    ## for accounting purposes, removed before final write
    retdat$IsNew = FALSE

    ## detect and remove entries whose files have been deleted
    ## file.exists(NA_character_) returns FALSE, so this
    ## is ok without an explicit NA check
    keeprows <- file.exists(retdat$tarball)
    if(verbose.level > 0L) {
        msg <- paste("Tarballs found for", sum(keeprows), " of ",
                     nrow(retdat), "existing PACKAGES entries.")
        message(msg)
    }
    retdat <- retdat[keeprows,]

    ## check for tarballs that are too new
    ## remove entries which might appear to match them
    ## because the new tarball takes precedence.
    tbmtimes <- file.info(retdat$tarball)$mtime
    toonew <- which(tbmtimes > pmtime)
    if(length(toonew) > 0L) {
        if(verbose.level > 0L){
            msg <- paste(length(toonew), " tarball(s) matching existing entries are ",
                         "newer than PACKAGES file and must be reprocessed.")
            message(msg)
        }
        retdat <- retdat[-toonew, ]
    }
    
    ## If in strict mode we confirm that the MD5 sums match for
    ## tarballs which match pre-existing PACKAGES entries.
    ##
    ## Otherwise we skip this check for speed, assuming that
    ## any tarball we find is the one used to create the entry.
    ##
    ## Note: skipping the check can lead to a 'bad' repo in rare
    ## cases, but the installation machinery would still protect
    ## against non-malicious cases of this by failing out when the
    ##
    ## Note: MD5 sum didn't match what PACKAGES said it should be.
    ## In the win.binary case the existing PACKAGES file has no MD5
    ## sums, but we caught that above, so if strict is TRUE, we know
    ## type != win.binary.
    if(strict && NROW(retdat) > 0L) {
        if(verbose.level > 0L) {
            msg <- paste("[strict mode] Checking if MD5sums match ",
                         "for existing tarballs")
            message(msg)
        }
        curMD5sums <- md5sum(normalizePath(retdat$tarball))
        ## There are no NAs in retdat$MD5sum here, as the only data in
        ## there now is from the existing PACKAGES file.
        notokinds <- which(retdat$MD5sum != curMD5sums)
        if(length(notokinds) > 0L) {
            msg <- paste0("Detected ", length(notokinds), " MD5sum mismatches",
                          " between existing PACKAGES file and tarballs")
            warning(msg)
        } else if(verbose.level > 0L) {
            message("All existing entry MD5sums match tarballs.") 
        }
        ## tarballs that don't already ahve an entry
        ## OR that mismatched their existing entry
        ## possibly needing to be added
        if(length(notokinds) > 0L) {
            retdat <- retdat[-notokinds,]
        }
    }
    
    newpkgfiles <- setdiff(normalizePath(pkgfiles),
                           normalizePath(retdat$tarball))
    
    ## If we're willing to assume the filenames are honest and
    ## accurate, we can skip non-newest package versions without
    ## ever untaring them and reading their DESCRIPTION files.
    ##
    ## this is not the default because it is technically speaking
    ## less safe than what write_PACKAGES(,latestOnly=TRUE) does
    ## which is always process everything then prune.
    if(!strict &&
       latestOnly &&
       length(newpkgfiles) > 0L) {
        ##strip extension, left with pkgname_version
        newpkgtmp <- gsub(.get_pkg_file_pattern(type, ext.only = TRUE),
                          "",
                          basename(newpkgfiles))
        newpkgspl <- strsplit(basename(newpkgtmp), "_")
        newpkgdf <- do.call(rbind.data.frame,
                            c(newpkgspl, stringsAsFactors = FALSE))
        ## We create a dummy new repository db with only
        ## Package and Version columns, then fill them
        ## out with NAs so we can hit .remove_stale_dups
        ## before ever reading in the DESCRIPTION files
        ##
        ## These dummy db rows will all be replaced by the
        ## real data later in the process before the
        ## new PACKAGES files are written.
        
        newpkgdf <- newpkgdf[,1:2]
        names(newpkgdf) <- c("Package", "Version")
        newpkgdf <- .filldfcols(newpkgdf, retdat)
        ## for accounting purposes, taken back off later
        newpkgdf$IsNew <- TRUE
        newpkgdf$tarball <- newpkgfiles
        retdat <- rbind(retdat, newpkgdf) 
        ## remove non-latest ones now to avoid the expensive stuff
        ## this is non-strict because it assumes the package name and
        ## version in the filename are accurate. Technically, not
        ## guaranteed.
        retdat <- .remove_stale_dups(retdat)
        newpkgfiles <- retdat$tarball[retdat$IsNew]
    }
    
    ## Do any packages/package versions need to be added?
    numnew <- length(newpkgfiles)
    if(numnew > 0L) {
        if(verbose.level > 0L) {
            message("Found ", numnew, " package versions to process.")
        }
        
        ## returns a list of character vectors suitable for construction
        ## into a read.dcf output-style character matrix
        newpkgdat <- .process_package_files_for_repository_db(newpkgfiles,
                                                              type,
                                                              fields,
                                                              verbose.level > 1)
        newpkgdat <- .process_repository_package_db_to_matrix(newpkgdat,
                                                              path = "", #unused here
                                                              addFiles,
                                                              addPaths = FALSE, 
                                                              latestOnly)
        
        newpkgdf <- as.data.frame(newpkgdat, stringsAsFactors = FALSE)
           
        if(!identical(names(newpkgdf), names(retdat))) {
            ## make sure we catch columns only present in one or
            ## the other in both directions.
            ##
            ## the order of columns that comes out of this is columns
            ## in retdat (ie the original PACKAGES) in the order
            ## they appear there, THEN fields unique to the new tarballs
            ## appended in the order they appear there.
            newpkgdf <- .filldfcols(newpkgdf, retdat)
            retdat <- .filldfcols(retdat, newpkgdf)
        }
        
        if(verbose.level > 0L) {
            msg <- paste("Processed", nrow(newpkgdf), "entries from ",
                         "package tarballs.")
            message(msg)
        }
        
        ## just for accounting purposes
        ## taken back off later
        
        newpkgdf$IsNew <- TRUE
        retdat <- rbind(retdat[!retdat$IsNew,],
                        newpkgdf)
        if(latestOnly) {
            retdat <- .remove_stale_dups(retdat)
        }
        if(verbose.level > 0L) {
            msg <- paste(sum(retdat$IsNew), "entries added or updated, ",
                         sum(!retdat$IsNew), " entries retained unchanged.")
            message(msg)
        }
        
    } else if (verbose.level > 0L) {
        message("No new packages or updated package versions detected")
    }
    
    if(verbose.level > 0L) {
        msg <- paste("Final updated PACKAGES db contains ",
                     nrow(retdat), " entries.")
        message(msg)
    }

    ## write_PACKAGES docs don't define an order of entries, but I
    ## think it should(?) be sort order of files it processes. We
    ## reorder our db to give the same order.
    retdat <- retdat[order(paste0(retdat$Package, retdat$Version)),]

    ## clean up temp columns (note this works even if they aren't
    ## there so we don't need to worry about ones that are only
    ## defined within if blocks
    retdat$IsNew <- NULL
    retdat$tarball <- NULL

    ## guarantee canonical field order, with non-canonical fields
    ## appearing after
    noncanonfs <- setdiff(names(retdat), fieldorder)
    canonfs <- fieldorder[fieldorder %in% names(retdat)]
    retdat <- retdat[,c(canonfs, noncanonfs)]

    
    if(dryrun) {
        if(verbose.level > 0L)
            message("[dryrun mode] Dryrun complete.")
    } else {
        if(verbose.level > 0L)
            message("Writing final updated PACKAGES files.")
        ## crucial that db is written as a matrix
        ## otherwise available.packages, etc will fail
        db <- as.matrix(retdat)
        np <- .write_repository_package_db(db, dir, rds_compress)
        if(verbose.level > 0L)
            message("update_PACKAGES complete.")
    }
}


## pad df with columns from srcdf that it is missing,
## populated with NAs of the type appropriate for the
## column in srcdf. Must work for 0 row df or 0 row srcdf
##
## final col order: names(srcdf) followed by
## any columns unique to df
.filldfcols <- function(df, srcdf) {
    srcnames <- names(srcdf)
    dfnames <- names(df)
    newcols <- setdiff(srcnames, dfnames)
    df[,newcols] <- srcdf[integer(), newcols]
    df <- df[,unique(c(srcnames, dfnames))]
    df
}

#  File src/library/tools/R/urltools.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 2015-2024 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

## See RFC 3986 <https://www.rfc-editor.org/rfc/rfc3986> and
## <https://url.spec.whatwg.org/>.

get_IANA_URI_scheme_db <-
function()
{
    ## See
    ## <https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml>.
    baseurl <- "https://www.iana.org/assignments/uri-schemes/"
    db <- utils::read.csv(url(paste0(baseurl, "uri-schemes-1.csv")),
                          stringsAsFactors = FALSE, encoding = "UTF-8")
    names(db) <- chartr(".", "_", names(db))
    db$URI_Scheme <- sub(" .*", "", db$URI_Scheme)
    db
}

parse_URI_reference <-
function(x)
{
    re <- "^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\\?([^#]*))?(#(.*))?"
    if(length(x)) {
        y <- do.call(rbind, regmatches(x, regexec(re, x)))
        y <- y[, c(3, 5, 6, 8, 10), drop = FALSE]
    } else {
        y <- matrix(character(), 0L, 5L)
    }
    y <- as.data.frame(y)
    names(y) <- c("scheme", "authority", "path", "query", "fragment")
    y
}

.get_urls_from_Rd <-
function(x, href = TRUE, ifdef = FALSE)
{
    urls <- character()
    recurse <- function(e) {
        tag <- attr(e, "Rd_tag")
        ## Rd2HTML and Rd2latex remove whitespace and \n from URLs.
        if(identical(tag, "\\url")) {
            urls <<- c(urls, lines2str(.Rd_deparse(e, tag = FALSE)))
        } else if(href && identical(tag, "\\href")) {
            ## One could also record the \href text argument in the
            ## names, but then one would need to process named and
            ## unnamed extracted URLs separately.
            urls <<- c(urls, lines2str(.Rd_deparse(e[[1L]], tag = FALSE)))
        } else if(ifdef && length(tag) && (tag %in% c("\\if", "\\ifelse"))) {
            ## cf. testRdConditional()
            condition <- e[[1L]]
            if(all(RdTags(condition) == "TEXT")) {
                if(any(c("TRUE", "html") %in%
                       trimws(strsplit(paste(condition, collapse = ""), 
                                       ",")[[1L]])))
                    recurse(e[[2L]])
                else if(tag == "\\ifelse")
                    recurse(e[[3L]])
            }
        } else if(is.list(e))
            lapply(e, recurse)
    }
    lapply(x, recurse)
    unique(trimws(urls))
}

.get_urls_from_HTML_file <-
function(f)
{
    doc <- xml2::read_html(f)
    if(!inherits(doc, "xml_node")) return(character())
    nodes <- xml2::xml_find_all(doc, "//a")
    hrefs <- xml2::xml_attr(nodes, "href")
    unique(hrefs[!is.na(hrefs) & !startsWith(hrefs, "#")])
}

.get_urls_from_PDF_file <-
function(f)    
{
    ## Seems there is no straightforward way to extract hyperrefs from a
    ## PDF, hence first convert to HTML.
    ## Note that pdftohtml always outputs in cwd ...
    owd <- getwd()
    dir.create(d <- tempfile())
    on.exit({ unlink(d, recursive = TRUE); setwd(owd) })
    file.copy(normalizePath(f), d)
    setwd(d)
    g <- tempfile(tmpdir = d, fileext = ".xml")
    system2("pdftohtml",
            c("-s -q -i -c -xml", shQuote(basename(f)), shQuote(basename(g))))
    ## Oh dear: seems that pdftohtml can fail without a non-zero exit
    ## status.
    if(file.exists(g))
        .get_urls_from_HTML_file(g)
    else
        character()
}

url_db <-
function(urls, parents)
{
    ## Some people get leading LFs in URLs, so trim before checking.
    db <- list2DF(list(URL = trimws(as.character(urls)),
                       Parent = as.character(parents)))
    class(db) <- c("url_db", "data.frame")
    db
}

url_db_from_HTML_files <-
function(dir, recursive = FALSE, files = NULL, verbose = FALSE)
{
    urls <- parents <- character()
    if(is.null(files)) 
        files <- list.files(dir, pattern = "[.]html$",
                            full.names = TRUE,
                            recursive = recursive)
    urls <-
        lapply(files,
               function(f) {
                   if(verbose)
                       message(sprintf("processing %s",
                                       .file_path_relative_to_dir(f, dir)))
                   .get_urls_from_HTML_file(f)
               })
    names(urls) <- files
    urls <- Filter(length, urls)
    if(length(urls)) {
        parents <- rep.int(.file_path_relative_to_dir(names(urls), dir),
                           lengths(urls))
        urls <- unlist(urls, use.names = FALSE)
    }
    url_db(urls, parents)
}

url_db_from_PDF_files <-
function(dir, recursive = FALSE, files = NULL, verbose = FALSE)
{
    urls <- parents <- character()
    if(is.null(files))
        files <- list.files(dir, pattern = "[.]pdf$",
                            full.names = TRUE,
                            recursive = recursive)
    urls <-
        lapply(files,
               function(f) {
                   if(verbose)
                       message(sprintf("processing %s",
                                       .file_path_relative_to_dir(f, dir)))
                   .get_urls_from_PDF_file(f)
               })
    names(urls) <- files
    urls <- Filter(length, urls)
    if(length(urls)) {
        parents <- rep.int(.file_path_relative_to_dir(names(urls), dir),
                           lengths(urls))
        urls <- unlist(urls, use.names = FALSE)
    }
    url_db(urls, parents)
}

url_db_from_package_Rd_db <-
function(db)
{
    urls <- Filter(length, lapply(db, .get_urls_from_Rd))
    url_db(unlist(urls, use.names = FALSE),
           rep.int(file.path("man", names(urls)),
                   lengths(urls)))
}

url_db_from_package_metadata <-
function(meta)
{
    urls <- character()
    fields <- c("URL", "BugReports")
    for(v in meta[fields]) {
        if(is.na(v)) next
        urls <- c(urls, .get_urls_from_DESCRIPTION_URL_field(v))
    }
    if(!is.na(v <- meta["Description"])) {
        urls <- c(urls, .get_urls_from_DESCRIPTION_Description_field(v))
    }
    url_db(urls, rep.int("DESCRIPTION", length(urls)))
}

.get_urls_from_DESCRIPTION_URL_field <-
function(v)
{
    urls <- character()
    if(is.na(v)) return(urls)
    pattern <-
        "<(URL: *)?((https?|ftp)://[^[:space:],]*)[[:space:]]*>"
    m <- gregexpr(pattern, v)
    urls <- c(urls, .gregexec_at_pos(pattern, v, m, 3L))
    regmatches(v, m) <- ""
    pattern <- "(^|[^>\"?])((https?|ftp)://[^[:space:],]*)"
    m <- gregexpr(pattern, v)
    urls <- c(urls, .gregexec_at_pos(pattern, v, m, 3L))
    urls
}

.get_urls_from_DESCRIPTION_Description_field <-
function(v)
{
    urls <- character()
    if(is.na(v)) return(urls)    
    pattern <-
        "<(URL: *)?((https?|ftp)://[^[:space:]]+)[[:space:]]*>"
    m <- gregexpr(pattern, v)
    urls <- c(urls, .gregexec_at_pos(pattern, v, m, 3L))
    regmatches(v, m) <- ""
    pattern <-
        "([^>\"?])((https?|ftp)://[[:alnum:]/.:@+\\_~%#?=&;,-]+[[:alnum:]/])"
    m <- gregexpr(pattern, v)
    urls <- c(urls, .gregexec_at_pos(pattern, v, m, 3L))
    regmatches(v, m) <- ""
    pattern <- "<([A-Za-z][A-Za-z0-9.+-]*:[^>]+)>"
    ##   scheme      = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
    m <- gregexpr(pattern, v)
    urls <- c(urls, .gregexec_at_pos(pattern, v, m, 2L))
    urls
}

url_db_from_package_citation <-
function(dir, meta, installed = FALSE)
{
    urls <- character()
    path <- if(installed) "CITATION" else file.path("inst", "CITATION")
    cfile <- file.path(dir, path)
    if(file.exists(cfile)) {
        cinfo <- .read_citation_quietly(cfile, meta)
        if(!inherits(cinfo, "error"))
            urls <- trimws(unique(unlist(cinfo$url, use.names = FALSE)))
    }
    url_db(urls, rep.int(path, length(urls)))
}

url_db_from_package_news <-
function(dir, installed = FALSE)
{
    path <- if(installed) "NEWS.Rd" else file.path("inst", "NEWS.Rd")
    nfile <- file.path(dir, path)
    urls <-
        if(file.exists(nfile)) {
            macros <- initialRdMacros()
            .get_urls_from_Rd(prepare_Rd(parse_Rd(nfile, macros = macros),
                                         stages = "install"))
        } else character()
    url_db(urls, rep.int(path, length(urls)))
}

url_db_from_package_HTML_files <-
function(dir, installed = FALSE)
{
    path <- if(installed) "doc" else file.path("inst", "doc")
    files <- Sys.glob(file.path(dir, path, "*.html"))
    if(installed && file.exists(rfile <- file.path(dir, "README.html")))
        files <- c(files, rfile)
    url_db_from_HTML_files(dir, files = files)
}

url_db_from_package_README_md <-
function(dir, installed = FALSE)
{
    urls <- path <- character()
    rfile <- Filter(file.exists,
                    c(if(!installed)
                          file.path(dir, "inst", "README.md"),
                      file.path(dir, "README.md")))[1L]
    if(!is.na(rfile) && nzchar(Sys.which("pandoc"))) {
        path <- .file_path_relative_to_dir(rfile, dir)
        tfile <- tempfile("README", fileext = ".html")
        on.exit(unlink(tfile))
        out <- .pandoc_md_for_CRAN(rfile, tfile)
        if(!out$status) {
            urls <- .get_urls_from_HTML_file(tfile)
        }
    }
    url_db(urls, rep.int(path, length(urls)))
}

url_db_from_package_NEWS_md <-
function(dir, installed = FALSE)
{
    urls <- path <- character()
    nfile <- Filter(file.exists,
                    c(if(!installed)
                          file.path(dir, "inst", "NEWS.md"),
                      file.path(dir, "NEWS.md")))[1L]
    if(!is.na(nfile) && nzchar(Sys.which("pandoc"))) {
        path <- .file_path_relative_to_dir(nfile, dir)
        tfile <- tempfile("NEWS", fileext = ".html")
        on.exit(unlink(tfile))
        out <- .pandoc_md_for_CRAN(nfile, tfile)
        if(!out$status) {
            urls <- .get_urls_from_HTML_file(tfile)
        }
    }
    url_db(urls, rep.int(path, length(urls)))
}

url_db_from_package_sources <-
function(dir, add = FALSE) {
    meta <- .get_package_metadata(dir, FALSE)
    db <- rbind(url_db_from_package_metadata(meta),
                url_db_from_package_Rd_db(Rd_db(dir = dir)),
                url_db_from_package_citation(dir, meta),
                url_db_from_package_news(dir))
    if(requireNamespace("xml2", quietly = TRUE)) {
        db <- rbind(db,
                    url_db_from_package_HTML_files(dir),
                    url_db_from_package_README_md(dir),
                    url_db_from_package_NEWS_md(dir)
                    )
    }
    if(add)
        db$Parent <- file.path(basename(dir), db$Parent)
    db
}

url_db_from_installed_packages <-
function(packages, lib.loc = NULL, verbose = FALSE)
{
    if(!length(packages)) return()
    one <- function(p) {
        if(verbose)
            message(sprintf("processing %s", p))
        dir <- system.file(package = p, lib.loc = lib.loc)
        if(dir == "") return()
        meta <- .read_description(file.path(dir, "DESCRIPTION"))
        rddb <- Rd_db(p, lib.loc = dirname(dir))
        db <- rbind(url_db_from_package_metadata(meta),
                    url_db_from_package_Rd_db(rddb),
                    url_db_from_package_citation(dir, meta,
                                                 installed = TRUE),
                    url_db_from_package_news(dir, installed = TRUE))
        if(requireNamespace("xml2", quietly = TRUE)) {
            db <- rbind(db,
                        url_db_from_package_HTML_files(dir,
                                                       installed = TRUE),
                        url_db_from_package_README_md(dir,
                                                      installed = TRUE),
                        url_db_from_package_NEWS_md(dir,
                                                    installed = TRUE)
                        )
        }
        db$Parent <- file.path(p, db$Parent)
        db
    }
    do.call(rbind,
            c(lapply(packages, one),
              list(make.row.names = FALSE)))
}

get_IANA_HTTP_status_code_db <-
function()
{
    ## See
    ## <https://www.iana.org/assignments/http-status-codes/http-status-codes.xhtml>
    baseurl <- "https://www.iana.org/assignments/http-status-codes/"
    db <- utils::read.csv(url(paste0(baseurl, "http-status-codes-1.csv")),
                          stringsAsFactors = FALSE)
    ## Drop "Unassigned".
    db[db$Description != "Unassigned", ]
}

## See <https://en.wikipedia.org/wiki/List_of_FTP_server_return_codes>
## and <https://www.rfc-editor.org/rfc/rfc959>,
## Section 4.2.2 "Numeric Order List of Reply Codes",
## and <https://www.rfc-editor.org/rfc/rfc2228>,
## Section 5 "New FTP Replies".
## Only need those >= 400.
table_of_FTP_server_return_codes <-
    c("421" = "Service not available, closing control connection.",
      "425" = "Can't open data connection.",
      "426" = "Connection closed; transfer aborted.",
      "430" = "Invalid username or password",
      "431" = "Need some unavailable resource to process security.",
      "434" = "Requested host unavailable.",
      "450" = "Requested file action not taken.",
      "451" = "Requested action aborted: local error in processing.",
      "452" = "Requested action not taken.  Insufficient storage space in system.",
      "500" = "Syntax error, command unrecognized.",
      "501" = "Syntax error in parameters or arguments.",
      "502" = "Command not implemented.",
      "503" = "Bad sequence of commands.",
      "504" = "Command not implemented for that parameter.",
      "530" = "Not logged in.",
      "532" = "Need account for storing files.",
      "533" = "Command protection level denied for policy reasons.",
      "534" = "Request denied for policy reasons.",
      "535" = "Failed security check (hash, sequence, etc).",
      "536" = "Requested PROT level not supported by mechanism.",
      "537" = "Command protection level not supported by security mechanism.",
      "550" = "Requested action not taken.  File unavailable",
      "551" = "Requested action aborted: page type unknown.",
      "552" = "Requested file action aborted.  Exceeded storage allocation (for current directory or dataset).",
      "553" = "Requested action not taken.  File name not allowed.",
      "631" = "Integrity protected reply.",
      "632" = "Confidentiality and integrity protected reply.",
      "633" = "Confidentiality protected reply."
      )

check_url_db <-
function(db, remote = TRUE, verbose = FALSE, parallel = FALSE, pool = NULL)
{
    use_curl <-
        !parallel &&
        config_val_to_logical(Sys.getenv("_R_CHECK_URLS_USE_CURL_",
                                         "TRUE")) &&
        requireNamespace("curl", quietly = TRUE)

    if(parallel && is.null(pool))
        pool <- curl::new_pool()    

    .gather <- function(u = character(),
                        p = list(),
                        s = rep.int("", length(u)),
                        m = rep.int("", length(u)),
                        new = rep.int("", length(u)),
                        cran = rep.int("", length(u)),
                        spaces = rep.int("", length(u)),
                        R = rep.int("", length(u))) {
        y <- list2DF(list(URL = u, From = p, Status = s, Message = m,
                          New = new, CRAN = cran, Spaces = spaces, R = R))
        class(y) <- c("check_url_db", "data.frame")
        y
    }
    
    .fetch_headers <-
        if(parallel)
            function(urls)
                .fetch_headers_via_curl(urls, verbose, pool)
        else
            function(urls)
                .fetch_headers_via_base(urls, verbose)

    .check_ftp <- function(u, h) {
        if(inherits(h, "error")) {
            s <- "-1"
            msg <- sub("[[:space:]]*$", "", conditionMessage(h))
        } else {
            s <- as.character(attr(h, "status"))
            msg <- table_of_FTP_server_return_codes[s]
        }
        c(s, msg, "", "")
    }

    .check_http <- if(remote)
                       function(u, h) c(.check_http_A(u, h),
                                        .check_http_B(u))
                   else
                       function(u, h) c(rep.int("", 3L),
                                        .check_http_B(u))

    .check_http_A <- function(u, h) {
        newLoc <- ""
        if(inherits(h, "error")) {
            s <- "-1"
            msg <- sub("[[:space:]]*$", "", conditionMessage(h))
            if(grepl(paste(c("server certificate verification failed",
                             "failed to get server cert",
                             "libcurl error code (51|60)"),
                           collapse = "|"),
                     msg)) {
                h2 <- tryCatch(curlGetHeaders(u, verify = FALSE),
                               error = identity)
                s2 <- as.character(attr(h2, "status"))
                msg <- paste0(msg, "\n\t(Status without verification: ",
                              table_of_HTTP_status_codes[s2], ")")
            }
        } else {
            s <- as.character(attr(h, "status"))
            msg <- table_of_HTTP_status_codes[s]
        }
        ## Look for redirected URLs
        ## According to
        ## <https://www.rfc-editor.org/rfc/rfc7230#section-3.1.2> the first
        ## line of a response is the status-line, with "a possibly empty
        ## textual phrase describing the status code", so only look for
        ## a 301 status code in the first line.
        if(grepl(" 301 ", h[1L], useBytes = TRUE)) {
            ## Get the new location from the last consecutive 301
            ## obtained.
            h <- split(h, c(0L, cumsum(h == "\r\n")[-length(h)]))
            i <- vapply(h,
                        function(e)
                            grepl(" 301 ", e[1L], useBytes = TRUE),
                        NA)
            h <- h[[which(!i)[1L] - 1L]]
            pos <- grep("^[Ll]ocation: ", h, useBytes = TRUE)
            if(length(pos)) {
                loc <- sub("^[Ll]ocation: ([^\r]*)\r\n", "\\1",
                           h[pos[1L]])
                ## Ouch.  According to RFC 7231, the location is a URI
                ## reference, and may be relative in which case it needs
                ## resolving against the effect request URI.
                ## <https://www.rfc-editor.org/rfc/rfc7231#section-7.1.2>.
                ## Not quite straightforward, hence do not report such
                ## 301s. 
                ## (Alternatively, could try reporting the 301 but no
                ## new location.)
                newParts <- parse_URI_reference(loc)
                if(nzchar(newParts[1L, "scheme"])) {
                    newLoc <- loc
                    ## Handle fragments. If the new URL does have one,
                    ## use it. Otherwise, if the old has one, use that.
                    ## (From section 7.1.2).
                    if (newParts[1L, "fragment"] == "") {
                        uParts <- parse_URI_reference(u)
                        if (nzchar(uFragment <- uParts[1L, "fragment"])) {
                            newLoc <- paste0(newLoc, "#", uFragment)
                        }
                    }
                }
            }
        }
        ##
        if((s != "200") && use_curl) {
            g <- .curl_fetch_memory_status_code(u)
            if(g == "200") {
                s <- g
                msg <- "OK"
            }
        }
        ## A mis-configured site
        if (s == "503" && any(grepl("www.sciencedirect.com", c(u, newLoc))))
            s <- "405"
        c(s, msg, newLoc)
    }

    .check_http_B <- function(u) {
        ul <- tolower(u)
        cran <- ((grepl("^https?://cran.r-project.org/web/packages", ul) &&
                  !grepl("^https?://cran.r-project.org/web/packages/([.[:alnum:]_]+(html|pdf|rds))?$",
                         ul)) ||
                 (grepl("^https?://cran.r-project.org/web/views/[[:alnum:]]+[.]html$",
                        ul)) ||
                 startsWith(ul, "http://cran.r-project.org") ||
                 any(startsWith(ul, mirrors)))
        R <- grepl("^http://(www|bugs|journal).r-project.org", ul)
        spaces <- grepl(" ", u)
        c(if(cran) u else "", if(spaces) u else "", if(R) u else "")
    }

    bad <- .gather()

    if(!NROW(db)) return(bad)

    ## Could also use utils::getCRANmirrors(local.only = TRUE).
    mirrors <- c(utils::read.csv(file.path(R.home("doc"),
                                           "CRAN_mirrors.csv"),
                                 as.is = TRUE, encoding = "UTF-8")$URL,
                 "http://cran.rstudio.com/",
                 "https://cran.rstudio.com/")
    mirrors <- tolower(sub("/$", "", mirrors))

    if(inherits(db, "check_url_db")) {
        ## Allow re-checking check results.
        parents <- db$From
        urls <- db$URL
    } else {
        parents <- split(db$Parent, db$URL)
        urls <- names(parents)
    }

    parts <- parse_URI_reference(urls)

    ## Empty URLs.
    ind <- apply(parts == "", 1L, all)
    if(any(ind)) {
        len <- sum(ind)
        bad <- rbind(bad,
                     .gather(urls[ind],
                             parents[ind],
                             m = rep.int("Empty URL", len)))
    }

    ## Invalid URI schemes.
    schemes <- parts[, 1L]
    ind <- is.na(match(tolower(schemes),
                       c("",
                         IANA_URI_scheme_db$URI_Scheme,
                         "arxiv",
                         ## Also allow 'isbn' and 'issn', which in fact
                         ## are registered URN namespaces but not
                         ## registered URI schemes, see
                         ## <https://www.iana.org/assignments/urn-formal/isbn>
                         ## <https://www.iana.org/assignments/urn-formal/issn>
                         ## <https://doi.org/10.17487/rfc3986>
                         ## <https://doi.org/10.17487/rfc8141>.
                         "isbn", "issn",
                         ## Also allow 'javascript' scheme, see
                         ## <https://tools.ietf.org/html/draft-hoehrmann-javascript-scheme-03>
                         ## (but apparently never registered with IANA).
                         "javascript")))
    if(any(ind)) {
        len <- sum(ind)
        msg <- rep.int("Invalid URI scheme", len)
        doi <- schemes[ind] == "doi"
        if(any(doi))
            msg[doi] <- paste(msg[doi], "(use \\doi for DOIs in Rd markup)")
        bad <- rbind(bad,
                     .gather(urls[ind], parents[ind], m = msg))
    }
    
    ## Could check urn URIs at least for appropriate namespaces using
    ## <https://www.iana.org/assignments/urn-namespaces/urn-namespaces-1.csv>

    ## ftp.
    pos <- which(schemes == "ftp")
    if(length(pos) && remote) {
        urlspos <- urls[pos]
        headers <- .fetch_headers(urlspos)
        results <- do.call(rbind, Map(.check_ftp, urlspos, headers))
        status <- as.numeric(results[, 1L])
        ind <- (status < 0L) | (status >= 400L)
        if(any(ind)) {
            pos <- pos[ind]
            s <- as.character(status[ind])
            s[s == "-1"] <- "Error"
            m <- results[ind, 2L]
            m[is.na(m)] <- ""
            bad <- rbind(bad,
                         .gather(urls[pos], parents[pos], s, m))
        }
    }

    ## http/https.
    pos <- which(schemes == "http" | schemes == "https")
    if(length(pos) && remote) {
        urlspos <- urls[pos]
        ## Check DOI URLs via the DOI handle API, as we nowadays do for
        ## checking DOIs.
        myparts <- parts[pos, , drop = FALSE]
        ind <- (((myparts[, 2L] == "doi.org") | 
                 (myparts[, 2L] == "dx.doi.org")) &
                startsWith(myparts[, 3L], "/10.") &
                !nzchar(myparts[, 4L]) &
                !nzchar(myparts[, 5L]))
        if(any(ind))
            urlspos[ind] <- paste0("https://doi.org/api/handles",
                                   myparts[ind, 3L])
        ## Could also use regexps, e.g.
        ##    pat <- "^https?://(dx[.])?doi.org/10[.]([^?#]+)$"
        ##    ind <- grep(pat, urlspos)
        ##    if(length(ind))
        ##         urlspos[ind] <-
        ##             paste0("https://doi.org/api/handles/10.",
        ##                     sub(pat, "\\2", urlspos[ind]))
        ## but using the parts is considerably faster ...
        headers <- .fetch_headers(urlspos)
        if(parallel &&
           any(ind <- vapply(headers,
                             function(e) {
                                 if(inherits(e, "error")) -1L
                                 else attr(e, "status")
                             },
                             0L) != 200)) {
            ## We also re-check non-200 results in .check_http_A().
            ## Not very useful the way we currently show progress:
            ##   if(verbose)
            ##       message(sprintf("found %d non-OK responses, re-fetching ...",
            ##                       sum(ind)))
            headers[ind] <-
                .fetch_headers_via_curl(urlspos[ind],
                                        verbose, pool, FALSE)
        }
        results <- do.call(rbind, Map(.check_http, urlspos, headers))
        status <- as.numeric(results[, 1L])
        ## 405 is HTTP not allowing HEAD requests
        ## maybe also skip 500, 503, 504 as likely to be temporary issues
        ind <- is.na(match(status, c(200L, 405L, NA))) |
            nzchar(results[, 3L]) |
            nzchar(results[, 4L]) |
            nzchar(results[, 5L]) |
            nzchar(results[, 6L])
        if(any(ind)) {
            pos <- pos[ind]
            s <- as.character(status[ind])
            s[is.na(s)] <- ""
            s[s == "-1"] <- "Error"
            m <- results[ind, 2L]
            m[is.na(m)] <- ""
            bad_https <- .gather(urls[pos], parents[pos], s, m,
                                 results[ind, 3L],
                                 results[ind, 4L],
                                 results[ind, 5L],
                                 results[ind, 6L])
                                 
            ## omit some typically false positives
            ## for efficiency reasons two separate false positives tables for 403 and 404:
            false_pos_db_403 <- c(
                "^https?://twitter.com/", 
                "^https?://www.jstor.org/",
                "^https?://.+\\.wiley.com/", 
                "^https?://www.science.org/",
                "^https?://www.researchgate.net/",
                "^https?://www.tandfonline.com/",
                "^https?://pubs.acs.org/",
                "^https?://journals.aom.org/",
                "^https?://journals.sagepub.com/",
                "^https?://www.pnas.org/")
            false_pos_db_404 <- c(                
                "^https?://finance.yahoo.com/")
            bad_https <- bad_https[!((grepl(paste(false_pos_db_403, collapse="|"), bad_https$URL) & 
                                        bad_https$Status == "403") |
                                     (grepl(paste(false_pos_db_404, collapse="|"), bad_https$URL) & 
                                        bad_https$Status == "404")), , drop=FALSE]
            bad <- rbind(bad, bad_https)
        }
    }
    bad
}

format.check_url_db <-
function(x, ...)
{
    if(!NROW(x)) return(character())

    u <- x$URL
    new <- x$New
    ind <- nzchar(new)
    if(any(ind)) {
        u[ind] <- sprintf("%s (moved to %s)", u[ind], new[ind])
        if(config_val_to_logical(Sys.getenv("_R_CHECK_URLS_SHOW_301_STATUS_",
                                            "FALSE"))) {
            x$Message[ind] <- "Moved Permanently"
            x$Status[ind] <- "301"
        }
    }

    paste0(sprintf("URL: %s", u),
           sprintf("\nFrom: %s",
                   vapply(x$From, paste, "", collapse = "\n      ")),
           ifelse((s <- x$Status) == "",
                  "",
                  sprintf("\nStatus: %s", s)),
           ifelse((m <- x$Message) == "",
                  "",
                  sprintf("\nMessage: %s", gsub("\n", "\n  ", m, fixed=TRUE))),
           ifelse((m <- x$Spaces) == "",
                  "",
                  "\nURL contains spaces"),
           ifelse((m <- x$CRAN) == "",
                  "",
                  "\nCRAN URL not in canonical form"),
           ifelse((m <- x$R) == "",
                  "",
                  "\nR-project URL not in canonical form")
           )
}

print.check_url_db <-
function(x, ...)
{
    if(NROW(x))
        writeLines(paste(format(x), collapse = "\n\n"))
    invisible(x)
}

as.matrix.check_url_db <-
function(x, ...)
{
    n <- lengths(x[["From"]])
    y <- do.call(cbind,
                 c(list(URL = rep.int(x[["URL"]], n),
                        Parent = unlist(x[["From"]])),
                   lapply(x[-c(1L, 2L)], rep.int, n)))
    rownames(y) <- NULL
    y
}

.fetch_headers_via_base <-
function(urls, verbose = FALSE, ids = urls)
    Map(function(u, verbose, i) {
            if(verbose) message(sprintf("processing %s", i))
            tryCatch(curlGetHeaders(u), error = identity)
        },
        urls, verbose, ids)

.fetch_headers_via_curl <-
function(urls, verbose = FALSE, pool = NULL, nobody = TRUE)
{
    out <- .curl_multi_run_worker(urls, nobody, verbose, pool)
    ind <- !vapply(out, inherits, NA, "error")
    if(any(ind))
        out[ind] <- lapply(out[ind],
                           function(x) {
                               y <- strsplit(rawToChar(x$headers),
                                             "(?<=\r\n)",
                                             perl = TRUE)[[1L]]
                               attr(y, "status") <- x$status_code
                               y
                           })
    out
}

.curl_multi_run_worker <-
function(urls, nobody = FALSE, verbose = FALSE, pool = NULL,
         opts = NULL, hdrs = NULL)
{
    ## Use 'nobody = TRUE' to fetch only headers.
    
    .progress_bar <- function(length, msg = "") {
        bar <- new.env(parent = baseenv())
        if(is.null(length)) {
            length <- 0L
        }
        done <- fmt <- NULL             # make codetools happy
        bar$length <- length
        bar$done <- -1L
        digits <- trunc(log10(length)) + 1L
        bar$fmt <- paste0("\r", msg, "[ %", digits, "i / %", digits, "i ]")
        bar$update <- function() {
            assign("done", inherits = TRUE, done + 1L)
            if (length <= 0L) {
                return()
            }
            if (done >= length) {
                cat("\r", strrep(" ", nchar(fmt)), "\r", sep = "",
                    file = stderr())
            } else {
                cat(sprintf(fmt, done, length), sep = "",
                    file = stderr())
            }
        }
        environment(bar$update) <- bar
        bar$update()
        bar
    }

    if(is.null(pool))
        pool <- curl::new_pool()

    if(is.null(opts))
        opts <- .curl_handle_default_opts
    opts <- c(opts, list(nobody = nobody))
    timeout <- as.integer(getOption("timeout"))
    if(!is.na(timeout) && (timeout > 0L))
        opts <- c(opts,
                  list(connecttimeout = timeout,
                       timeout = timeout))

    if(is.null(hdrs))
        hdrs <- .curl_handle_default_hdrs

    bar <- .progress_bar(if (verbose) length(urls), msg = "fetching ")    

    out <- vector("list", length(urls))

    for(i in seq_along(out)) {
        u <- urls[[i]]
        h <- curl::new_handle(url = u)
        curl::handle_setopt(h, .list = opts)
        if(length(hdrs))
            curl::handle_setheaders(h, .list = hdrs)
        if((startsWith(u, "https://github.com/") ||
            (u == "https://github.com")) &&
           nzchar(a <- Sys.getenv("GITHUB_PAT", ""))) {
            curl::handle_setheaders(h, "Authorization" = paste("token", a))
        }
        handle_result <- local({
            i <- i
            function(x) {
                out[[i]] <<- x
                bar$update()
            }
        })
        handle_error <- local({
            i <- i
            function(x) {
                out[[i]] <<-
                    structure(list(message = x),
                              class = c("curl_error", "error", "condition"))
                bar$update()
            }
        })
        curl::multi_add(h,
                        done = handle_result,
                        fail = handle_error,
                        pool = pool)
    }

    curl::multi_run(pool = pool)
   
    out
}

.curl_fetch_memory_status_code <-
function(u, verbose = FALSE, opts = NULL, hdrs = NULL)
{
    if(verbose)
        message(sprintf("processing %s", u))

    if(is.null(opts))
        opts <- .curl_handle_default_opts
    timeout <- as.integer(getOption("timeout"))
    if(!is.na(timeout) && (timeout > 0L))
        opts <- c(opts,
                  list(connecttimeout = timeout,
                       timeout = timeout))

    if(is.null(hdrs))
        hdrs <- .curl_handle_default_hdrs
    
    ## Configure curl handle for better luck with JSTOR URLs/DOIs.
    ## Alternatively, special-case requests to
    ##   https?://doi.org/10.2307
    ##   https?://www.jstor.org
    h <- curl::new_handle()
    curl::handle_setopt(h, .list = opts)
    if(length(hdrs))
        curl::handle_setheaders(h, .list = hdrs)
    if((startsWith(u, "https://github.com/") ||
            (u == "https://github.com")) &&
       nzchar(a <- Sys.getenv("GITHUB_PAT", "")))
        curl::handle_setheaders(h, "Authorization" = paste("token", a))
    
    g <- tryCatch(curl::curl_fetch_memory(u, handle = h),
                  error = identity)
    .curl_response_status_code(g)
}

.curl_response_status_code <-
function(x)
{
    if(inherits(x, "error")) -1L else x$status_code
}

.curl_handle_default_opts <-
    list(cookiesession = 1L,
         followlocation = 1L)

.curl_handle_default_hdrs <-
    list("User-Agent" =
             Sys.getenv("_R_CHECK_URLS_CURL_USER_AGENT_", "curl"))

check_package_urls <-
function(dir, verbose = FALSE)
{
    db <- url_db_from_package_sources(dir)
    check_url_db(db, verbose = verbose, parallel = TRUE)
}
#  File src/library/tools/R/userdir.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 2020 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

R_user_dir <-
function(package, which = c("data", "config", "cache"))
{
    stopifnot(is.character(package), length(package) == 1L)

    which <- match.arg(which)

    home <- normalizePath("~")

    path <-
        switch(which,
               data = {
                   if(nzchar(p <- Sys.getenv("R_USER_DATA_DIR")))
                       p
                   else if(nzchar(p <- Sys.getenv("XDG_DATA_HOME")))
                       p
                   else if(.Platform$OS.type == "windows")
                       file.path(Sys.getenv("APPDATA"), "R", "data")
                   else if(Sys.info()["sysname"] == "Darwin")
                       file.path(home, "Library", "Application Support",
                                 "org.R-project.R") 
                   else
                       file.path(home, ".local", "share")
               },
               config = {
                   if(nzchar(p <- Sys.getenv("R_USER_CONFIG_DIR")))
                       p
                   else if(nzchar(p <- Sys.getenv("XDG_CONFIG_HOME")))
                       p
                   else if(.Platform$OS.type == "windows")
                       file.path(Sys.getenv("APPDATA"), "R", "config")
                   else if(Sys.info()["sysname"] == "Darwin")
                       file.path(home, "Library", "Preferences",
                                 "org.R-project.R")
                   else
                       file.path(home, ".config")
               },
               cache = {
                   if(nzchar(p <- Sys.getenv("R_USER_CACHE_DIR")))
                       p
                   else if(nzchar(p <- Sys.getenv("XDG_CACHE_HOME")))
                       p
                   else if(.Platform$OS.type == "windows")
                       file.path(Sys.getenv("LOCALAPPDATA"), "R", "cache")
                   else if(Sys.info()["sysname"] == "Darwin")
                       file.path(home, "Library", "Caches",
                                 "org.R-project.R")
                   else
                       file.path(home, ".cache")
               })
        
    file.path(path, "R", package)
}
#  File src/library/tools/R/utils.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2025 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

### * File utilities.

### ** file_ext

file_ext <-
function(x)
{
    ## Return the file extensions.
    ## (Only purely alphanumeric extensions are recognized.)
    pos <- regexpr("\\.([[:alnum:]]+)$", x)
    ifelse(pos > -1L, substring(x, pos + 1L), "")
}

### ** file_path_as_absolute

file_path_as_absolute <-
function(x)
{
    ## Turn a possibly relative file path absolute, performing tilde
    ## expansion if necessary.
    if(length(x) != 1L)
        stop(gettextf("'%s' must be a character string", "x"), domain=NA)
    if(!file.exists(epath <- path.expand(x)))
        stop(gettextf("file '%s' does not exist", x),
             domain = NA)
    normalizePath(epath, "/", TRUE)
}

### ** file_path_relative_to

file_path_relative_to <-
function(x, start = getwd(), parent = TRUE)
{
    x <- normalizePath(x, "/", mustWork = FALSE)
    if(!parent) {
        p <- normalizePath(start[1L], "/", mustWork = TRUE)
        if(any(i <- startsWith(x, p))) {
            ## Assume .Platform$file.sep is a single character.
            x[i] <- substring(x[i], nchar(p) + 2L)
        }
        x
    } else {
        p <- strsplit(normalizePath(start, "/", mustWork = FALSE),
                      "/", fixed = TRUE)[[1L]]
        y <- strsplit(x, "/", fixed = TRUE)
        f <- function(u, v) {
            i <- 1L
            while(i <= min(length(v), length(p))) {
                if(v[i] == p[i])
                    i <- i + 1L
                else
                    break
            }
            if(i == 1L) {
                ## Paths start differently, so relative cannot work
                u
            } else {
                i <- i - 1L
                paste(c(rep_len("..", length(p) - i), v[-seq_len(i)]),
                      collapse = .Platform$file.sep)
            }
        }
        unlist(Map(f, x, y, USE.NAMES = FALSE))
    }
}

### ** file_path_sans_ext

file_path_sans_ext <-
function(x, compression = FALSE)
{
    ## Return the file paths without extensions.
    ## (Only purely alphanumeric extensions are recognized.)
    if(compression)
        x <- sub("[.](gz|bz2|xz)$", "", x)
    sub("([^.]+)\\.[[:alnum:]]+$", "\\1", x)
}

### ** file_test

## exported/documented copy is in utils.

file_test <-
function(op, x, y)
{
    ## Provide shell-style '-f', '-d', '-h'/'-L', '-x', '-w', '-r',
    ## '-nt' and '-ot' tests.
    ## Note that file.exists() only tests existence ('test -e' on some
    ## systems), and that our '-f' tests for existence and not being a
    ## directory (the GNU variant tests for being a regular file).
    ## Note: vectorized in x and y.
    switch(op,
           "-f" = !is.na(isdir <- file.info(x, extra_cols = FALSE)$isdir) & !isdir,
           "-d" = dir.exists(x),
           "-h" = (!is.na(y <- Sys.readlink(x)) & nzchar(y)),
           "-L" = (!is.na(y <- Sys.readlink(x)) & nzchar(y)),
           "-nt" = (!is.na(mt.x <- file.mtime(x))
                    & !is.na(mt.y <- file.mtime(y))
                    & (mt.x > mt.y)),
           "-ot" = (!is.na(mt.x <- file.mtime(x))
                    & !is.na(mt.y <- file.mtime(y))
                    & (mt.x < mt.y)),
           "-x" = (file.access(x, 1L) == 0L),
           "-w" = (file.access(x, 2L) == 0L),
           "-r" = (file.access(x, 4L) == 0L),
           stop(gettextf("test '%s' is not available", op),
                domain = NA))
}

### ** list_files_with_exts

list_files_with_exts <-
function(dir, exts, all.files = FALSE, full.names = TRUE)
{
    ## Return the paths or names of the files in @code{dir} with
    ## extension in @code{exts}.

    files <- list.files(dir, all.files = all.files)
    ## does not cope with exts with '.' in.
    ## files <- files[sub(".*\\.", "", files) %in% exts]
    patt <- paste0("\\.(", paste(exts, collapse="|"), ")$")
    files <- grep(patt, files, value = TRUE)
    if(full.names)
        files <- if(length(files))
            file.path(dir, files)
        else
            character()
    files
}

### ** list_files_with_type

list_files_with_type <-
function(dir, type, all.files = FALSE, full.names = TRUE,
         OS_subdirs = .OStype())
{
    ## Return a character vector with the paths of the files in
    ## @code{dir} of type @code{type} (as in .make_file_exts()).
    ## When listing R code and documentation files, files in OS-specific
    ## subdirectories are included (if present) according to the value
    ## of @code{OS_subdirs}.

    exts <- .make_file_exts(type)
    files <-
        list_files_with_exts(dir, exts, all.files = all.files,
                             full.names = full.names)

    if(type %in% c("code", "docs")) {
        for(os in OS_subdirs) {
            os_dir <- file.path(dir, os)
            if(dir.exists(os_dir)) {
                os_files <- list_files_with_exts(os_dir, exts,
                                                 all.files = all.files,
                                                 full.names = FALSE)
                os_files <- file.path(if(full.names) os_dir else os,
                                      os_files)
                files <- c(files, os_files)
            }
        }
    }
    ## avoid ranges since they depend on the collation order in the locale.
    if(type %in% c("code", "docs")) { # only certain filenames are valid.
        files <- files[grep("^[ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789]", basename(files))]
    }
    if(type %in% "demo") {           # only certain filenames are valid.
        files <- files[grep("^[ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz]", basename(files))]
    }
    files
}

### ** list_code_files_in_package

list_code_files_in_package <-
function(dir) {
    dir <- normalizePath(dir)
    code_dir <- file.path(dir, "R")
    code_files <- list_files_with_type(code_dir, "code")
    if(!length(code_files)) return(code_files)
    meta <- .get_package_metadata(dir)
    collate_fields <- c(paste0("Collate.", .OStype()), "Collate")
    if(any(i <- (collate_fields %in% names(meta)))) {
        collate <-
            file.path(code_dir,
                      .read_collate_field(meta[collate_fields[i][1L]]))
        ## Note that matching code files and collate spec really only is
        ## appropriate after having run configure as part of installing,
        ## as this can create code files (e.g., from a .R.in code file).
        ## Note also that using set ops is not appropriate here, as
        ## these re-sort according to the current locale.
        code_files <- collate[collate %in% code_files]
    }
    code_files
}


### ** reQuote

reQuote <-
function(x)
{
    escape <- function(s) paste0("\\", s)
    re <- "[.*?+^$\\()[]"
    m <- gregexpr(re, x)
    regmatches(x, m) <- lapply(regmatches(x, m), escape)
    x
}

### ** showNonASCII

showNonASCII <-
function(x)
{
    ind <- .Call(C_nonASCII, x)
    if(any(ind)) {
        message(paste0(which(ind), ": ",
                       ## iconv will usually substitute,
                       ## but inplementations including macOS 14
                       ## may translate to ASCII.
                       iconv(x[ind], "", "ASCII", sub = "byte"),
                       collapse = "\n"), domain = NA)
    }
    invisible(x[ind])
}

showNonASCIIfile <-
function(file)
    showNonASCII(readLines(file, warn = FALSE))

env_path <- function(...) file.path(..., fsep = .Platform$path.sep)

### * Text utilities.

### ** delimMatch
delimMatch <-
function(x, delim = c("{", "}"), syntax = "Rd")
{
    if(!is.character(x))
        stop("argument 'x' must be a character vector")
    ## FIXME: bytes or chars?
    if((length(delim) != 2L) || any(nchar(delim) != 1L))
        stop("argument 'delim' must specify two characters")
    if(syntax != "Rd")
        stop("only Rd syntax is currently supported")

    .Call(C_delim_match, x, delim)
}

### ** lines2str
lines2str <-
function(txt, sep = "") {
    bytes <- gsub("\n", sep, paste(txt, collapse = sep),
                  fixed = TRUE, useBytes = TRUE)
    trimws(iconv(bytes, to = "UTF-8", sub = "byte"))
}


### * LaTeX utilities

### ** texi2pdf
texi2pdf <-
function(file, clean = FALSE, quiet = TRUE,
         texi2dvi = getOption("texi2dvi"),
         texinputs = NULL, index = TRUE)
    texi2dvi(file = file, pdf = TRUE, clean = clean, quiet = quiet,
             texi2dvi = texi2dvi, texinputs = texinputs, index = index)

### ** texi2dvi

texi2dvi <-
function(file, pdf = FALSE, clean = FALSE, quiet = TRUE,
         texi2dvi = getOption("texi2dvi"),
         texinputs = NULL, index = TRUE)
{
    if (clean) pre_files <- list.files(all.files = TRUE)
    do_cleanup <- function(clean)
        if(clean) {
            ## output file will be created in the current directory
            out_file <- paste(basename(file_path_sans_ext(file)),
                              if(pdf) "pdf" else "dvi", sep = ".")
            files <- setdiff(list.files(all.files = TRUE),
                             c(".", "..", out_file, pre_files))
            file.remove(files)
        }

    ## Run texi2dvi on a latex file, or emulate it.

    if(identical(texi2dvi, "emulation")) texi2dvi <- ""
    else {
        if(is.null(texi2dvi) || !nzchar(texi2dvi) || texi2dvi == "texi2dvi") {
            texi2dvi <- Sys.which("texi2dvi")
            if(.Platform$OS.type == "windows" && !nzchar(texi2dvi))
                texi2dvi <- Sys.which("texify")
        } else if (!nzchar(Sys.which(texi2dvi))) { # check provided path
            warning("texi2dvi script/program not available, using emulation")
            texi2dvi <- ""
        } # else the provided one should work
    }

    paths2env <- function(x) paste(x, collapse = .Platform$path.sep)
    ## not clear if this is needed, but works
    if(.Platform$OS.type == "windows")
        texinputs <- gsub("\\", "/", texinputs, fixed = TRUE)
    Rtexmf <- file.path(R.home("share"), "texmf", fsep = "/")
    Rtexinputs <- file.path(Rtexmf, "tex", "latex", fsep = "/")
    Rbibinputs <- file.path(Rtexmf, "bibtex", "bib", fsep = "/")
    Rbstinputs <- file.path(Rtexmf, "bibtex", "bst", fsep = "/")

    otexinputs <- Sys.getenv("TEXINPUTS", unset = NA_character_)
    if(is.na(otexinputs)) {
        on.exit(Sys.unsetenv("TEXINPUTS"))
        otexinputs <- "."
    } else on.exit(Sys.setenv(TEXINPUTS = otexinputs))
    ## "" below represents system paths
    Sys.setenv(TEXINPUTS = paths2env(c(texinputs, otexinputs, Rtexinputs, "")))
    obibinputs <- Sys.getenv("BIBINPUTS", unset = NA_character_)
    if(is.na(obibinputs)) {
        on.exit(Sys.unsetenv("BIBINPUTS"), add = TRUE)
        obibinputs <- "."
    } else on.exit(Sys.setenv(BIBINPUTS = obibinputs, add = TRUE))
    Sys.setenv(BIBINPUTS = paths2env(c(texinputs, obibinputs, Rbibinputs, "")))
    obstinputs <- Sys.getenv("BSTINPUTS", unset = NA_character_)
    if(is.na(obstinputs)) {
        on.exit(Sys.unsetenv("BSTINPUTS"), add = TRUE)
        obstinputs <- "."
    } else on.exit(Sys.setenv(BSTINPUTS = obstinputs), add = TRUE)
    Sys.setenv(BSTINPUTS = paths2env(c(texinputs, obstinputs, Rbstinputs, "")))

    if(index && nzchar(texi2dvi) && .Platform$OS.type != "windows") {
        ## switch off the use of texindy in texi2dvi >= 1.157
        Sys.setenv(TEXINDY = "false")
        on.exit(Sys.unsetenv("TEXINDY"), add = TRUE)
        opt_pdf <- if(pdf) "--pdf" else ""
        opt_quiet <- if(quiet) "--quiet" else ""
        opt_extra <- ""

        ## the current heuristics for finding error messages in log files
        ## have better coverage with the default '!' error indicator, but
        ## texi2dvi enables the file:line:error style, so:
        out <- .system_with_capture(texi2dvi, "--help")
        if(length(grep("--no-line-error", out$stdout)))
            opt_extra <- "--no-line-error"

        ## and work around a bug in texi2dvi
        ## https://stat.ethz.ch/pipermail/r-devel/2011-March/060262.html
        ## That has [A-Za-z], earlier versions [A-z], both of which may be
        ## invalid in some locales.
        env0 <- "LC_COLLATE=C"
        ## texi2dvi, at least on macOS (4.8) does not accept TMPDIR with spaces.
        if (grepl(" ", Sys.getenv("TMPDIR")))
            env0 <- paste(env0,  "TMPDIR=/tmp")
        out <- .system_with_capture(texi2dvi,
                                    c(opt_pdf, opt_quiet, opt_extra,
                                      shQuote(file)),
                                    env = env0)

        log <- paste0(file_path_sans_ext(file), ".log")

        ## With Texinfo 6.1 (precisely, c6637), texi2dvi may not rerun
        ## often enough and give a non-zero status value when it should
        ## have continued iterating.
        ## Try to catch and correct cases seen on CRAN ...
        ## (Note that texi2dvi may have been run quietly, in which case
        ## diagnostics will only be in the log file.)
        if(out$status &&
           file_test("-f", log) &&
           any(grepl("(Rerun to get|biblatex.*\\(re\\)run)",
                     readLines(log, warn = FALSE), useBytes = TRUE))) {
            out <- .system_with_capture(texi2dvi,
                                        c(opt_pdf, opt_quiet, opt_extra,
                                          shQuote(file)),
                                        env = env0)
        }

        ## We cannot necessarily rely on out$status, hence let us
        ## analyze the log files in any case.
        errors <- character()
        ## (La)TeX errors.
        log <- paste0(file_path_sans_ext(file), ".log")
        if(file_test("-f", log)) {
            lines <- .get_LaTeX_errors_from_log_file(log)
            if(length(lines))
                errors <- paste0("LaTeX errors:\n",
                                 paste(lines, collapse = "\n"))
        }
        ## BibTeX errors.
        log <- paste0(file_path_sans_ext(file), ".blg")
        if(file_test("-f", log)) {
            lines <- .get_BibTeX_errors_from_blg_file(log)
            if(length(lines))
                errors <- paste0("BibTeX errors:\n",
                                 paste(lines, collapse = "\n"))
        }

        msg <- ""
        if(out$status) {
            ## <NOTE>
            ## If we cannot rely on out$status, we could test for
            ##   if(out$status || length(errors))
            ## But shouldn't we be able to rely on out$status on Unix?
            ## </NOTE>
            msg <- gettextf("Running 'texi2dvi' on '%s' failed.", file)
            ## Error messages from GNU texi2dvi are rather terse, so
            ## only use them in case no additional diagnostics are
            ## available (e.g, makeindex errors).
            if(length(errors))
                msg <- paste(msg, errors, sep = "\n")
            else if(length(out$stderr))
                msg <- paste(msg, "Messages:",
                             paste(out$stderr, collapse = "\n"),
                             sep = "\n")
            if(!quiet)
                msg <- paste(msg, "Output:",
                             paste(out$stdout, collapse = "\n"),
                             sep = "\n")
        }

        do_cleanup(clean)

        if(nzchar(msg))
            stop(msg, domain = NA)
        else if(!quiet)
            message(paste(paste(out$stderr, collapse = "\n"),
                          paste(out$stdout, collapse = "\n"),
                          sep = "\n"))
    } else if(index && nzchar(texi2dvi)) { # MiKTeX on Windows
        extra <- ""

        ## look for MiKTeX (which this almost certainly is)
        ## http://docs.miktex.org/manual/texify.html
        ## ver <- system(paste(shQuote(texi2dvi), "--version"), intern = TRUE)
        ## if(length(grep("MiKTeX", ver[1L]))) {
        ##     extra <- "--max-iterations=20"
        ## }

        ## 'file' could be a file path
        base <- basename(file_path_sans_ext(file))
        ## this only gives a failure in some cases, e.g. not for bibtex errors.
        system(paste(shQuote(texi2dvi),
                     if(quiet) "--quiet" else "",
                     if(pdf) "--pdf" else "",
                     shQuote(file), extra),
               intern=TRUE, ignore.stderr=TRUE)
        msg <- ""
        ## (La)TeX errors.
        logfile <- paste0(base, ".log")
        if(file_test("-f", logfile)) {
            lines <- .get_LaTeX_errors_from_log_file(logfile)
            if(length(lines))
                msg <- paste(msg, "LaTeX errors:",
                             paste(lines, collapse = "\n"),
                             sep = "\n")
        }
        ## BibTeX errors.
        logfile <- paste0(base, ".blg")
        if(file_test("-f", logfile)) {
            lines <- .get_BibTeX_errors_from_blg_file(logfile)
            if(length(lines))
                msg <- paste(msg, "BibTeX errors:",
                             paste(lines, collapse = "\n"),
                             sep = "\n")
        }

        do_cleanup(clean)
        if(nzchar(msg)) {
            msg <- paste(gettextf("running 'texi2dvi' on '%s' failed", file),
                         msg, "", sep = "\n")
            stop(msg, call. = FALSE, domain = NA)
        }
    } else {
        ## Do not have texi2dvi or don't want to index
        ## Needed on Windows except for MiKTeX (prior to Sept 2015)

        texfile <- shQuote(file)
        ## 'file' could be a file path
        base <- basename(file_path_sans_ext(file))
        idxfile <- paste0(base, ".idx")
        latex <- if(pdf) Sys.getenv("PDFLATEX", "pdflatex")
        else  Sys.getenv("LATEX", "latex")
        if(!nzchar(Sys.which(latex)))
            stop(if(pdf) "pdflatex" else "latex", " is not available",
                 domain = NA)

        sys2 <- if(quiet)
            function(...) system2(..., stdout = FALSE, stderr = FALSE)
        else system2
        bibtex <- Sys.getenv("BIBTEX", "bibtex")
        makeindex <- Sys.getenv("MAKEINDEX", "makeindex")
        ltxargs <- c("-interaction=nonstopmode", texfile)
        if(sys2(latex, ltxargs)) {
            lines <- .get_LaTeX_errors_from_log_file(paste0(base, ".log"))
            errors <- if(length(lines))
                          paste0("LaTeX errors:\n",
                                 paste(lines, collapse = "\n"))
                      else character()
            stop(paste(gettextf("unable to run %s on '%s'", latex, file),
                       errors, sep = "\n"),
                 domain = NA)
        }
        nmiss <- length(grep("Warning:.*Citation.*undefined",
                             readLines(paste0(base, ".log")),
                             useBytes = TRUE))
        for(iter in 1L:10L) { ## safety check
            ## This might fail as the citations have been included in the Rnw
            if(nmiss) sys2(bibtex, shQuote(base))
            nmiss_prev <- nmiss
            if(index && file.exists(idxfile)) {
                if(sys2(makeindex, shQuote(idxfile)))
                    stop(gettextf("unable to run '%s' on '%s'",
                                  makeindex, idxfile),
                         domain = NA)
            }
            if(sys2(latex, ltxargs)) {
                lines <- .get_LaTeX_errors_from_log_file(paste0(base, ".log"))
                errors <- if(length(lines))
                              paste0("LaTeX errors:\n",
                                     paste(lines, collapse = "\n"))
                else character()
                stop(paste(gettextf("unable to run %s on '%s'", latex, file),
                           errors, sep = "\n"),
                     domain = NA)
            }
            Log <- readLines(paste0(base, ".log"))
            nmiss <- length(grep("Warning:.*Citation.*undefined", Log,
                                 useBytes = TRUE))
            if(nmiss == nmiss_prev &&
               !any(grepl("(Rerun to get|biblatex.*\\(re\\)run)", Log,
                          useBytes = TRUE)) ) break
        }
        do_cleanup(clean)
    }
    invisible(NULL)
}

### * Internal utility variables.

### ** .vc_dir_names

## Version control directory names: CVS, .svn (Subversion), .arch-ids
## (arch), .bzr, .git, .hg (mercurial) and _darcs (Darcs)
## And it seems .metadata (eclipse) is in the same category.

.vc_dir_names <-
    c("CVS", ".svn", ".arch-ids", ".bzr", ".git", ".hg", "_darcs", ".metadata")

## and RE version (beware of the need for escapes if amending)

.vc_dir_names_re <-
    "/(CVS|\\.svn|\\.arch-ids|\\.bzr|\\.git|\\.hg|_darcs|\\.metadata)(/|$)"

## We are told
## .Rproj.user is Rstudio
## .cproject .project .settings are Eclipse
## .exrc is for vi
## .tm_properties is Mac's TextMate
.hidden_file_exclusions <-
    c(".Renviron", ".Rprofile", ".Rproj.user",
      ".Rhistory", ".Rapp.history",
      ".tex", ".log", ".aux", ".pdf", ".png",
      ".backups", ".cvsignore", ".cproject", ".directory",
      ".dropbox", ".exrc", ".gdb.history",
      ".gitattributes", ".gitignore", ".gitmodules",
      ".hgignore", ".hgtags",
      ".htaccess",
      ".latex2html-init",
      ".project", ".seed", ".settings", ".tm_properties")

### * Internal utility functions.

### ** filtergrep

filtergrep <-
function(pattern, x, ...)
    grep(pattern, x, invert = TRUE, value = TRUE, ...)

### ** %notin%

`%notin%` <-
function(x, y)
    is.na(match(x, y))

### ** %w/o%

## x without y, as in the examples of ?match.
`%w/o%` <-
function(x, y)
    x[is.na(match(x, y))]

### ** .OStype

.OStype <- function() {
    Sys.getenv("R_OSTYPE", unset = .Platform$OS.type, names = FALSE)
}

### ** .R_copyright_msg

.R_copyright_msg <-
function(year)
    sprintf("Copyright (C) %s-%s The R Core Team.",
            year, R.version$year)

### ** .R_top_srcdir

## Find the root directory of the source tree used for building this
## version of R (corresponding to Unix configure @top_srcdir@).
## Seems this is not recorded anywhere, but we can find our way ...

.R_top_srcdir_from_Rd <-
function() {
    filebase <-
        file_path_sans_ext(system.file("help", "tools.rdb",
                                       package = "tools"))
    path <- attr(fetchRdDB(filebase, "QC"), "Rdfile")
    ## We could use 5 dirname() calls, but perhaps more easily:
    substr(path, 1L, nchar(path) - 28L)
}

## Unfortunately,
##   .R_top_srcdir <- .R_top_srcdir_from_Rd()
## does not work because when tools is installed there are no Rd pages
## yet ...

### ** config_val_to_logical

config_val_to_logical <-
function(val, na.ok=TRUE) utils:::str2logical(val, na.ok=na.ok)

### ** .canonicalize_doi

.canonicalize_doi <-
function(x)
{
    x <- sub("^((doi|DOI):)?[[:space:]]*https?://(dx[.])?doi[.]org/", "",
             x)
    sub("^(doi|DOI):", "", x)
}

### ** .canonicalize_quotes

.canonicalize_quotes <-
function(txt)
{
    txt <- as.character(txt)
    if(!length(txt)) return(txt)
    enc <- Encoding(txt)
    txt <- gsub("(\u2018|\u2019)", "'", txt, perl = TRUE, useBytes = TRUE)
    txt <- gsub("(\u201c|\u201d)", '"', txt, perl = TRUE, useBytes = TRUE)
    Encoding(txt) <- enc
    txt
}

### ** .enc2latin1

.enc2latin1 <-
function(x)
{
    if(length(pos <- which(Encoding(x) == "UTF-8")))
        x[pos] <- iconv(x[pos], "UTF-8", "latin1", sub = "byte")
    x
}

### ** .eval_with_capture

.eval_with_capture <-
function(expr, type = NULL)
{
    ## Evaluate the given expression and return a list with elements
    ## 'value', 'output' and 'message' (with obvious meanings).

    ## <NOTE>
    ## The current implementation gives character() if capturing was not
    ## attempted of gave nothing.  If desired, one could modify the code
    ## to return NULL in the former case.
    ## </NOTE>

    if(is.null(type))
        capture_output <- capture_message <- TRUE
    else {
        type <- match.arg(type, c("output", "message"))
        capture_output <- type == "output"
        capture_message <- !capture_output
    }

    outcon <- file(open = "w+", encoding = "UTF-8")
    msgcon <- file(open = "w+", encoding = "UTF-8")
    if(capture_output) {
        sink(outcon, type = "output")
        on.exit(sink(type = "output"))
    }
    if(capture_message) {
        sink(msgcon, type = "message")
        on.exit(sink(type = "message"), add = capture_output)
    }
    on.exit({ close(outcon) ; close(msgcon) }, add = TRUE)

    value <- eval(expr)
    list(value = value,
         output = readLines(outcon, warn = FALSE),
         message = readLines(msgcon, warn = FALSE))
}

### ** .expand_anchored_Rd_xrefs

.expand_anchored_Rd_xrefs <-
function(db)
{
    ## db should have columns Target and Anchor.
    db <- db[, c("Target", "Anchor"), drop = FALSE]
    ## See .check_Rd_xrefs().
    anchor <- db[, 2L]
    have_equals <- startsWith(anchor, "=")
    if(any(have_equals))
        db[have_equals, ] <-
            cbind(sub("^=", "", anchor[have_equals]), "")
    anchor <- db[, 2L]
    have_colon <- grepl(":", anchor, fixed = TRUE)
    y <- cbind(T_Package = anchor, T_File = db[, 1L])
    y[have_colon, ] <-
        cbind(sub("([^:]*):(.*)", "\\1", anchor[have_colon]),
              sub("([^:]*):(.*)", "\\2", anchor[have_colon]))
    y
}

### ** .file_append_ensuring_LFs

.file_append_ensuring_LFs <-
function(file1, file2)
{
    ## Use a fast version of file.append() that ensures LF between
    ## files.
    .Call(C_codeFilesAppend, file1, file2)
}

### ** .file_path_to_LaTeX_graphicspath

.file_path_to_LaTeX_graphicspath <-
function(x)
{
    x <- normalizePath(x, "/")
    ## Older versions of (PDF)LaTeX need double quotes in case of spaces
    ## etc.  Newer versions of XeLaTeX and LuaLaTeX cannot handle these.
    ## Argh ...
    sprintf(paste(c("\\makeatletter",
                    "\\ifthenelse",
                    "{\\boolean{Rd@graphicspath@needs@quotes}}",
                    "{\\graphicspath{{\"%s/\"}}}",
                    "{\\graphicspath{{%s/}}}",
                    "\\makeatother"),
                  collapse = ""),
            x, x)
}

### ** .file_path_relative_to_dir

.file_path_relative_to_dir <-
function(x, dir, add = FALSE)
{
    if(any(ind <- startsWith(x, dir))) {
        ## Assume .Platform$file.sep is a single character.
        x[ind] <- if(add)
            file.path(basename(dir), substring(x[ind], nchar(dir) + 2L))
        else
            substring(x[ind], nchar(dir) + 2L)
    }
    x
}

### ** .find_calls

.find_calls <-
function(x, predicate = NULL, recursive = FALSE)
{
    calls <- list()

    if(!is.recursive(x) || isS4(x)) return(calls)

    x <- if(is.call(x))
             list(x)
         else {
             if(is.object(x))
                 class(x) <- NULL
             as.list(x)
         }

    f <- if(is.null(predicate))
        function(e) is.call(e)
    else ## no check predicate returns a scalar, so any() added for 4.2.0
        function(e) is.call(e) && any(predicate(e))

    if(!recursive) return(Filter(f, x))

    gatherer <- function(e) {
        if(f(e)) calls <<- c(calls, list(e))
        if(is.recursive(e) && !is.environment(e) && !isS4(e)) {
            if(is.object(e))
                class(e) <- NULL
            e <- as.list(e)
            for(i in seq_along(e)) gatherer(e[[i]])
        }
    }

    gatherer(x)

    calls
}

### ** .find_calls_in_file

.find_calls_in_file <-
function(file, encoding = NA, predicate = NULL, recursive = FALSE)
{
    .find_calls(.parse_code_file(file, encoding), predicate, recursive)
}

### ** .find_calls_in_package_code

.find_calls_in_package_code <-
function(dir, predicate = NULL, recursive = FALSE, .worker = NULL,
         which = "code")
{
    dir <- file_path_as_absolute(dir)

    dfile <- file.path(dir, "DESCRIPTION")
    encoding <- if(file.exists(dfile))
        .read_description(dfile)["Encoding"] else NA

    if(is.null(.worker))
        .worker <- function(file, encoding)
            .find_calls_in_file(file, encoding, predicate, recursive)

    which <- match.arg(which,
                       c("code", "vignettes", "tests",
                         "NAMESPACE", "CITATION", "docs"),
                       several.ok = TRUE)
    code_files <-
        c(character(),
          if("code" %in% which)
              list_files_with_type(file.path(dir, "R"), "code",
                                   OS_subdirs = c("unix", "windows")),
          if(("vignettes" %in% which) &&
             dir.exists(file.path(dir, "vignettes")) &&
             dir.exists(fp <- file.path(dir, "inst", "doc")))
              list_files_with_type(fp, "code"),
          ## cf. .check_packages_used_in_tests() ...
          if(("tests" %in% which) &&
             dir.exists(fp <- file.path(dir, "tests")))
              c(list.files(fp, pattern = "\\.[rR]$",
                           full.names = TRUE),
                if(dir.exists(fp <- file.path(fp, "testthat")))
                    list.files(fp, pattern = "\\.[rR]$",
                               full.names = TRUE)),
          if(("NAMESPACE" %in% which) &&
             file.exists(fp <- file.path(dir, "NAMESPACE")))
              fp,
          if(("CITATION" %in% which) &&
             file.exists(fp <- file.path(dir, "inst", "CITATION")))
              fp)

    calls <- lapply(code_files, .worker, encoding)
    names(calls) <-
        .file_path_relative_to_dir(code_files, dirname(dir))

    if("docs" %in% which) {
        db <- Rd_db(dir = dir)
        names(db) <- file.path(basename(dir), "man", names(db))
        calls <-
            c(calls,
              Filter(length,
                     lapply(db,
                            function(e) {
                                f <- tempfile()
                                on.exit(unlink(f))
                                Rd2ex(e, f)
                                if(file.exists(f))
                                    .worker(f, "UTF-8")
                            })))
    }
    
    calls
}

### ** .predicate_for_calls_with_names

.predicate_for_calls_with_names <-
function(funnames, pkgnames = character(), colons = c("::", ":::"))
{
    ## Use pkgnames = NA_character_ to match *any* PKG::FUN call with
    ## FUN in funnames.  Strange but why not?  Or better to use "*"?
    function(e) {
        (is.call(e) &&        
         ((is.name(x <- e[[1L]]) &&
           as.character(x) %in% funnames)) ||
         ((is.call(x <- e[[1L]]) &&
           is.name(x[[1L]]) &&
           (as.character(x[[1L]]) %in% colons) &&
           (((length(pkgnames) == 1L) && is.na(pkgnames)) ||
            as.character(x[[2L]]) %in% pkgnames) &&
           as.character(x[[3L]]) %in% funnames)))
    }
}

### ** .find_owner_env

.find_owner_env <-
function(v, env, last = NA, default = NA) {
    while(!identical(env, last))
        if(exists(v, envir = env, inherits = FALSE))
            return(env)
        else
            env <- parent.env(env)
    default
}

### ** .find_tidy_cmd

.find_tidy_cmd <-
function(Tidy = Sys.getenv("R_TIDYCMD", "tidy"))
{
    ## Require a recent enough version of HTML Tidy.
    ## We really need HTML Tidy 5.0.0 or later, and all these versions
    ## should have tidy --version match
    ##   ^HTML Tidy .*version (\\d+\\.\\d+\\.\\d+)
    ## See
    ## <https://github.com/htacg/tidy-html5/blob/next/README/VERSION.md>
    ## and
    ## <https://bugs.r-project.org/show_bug.cgi?id=18731>.
    msg <- ""
    OK <- nzchar(Sys.which(Tidy))
    if(OK) {
        ver <- system2(Tidy, "--version", stdout = TRUE)
        ## Argh.  We used to match with
        ##   ^HTML Tidy .*version (\\d+\\.\\d+\\.\\d+)$
        ## but HTML Tidy 5.8.0 has added l10n to its version info.  For
        ## now, this always seems to match
        ##   ^HTML Tidy .* (\\d+\\.\\d+\\.\\d+)$
        ## if this changes, we could try getting the version info with
        ## LC_MESSAGES= (set to empty) which seems to get the English
        ## default.
        mat <- regexec("^HTML Tidy .* (\\d+\\.\\d+\\.\\d+)$", ver)
        ver <- regmatches(ver, mat)[[1L]][2L]
        OK <- !is.na(ver)
        if(OK) {
            ## Minimum version requirement.
            req <- "5.0.0"
            OK <- numeric_version(ver) >= req
            if(!OK)
                msg <-
                    sprintf("'%s' is too old: need version %s, found %s",
                            Tidy, req, ver)
        } else
            msg <-
                sprintf("'%s' doesn't look like recent enough HTML Tidy",
                        Tidy)
    } else msg <- sprintf("no command '%s' found", Tidy)
    if(nzchar(msg)) {
        Tidy <- ""
        attr(Tidy, "msg") <- msg
    }
    Tidy
}

### ** .get_BibTeX_errors_from_blg_file

.get_BibTeX_errors_from_blg_file <-
function(con)
{
    ## Get BibTeX error info, using non-header lines until the first
    ## warning or summary, hoping for the best ...
    lines <- readLines(con, warn = FALSE)
    if(any(ind <- is.na(nchar(lines, allowNA = TRUE))))
        lines[ind] <- iconv(lines[ind], "", "", sub = "byte")

    ## How can we find out for sure that there were errors?  Try
    ## guessing ... and peeking at tex-buf.el from AUCTeX.
    really_has_errors <-
        (any(startsWith(lines, "---")) ||
         regexpr("There (was|were) ([0123456789]+) error messages?",
                 lines[length(lines)]) > -1L)
    ## (Note that warnings are ignored for now.)
    ## MiKTeX does not give usage, so '(There were n error messages)' is
    ## last.
    pos <- grep("^(Warning|You|\\(There)", lines)
    if(!really_has_errors || !length(pos) ) return(character())
    ind <- seq.int(from = 3L, length.out = pos[1L] - 3L)
    lines[ind]
}

### ** .get_LaTeX_errors_from_log_file

.get_LaTeX_errors_from_log_file <-
function(con, n = 4L)
{
    ## Get (La)TeX lines with error plus n (default 4) lines of trailing
    ## context.
    lines <- readLines(con, warn = FALSE)
    if(any(ind <- is.na(nchar(lines, allowNA = TRUE))))
        lines[ind] <- iconv(lines[ind], "", "", sub = "byte")

    ## Try matching both the regular error indicator ('!') as well as
    ## the file line error indicator ('file:line:').
    pos <- grep("(^! |^!pdfTeX error:|:[0123456789]+:.*[Ee]rror)", lines)
    ## the latter will miss some error messages like "Missing $ inserted"
    ## (a more general pattern caught false positives)
    ## Errors are typically of the form
    ## ! LaTeX Error:
    ## !pdfTeX error:
    ## ! Emergency stop
    ## !  ==> Fatal error occurred, no output PDF file produced!
    ## .../pegas.Rcheck/inst/doc/ReadingFiles.tex:395: Package inputenc Error:
    if(!length(pos)) return(character())
    ## Error chunk extends to at most the next error line.
    mapply(function(from, to) paste(lines[from : to], collapse = "\n"),
           pos, pmin(pos + n, c(pos[-1L], length(lines))))
}

### ** .get_internal_S3_generics

.get_internal_S3_generics <-
function(primitive = TRUE) # primitive means 'include primitives'
{
    c(.internalGenerics,
      if(primitive)
          c("[", "[[", "$", "[<-", "[[<-", "$<-", "@", "@<-",
            ## The above are actually primitive but not listed in
            ## base::.S3PrimitiveGenerics et al: not sure why?
            .get_S3_primitive_generics()
            ## ^^^^^^^ now contains the members of the group generics
            ## from groupGeneric.Rd.
            )
      )
}

### ** .get_namespace_package_depends

.get_namespace_package_depends <-
function(dir, selective_only = FALSE)
{
    nsInfo <- .check_namespace(dir)
    getter <- if(selective_only) {
        function(e) {
            if(is.list(e) && length(e[[2L]])) e[[1L]] else character()
        }
    } else {
        function(e) e[[1L]]
    }
    depends <- c(lapply(nsInfo$imports, getter),
                 lapply(nsInfo$importClasses, getter),
                 lapply(nsInfo$importMethods, getter))
    unique(sort(as.character(unlist(depends, use.names = FALSE))))
}

### ** .get_namespace_S3_methods_db

.get_namespace_S3_methods_db <-
function(nsInfo)
{
    ## Get the registered S3 methods for an 'nsInfo' object returned by
    ## parseNamespaceFile(), as a 3-column character matrix with the
    ## names of the generic, class and method (as a function).
    S3_methods_db <- nsInfo$S3methods
    if(!length(S3_methods_db))
        return(matrix(character(), ncol = 4L))
    idx <- is.na(S3_methods_db[, 3L])
    S3_methods_db[idx, 3L] <-
        paste(S3_methods_db[idx, 1L],
              S3_methods_db[idx, 2L],
              sep = ".")
    S3_methods_db
}

### ** .get_namespace_S3_methods_with_homes

.get_namespace_S3_methods_with_homes <-
function(package, lib.loc = NULL)
{
    ## Get the registered S3 methods with the 'homes' of the generics
    ## they are registered for.
    ## Original code provided by Luke Tierney.
    path <- system.file(package = package, lib.loc = lib.loc)
    if(!nzchar(path)) return(NULL)
    if(package == "base") {
        len <- nrow(.S3_methods_table)
        return(list2DF(list(generic = .S3_methods_table[, 1L],
                            home = rep_len("base", len),
                            class = .S3_methods_table[, 2L],
                            delayed = rep_len(FALSE, len))))
    }
    lib.loc <- dirname(path)
    nsinfo <- parseNamespaceFile(package, lib.loc)
    S3methods <- nsinfo$S3methods
    if(!length(S3methods)) return(NULL)
    tab <- NULL
    ind <- is.na(S3methods[, 4L])
    if(!all(ind)) {
        ## Delayed registrations can be handled directly.
        pos <- which(!ind)
        tab <- list2DF(list(generic = S3methods[pos, 1L],
                            home = S3methods[pos, 4L],
                            class = S3methods[pos, 2L],
                            delayed = rep_len(TRUE, length(pos))))
        S3methods <- S3methods[ind, , drop = FALSE]
    }
    generic <- S3methods[, 1L]
    nsenv <- loadNamespace(package, lib.loc)
    ## Possibly speed things up by only looking up the unique generics.
    generics <- unique(generic)
    homes <- character(length(generics))
    ind <- is.na(match(generics, .get_S3_group_generics()))
    homes[ind] <-
        unlist(lapply(generics[ind],
                      function(f) {
                          f <- get(f, nsenv)
                          getNamespaceName(topenv(environment(f)))
                      }),
               use.names = FALSE)
    ## S3 group generics belong to base.
    homes[!ind] <- "base"
    rbind(list2DF(list(generic = generic,
                       home = homes[match(generic, generics)],
                       class = S3methods[, 2L],
                       delayed = rep_len(FALSE, length(generic)))),
          tab)
}

### ** .get_package_metadata

.get_package_metadata <-
function(dir, installed = FALSE)
{
    ## Get the package DESCRIPTION metadata for a package with root
    ## directory 'dir'.  If an unpacked source (uninstalled) package,
    ## base packages (have only a DESCRIPTION.in file with priority
    ## "base") need special attention.
    dir <- file_path_as_absolute(dir)
    dfile <- file.path(dir, "DESCRIPTION")
    if(file_test("-f", dfile)) return(.read_description(dfile))
    if(installed) stop("File 'DESCRIPTION' is missing.")
    dfile <- file.path(dir, "DESCRIPTION.in")
    if(file_test("-f", dfile))
        meta <- .read_description(dfile)
    else
        stop("Files 'DESCRIPTION' and 'DESCRIPTION.in' are missing.")
    if(identical(as.character(meta["Priority"]), "base")) return(meta)
    stop("invalid package layout")
}

### ** .get_requires_from_package_db

.get_requires_from_package_db <-
function(db,
         category = c("Depends", "Imports", "LinkingTo", "VignetteBuilder",
         "Suggests", "Enhances", "RdMacros"))
{
    category <- match.arg(category)
    if(category %in% names(db)) {
        requires <- unlist(strsplit(db[category], ","))
        requires <-
            sub("^[[:space:]]*([[:alnum:].]+).*$", "\\1", requires)
        if(category == "Depends")
            requires <- requires[requires != "R"]
    }
    else
        requires <- character()
    requires
}

### ** .get_requires_with_version_from_package_db

.get_requires_with_version_from_package_db <-
function(db,
         category = c("Depends", "Imports", "LinkingTo", "VignetteBuilder",
         "Suggests", "Enhances"))
{
    category <- match.arg(category)
    if(category %in% names(db)) {
        res <- .split_dependencies(db[category])
        if(category == "Depends") res[names(res) != "R"] else res
    } else list()
}

### ** .get_S3_generics_as_seen_from_package

## .get_S3_generics_as_seen_from_package <-
## function(dir, installed = TRUE, primitive = FALSE)
## {
##     ## Get the S3 generics "as seen from a package" rooted at
##     ## @code{dir}.  Tricky ...
##     if(basename(dir) == "base")
##         env_list <- list()
##     else {
##         ## Always look for generics in the whole of the former base.
##         ## (Not right, but we do not perform run time analyses when
##         ## working off package sources.)  Maybe change this eventually,
##         ## but we still cannot rely on packages to fully declare their
##         ## dependencies on base packages.
##         env_list <-
##             list(baseenv(),
##                  as.environment("package:graphics"),
##                  as.environment("package:stats"),
##                  as.environment("package:utils"))
##         if(installed) {
##             ## Also use the loaded namespaces and attached packages
##             ## listed in the DESCRIPTION Depends and Imports fields.
##             ## Not sure if this is the best approach: we could also try
##             ## to determine which namespaces/packages were made
##             ## available by loading the package (which should work at
##             ## least when run from R CMD check), or we could simply
##             ## attach every package listed as a dependency ... or
##             ## perhaps do both.
##             db <- .read_description(file.path(dir, "DESCRIPTION"))
##             depends <- .get_requires_from_package_db(db, "Depends")
##             imports <- .get_requires_from_package_db(db, "Imports")
##             reqs <- intersect(c(depends, imports), loadedNamespaces())
##             if(length(reqs))
##                 env_list <- c(env_list, lapply(reqs, getNamespace))
##             reqs <- intersect(setdiff(depends, loadedNamespaces()),
##                               .packages())
##             if(length(reqs))
##                 env_list <- c(env_list, lapply(reqs, .package_env))
##             env_list <- unique(env_list)
##         }
##     }
##     ## some BioC packages warn here
##     suppressWarnings(
##     unique(c(.get_internal_S3_generics(primitive),
##              unlist(lapply(env_list, .get_S3_generics_in_env))))
##     )
## }

### ** .get_S3_generics_in_base

.get_S3_generics_in_base <-
function()
{
    ## .get_S3_generics_in_env(.BaseNamespaceEnv) gets all UseMethod
    ## generics.
    ## .get_internal_S3_generics() gets the internal S3 generics.  By
    ## default this also adds the primitive generics.
    ## .get_S3_group_generics() gets the S3 group generics.
    ## Note that
    ##   .make_S3_group_generic_env()
    ## generates an env with the group generics and appropriate
    ## signatures, so we should always have
    ##    identical(sort(.get_S3_group_generics()),
    ##              sort(names(.make_S3_group_generic_env())))
    ## and that
    ##    .make_S3_primitive_generic_env()
    ##  generates and env with the primitive generics and appropriate
    ##  signatures (in turn using base::.GenericArgsEnv), so we should
    ##  always have
    ##    identical(sort(.get_S3_primitive_generics()),
    ##              sort(names(.make_S3_primitive_generic_env())))
    c(.get_S3_generics_in_env(.BaseNamespaceEnv),
      .get_internal_S3_generics(),
      .get_S3_group_generics())
}

### ** .get_S3_generics_in_env

.get_S3_generics_in_env <-
function(env, nms = NULL)
{
    if(is.null(nms))
        nms <- sort(names(env))
    if(".no_S3_generics" %in% nms)
        character()
    else
        Filter(function(f) .is_S3_generic(f, envir = env), nms)
}

### ** .get_S3_group_generics

.get_S3_group_generics <-
function()
    c("Ops", "Math", "Summary", "Complex", "matrixOps")

### ** .get_S3_primitive_generics

.get_S3_primitive_generics <-
function(include_group_generics = TRUE)
{
    if(include_group_generics)
        c(base::.S3PrimitiveGenerics,
          ## Keep this in sync with ? groupGeneric:
          ## Group 'Math':
          "abs", "sign", "sqrt",
          "floor", "ceiling", "trunc",
          "round", "signif",
          "exp", "log", "expm1", "log1p",
          "cos", "sin", "tan",
          "cospi", "sinpi", "tanpi",
          "acos", "asin", "atan",
          "cosh", "sinh", "tanh",
          "acosh", "asinh", "atanh",
          "lgamma", "gamma", "digamma", "trigamma",
          "cumsum", "cumprod", "cummax", "cummin",
          ## Group 'Ops':
          "+", "-", "*", "/",
          "^", "%%", "%/%",
          "&", "|", "!",
          "==", "!=",
          "<", "<=", ">=", ">",
          ## Group 'Summary':
          "all", "any", "sum", "prod", "max", "min", "range",
          ## Group 'Complex':
          "Arg", "Conj", "Im", "Mod", "Re",
          ## Group 'matrixOps'
          "%*%")
    else
        base::.S3PrimitiveGenerics
}

### ** .get_standard_Rd_keywords

.get_standard_Rd_keywords <-
function()
{
    lines <- readLines(file.path(R.home("doc"), "KEYWORDS.db"))
    lines <- grep("^.*\\|([^:]*):.*", lines, value = TRUE)
    lines <- sub( "^.*\\|([^:]*):.*", "\\1", lines)
    lines
}

### ** .get_standard_package_names

standard_package_names <-
.get_standard_package_names <-
local({
    lines <- readLines(file.path(R.home("share"), "make", "vars.mk"))
    lines <- grep("^R_PKGS_[[:upper:]]+ *=", lines, value = TRUE)
    out <- strsplit(sub("^R_PKGS_[[:upper:]]+ *= *", "", lines), " +")
    names(out) <-
        tolower(sub("^R_PKGS_([[:upper:]]+) *=.*", "\\1", lines))
    eval(substitute(function() {out}, list(out=out)), envir = topenv())
    })


### ** .get_standard_package_dependencies

.get_standard_package_dependencies <-
function(reverse = FALSE, recursive = FALSE)
{
    names <- unlist(.get_standard_package_names())
    paths <- file.path(.Library, names, "DESCRIPTION")
    ## Be nice ...
    paths <- paths[file.exists(paths)]
    which <- c("Depends", "Imports")
    fields <- c("Package", which)
    ## Create a minimal available packages db.
    a <- do.call(rbind,
                 lapply(paths,
                        function(p) .read_description(p)[fields]))
    colnames(a) <- fields
    package_dependencies(names, a, which = which,
                         reverse = reverse, recursive = recursive)
}

### ** .get_standard_repository_URLs

## Usage in e.g. CRAN_baseurl_for_web_area assumes this returns a
## valid CRAN mirror as its first element.
## That used not to be guaranteed, and it is still unchecked.
.get_standard_repository_URLs <-
function(ForXrefs = FALSE)
 {
     if(ForXrefs &&
        nzchar(repos <- Sys.getenv("_R_CHECK_XREFS_REPOSITORIES_", "")))
         return(utils:::.expand_BioC_repository_URLs(strsplit(repos, " +")[[1L]]))

     nms <- c("CRAN", "BioCsoft", "BioCann", "BioCexp")
     repos <- getOption("repos")
     ## This is set by utils:::.onLoad(), hence may be NULL.
     if(!is.null(repos) && !anyNA(repos[nms]) && (repos["CRAN"] != "@CRAN@"))
         repos <- repos[nms]
     else {
         repos <- utils:::.get_repositories()[nms, "URL"]
         names(repos) <- nms
         ## That might not contain an entry for CRAN
         if(is.na(repos["CRAN"]) || repos["CRAN"] == "@CRAN@")
             repos["CRAN"] <- "https://CRAN.R-project.org"
     }
     repos
}

.get_CRAN_repository_URL <-
function()
 {
     repos <- getOption("repos")
     if(!is.null(repos) && !is.na(cr <- repos["CRAN"]) && (cr != "@CRAN@"))
         return(cr)
     cr <- utils:::.get_repositories()["CRAN", "URL"]
     ## That might not contain an entry for CRAN
     if(is.na(cr) || cr == "@CRAN@") cr <- "https://CRAN.R-project.org"
     cr
 }

### ** .get_standard_repository_db_fields

.get_standard_repository_db_fields <-
function(type = c("source", "mac.binary", "win.binary")) {
    type <- match.arg(type)
    c("Package", "Version", "Priority",
      "Depends", "Imports", "LinkingTo", "Suggests", "Enhances",
      "License", "License_is_FOSS", "License_restricts_use",
      "OS_type", "Archs", "MD5sum",
      if(type == "source") "NeedsCompilation"
      )
}

### ** .get_standard_DESCRIPTION_fields

.get_standard_DESCRIPTION_fields <-
function()
{
    unique(c(.get_standard_repository_db_fields(),
             ## Extract from R-exts via
             ## .get_DESCRIPTION_fields_in_R_exts():
             c("Additional_repositories",
               "Author",
               "Authors@R",
               "Biarch",
               "BugReports",
               "BuildKeepEmpty",
               "BuildManual",
               "BuildResaveData",
               "BuildVignettes",
               "Built",
               "ByteCompile",
               "Classification/ACM",
               "Classification/ACM-2012",
               "Classification/JEL",
               "Classification/MSC",
               "Classification/MSC-2010",
               "Collate",
               "Collate.unix",
               "Collate.windows",
               "Contact",
               "Copyright",
               "Date",
               "Depends",
               "Description",
               "Encoding",
               "Enhances",
               "Imports",
               "KeepSource",
               "Language",
               "LazyData",
               "LazyDataCompression",
               "LazyLoad",
               "License",
               "LinkingTo",
               "MailingList",
               "Maintainer",
               "Note",
               "OS_type",
               "Package",
               "Packaged",
               "Priority",
               "RdMacros",
               "Suggests",
               "StagedInstall",
               "SysDataCompression",
               "SystemRequirements",
               "Title",
               "Type",
               "URL",
               "UseLTO",
               "Version",
               "VignetteBuilder",
               "ZipData"),
             ## Others: adjust as needed.
             c("Repository",
               "Path",
               "Date/Publication",
               "LastChangedDate",
               "LastChangedRevision",
               "Revision",
               "RcmdrModels",
               "RcppModules",
               "Roxygen",
               "Acknowledgements",
               "Acknowledgments", # USA/Canadian usage.
               "biocViews")
             ))
}

### ** .get_DESCRIPTION_fields_in_R_exts

.get_DESCRIPTION_fields_in_R_exts <-
function(texi = NULL)
{
    if(is.null(texi))
        texi <- file.path(.R_top_srcdir_from_Rd(),
                          "doc", "manual", "R-exts.texi")
    lines <- readLines(texi)
    re <- "^@c DESCRIPTION field "
    sort(unique(sub(re, "", lines[grepl(re, lines)])))
}

### ** .get_top_call_in_fun

.get_top_call_in_fun <-
function(f)
{
    b <- body(f)
    repeat {
        if(!is.call(b)) return(NULL)
        if((length(b) > 1L) && (b[[1L]] == as.name("{")))
            b <- b[[2L]]
        else
            break
    }
    b
}

### ** .gregexec_at_pos

.gregexec_at_pos <-
function(pattern, x, m, pos)
{
    unlist(lapply(regmatches(x, m),
                  function(e)
                      do.call(rbind,
                              regmatches(e,
                                         regexec(pattern, e)))[, pos]
                  ),
           use.names = FALSE)
}

### ** .gsub_with_transformed_matches

.gsub_with_transformed_matches <-
function(pattern, replacement, x, trafo, count, ...)
{
    ## gsub() with replacements featuring transformations of matches.
    ##
    ## Character string (%s) conversion specifications in 'replacement'
    ## will be replaced by applying the respective transformations in
    ## 'trafo' to the respective matches (parenthesized subexpressions of
    ## 'pattern') specified by 'count'.
    ##
    ## Argument 'trafo' should be a single unary function, or a list of
    ## such functions.
    ## Argument 'count' should be a vector of with the numbers of
    ## parenthesized subexpressions to be transformed (0 gives the whole
    ## match).

    replace <- function(yi) {
        do.call(sprintf,
                c(list(replacement),
                  Map(function(tr, co) fsub("\\", "\\\\", tr(yi[co])),
                      trafo, count + 1L)))
    }

    if(!is.list(trafo)) trafo <- list(trafo)
    m <- gregexpr(pattern, x, ...)
    v <- lapply(regmatches(x, m),
                function(e) {
                    y <- regmatches(e, regexec(pattern, e, ...))
                    unlist(Map(function(ei, yi) {
                        sub(pattern, replace(yi), ei, ...)
                    },
                               e,
                               y))
                })
    regmatches(x, m) <- v
    x
}

### imports_for_undefined_globals

imports_for_undefined_globals <-
function(txt, lst, selective = TRUE)
{
    if(!missing(txt))
        lst <- scan(what = character(), text = txt, quiet = TRUE)
    lst <- sort(unique(lst))
    nms <- lapply(lst, utils::find)
    ind <- lengths(nms) > 0L
    imp <- split(lst[ind], substring(unlist(nms[ind]), 9L))
    if(selective) {
        sprintf("importFrom(%s)",
                vapply(Map(c, names(imp), imp),
                       function(e)
                           paste0("\"", e, "\"", collapse = ", "),
                       ""))
    } else {
        sprintf("import(\"%s\")", names(imp))
    }
}

### ** .is_ASCII

.is_ASCII <-
function(x)
{
    ## Determine whether the strings in a character vector are ASCII or
    ## not.
    vapply(as.character(x),
           function(txt) all(charToRaw(txt) <= as.raw(127)),
           NA)
}

### ** .is_ISO_8859

.is_ISO_8859 <-
function(x)
{
    ## Determine whether the strings in a character vector could be in
    ## some ISO 8859 character set or not.
    raw_ub <- as.raw(0x7f)
    raw_lb <- as.raw(0xa0)
    vapply(as.character(x),
           function(txt) {
               raw <- charToRaw(txt)
               all(raw <= raw_ub | raw >= raw_lb)
           },
           NA)
}

### ** .is_primitive_in_base

.is_primitive_in_base <-
function(fname)
{
    ## Determine whether object named 'fname' found in the base
    ## environment is a primitive function.
    is.primitive(baseenv()[[fname]])
}

### ** .is_S3_generic

.is_S3_generic <-
function(fname, envir, mustMatch = TRUE)
{
    ## Determine whether object named 'fname' found in environment
    ## 'envir' is (to be considered) an S3 generic function.  Note,
    ## found *in* not found *from*, so envir does not have a default.
    ##
    ## If it is, does it dispatch methods of fname?  We need that to
    ## look for possible methods as functions named fname.* ....
    ##
    ## Provided by LT with the following comments:
    ##
    ## This is tricky.  Figuring out what could possibly dispatch
    ## successfully some of the time is pretty much impossible given R's
    ## semantics.  Something containing a literal call to UseMethod is
    ## too broad in the sense that a UseMethod call in a local function
    ## doesn't produce a dispatch on the outer function ...
    ##
    ## If we use something like: a generic has to be
    ##      function(e) <UME>  # UME = UseMethod Expression
    ## with
    ##      <UME> = UseMethod(...) |
    ##             if (...) <UME> [else ...] |
    ##             if (...) ... else <UME>
    ##             { ... <UME> ... }
    ## then a recognizer for UME might be as follows.

    f <- suppressMessages(get(fname, envir = envir, inherits = FALSE))
    if(!is.function(f)) return(FALSE)
    isUMEbrace <- function(e) {
        for (ee in as.list(e[-1L])) if (nzchar(res <- isUME(ee))) return(res)
        ""
    }
    isUMEif <- function(e) {
        if (length(e) == 3L) isUME(e[[3L]])
        else {
            if (nzchar(res <- isUME(e[[3L]]))) res
            else if (nzchar(res <- isUME(e[[4L]]))) res
            else ""
        }

    }
    isUME <- function(e) {
        if (is.call(e) && (is.name(e[[1L]]) || is.character(e[[1L]]))) {
            switch(as.character(e[[1L]]),
                   UseMethod = as.character(e[[2L]]),
                   "{" = isUMEbrace(e),
                   "if" = isUMEif(e),
                   "")
        } else ""
    }
    res <- isUME(body(f))
    if(mustMatch) res == fname else nzchar(res)
}

### ** .load_namespace_quietly

.load_namespace_quietly <-
function(package, lib.loc) {
    if(package != "base")
        .try_quietly(loadNamespace(package, lib.loc))
}

### ** .load_namespace_rather_quietly

.load_namespace_rather_quietly <-
function(package)
{
    ## Suppress messages and warnings from loading namespace
    ## dependencies.
    .whandler <- function(e) {
        calls <- sys.calls()
        if(sum(.call_names(calls) == "loadNamespace") == 1L)
            signalCondition(e)
        else
            tryInvokeRestart("muffleWarning")
    }
    expr <- substitute(loadNamespace(package), list(package = package))
    invisible(withCallingHandlers(suppressMessages(eval(expr)),
                                  warning = .whandler))
}

### ** .load_package_quietly

.load_package_quietly <-
function(package, lib.loc)
{
    ## Quietly ensure that package @code{package} is loaded and
    ## attached.
    ## If not yet loaded, look for the package in @code{lib.loc}.
    ## Otherwise, we do not attempt reloading: previously we tried at
    ## least when attached, but reloading namespaces invalidates DLLs
    ## and S3 registries, see e.g. PR#18130
    ## <https://bugs.r-project.org/show_bug.cgi?id=18130>.
    ## Hence if already loaded, we can neither ensure that the package
    ## came from @code{lib.loc}, nor that we used the currently
    ## installed versions.
    ## Don't do anything for base.
    ##
    ## All QC functions use this for loading packages because R CMD
    ## check interprets all output as indicating a problem.
    if(package != "base")
        .try_quietly({
            pos <- match(paste0("package:", package), search())
            if(!is.na(pos)) {
                detach(pos = pos)
                ## Presumably this should use
                ## <CODE>
                ##   detach(pos, force = TRUE)
                ## </CODE>
                ## to always detach?
                ## Or perhaps simply leave things as they are?
            }
            library(package, lib.loc = lib.loc, character.only = TRUE,
                    verbose = FALSE)
        })
}

### ** .make_file_exts

## <FIXME>
## Remove support for type "vignette" eventually ...
## </FIXME>

.make_file_exts <-
function(type = c("code", "data", "demo", "docs", "vignette"))
{
    ## Return a character vector with the possible/recognized file
    ## extensions for a given file type.
    switch(type,
           code = c("R", "r", "S", "s", "q"),
           ## Keep in sync with the order given in base's data.Rd.
           data = c("R", "r",
                    "RData", "rdata", "rda",
                    "tab", "txt", "TXT",
                    "tab.gz", "txt.gz",
                    "tab.bz2", "txt.bz2",
                    "tab.xz", "txt.xz",
                    "csv", "CSV",
                    "csv.gz", "csv.bz2", "csv.xz"),
           demo = c("R", "r"),
           docs = c("Rd", "rd"),
           vignette = c(outer(c("R", "r", "S", "s"), c("nw", "tex"),
                              paste0),
                        "Rmd"))
}

### ** .make_S3_group_generic_env

.make_S3_group_generic_env <-
function(parent = parent.frame())
{
    ## Create an environment with pseudo-definitions for the S3 group
    ## methods.
    env <- new.env(parent = parent) # small
    assign("Math", function(x, ...) UseMethod("Math"),
           envir = env)
    assign("Ops", function(e1, e2) UseMethod("Ops"),
           envir = env)
    assign("matrixOps", function(x, y) UseMethod("matrixOps"),
           envir = env)
    assign("Summary", function(..., na.rm = FALSE) UseMethod("Summary"),
           envir = env)
    assign("Complex", function(z) UseMethod("Complex"),
           envir = env)
    env
}

### ** .make_S3_primitive_generic_env

.make_S3_primitive_generic_env <-
function(parent = parent.frame(), fixup = FALSE)
{
    ## Create an environment with pseudo-definitions for the S3 primitive
    ## generics
    env <- list2env(as.list(base::.GenericArgsEnv, all.names=TRUE),
                    hash=TRUE, parent=parent)
    if(fixup) {
        ## now fixup the operators from (e1,e2) to (x,y)
        for(f in c('+', '-', '*', '/', '^', '%%', '%/%', '&', '|',
                   '==', '!=', '<', '<=', '>=', '>')) {
            fx <- get(f, envir = env)
            formals(fx) <- alist(x=, y=)
            assign(f, fx, envir = env)
        }
    }
    env
}

### ** .make_S3_primitive_nongeneric_env

## why not just use  base::.ArgsEnv -- is the parent really important if(is_base)?
.make_S3_primitive_nongeneric_env <-
function(parent = parent.frame())
{
    ## Create an environment with pseudo-definitions
    ## for the S3 primitive non-generics
    list2env(as.list(base::.ArgsEnv, all.names=TRUE),
             hash=TRUE, parent=parent)
}

### ** .make_KaTeX_checker

.make_KaTeX_checker <- local({
    fun <- NULL
    ctx <- NULL
    function() {
        if(is.null(fun) && requireNamespace("V8", quietly = TRUE)) {
            dir <- file.path(R.home("doc"), "html")
            ctx <<- V8::v8("window")
            ctx$source(file.path(dir, "katex", "katex.js"))
            ## Provides additional macros:
            ctx$source(file.path(dir, "katex-config.js"))
            ## Provides checkTex():
            ctx$source(file.path(dir, "katex-check.js"))
            fun <<- function(tex) ctx$call('checkTex', tex)
        }
        fun
    }
})

### ** .make_RFC4646_langtag_regexp

.make_RFC4646_langtag_regexp <-
function()
{
    ## See <https://www.ietf.org/rfc/rfc4646.html>.
    ## Language tags can be of the form (in ABNF, see
    ## <https://tools.ietf.org/rfc/rfc4234.txt>): 
    ##   langtag / privateuse / grandfathered
    ## where
    ##   privateuse    = ("x"/"X") 1*("-" (1*8alphanum))
    ##   grandfathered = 1*3ALPHA 1*2("-" (2*8alphanum))
    ## We only allow langtag, for which in turn we have
    ##   (language
    ##    ["-" script]
    ##    ["-" region]
    ##    *(["-" variant])
    ##    *(["-" extension])
    ##    ["-" privateuse]
    ## where
    ##   language    = (2*3ALPHA [-extlang])  ; shortest ISO 639 code
    ##                  / 4ALPHA              ; reserved for future use
    ##                  / 5*8ALPHA            ; registered language subtag
    ##   extlang     = *3("-" 3*ALPHA)        ; reserved for future use
    ##   script      = 4ALPHA                 ; ISO 15924 code
    ##   region      = 2ALPHA                 ; ISO 3166 code
    ##                 / 3DIGIT               ; UN M.49 code
    ##   variant     = 5*8alphanum            ; registered variants
    ##                 / (DIGIT 3alphanum)
    ##   extension   = singleton 1*("-" (2*8alphanum))
    ##   singleton   = %x41-57 / %x59-5A / %x61-77 / %x79-7A / DIGIT
    ##               ; "a"-"w" / "y"-"z" / "A"-"W" / "Y"-"Z" / "0"-"9"
    ##   alphanum    = (ALPHA / DIGIT)        ; letters and numbers

    re_extlang <- "[[:alpha:]]{3}"
    re_language <-
        sprintf("[[:alpha:]]{2,3}(-%s){0,3}|[[:alpha:]]{4,8}", re_extlang)
    re_script <- "[[:alpha:]]{4}"
    re_region <- "[[:alpha:]]{2}|[[:digit:]]{3}"
    re_variant <- "[[:alnum:]]{5,8}|[[:digit:]][[:alnum:]]{3}"
    re_singleton <- "[abcdefghijklmnopqrstuvwyzABCDEFGHIJKLMNOPQRSTUVWYZ0123456789]"
    re_extension <- sprintf("(%s)(-[[:alnum:]]{2,8}){1,}", re_singleton)

    sprintf("(%s)((-%s)?)((-%s)?)((-%s)*)((-%s)*)",
            re_language, re_script, re_region, re_variant, re_extension)
}
    
### ** nonS3methods [was .make_S3_methods_stop_list ]

nonS3methods <- function(package)
{
    ## Return a character vector with the names of the functions in
    ## @code{package} which 'look' like S3 methods, but are not.
    ## Using package = NULL returns all known examples

    stopList <-
        list(base = c("all.equal", "all.names", "all.vars",
             "as.data.frame.vector",
             "format.info", "format.pval",
             "max.col",
             ## the next two only exist in *-defunct.Rd.
             ## "print.atomic", "print.coefmat",
             "qr.Q", "qr.R", "qr.X", "qr.coef", "qr.fitted", "qr.qty",
             "qr.qy", "qr.resid", "qr.solve",
             "rep.int", "seq.int", "sort.int", "sort.list"),
             AMORE = "sim.MLPnet",
             BSDA = "sign.test",
             BiocGenerics = "rep.int",
             ChemometricsWithR = "lda.loofun",
             ElectoGraph = "plot.wedding.cake",
             FrF2 = "all.2fis.clear.catlg",
             GLDEX = c("hist.su", "pretty.su"),
             Hmisc = c("abs.error.pred", "all.digits", "all.is.numeric",
                       "format.df", "format.pval", "t.test.cluster"),
             HyperbolicDist = "log.hist",
             MASS = c("frequency.polygon", "gamma.dispersion", "gamma.shape",
                      "hist.FD", "hist.scott"),
             LinearizedSVR = "sigma.est",
             ## FIXME: since these are already listed with 'base',
             ##        they should not need to be repeated here:
             Matrix = c("qr.Q", "qr.R", "qr.coef", "qr.fitted",
                        "qr.qty", "qr.qy", "qr.resid"),
             PerformanceAnalytics = c("mean.LCL", "mean.UCL",
                                      "mean.geometric", "mean.stderr"),
             RCurl = "merge.list",
             RNetCDF = c("close.nc", "dim.def.nc", "dim.inq.nc",
                         "dim.rename.nc", "open.nc", "print.nc"),
             Rmpfr = c("mpfr.is.0", "mpfr.is.integer"),
             SMPracticals = "exp.gibbs",
             SparseM = c("as.matrix.csc","as.matrix.csr", "as.matrix.ssc", "as.matrix.ssr", "as.matrix.coo",
                         "is.matrix.csc","is.matrix.csr", "is.matrix.ssc", "is.matrix.ssr", "is.matrix.coo"),
             TANOVA = "sigma.hat",
             TeachingDemos = "sigma.test",
             XML = "text.SAX",
             ape = "sort.index",
             arm = "sigma.hat", # lme4 has sigma()
             assist = "chol.new",
             boot = "exp.tilt",
             car = "scatterplot.matrix",
             calibrator = "t.fun",
             clusterfly = "ggobi.som",
             coda = "as.mcmc.list",
             crossdes = "all.combn",
             ctv = "update.views",
             deSolve = "plot.1D",
             effects = "all.effects", # already deprecated
             elliptic = "sigma.laurent",
             equivalence = "sign.boot",
             fields = c("qr.q2ty", "qr.yq2"),
             gbm = c("pretty.gbm.tree", "quantile.rug"),
             genetics = "diseq.ci",
             gpclib = "scale.poly",
             grDevices = "boxplot.stats",
             graphics = c("close.screen", "plot.design", "plot.new",
                          "plot.window", "plot.xy", "split.screen"),
             ic.infer = "all.R2",
             hier.part = "all.regs",
             lasso2 = "qr.rtr.inv",
             latticeExtra = "xyplot.list",
             locfit = c("density.lf", "plot.eval"),
             moments = c("all.cumulants", "all.moments"),
             mosaic = "t.test",
             mratios = c("t.test.ration", "t.test.ratio.default",
                         "t.test.ratio.formula"),
             ncdf = c("open.ncdf", "close.ncdf",
                      "dim.create.ncdf", "dim.def.ncdf",
                      "dim.inq.ncdf", "dim.same.ncdf"),
             plyr = c("rbind.fill", "rbind.fill.matrix"),
             quadprog = c("solve.QP", "solve.QP.compact"),
             reposTools = "update.packages2",
             reshape = "all.vars.character",
             rgeos = "scale.poly",
             rowr = "cbind.fill",
             sac = "cumsum.test",
             sfsmisc = "cumsum.test",
             sm = "print.graph",
             spatstat = "lengths.psp",
             splusTimeDate = "sort.list",
             splusTimeSeries = "sort.list",
	     stats = c("anova.lmlist", "expand.model.frame", "fitted.values",
		       "influence.measures", "lag.plot", "qr.influence", "t.test",
                       "plot.spec.phase", "plot.spec.coherency"),
             stremo = "sigma.hat",
             supclust = c("sign.change", "sign.flip"),
             tensorA = "chol.tensor",
             utils = c("close.socket", "flush.console", "update.packages"),
             wavelets = "plot.dwt.multiple"
             )
    if(is.null(package)) return(unlist(stopList))
    thisPkg <- stopList[[package]]
    if(!length(thisPkg)) character() else thisPkg
}

### ** .make_S3_methods_table_for_base

.make_S3_methods_table_for_base <-
function()
{
    env <- baseenv()
    objects <- ls(env, all.names = TRUE)
    ind <- vapply(objects,
                  function(o) .is_S3_generic(o, env),
                  FALSE)
    generics <- sort(unique(c(objects[ind],
                              .get_S3_group_generics(),
                              .get_internal_S3_generics())))
    ind <- grepl("^[[:alpha:]]", generics)
    generics <- c(generics[!ind], generics[ind])
    ## The foo.bar objects in base:
    objects <- grep("[^.]+[.][[:alpha:]]", objects, value = TRUE)
    ## Make our lives easier ...
    objects <- setdiff(objects, nonS3methods("base"))
    ## Find the ones matching GENERIC.CLASS from the list of generics.
    methods <-
        lapply(generics,
               function(e) objects[startsWith(objects, paste0(e, "."))])
    names(methods) <- generics
    ## Need to separate all from all.equal:
    methods$all <- methods$all[!startsWith(methods$all, "all.equal")]
    methods <- Filter(length, methods)
    classes <- Map(substring, methods, nchar(names(methods)) + 2L)

    cbind(generic = rep.int(names(classes), lengths(classes)),
          class = unlist(classes, use.names = FALSE))
}

.deparse_S3_methods_table_for_base <-
function()
{
    if(!identical("C", Sys.getlocale("LC_COLLATE")))
        warning("*not* using 'C' for LC_COLLATE locale")
    mdb <- .make_S3_methods_table_for_base()
    n <- nrow(mdb)
    c(sprintf("%s\"%s\", \"%s\"%s",
              c("matrix(c(", rep.int("         ", n - 1L)),
              mdb[, 1L],
              mdb[, 2L],
              c(rep.int(",", n - 1L), "),")),
      "       ncol = 2L, byrow = TRUE,",
      "       dimnames = list(NULL, c(\"generic\", \"class\")))")
}

### ** .package_apply

.package_apply <-
function(packages = NULL, FUN, ..., pattern = NULL, verbose = TRUE,
         Ncpus = getOption("Ncpus", 1L))
{
    ## Apply FUN and extra '...' args to all given packages.
    ## The default corresponds to all installed packages with high
    ## priority.
    if(is.null(packages))
        packages <-
            unique(utils::installed.packages(priority = "high")[ , 1L])

    if(!is.null(pattern))
        packages <- grepv(pattern, packages)

    ## Keep in sync with .unpacked_source_repository_apply().
    ## <FIXME>
    ## Should we really catch errors?
    one <- function(p) {
        if(verbose)
            message(sprintf("processing %s", p))
        tryCatch(FUN(p, ...), error = identity)
    }
    ## </FIXME>

    ## Would be good to have a common wrapper ...
    if(Ncpus > 1L) {
        if(.Platform$OS.type != "windows") {
            out <- parallel::mclapply(packages, one, mc.cores = Ncpus)
        } else {
            cl <- parallel::makeCluster(Ncpus)
            args <- list(FUN, ...)      # Eval promises.
            out <- parallel::parLapply(cl, packages, one)
            parallel::stopCluster(cl)
        }
    } else {
        out <- lapply(packages, one)
    }

    names(out) <- packages
    out
}

### ** .package_code_using_R_4.x_syntax

.package_code_using_R_4.x_syntax <-
function(dir)
{
    dir <- file_path_as_absolute(dir)
    wrk <- function(p, f) {
        x <- utils::getParseData(parse(p, keep.source = TRUE))
        i1 <- which(x$token %in% c("PIPE", "'\\\\'"))
        i2 <- which(x$token == "PLACEHOLDER")
        if(length(i1) || length(i2)) {
            xi <- x$id
            xp <- x$parent
            n1 <- rep_len("4.1.0", length(i1))
            ## Detect experimental placeholder feature as the head of a
            ## chain of extractions by looking at the first child of the
            ## grandparent of the placeholder: if it is the placeholder
            ## expression then we have the 4.3.0 syntax.
            n2 <- ifelse(vapply(i2,
                                function(j) {
                                    u <- xp[j]
                                    v <- xp[xi %in% u]
                                    min(xi[xp %in% v]) == u
                                },
                                NA),
                         "4.3.0",
                         "4.2.0")
            i <- c(i1, i2)
            data.frame(token = x$token[i],
                       needs = c(n1, n2),
                       text = utils::getParseText(x, xp[i]),
                       file = rep_len(f, length(i)))
        } else
            NULL
    }

    files <- list_files_with_type(file.path(dir, "R"), "code",
                                  full.names = FALSE,
                                  OS_subdirs = c("unix", "windows"))
    ## As of 2025-03, packages
    ##   gmailr httr2 purrr
    ## use configure code to drop the pipe using examples for R < 4.1.
    db <- if(basename(dir) %in% c("gmailr", "httr2", "purrr"))
              list()
          else
              Rd_db(dir = dir)

    do.call(rbind,
            c(Map(function(u, v) {
                      tryCatch({
                          wrk(u, v)
                      }, error = function(e) NULL)
                  },
                  file.path(dir, "R", files),
                  files,
                  USE.NAMES = FALSE),
              Map(function(u, v) {
                      tryCatch({
                          p <- tempfile()
                          on.exit(unlink(p))
                          ## Need to extract the code in the examples.
                          ## Rd2ex() does that and more, but provides no
                          ## output if there are no examples ...
                          Rd2ex(u, p)
                          if(file.exists(p))
                              wrk(p, v)
                      }, error = function(e) NULL)
                  },
                  db,
                  names(db),
                  USE.NAMES = FALSE)))
}

## ** .package_depends_on_R_at_least

.package_depends_on_R_at_least <-
function(dir, v)
{
    .package_metadata_has_depends_on_R_at_least(.get_package_metadata(dir),
                                                v)
}

### ** .package_metadata_has_depends_on_R_at_least

.package_metadata_has_depends_on_R_at_least <-
function(meta, v)
{
    for(dep in .split_description(meta)$Rdepends2) {
        if((dep$op == '>=') && (dep$version >= v)) return(TRUE)
    }
    FALSE
}
    
### ** .package_vignettes_via_call_to_R

.package_vignettes_via_call_to_R <-
function(dir, ..., libpaths = .libPaths()) {
    ## pkgVignettes() needs to load the namespaces of the vignette
    ## builders in order to find the vignette engines, and cannot unload
    ## again, which may be undesirable (e.g., when calling from the
    ## master check process *before* installing the package checked.
    ## pkgVignettes() has a lib.loc argument but that is not passed
    ## through to loadVignetteBuilder(), so we use .libPaths() instead.
    fun <- function(dir, ..., libpaths) {
        .libPaths(libpaths)
        pkgVignettes(dir = dir, ...)
    }
    R(fun, list(dir, ..., libpaths = libpaths), "--vanilla")
}

### ** .pandoc_md_for_CRAN

.pandoc_md_for_CRAN <-
function(ifile, ofile)
{
    .system_with_capture("pandoc",
                         paste(shQuote(normalizePath(ifile)),
                               "-s", "--mathjax",
                               "--email-obfuscation=references",
                               "-o", shQuote(ofile)))
}

### ** .parse_code_file

.parse_code_file <-
function(file, encoding = NA, keep.source = getOption("keep.source"))
{
    if(!file.exists(file) || !file.size(file)) return()
    suppressWarnings({
        if(!is.na(encoding) &&
           (encoding != "unknown") &&
           (Sys.getlocale("LC_CTYPE") %notin% c("C", "POSIX"))) {
            ## Previous use of con <- file(file, encoding = encoding)
            ## was intolerant so do something similar to what
            ## .install_package_code_files() does.  Do not use a #line
            ## directive though as this will confuse getParseData().
            lines <- iconv(readLines(file, warn = FALSE),
                           from = encoding, to = "", sub = "byte")
            parse(text = lines, srcfile = srcfile(file),
                  keep.source = keep.source)
        } else
            parse(file,
                  keep.source = keep.source)
    })
}

### ** .persons_from_metadata

.persons_from_metadata <- function(dir, meta = NULL) {
    if(is.null(meta))
        meta <- .get_package_metadata(dir)
    if(!is.na(aar <- meta["Authors@R"])) {
        aar <- tryCatch(utils:::.read_authors_at_R_field(aar),
                        error = identity)
        if(inherits(aar, "person"))
            return(aar)
    }
    NULL
}

### ** .persons_from_citation

.persons_from_citation <- function(dir, installed = FALSE) {
    meta <- .get_package_metadata(dir, installed = installed)
    path <- if(installed)
                "CITATION"
            else
                file.path("inst", "CITATION")
    cfile <- file.path(dir, path)
    cinfo <- .read_citation_quietly(cfile, meta)
    if(!inherits(cinfo, "error")) {
        aut <- do.call(c, lapply(unclass(cinfo), `[[`, "author"))
        if(inherits(aut, "person"))
            return(aut)
    }
    NULL
}

### ** .read_additional_repositories_field

.read_additional_repositories_field <-
function(txt)
    unique(unlist(strsplit(txt, ",[[:space:]]*")))

### ** .read_citation_quietly

.read_citation_quietly <-
function(cfile, meta)
{
    tryCatch(suppressMessages(suppressWarnings(utils::readCitationFile(cfile,
                                                                       meta))),
             error = identity)
}

### ** .read_collate_field

.read_collate_field <-
function(txt)
{
    ## Read Collate specifications in DESCRIPTION files.
    ## These consist of file paths relative to the R code directory,
    ## separated by white space, possibly quoted.  Note that we could
    ## have newlines in DCF entries but do not allow them in file names,
    ## hence we gsub() them out.
    con <- textConnection(gsub("\n", " ", txt, fixed=TRUE))
    on.exit(close(con))
    scan(con, what = character(), strip.white = TRUE, quiet = TRUE)
}

### ** .read_description

.keep_white_description_fields <-
    c("Description", "Authors@R", "Author", "Built", "Packaged")

.read_description <-
function(dfile, keep.white = .keep_white_description_fields)
{
    ## Try reading in package metadata from a DESCRIPTION file.
    ## (Never clear whether this should work on the path of the file
    ## itself, or on that of the directory containing it.)
    ## <NOTE>
    ## As we do not have character "frames", we return a named character
    ## vector.
    ## </NOTE>
    if(!file_test("-f", dfile))
        stop(gettextf("file '%s' does not exist", dfile), domain = NA)
    out <- tryCatch(read.dcf(dfile, keep.white = keep.white),
                    error = function(e)
                    stop(gettextf("file '%s' is not in valid DCF format",
                                  dfile),
                         domain = NA, call. = FALSE))
    if (nrow(out) != 1L)
        stop("contains a blank line", call. = FALSE)
    out <- out[1L, ]
    if(!is.na(encoding <- out["Encoding"])) {
        ## could convert everything (valid) to UTF-8
        if(encoding == "UTF-8") {
            Encoding(out) <- "UTF-8"
            ind <- validUTF8(out)
            if(!all(ind)) {
                pos <- which(!ind)
                ## Be as nice as for the other cases ...
                ## Could also throw an error along the lines of
                ##   stop(sprintf(ngettext(length(pos),
                ##                         "field %s is not valid UTF-8",
                ##                         "fields %s are not valid UTF-8"),
                ##                paste(sQuote(names(out)[pos]),
                ##                             collapse = ", ")),
                ##        call. = FALSE, domain = NA)
                out[pos] <-
                    iconv(out[pos], "UTF-8", "UTF-8", sub = "byte")
            }
        }
        else if(encoding == "latin1")
            Encoding(out) <- "latin1"
        else
            out <- iconv(out, encoding, "", sub = "byte")
    }
    out
}

.write_description <-
function(x, dfile)
{
    ## Invert how .read_description() handles package encodings.
    if(!is.na(encoding <- x["Encoding"])) {
        ## For UTF-8 or latin1 encodings, .read_description() would
        ## simply have marked the encoding.  But we might have added
        ## fields encoded differently ...
        ind <- is.na(match(Encoding(x), c(encoding, "unknown")))
        if(any(ind))
            x[ind] <- mapply(iconv, x[ind], Encoding(x)[ind], encoding,
                             sub = "byte")
    } else {
        ## If there is no declared encoding, we cannot have non-ASCII
        ## content.
        ## Cf. tools::showNonASCII():
        asc <- iconv(x, "latin1", "ASCII")
        ## fields might have been NA to start with, so use identical.
        if(!identical(asc, x)) {
            warning("Unknown encoding with non-ASCII data: converting to ASCII")
	    ind <- is.na(asc) | (asc != x)
            x[ind] <- iconv(x[ind], "latin1", "ASCII", sub = "byte")
        }
    }
    ## Avoid folding for fields where we keep whitespace when reading,
    ## plus two more fields where legacy code does not strip whitespace
    ## and so we should not wrap.
    ## Unfortunately, wrapping may destroy declared encodings: for the
    ## fields where we do not keep whitespace, write.dcf() calls
    ## formatDL() which in turn calls paste() on the results of
    ## strwrap(), and paste() may change the (common) encoding.
    ## In particular, pasting a latin1 string comes out in UTF-8 in a
    ## UTF-8 locale, and with unknown encoding in a C locale.
    ## Hence, when we have a declared non-UTF-8 encoding, we convert
    ## to UTF-8 before formatting, and convert back to the declared
    ## encoding when writing out.
    if(!is.na(encoding) && (encoding != "UTF-8")) {
        x <- iconv(x, from = encoding, to = "UTF-8")
        tfile <- tempfile()
        write.dcf(rbind(x), tfile,
                  keep.white = c(.keep_white_description_fields,
                                 "Maintainer", "BugReports"),
                  useBytes = TRUE)
        writeLines(iconv(readLines(tfile),
                         from = "UTF-8", to = encoding),
                   dfile, useBytes = TRUE)
    } else {
        write.dcf(rbind(x), dfile,
                  keep.white = c(.keep_white_description_fields,
                                 "Maintainer", "BugReports"),
                  useBytes = TRUE)
    }
}

.expand_package_description_db_R_fields <-
function(x)
{
    enc <- x["Encoding"]
    y <- character()
    if(!is.na(aar <- x["Authors@R"])) {
        aar <- utils:::.read_authors_at_R_field(aar)
        lat <- identical(enc, "latin1")
        if(is.na(x["Author"])) {
            tmp <- utils:::.format_authors_at_R_field_for_author(aar)
            if(lat) tmp <- .enc2latin1(tmp)
            y["Author"] <- tmp
        }
        if(is.na(x["Maintainer"])) {
            tmp <- utils:::.format_authors_at_R_field_for_maintainer(aar)
            if(lat) tmp <- .enc2latin1(tmp)
            y["Maintainer"] <- tmp
        }
    }
    y
}

### ** .replace_chars_by_hex_subs

.replace_chars_by_hex_subs <-
function(x, re) {
    char_to_hex_sub <- function(s) {
        paste0("<", charToRaw(s), ">", collapse = "")
    }
    vapply(strsplit(x, ""),
           function(e) {
               pos <- grep(re, e, perl = TRUE)
               if(length(pos))
                   e[pos] <- vapply(e[pos], char_to_hex_sub, "")
               paste(e, collapse = "")
           },
           "")
}

### ** .source_assignments

.source_assignments <-
function(file, envir, enc = NA)
{
    ## Read and parse expressions from @code{file}, and then
    ## successively evaluate the top-level assignments in @code{envir}.
    ## Apart from only dealing with assignments, basically does the same
    ## as @code{sys.source(file, envir, keep.source = FALSE)}.
    oop <- options(topLevelEnvironment = envir, keep.source = FALSE)
    on.exit(options(oop))

### <FIXME> for S4, setClass() .. are assignments, but must be called
    ##         with correct 'where = envir'!
    ## Possible solution: modified versions of these functions with changed
    ##                    'where = ...' (default arg) in formals(.)
    ## stopifnot(require(methods, quietly=TRUE))
    ## assignmentSymbols <- c(c("<-", "="),
    ##                        ls(pattern = "^set[A-Z]", pos = "package:methods"))
    assignmentSymbols <- c("<-", "=")
### </FIXME>
    con <- if(!is.na(enc) &&
              (Sys.getlocale("LC_CTYPE") %notin% c("C", "POSIX"))) {
               on.exit(close(con), add = TRUE)
               file(file, encoding = enc)
           } else file
    exprs <- parse(n = -1L, file = con)
    exprs <- exprs[lengths(exprs) > 0L]
    for(e in exprs) {
	if(is.call(e) &&
           as.character(e[[1L]])[1L] %in% assignmentSymbols)
            tryCatch(eval(e, envir), error = identity)
    }
    invisible()
}

### .source_assignments_in_code_dir

.source_assignments_in_code_dir <-
function(dir, envir, meta = character())
{
    ## Combine all code files in @code{dir}, read and parse expressions,
    ## and successively evaluate the top-level assignments in @code{envir}.
    con <- tempfile("Rcode")
    on.exit(unlink(con))
    if(!file.create(con))
        stop("unable to create ", con)
    ## If the (DESCRIPTION) metadata contain a Collate specification,
    ## use this for determining the code files and their order.
    txt <- meta[c(paste0("Collate.", .OStype()), "Collate")]
    ind <- which(!is.na(txt))
    files <- if(any(ind))
        Filter(function(x) file_test("-f", x),
               file.path(dir, .read_collate_field(txt[ind[1L]])))
    else
        list_files_with_type(dir, "code")
    if(!all(.file_append_ensuring_LFs(con, files)))
        stop("unable to write code files")
    if(!is.na(package <- meta["Package"]))
        envir$.packageName <- package
    tryCatch(.source_assignments(con, envir, enc = meta["Encoding"]),
             error = function(e)
                 stop("cannot source package code:\n", conditionMessage(e),
                      call. = FALSE))
}

### ** .split_dependencies

.split_dependencies <-
function(x)
{
    ## given one or more Depends: or Suggests: fields from DESCRIPTION
    ## return a named list of list (name, [op, version])
    if(!length(x)) return(list())
    x <- unlist(strsplit(x, ","))
    ## some have had space before ,
    x <- sub('[[:space:]]+$', '', x)
    x <- unique(sub("^[[:space:]]*(.*)", "\\1" , x))
    names(x) <- sub("^([[:alnum:].]+).*$", "\\1" , x)
    lapply(x, .split_op_version)
}

### ** .split_op_version

.split_op_version <-
function(x)
{
    ## given a single piece of dependency
    ## return a list of components (name, [op, version])
    ## NB this relies on trailing space having been removed
    pat <- "^([^\\([:space:]]+)[[:space:]]*\\(([^\\)]+)\\).*"
    x1 <- sub(pat, "\\1", x)
    x2 <- sub(pat, "\\2", x)
    if(x2 != x1) {
        pat <- "[[:space:]]*([[<>=!]+)[[:space:]]+(.*)"
        version <- sub(pat, "\\2", x2)
        if (!startsWith(version, "r")) version <- package_version(version)
        list(name = x1, op = sub(pat, "\\1", x2), version = version)
    } else list(name = x1)
}

### ** .system_with_capture

.system_with_capture <-
function(command, args = character(), env = character(),
         stdin = "", input = NULL, timeout = 0)
{
    ## Invoke a system command and capture its status, stdout and stderr
    ## into separate components.

    outfile <- tempfile("xshell")
    errfile <- tempfile("xshell")
    on.exit(unlink(c(outfile, errfile)))
    status <- system2(command, args, env = env,
                      stdout = outfile, stderr = errfile,
                      stdin = stdin, input = input,
                      timeout = timeout)
    list(status = status,
         stdout = readLines(outfile, warn = FALSE),
         stderr = readLines(errfile, warn = FALSE))
}

### ** .trim_common_leading_whitespace

.trim_common_leading_whitespace <-
function(x)
{
    y <- sub("^([ \t]*).*", "\\1", x)
    n <- nchar(y)
    if(any(n == 0))
        return(x)
    i <- grep("\t", y, fixed = TRUE)
    if(length(i)) {
        ## Need to convert tabs to spaces.
        ## Ideally nchar(y, "width") would do things for us ...
        wids <- vapply(strsplit(y[i], ""),
                       function(e) {
                           p <- which(e == "\t")
                           d <- diff(c(0, p))
                           sum(d + 8 - (d %% 8)) + length(e) -
                               p[length(p)]
                       },
                       0)
        x[i] <- paste0(strrep(" ", wids), substring(x[i], n[i] + 1L))
        n[i] <- wids
    }
    substring(x, min(n) + 1L)
}

### ** .try_quietly

.try_quietly <-
function(expr)
{
    ## Try to run an expression, suppressing all 'output'.  In case of
    ## failure, stop with the error message and a "traceback" ...

    oop <- options(warn = 1)
    on.exit(options(oop))
    outConn <- file(open = "w+")         # anonymous tempfile
    sink(outConn, type = "output")
    sink(outConn, type = "message")
    yy <- tryCatch(withRestarts(withCallingHandlers(expr, error = {
        function(e) invokeRestart("grmbl", e, sys.calls())
    }),
                                grmbl = function(e, calls) {
                                    n <- length(sys.calls())
                                    ## Chop things off as needed ...
                                    calls <- calls[-seq.int(length.out = n - 1L)]
                                    calls <- rev(calls)[-c(1L, 2L)]
                                    tb <- lapply(calls, deparse)
                                    stop(conditionMessage(e),
                                         "\nCall sequence:\n",
                                         paste(.eval_with_capture(traceback(tb))$output,
                                               collapse = "\n"),
                                         call. = FALSE)
                                }),
                   error = identity,
                   finally = {
                       sink(type = "message")
                       sink(type = "output")
                       close(outConn)
                   })
    if(inherits(yy, "error"))
        stop(yy)
    yy
}

### ** .unpacked_source_repository_apply

.unpacked_source_repository_apply <-
function(dir, FUN, ..., pattern = NULL, verbose = FALSE,
         Ncpus = getOption("Ncpus", 1L))
{
    dir <- file_path_as_absolute(dir)

    dfiles <- Sys.glob(file.path(dir, "*", "DESCRIPTION"))
    if(!is.null(pattern))
        dfiles <- dfiles[grepl(pattern, basename(dirname(dfiles)))]
    paths <- dirname(dfiles)

    ## Keep in sync with .package_apply().
    ## <FIXME>
    ## Should we really catch errors?
    one <- function(p) {
        if(verbose)
            message(sprintf("processing %s", basename(p)))
        tryCatch(FUN(p, ...), error = identity)
    }
    ## </FIXME>

    ## Would be good to have a common wrapper ...
    if(Ncpus > 1L) {
        if(.Platform$OS.type != "windows") {
            out <- parallel::mclapply(paths, one, mc.cores = Ncpus)
        } else {
            cl <- parallel::makeCluster(Ncpus)
            args <- list(FUN, ...)      # Eval promises.
            out <- parallel::parLapply(cl, paths, one)
            parallel::stopCluster(cl)
        }
    } else {
        out <- lapply(paths, one)
    }

    names(out) <- basename(paths)
    out
}

### ** .wrong_args

.wrong_args <-
function(args, msg)
{
    len <- length(args)
    if(!len)
        character()
    else if(len == 1L)
        paste("argument", sQuote(args), msg)
    else
        paste("arguments",
              paste0(c(rep.int("", len - 1L), "and "),
                     sQuote(args),
                     c(rep.int(", ", len - 1L), ""),
                     collapse = ""),
              msg)
}

### * Miscellania

### ** R

R <-
function(fun, args = list(), opts = "--no-save --no-restore",
         env = character(), arch = "", drop = TRUE, timeout = 0)
{
    .safe_repositories <- function() {
        x <- getOption("repos")
        y <- .get_standard_repository_URLs()
        i <- which(names(x) == "CRAN")[1L]
        if(is.na(i) || x[i] == "@CRAN@")
            x[i] <- y["CRAN"]
        c(x, y[match(names(y), names(x), 0L) == 0L])
    }

    ## escape issue if we use backslashes in paths, hence convert to "/"
    tfi <- normalizePath(tempfile("runri"), winslash="/", mustWork=FALSE)
    tfo <- normalizePath(tempfile("runro"), winslash="/", mustWork=FALSE)

    wrk <- c(sprintf("x <- readRDS(\"%s\")", tfi),
             "options(repos = x$repos)",
             ## need quote = TRUE in case some of args are not self-evaluating
             ## could catch other conditions also
             "y <- tryCatch(list(do.call(x$fun, x$args, quote = TRUE)), error = identity)",
             sprintf("saveRDS(y, \"%s\")", tfo))
    saveRDS(list(fun = fun, args = args, repos = .safe_repositories()),
            tfi)
    cmd <- if(.Platform$OS.type == "windows") {
               if(nzchar(arch))
                   ## R.home("bin") might be better, but Windows
                   ## installation is monolithic
                   file.path(R.home(), "bin", arch, "Rterm.exe")
               else
                   file.path(R.home("bin"), "Rterm.exe")
           } else {
               if(nzchar(arch))
                   opts <- c(paste0("--arch=", arch), opts)
               file.path(R.home("bin"), "R")
           }
    res <- .system_with_capture(cmd, opts, env, input = wrk,
                                timeout = timeout)
    ## FIXME: what should the "value" be in case of error?
    if(file.exists(tfo)) {
        val <- readRDS(tfo)
        if (inherits(val, "condition")) {
            ## maybe wrap in a classed error and include some of res
            msg <- paste0("error in inferior call:\n  ", conditionMessage(val))
            stop(do.call(errorCondition,
                         c(list(message = msg, 
                                class = "inferiorCallError",
                                value = val),
                           res)))
        }
        else {
            val <- val[[1L]]
            if(drop)
                val
            else
                c(list(value = val), res)
        }
    }
    else
        ## again maybe wrap in a classed error  and include some of res
        ## might want to distinguish two errors by sub-classes
        stop(do.call(errorCondition,
                     c(list(message = "inferior call failed",
                            class = "inferiorCallError"),
                       res = res)))
}

### ** Rcmd

Rcmd <-
function(args, ...)
{
    if(.Platform$OS.type == "windows")
        system2(file.path(R.home("bin"), "Rcmd.exe"), args, ...)
    else
        system2(file.path(R.home("bin"), "R"), c("CMD", args), ...)
}

### ** Sys.setenv1

##' Sys.setenv() *one* variable unless it's set (to non-empty) already - export/move to base?
Sys.setenv1 <- function(var, value) {
    if(!nzchar(Sys.getenv(var)))
        .Internal(Sys.setenv(var, as.character(value)))
}

### ** pskill

pskill <-
function(pid, signal = SIGTERM)
    invisible(.Call(C_ps_kill, pid, signal))

### ** psnice

psnice <-
function(pid = Sys.getpid(), value = NA_integer_)
{
    res <- .Call(C_ps_priority, pid, value)
    if(is.na(value)) res else invisible(res)
}

### ** toTitleCase

## original version based on http://daringfireball.net/2008/05/title_case
## but much altered before release.
toTitleCase <-
function(text)
{
    ## leave these alone: the internal caps rule would do that
    ## in some cases.  We could insist on this exact capitalization.
    alone <- c("2D", "3D", "AIC", "BayesX", "GoF", "HTML", "LaTeX",
               "MonetDB", "OpenBUGS", "TeX", "U.S.", "U.S.A.", "WinBUGS",
               "aka", "et", "al.", "ggplot2", "i.e.", "jar", "jars",
               "ncdf", "netCDF", "rgl", "rpart", "xls", "xlsx")
    ## These should be lower case except at the beginning (and after :)
    lpat <- "^(a|an|and|are|as|at|be|but|by|en|for|if|in|is|nor|not|of|on|or|per|so|the|to|v[.]?|via|vs[.]?|from|into|than|that|with)$"
    ## These we don't care about
    either <- c("all", "above", "after", "along", "also", "among",
                "any", "both", "can", "few", "it", "less", "log",
                "many", "may", "more", "over", "some", "their",
                "then", "this", "under", "until", "using", "von",
                "when", "where", "which", "will", "without",
                "yet", "you", "your")
    titleCase1 <- function(x) {
        ## A quote might be prepended.
        do1 <- function(x) {
            x1 <- substr(x, 1L, 1L)
            if(nchar(x) >= 3L && x1 %in% c("'", '"'))
                paste0(x1, toupper(substr(x, 2L, 2L)),
                       tolower(substring(x, 3L)))
            else paste0(toupper(x1), tolower(substring(x, 2L)))
        }
        if(is.na(x)) return(NA_character_)
        xx <- .Call(C_splitString, x, ' -/"()\n\t')
        ## for 'alone' we could insist on that exact capitalization
        alone <- xx %in% c(alone, either)
        alone <- alone | grepl("^'.*'$", xx)
        havecaps <- grepl("^[[:alpha:]].*[[:upper:]]+", xx)
        l <- grepl(lpat, xx, ignore.case = TRUE)
        l[1L] <- FALSE
        ## do not remove capitalization immediately after ": " or "- "
        ind <- grep("[-:]$", xx); ind <- ind[ind + 2L <= length(l)]
        ind <- ind[(xx[ind + 1L] == " ") & grepl("^['[:alnum:]]", xx[ind + 2L])]
        # don't capitalize lpat words after hyphenation
        ind <- ind[!(xx[ind] == "-" & grepl(lpat, xx[ind + 2L]))]
        l[ind + 2L] <- FALSE
        ## Also after " (e.g. "A Book Title")
        ind <- which(xx == '"'); ind <- ind[ind + 1L <= length(l)]
        l[ind + 1L] <- FALSE
        xx[l] <- tolower(xx[l])
        keep <- havecaps | l | (nchar(xx) == 1L) | alone
        xx[!keep] <- vapply(xx[!keep], do1, "<chr>")
        paste(xx, collapse = "")
    }
    if(typeof(text) != "character")
        stop("'text' must be a character vector")
    vapply(text, titleCase1, "<chr>", USE.NAMES = FALSE)
}

### ** path_and_libPath

##' Typically the union of R_LIBS and current .libPaths(); may differ e.g. via R_PROFILE
path_and_libPath <-
function(...)
{
    lP <- .libPaths()
    ## don't call normalizePath on paths which do not exist: allowed in R_LIBS!
    ep0 <- c(strsplit(env_path(...), .Platform$path.sep, fixed = TRUE)[[1L]], lP[-length(lP)])
    ep0 <- ep0[dir.exists(ep0)]
    paste(unique(normalizePath(ep0)), collapse = .Platform$path.sep)
}

### ** str_parse_logic

##' @param otherwise: can be call, such as quote(errmesg(...))
str_parse_logic <-
function(ch, default = TRUE, otherwise = default, n = 1L)
{
    if(is.na(ch)) default
    else switch(tolower(ch),
                "1" =, "yes" =, "true" = TRUE,
                "0" =, "no" =, "false" = FALSE,
                eval.parent(otherwise, n = n))
}

### ** str_parse

str_parse <-
function(ch, default = TRUE, logical = TRUE, otherwise = default, n = 2L)
{
    if(logical)
        str_parse_logic(ch, default=default, otherwise=otherwise, n = n)
    else if(is.na(ch))
        default
    else
        ch
}

### **

namespace_loads_from_file_load <-
function(f, verbose = FALSE)
{
    if(verbose) message(sprintf("processing %s", f))

    fun <- local({
        make_namespace_load_tracer <- function() {
            local({
                .packages <- character()
                .nframes <- integer()
                function(p, n) {
                    .packages <<- c(.packages, p)
                    .nframes <<- c(.nframes, n)
                }
            })
        }
        trace_namespace_loads <- function(expr, tracer) {
            ..namespace_load_tracer <- tracer
            suppressMessages({
                trace(base::loadNamespace,
                      function() {
                          pkg <- as.character(parent.frame()$package)
                          dynGet("..namespace_load_tracer")(pkg[[1L]],
                              sys.nframe())
                      },
                      print = FALSE)
            })
            on.exit(suppressMessages(untrace(base::loadNamespace)))
            expr
        }
        function(file) {
            tracer <- make_namespace_load_tracer()
            tmpenv <- new.env()
            trace_namespace_loads(load(file, tmpenv), tracer)
            with(environment(tracer),
                 .packages[.nframes == min(.nframes)])
        }
    })

    R(fun, list(f))
}

### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
#  File src/library/tools/R/Vignettes.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2025 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

vignette_is_tex <- function(file, ...) {
    endsWith(tolower(file), ".tex")
}

# Infers the vignette type (PDF or HTML) from the filename of the
# final vignette product.
vignette_type <- function(file) {
    ext <- tolower(file_ext(file))
    type <- c(pdf="PDF", html="HTML")[ext]
    if (is.na(type))
        stop(gettextf("Vignette product %s does not have a known filename extension (%s)",
                      sQuote(file), paste(sQuote(names(type)), collapse=", ")),
             domain = NA)
    unname(type)
}

# Locates the vignette weave, tangle and texi2pdf product(s) based on the
# vignette name.   All such products must have the same name as their filename
# prefix (i.e. "^<name>").
# For weave, final = TRUE will look for <name>.pdf and <name>.html, whereas
# with final = FALSE it also looks for <name>.tex (if <name>.pdf is also
# found, it will be returned).  For tangle, main = TRUE will look for <name>.R,
# whereas main = FALSE will look for <name><anything>*.R.
# For texi2pdf, <name>.pdf is located.
find_vignette_product <-
    function(name, by = c("weave", "tangle", "texi2pdf"),
             final = FALSE, main = TRUE, dir = ".", engine, ...)
{
    stopifnot(length(name) == 1L, dir.exists(dir))
    by <- match.arg(by)
    exts <- ## (lower case here):
	switch(by,
	       "weave" = if (final) c("pdf", "html") else c("pdf", "html", "tex"),
	       "tangle" = c("r", "s"),
	       "texi2pdf" = "pdf")

    exts <- c(exts, toupper(exts))
    pattern1 <- sprintf("^%s[.](%s)$", name, paste(exts, collapse = "|"))
    output0 <- list.files(path = dir, no.. = TRUE)
    output0 <- output0[file_test("-f", file.path(dir, output0))]
    output <- grep(pattern1, output0, value = TRUE)
    # If main is FALSE, we want to find all other files with related
    # names.  We make sure that the main file is in position 1.
    # FIXME: we should check a timestamp or something to see that
    #	      these were produced by tangling for the "name" vignette,
    #	      they aren't just coincidentally similar names.
    if (!main) {
	pattern2 <- sprintf("^%s.*[.](%s)$", name, paste(exts, collapse = "|"))
	output2 <- grep(pattern2, output0, value = TRUE)
	output <- c(output, setdiff(output2, output))
    }
    fmt_file_sizes <- function(files)
	paste(sprintf("%s (%g bytes)", sQuote(files), file.size(file.path(dir, files))),
	      collapse=", ")
    if (by == "weave") {
        if (length(output) == 0L)
            stop(gettextf("Failed to locate %s output file %s or %s for vignette with name %s and engine %s. The following files exist in working directory %s: %s",
                          sQuote(by),
                          sQuote(paste0(name, ".pdf")), sQuote(paste0(name, ".html")),
                          sQuote(name),
                          sQuote(sprintf("%s::%s", engine$package, engine$name)),
                          sQuote(normalizePath(dir)),
                          fmt_file_sizes(output0)),
                 domain = NA)
        if (length(output) > 2L || (final && length(output) > 1L))
            stop(gettextf("Located more than one %s output file (by engine %s) for vignette with name %s: %s", sQuote(by),
                          sQuote(sprintf("%s::%s", engine$package, engine$name)),
                          sQuote(name),
                          fmt_file_sizes(output)),
                 domain  = NA)
	# If weave produced a TeX and then a PDF without cleaning out
	# the TeX, consider the newer one (PDF wins a tie) as the weave product
	if (length(output) == 2L) {
	    idxs <- match(tolower(file_ext(output)), exts)
	    output <- output[order(idxs)]
	    if (file_test("-nt", output[2L], output[1L])) output <- output[2L]
	    else output <- output[1L]
        }
    } else if (by == "tangle") {
        if (main)
            stopifnot(length(output) <= 1L)
    } else if (by == "texi2pdf") {
        if (length(output) == 0L)
            stop(gettextf("Failed to locate %s output file %s for vignette with name %s and engine %s. The following files exist in working directory %s: %s",
                          sQuote(by),
                          sQuote(paste0(name, ".pdf")),
                          sQuote(name),
                          sQuote(sprintf("%s::%s", engine$package, engine$name)),
                          sQuote(normalizePath(dir)),
                          fmt_file_sizes(output0)),
                 domain = NA)
        if (length(output) > 1L)
            stop(gettextf("Located more than one %s output file (by engine %s) for vignette with name %s: %s",
                          sQuote(by),
                          sQuote(sprintf("%s::%s", engine$package, engine$name)),
                          sQuote(name),
                          fmt_file_sizes(output)),
                 domain = NA)
    }

    ## return :
    if (length(output) > 0L) {
        if (dir == ".")
            basename(output)
        else
            file.path(dir, output)
    } ## else NULL
}



### * checkVignettes
###
### Run a tangle+source and a weave on all vignettes of a package.

checkVignettes <-
function(package, dir, lib.loc = NULL,
         tangle = TRUE, weave = TRUE, latex = FALSE,
         workdir = c("tmp", "src", "cur"),
         keepfiles = FALSE)
{
    vigns <- pkgVignettes(package = package, dir = dir, lib.loc = lib.loc)
    if(is.null(vigns)) return(NULL)

    workdir <- match.arg(workdir)
    wd <- getwd()
    if (is.null(wd))
        stop("current working directory cannot be ascertained")
    if(workdir == "tmp") {
        tmpd <- tempfile("Sweave")   ## <= Rename?
        if(!dir.create(tmpd))
            stop(gettextf("unable to create temp directory %s ", sQuote(tmpd)),
                 domain = NA)
        setwd(tmpd)
    }
    else {
        keepfiles <- TRUE
        if(workdir == "src") setwd(vigns$dir)
    }

    on.exit({
        setwd(wd)
        if(!keepfiles) unlink(tmpd, recursive = TRUE)
    })

    file.create(".check.timestamp")
    result <- list(tangle = list(), weave = list(),
                   source = list(), latex = list())

    ## pkgVignettes has already done this
    loadVignetteBuilder(vigns$pkgdir)

    startdir <- getwd()
    for(i in seq_along(vigns$docs)) {
        path <- vigns$docs[i]
        file <- basename(path)
        name <- vigns$names[i]
    	engine <- vignetteEngine(vigns$engines[i])
	enc <- vigns$encodings[i]
        if (enc == "non-ASCII")
            stop(gettextf("Vignette '%s' is non-ASCII but has no declared encoding", name),
                 domain = NA)
        if(tangle) {
            message("  Running ", sQuote(file))
            .eval_with_capture({
                result$tangle[[file]] <- tryCatch({
                    engine$tangle(path, quiet = TRUE, encoding = enc)
                    setwd(startdir) # in case a vignette changes the working dir
                    find_vignette_product(name, by = "tangle", main = FALSE, engine = engine)
                }, error = identity)
            })
        }
        if(weave) {
            setwd(startdir) # in case a vignette changes the working dir then errored out
            .eval_with_capture({
                result$weave[[file]] <- tryCatch({
                    engine$weave(path, quiet = TRUE, encoding = enc)
                    setwd(startdir)
                    find_vignette_product(name, by = "weave", engine = engine)
                }, error = identity)
            })
        }
        setwd(startdir) # in case a vignette changes the working dir then errored out
    }

    # Assert that output files were not overwritten
    for (name in c("weave", "tangle")) {
        resultsT <- result[[name]]
        if (length(resultsT) <= 1L)
            next

        for (i in 1L:(length(resultsT)-1L)) {
            outputsI <- resultsT[[i]]
            if (inherits(outputsI, "error"))
                next;
            outputsI <- normalizePath(outputsI)

            for (j in (i+1L):length(resultsT)) {
                 outputsJ <- resultsT[[j]]
                 if (inherits(outputsJ, "error"))
                     next;
                 outputsJ <- normalizePath(outputsJ)
                 bad <- intersect(outputsJ, outputsI)
                 if (length(bad) > 0L) {
                     stop(gettextf("Vignette %s overwrites the following %s output by vignette %s: %s",
                                   sQuote(basename(names(resultsT)[j])),
                                   sQuote(name),
                                   sQuote(basename(names(resultsT)[i])),
                                   paste(basename(bad), collapse=", ")),
                          domain = NA)
                 }
            }
        }
    }

    if(tangle) {
        ## Tangling can create several source files if splitting is on,
        ## and these can be .R or .S (at least).  However, there is
        ## no guarantee that running them in alphabetical order in a
        ## session will work -- with named chunks it normally will not.
        cwd <- getwd()
        if (is.null(cwd))
            stop("current working directory cannot be ascertained")
        for(i in seq_along(result$tangle)) {
            sources <- result$tangle[[i]]
            if (inherits(sources, "error"))
                next
            sources <- sources[file_test("-nt", sources, ".check.timestamp")]
            for(file in sources) {
                .eval_with_capture({
                    result$source[[file]] <- tryCatch({
                        source(file)
                    }, error = identity)
                })
                setwd(startdir)
            }
        }
    }

    if(weave && latex) {
        if("Makefile" %notin% list.files(vigns$dir)) {
            ## <NOTE>
            ## This used to run texi2pdf on *all* vignettes, including
            ## the ones already known from the above to give trouble.
            ## In addition, texi2pdf errors were not caught, so that in
            ## particular the results of the previous QC analysis were
            ## *not* returned in case of such errors ...
            ## Hence, let us
            ## * Only run texi2pdf() on previously unproblematic vignettes
            ## * Catch texi2pdf() errors similar to the above.
            ## * Do *not* immediately show texi2pdf() output as part of
            ##   running checkVignettes().
            ## (For the future, maybe keep this output and provide it as
            ## additional diagnostics ...)
            ## </NOTE>
            for (i in seq_along(result$weave)) {
                file <- names(result$weave)[i]
                output <- result$weave[[i]]
                if (inherits(output, "error"))
                    next
                if (!vignette_is_tex(output))
                    next
                ## Ensure that the vignette dir is in TEX/BIBINPUTS.
                ## This will often fail, however, when checking from an
                ## installed 'package', as bib files are usually not installed
                .eval_with_capture({
                    result$latex[[file]] <- tryCatch({
                       texi2pdf(file = output, clean = FALSE, quiet = TRUE,
                                texinputs = vigns$dir)
                       find_vignette_product(file_path_sans_ext(output),
                                             by = "texi2pdf", engine = engine)
                    }, error = identity)
                })
            }
        }
    }

    # Cleanup results
    for (name in c("tangle", "weave", "source", "latex")) {
        resultsT <- result[[name]]
        resultsT <- lapply(resultsT, FUN = function(res) {
          if (inherits(res, "error"))
              conditionMessage(res)
          else
              NULL
        })
        resultsT <- resultsT[!vapply(resultsT, is.null, NA)]
        result[[name]] <- resultsT
    }

    file.remove(".check.timestamp")
    class(result) <- "checkVignettes"
    result
}

format.checkVignettes <-
function(x, ...)
{
    myfmt <- function(y, title) {
        if(length(y)) {
            paste(c(paste0("\n", title, "\n"),
                    unlist(Map(c,
                               paste0("File ", names(y), ":"),
                               lapply(y, as.character)),
                           use.names = FALSE)),
                  collapse = "\n")
        }
    }
    c(character(),
      myfmt(x$tangle, "*** Tangle Errors ***"),
      myfmt(x$source, "*** Source Errors ***"),
      myfmt(x$weave,  "*** Weave Errors ***"),
      myfmt(x$latex,  "*** PDFLaTeX Errors ***"))
}

### get the engine from a file

getVignetteEngine <- function(filename, lines = readLines(filename, warn=FALSE)) {
    c(.get_vignette_metadata(lines, "Engine"), "utils::Sweave")[1L]
}

### * engineMatches
###
### does the engine from a vignette match one of the registered ones?
###
engineMatches <- function(regengine, vigengine) {
    if (!grepl("::", vigengine))
	regengine <- sub("^.*::", "", regengine)
    regengine == vigengine
}

### * pkgVignettes
###
### Get an object of class pkgVignettes which contains a list of
### vignette source files, the registered vignette engine for
### each of them, and the name of the directory which contains them.

### A vector of 'subdirs' is allowed for historical reasons but the
### first which exists is used.

pkgVignettes <-
function(package, dir, subdirs = NULL, lib.loc = NULL, output = FALSE,
         source = FALSE, check = FALSE)
{
    ## Argument handling.
    if(!missing(package)) {
        if(length(package) != 1L)
            stop("argument 'package' must be of length 1")
        dir <- find.package(package, lib.loc)
    }
    if(missing(dir))
	stop("you must specify 'package' or 'dir'")
    ## Using sources from directory @code{dir} ...
    if(!dir.exists(dir))
	stop(gettextf("directory '%s' does not exist", dir), domain = NA)
    else {
	dir <- file_path_as_absolute(dir)
	if (is.null(subdirs))
	    subdirs <- if (missing(package)) "vignettes" else "doc"
	for (subdir in subdirs) {
	    docdir <- file.path(dir, subdir)
	    if(dir.exists(docdir))
		break
	}
    }

    if(!dir.exists(docdir)) return(NULL)

    # Locate all vignette files
    buildPkgs <- loadVignetteBuilder(dir, mustwork = FALSE, lib.loc = lib.loc)
    engineList <- vignetteEngine(package = buildPkgs) # could be character()

    docs <- names <- engines <- patterns <- character()
    allFiles <- list.files(docdir, all.files = FALSE, full.names = TRUE)
    exclude <- inRbuildignore(sub(paste0(dir, "/"), "", allFiles, fixed = TRUE), dir)
    allFiles <- allFiles[!exclude]

    matchedPattern <- rep.int(FALSE, length(allFiles))
    msg <- character()
    if (length(allFiles) > 0L) {
        for (name in names(engineList)) {
            engine <- engineList[[name]]
            for (pattern in engine$pattern) {
                idxs <- grep(pattern, allFiles)
		matchedPattern[idxs] <- TRUE
		keep <- vapply(allFiles[idxs], function(.d.)
			       engineMatches(name, getVignetteEngine(.d.)), NA)
		if (any(keep)) {
		    idxs <- idxs[keep]
                    if (is.function(engine$weave)) {
                        docsT <- allFiles[idxs]
                        docs <- c(docs, docsT)
                        names <- c(names, gsub(pattern, "", basename(docsT)))
			engines	 <- c(engines,	rep.int(name,	 length(idxs)))
			patterns <- c(patterns, rep.int(pattern, length(idxs)))
                    }
		    matchedPattern <- matchedPattern[-idxs]
                    allFiles <- allFiles[-idxs]
                    if (length(allFiles) == 0L)
                        break
                }
            }
        }
	if (check && any(matchedPattern)) {
            files <- substring(allFiles[matchedPattern], nchar(dir) + 2)
            msg <- c("Files named as vignettes but with no recognized vignette engine:",
                     paste("  ", sQuote(files)),
                     "(Is a VignetteBuilder field missing?)")
        }
    }

    # Assert
    stopifnot(length(names)    == length(docs),
	      length(engines)  == length(docs),
	      length(patterns) == length(docs), !anyDuplicated(docs))

    defaultEncoding <- .get_package_metadata(dir)["Encoding"]
    encodings <- vapply(docs, getVignetteEncoding, "", default = defaultEncoding)

    z <- list(docs = docs, names = names, engines = engines,
              patterns = patterns, encodings = encodings,
              dir = docdir, pkgdir = dir, msg = msg)

    if (output) {
        outputs <- character(length(docs))
        for (i in seq_along(docs)) {
            file <- docs[i]
            name <- names[i]
            engine <- vignetteEngine(engines[i])
            outputI <- find_vignette_product(name, by = "weave", dir = docdir, engine = engine)
            outputs[i] <- outputI
        }
        z$outputs <- outputs
    }

    if (source) {
        sources <- list()
        for (i in seq_along(docs)) {
            file <- docs[i]
            name <- names[i]
            engine <- vignetteEngine(engines[i])
            sourcesI <- find_vignette_product(name, by = "tangle", main = FALSE, dir = docdir, engine = engine)
            sources[[file]] <- sourcesI
        }
        ## If a package has vignettes 'foo.Rnw' and 'foo-xxx.Rnw' with
        ## extracted sources 'foo.R' and 'foo-xxx.R', the above will
        ## give both .R files as sources for 'foo.Rnw', as tangling
        ## could split into several files and so matching file name
        ## roots (without extensions) cannot look for exact matches
        ## only.  However, if there were multiple matches but all
        ## vignettes sources have an exact match, then we can drop the
        ## non-exact matches from the multiple matches.
        ## Ideally, we would teach R CMD build to process one vignette
        ## source at a time and record the vignette products, and have
        ## pkgVignettes() use the recorded info if available.
        if(any(ind <- (lengths(sources) > 1L))) {
            rootify <- function(s) sub("[.][^.]+$", "", basename(s))
            dnm <- rootify(names(sources))
            snm <- lapply(sources, rootify)
            if(!any(is.na(match(dnm, unlist(snm))))) {
                for(i in which(ind)) {
                    sources[[i]] <-
                        sources[[i]][is.na(match(snm[[i]], dnm[-i]))]
                }
            }
        }
        z$sources <- sources
    }

    class(z) <- "pkgVignettes"
    z
}


### * buildVignettes
###
### Run a weave and pdflatex on all vignettes of a package
### (except for those named in 'skip', where TRUE means to skip vignettes
### with unavailable \VignetteDepends, as used by R CMD check)
### and try to remove all temporary files that were created.
### Exported version, used in R CMD build/check
buildVignettes <-
    function(package, dir, lib.loc = NULL, quiet = TRUE, clean = TRUE,
             tangle = FALSE, skip = NULL, ser_elibs = NULL)
{
    separate <- !is.null(ser_elibs)
    if (separate) elibs <- readRDS(ser_elibs)
    ## This has side effects, including loading vignette-buider pkgs
    vigns <- pkgVignettes(package = package, dir = dir, lib.loc = lib.loc,
                          check = TRUE)
    if (is.null(vigns)) return(invisible())
    if (length(vigns$docs) <= 1L) separate <- FALSE
    if (length(vigns$msg))
        warning(paste(vigns$msg, collapse = "\n"), domain = NA)

    ## Check that duplicated vignette names do not exist, e.g.
    ## 'vig' and 'vig' from 'vig.Rnw' and 'vig.Snw'.
    dups <- duplicated(vigns$names)
    if (any(dups)) {
        names <- unique(vigns$names[dups])
        docs <- sort(basename(vigns$docs[vigns$names %in% names]))
        stop(gettextf("Detected vignette source files (%s) with shared names (%s) and therefore risking overwriting each other's output files",
                      paste(sQuote(docs), collapse = ", "),
                      paste(sQuote(names), collapse = ", ")),
             domain = NA)
    }

    if (isTRUE(skip)) { # look for unavailable \VignetteDepends
        installed <- rownames(utils::installed.packages())
    } else if (!is.null(skip)) {
        if (isFALSE(skip)) skip <- NULL else stopifnot(is.character(skip))
    }

    ## unset SWEAVE_STYLEPATH_DEFAULT here to avoid problems
    Sys.unsetenv("SWEAVE_STYLEPATH_DEFAULT")

    op <- options(warn = 1) # we run may run vignettes in this process
    wd <- getwd()
    if (is.null(wd))
        stop("current working directory cannot be ascertained")
    on.exit({
        setwd(wd)
        options(op)
    })

    setwd(vigns$dir)

    ## FIXME: should this recurse into subdirs?
    origfiles <- list.files(all.files = TRUE)

    ## Note, as from 2.13.0, only this case
    have.makefile <- "Makefile" %in% origfiles

    file.create(".build.timestamp")

    ## pkgVignettes has already done this
    ## loadVignetteBuilder(vigns$pkgdir)
    outputs <- character()
    sourceList <- list()
    startdir <- getwd()
    skipped <- character()
    fails <- character()
    for(i in seq_along(vigns$docs)) {
        thisOK <- TRUE
        file <- basename(vigns$docs[i])
        name <- vigns$names[i]
        thisSKIP <- if (isTRUE(skip)) {
                        vinfo <- vignetteInfo(file)
                        length(missdeps <- vinfo$depends %w/o% installed) > 0
                    } else name %in% skip
        if (thisSKIP) {
            msg <- if (isTRUE(skip)) .pretty_format2(
                    sprintf("Note: skipping %s due to unavailable dependencies:",
                            sQuote(file)),  # grepped in check
                    missdeps)
                else gettextf("Note: skipping %s", sQuote(file))
            message(paste0(c(msg, ""), collapse = "\n"),
                    domain = NA)
            skipped <- c(skipped, file)
            next
        }
        enc <- vigns$encodings[i]
        if (enc == "non-ASCII") {
            message(gettextf("Error: Vignette '%s' is non-ASCII but has no declared encoding",
                             file))
            fails <- c(fails, file)
            next
        }
        engine <- vignetteEngine(vigns$engines[i])

        if (separate) {  # --- run in separate process
            tf2 <- gsub("\\", "/", tempfile(fileext = ".rds"), fixed = TRUE)
            saveRDS(engine, tf2)
            Rcmd <- sprintf('tools:::.buildOneVignette("%s", "%s", %s, %s, "%s", "%s", "%s")',
                            file, vigns$pkgdir, quiet, have.makefile,
                            name, enc, tf2)
            tlim <- get_timeout(Sys.getenv("_R_CHECK_ONE_VIGNETTE_ELAPSED_TIMEOUT_",
                                           Sys.getenv("_R_CHECK_ELAPSED_TIMEOUT_")))
            tf <- tempfile()
            status <- R_runR(Rcmd, "--vanilla --no-echo", elibs,
                             stdout = tf, stderr = tf, timeout = tlim)
            unlink(tf2)
            ##print(status)
            if (!status) {
                this <- readLines(tf)
                patt <- "^[+]-[+]"
                l <- grepl(patt, this)
                output <- gsub(patt, "", this[l])
                outputs <- c(outputs, output)
                cat(this[!l], sep = "\n")
            } else {
                fails <- c(fails, file)
                cat(readLines(tf), sep = "\n")
            }
            unlink(tf)
        } else {  # --- run in this process
            message(gettextf("--- re-building %s using %s",
                             sQuote(file), engine$name))
            tryCatch({
                engine$weave(file, quiet = quiet, encoding = enc)
                setwd(startdir) # In case weave/vignette changed it
                output <- find_vignette_product(name, by = "weave", engine = engine)
                if (!have.makefile && vignette_is_tex(output)) {
                    ## This can fail if run in a directory whose path contains spaces.
                    texi2pdf(file = output, clean = FALSE, quiet = quiet)
                    output <- find_vignette_product(name, by = "texi2pdf",
                                                    engine = engine)
                }
                outputs <- c(outputs, output)
            }, error = function(e) {
                thisOK <<- FALSE
                fails <<- c(fails, file)
                message(gettextf("Error: processing vignette '%s' failed with diagnostics:\n%s",
                                 file, conditionMessage(e)))
            })
        }         # end if (separate)

        if (tangle && !separate) {  # This is set for all engines as of 3.0.2
            ## It is unlikely that weave succeeds but tangle fails,
            ## so we don't bother to report tangle failures specifically.
            output <- tryCatch({
                engine$tangle(file, quiet = quiet, encoding = enc)
                setwd(startdir) # In case tangle/vignette changed it
                find_vignette_product(name, by = "tangle", main = FALSE, engine = engine)
            }, error = function(e) {
                thisOK <<- FALSE
                fails <<- c(fails, file)
                message(gettextf("Error: tangling vignette '%s' failed with diagnostics:\n%s",
                     file, conditionMessage(e)))
            })
            sourceList[[file]] <- output
        }
        if (!separate) {
            if (thisOK)
                message(gettextf("--- finished re-building %s\n", sQuote(file)))
            else
                message(gettextf("--- failed re-building %s\n", sQuote(file)))
        }
    } # end loop over vignettes

    if (have.makefile) {
        WINDOWS <- .Platform$OS.type == "windows"
        if (WINDOWS) {
            ## Some people have *assumed* that R_HOME uses / in Makefiles
            ## Spaces in paths might still cause trouble.
            rhome <- chartr("\\", "/", R.home())
            Sys.setenv(R_HOME = rhome)
        }
    	make <- Sys.getenv("MAKE", "make")
        if (!nzchar(make)) make <- "make"
        yy <- system(make)
        if (yy > 0) stop("running 'make' failed")
        ## See if Makefile has a clean: target, and if so run it.
        if (clean &&
	   any(startsWith(readLines("Makefile", warn = FALSE), "clean:")))
            system(paste(make, "clean"))
    } else {
        ## Badly-written vignettes open a pdf() device on Rplots.pdf and
        ## fail to close it.
        grDevices::graphics.off()

        keep <- c(outputs, unlist(sourceList))
        if (clean) {
            f <- setdiff(list.files(all.files = TRUE, no.. = TRUE), keep)
            newer <- file_test("-nt", f, ".build.timestamp")
            ## some packages, e.g. SOAR, create directories
            unlink(f[newer], recursive = TRUE)
            f <- setdiff(list.files(all.files = TRUE, no.. = TRUE),
                         c(keep, origfiles))
            f <- f[file_test("-f", f)]
            file.remove(f)
        }
    }

    if (file.exists(".build.timestamp")) file.remove(".build.timestamp")
    ## Might have been in origfiles ...

    if (length(fails)) {
        message(ngettext(length(fails),
                         "SUMMARY: processing the following file failed:",
                         "SUMMARY: processing the following files failed:"))
        message(paste(.pretty_format(fails), collapse = "\n"))
        message()
    }

    ## Assert
    if (length(fails) || (length(outputs) != (length(vigns$docs) - length(skipped)))) {
        msg <- "Vignette re-building failed."
        stop(msg, domain = NA, call. = FALSE)
    }

    vigns$outputs <- outputs
    vigns$sources <- sourceList

    invisible(vigns) ## not documented on the help page.
}

### * buildVignette
###
### Run a weave and/or tangle on one vignette and try to
### remove all temporary files that were created.
### Also called from 'R CMD Sweave' via .Sweave() in ../../utils/R/Sweave.R
buildVignette <-
    function(file, dir = ".", weave = TRUE, latex = TRUE, tangle = TRUE,
             quiet = TRUE, clean = TRUE, keep = character(),
             engine = NULL, buildPkg = NULL,
	     encoding = getVignetteEncoding(file), ...)
{
    if (!file_test("-f", file))
	stop(gettextf("file '%s' not found", file), domain = NA)
    if (!dir.exists(dir))
	stop(gettextf("directory '%s' does not exist", dir), domain = NA)

    if (!is.null(buildPkg))
	for (pkg in buildPkg)
	    suppressPackageStartupMessages(loadNamespace(pkg))

    if (is.null(engine))
    # Infer vignette engine from vignette content
	engine <- getVignetteEngine(file)

    # Get the vignette engine
    if (is.character(engine))
	engine <- vignetteEngine(engine, package = buildPkg)

    # Infer the vignette name
    names <- sapply(engine$pattern, FUN = sub, "", file)
    name <- basename(names[(names != file)][1L])

    # A non-matching filename?
    if (is.na(name))
	stop(gettextf("vignette filename '%s' does not match any of the '%s' filename patterns",
		file, paste(engine$package, engine$name, sep="::")),
		domain = NA)

    if (encoding == "non-ASCII")
    	stop(gettextf("Vignette '%s' is non-ASCII but has no declared encoding", name))

    # Set output directory temporarily
    file <- file_path_as_absolute(file)
    olddir <- setwd(dir)
    if (!is.null(olddir)) on.exit(setwd(olddir))

    # Record existing files (not to be cleaned)
    if (is.na(clean) || clean)
        origfiles <- list.files(all.files = TRUE)

    tdir <- getwd()# if 'dir' was relative, resetting to tdir will work
    output <- NULL

    # Weave
    final <- if (weave) {
	engine$weave(file, quiet = quiet, encoding = encoding, ...)
	setwd(tdir)  # In case weave/vignette changed it
	output <- find_vignette_product(name, by = "weave", engine = engine)

	# Compile TeX to PDF?
	if(latex && vignette_is_tex(output)) {
	    texi2pdf(file = output, clean = FALSE, quiet = quiet)
	    find_vignette_product(name, by = "texi2pdf", engine = engine)
	} else
	    output
    } # else NULL

    # Tangle
    sources <- if (tangle) {
	engine$tangle(file, quiet = quiet, encoding = encoding, ...)
	setwd(tdir)  # In case tangle changed it
	find_vignette_product(name, by = "tangle", main = FALSE, engine = engine)
    } # else NULL

    ## Cleanup newly created files unless those in 'keep'
    keep <- c(sources, final, keep)
    if (is.na(clean)) {  # Use NA to signal we want .tex (or .md) files kept.
	keep <- c(keep, output)
	clean <- TRUE
    }
    if (clean) {
	f <- setdiff(list.files(all.files = TRUE, no.. = TRUE),
                     c(keep, origfiles))
	## some packages create directories
	unlink(f, recursive = TRUE)
    }

    unique(keep)
}

### * .buildOneVignette

## helper to be run in a separate process
.buildOneVignette <-
    function(file, pkgdir, quiet = TRUE, have.makefile = FALSE,
             name, enc, ser_engine)
{
    op <- options(warn = 1)      # we run vignettes in this process
    engine <- readRDS(ser_engine)
    ## This is more than we need, but we cannot be sure that 'engine'
    ## has references to all the namespaces required.  But packages
    ## typically only have one engine, at most two.
    loadVignetteBuilder(pkgdir)
    OK <- TRUE
    startdir <- getwd()
    output <- character()

    message(gettextf("--- re-building %s using %s",
                     sQuote(file), engine$name))
    tryCatch({
        engine$weave(file, quiet = quiet, encoding = enc)
        setwd(startdir)  # In case weave/vignette changed it
        output <- find_vignette_product(name, by = "weave", engine = engine)
        if(!have.makefile && vignette_is_tex(output)) {
            texi2pdf(file = output, clean = FALSE, quiet = quiet)
            output <- find_vignette_product(name, by = "texi2pdf",
                                            engine = engine)
        }
    }, error = function(e) {
        OK <<- FALSE
        message(gettextf("Error: processing vignette '%s' failed with diagnostics:\n%s",
                         file, conditionMessage(e)))
    })

    if (OK)
        message(gettextf("--- finished re-building %s\n", sQuote(file)))
    else {
        message(gettextf("--- failed re-building %s\n", sQuote(file)))
        q("no", status = 9L)
    }

    message("+-+", output)
    invisible(output)
}


### * getVignetteEncoding

getVignetteEncoding <-  function(file, ...)
{
    lines <- readLines(file, warn = FALSE)
    .getVignetteEncoding(lines, ...)
}

.getVignetteEncoding <- function(lines, default = NA)
{
    res <- .get_vignette_metadata(lines, "Encoding")[1L]

    if(is.na(res)) {
        poss <- grep("^[[:space:]]*%+[[:space:]]*\\\\SweaveUTF8[[:space:]]*$", lines, useBytes = TRUE)
        if (length(poss))
	    "UTF-8"
        else {
            ## Look for input enc lines using inputenc or inputenx
            ## Note, multiple encodings are excluded.
            poss <-
                grep("^[[:space:]]*\\\\usepackage\\[([[:alnum:]]+)\\]\\{inputen[cx]\\}",
                     lines, useBytes = TRUE)
            ## Check it is in the preamble
            start <- grep("^[[:space:]]*\\\\begin\\{document\\}",
                          lines, useBytes = TRUE)
            if(length(start))
                poss <- poss[poss < start[1L]]
            if(length(poss)) {
        	poss <- lines[poss[1L]]
                poss <- gsub("%.*", "", poss, useBytes = TRUE) # strip off comment which
                                                               # may be non-ASCII
        	res <- gsub("^[[:space:]]*\\\\usepackage\\[([[:alnum:]]+)\\].*", "\\1",
                            poss)               # This line should be ASCII.
		## see Rd2latex.R.
		## Currently utf8, utf8x, latin1, latin9 and ansinew are in use.
		switch(res,
		       "utf8" =, "utf8x" = "UTF-8",
		       "latin1" =, "iso-8859-1" = "latin1",
		       "latin2" =, "iso-8859-2" = "latin2",
		       "latin9" =, "iso-8859-15" = "latin-9", # only form known to GNU libiconv
		       "latin10" =, "iso-8859-16" = "latin10",
		       "cyrillic" =, "iso-8859-5" =  "ISO-8859-5", # inputenx
		       "koi8-r" =  "KOI8-R", # inputenx
		       "arabic" = "ISO-8859-6", # Not clear next 3 are known to latex
		       "greek" =, "iso-8859-7" = "ISO-8859-7",
		       "hebrew" =, "iso-8859-8" = "ISO-8859-8",
		       "ansinew" = "CP1252",
		       "applemac" = "macroman",
		       ## assume these only get used on Windows
		       "cp1250" = "CP1250",
		       "cp1252" = "CP1252",
		       "cp1257" = "CP1257",
		       "unknown")
	    } else if (!is.na(default)) {
		default
            } else { # Nothing else has indicated an encoding, maybe it's just ASCII
                asc <- iconv(lines, "latin1", "ASCII")
		if(anyNA(asc) || any(asc != lines)) "non-ASCII" else "" # or "ASCII"
            }
        }
    } else
	res
}

### * .build_vignette_index

.get_vignette_metadata <-
function(lines, tag)
{
    ## <FIXME>
    ## Why don't we anchor this to the beginning of a line?
    meta_RE <- paste0("[[:space:]]*%+[[:space:]]*\\\\Vignette",
                      tag, "\\{([^}]*(\\{[^}]*\\})*[^}]*)\\}.*")
    ## </FIXME>
    meta <- grep(meta_RE, lines, value = TRUE, useBytes = TRUE)
    trimws(gsub(meta_RE, "\\1", meta))
}

vignetteInfo <- function(file)
{
    lines <- readLines(file, warn = FALSE)

    ## <FIXME>
    ## Can only proceed with lines which are valid in the current locale.
    ## Unfortunately, vignette encodings are a mess: package encodings
    ## might apply, but be overridden by \inputencoding commands.
    ## For now, assume that vignette metadata occur in all ASCII lines.
    ## (Could also iconv() using sub = "byte".)
    lines[is.na(nchar(lines, "c", TRUE))] <- ""
    ## </FIXME>

    ## \VignetteIndexEntry
    title <- c(.get_vignette_metadata(lines, "IndexEntry"), "")[1L]
    ## \VignetteDepends
    depends <- .get_vignette_metadata(lines, "Depends")
    if(length(depends))
        depends <- unlist(strsplit(depends[1L], ", *"))
    ## \VignetteKeyword and old-style \VignetteKeywords
    keywords <- .get_vignette_metadata(lines, "Keywords")
    keywords <- if(!length(keywords)) {
        ## No old-style \VignetteKeywords entries found.
        .get_vignette_metadata(lines, "Keyword")
    } else unlist(strsplit(keywords[1L], ", *"))
    ## no point in recording the file path since this is called on
    ## package installation.
    engine <- getVignetteEngine(lines=lines)
    list(file = basename(file), title = title, depends = depends,
         keywords = keywords, engine = engine)
}

## builds vignette indices from 'vigns', a pkgVignettes() result
.build_vignette_index <- function(vigns)
{
    stopifnot(inherits(vigns, "pkgVignettes"))

    files <- vigns$docs
    names <- vigns$names
    dir <- vigns$dir
    sources <- vigns$sources

    if(!dir.exists(dir))
        stop(gettextf("directory '%s' does not exist", dir), domain = NA)

    nvigns <- length(files)
    if(nvigns == 0L) {
        out <- data.frame(File = character(),
                          Title = character(),
                          PDF = character(),
			  R = character(),
                          stringsAsFactors = FALSE)
        out$Depends <- list()
        out$Keywords <- list()
        return(out)
    }

    # Check for duplicated vignette names
    if (any(dups <- duplicated(names))) {
    	dupname <- names[dups][1L]
    	dup <- basename(files[dups][1L])
    	orig <- basename(files[ names == dupname ][1L])
    	stop(gettextf("In '%s' vignettes '%s' and '%s' have the same vignette name",
    		      basename(dirname(dir)), orig, dup),
             domain = NA)
    }

    # Read vignette annotation from vignette source files
    contents <- vector("list", length = nvigns * 5L)
    dim(contents) <- c(nvigns, 5L)
    for(i in seq_along(files))
        contents[i, ] <- vignetteInfo(files[i])
    colnames(contents) <- c("File", "Title", "Depends", "Keywords", "Engine")

    ## This is to cover a temporary package installation
    ## by 'R CMD build' (via 'R CMD INSTALL -l <lib>)
    ## which in case vignettes have not been built.
    outputs <- vigns$outputs
    outputs <- if(!is.null(outputs)) basename(outputs) else character(nvigns)

    out <- data.frame(File = unlist(contents[, "File"]),
                      Title = unlist(contents[, "Title"]),
                      PDF = outputs,	# Not necessarily PDF, but name it that for back compatibility
		      R = "",		# May or may not be present
                      row.names = NULL, # avoid trying to compute row
                                        # names
                      stringsAsFactors = FALSE)
    # Optional
    for (i in seq_along(sources))
	if (length(s <- sources[[i]]))
	    out$R[which(names(sources)[i] == files)] <- basename(s[1L])
    out$Depends <- contents[, "Depends"]
    out$Keywords <- contents[, "Keywords"]

    stopifnot(NROW(out) == nvigns)

    out
}

### * .check_vignette_index

.check_vignette_index <-
function(vignetteDir, pkgdir = ".")
{
    dir <- file.path(pkgdir, vignetteDir)
    if(!dir.exists(dir))
        stop(gettextf("directory '%s' does not exist", dir), domain = NA)

    subdir <- gsub(pkgdir, "", dir, fixed=TRUE)
    vigns <- pkgVignettes(dir = pkgdir, subdirs = subdir)

    vignetteIndex <- .build_vignette_index(vigns)
    badEntries <-
        vignetteIndex[grep("^[[:space:]]*$", vignetteIndex[, "Title"]), "File"]
    class(badEntries) <- "check_vignette_index"
    badEntries
}

print.check_vignette_index <-
function(x, ...)
{
    if(length(x)) {
        writeLines(c("Vignettes with missing or empty \\VignetteIndexEntry:",
                     paste0("  ", basename(unclass(x)))))
    }
    invisible(x)
}


### * .writeVignetteHtmlIndex

.writeVignetteHtmlIndex <-
function(pkg, con, vignetteIndex = NULL)
{
    html <- c(HTMLheader("Vignettes and other documentation",
                         up = "../html/00Index.html",
                         css = "../html/R.css", # installed since R 2.13.0
                         ## relative paths to 'top' and 'logo' will only work
                         ## for the (site-)library in RHOME (or dynamic help)
                         Rhome = "../../.."),
              paste0("<h2>Vignettes from package '", pkg,"'</h2>"),
              if(NROW(vignetteIndex) == 0L) ## NROW(NULL) = 0
                  "The package contains no vignette meta-information."
              else {
                  vignetteIndex <- cbind(Package = pkg,
                                         as.matrix(vignetteIndex[, c("File", "Title", "PDF", "R")]))
                  makeVignetteTable(vignetteIndex, depth = NULL)
              })
    otherfiles <- list.files(system.file("doc", package = pkg))
    if(NROW(vignetteIndex))
        otherfiles <- setdiff(otherfiles,
                              c(vignetteIndex[, c("PDF", "File", "R")], "index.html"))
    if (length(otherfiles)) {
    	otherfiles <- ifelse(dir.exists(system.file(file.path("doc", otherfiles), package = pkg)),
			     paste0(otherfiles, "/"),
			     otherfiles)
	urls <- paste0('<a href="', otherfiles, '">', otherfiles, '</a>')
        html <- c(html, '<h2>Other files in the <span class="samp">doc</span> directory</h2>',
                  '<table style="width: 100%;">',
		  '<col style="width: 24%;">',
		  '<col style="width: 50%;">',
		  '<col style="width: 24%;">',
                  paste0('<tr><td></td><td><span class="samp">',
                         iconv(urls, "", "UTF-8"), "</span></td></tr>"),
                  "</table>")
    }
    html <- c(html, "</div></body></html>")
    writeLines(html, con=con)
}

getVigDepMtrx <-
function(vigDeps)
{
    ## Taken almost directly out of 'package.dependencies'
    if (length(vigDeps)) {
        z <- unlist(strsplit(vigDeps, ",", fixed=TRUE))
        z <- sub("^[[:space:]]*(.*)", "\\1", z)
        z <- sub("(.*)[[:space:]]*$", "\\1", z)
        pat <- "^([^\\([:space:]]+)[[:space:]]*\\(([^\\)]+)\\).*"
        depMtrx <- cbind(sub(pat, "\\1", z),
                         sub(pat, "\\2", z),
                         NA)
        noversion <- depMtrx[, 1L] == depMtrx[, 2L]
        depMtrx[noversion, 2L] <- NA
        pat <- "[[:space:]]*([[<>=]+)[[:space:]]+(.*)"
        depMtrx[!noversion, 2:3] <-
            c(sub(pat, "\\1", depMtrx[!noversion, 2L]),
              sub(pat, "\\2", depMtrx[!noversion, 2L]))
        depMtrx
    }
    else
        NA
}

### * .run_one_vignette
### helper for R CMD check

.run_one_vignette <-
function(vig_name, docDir, encoding = "", pkgdir)
{
    ## The idea about encodings here is that Stangle reads the
    ## file, converts on read and outputs in the current encoding.
    ## Then source() can assume the current encoding.
    td <- tempfile()
    dir.create(td)
    file.copy(docDir, td, recursive = TRUE)
    setwd(file.path(td, basename(docDir)))

    subdir <- gsub(pkgdir, "", docDir, fixed=TRUE)
    vigns <- pkgVignettes(dir=pkgdir, subdirs=subdir)
    if (is.null(vigns)) {
       cat("\n  When running vignette ", sQuote(vig_name), ":\n", sep="")
       stop("No vignettes available", call. = FALSE, domain = NA)
    }

    i <- which(basename(vigns$docs) == vig_name)
    if (length(i) == 0L) {
       cat("\n  When running vignette ", sQuote(vig_name), ":\n", sep="")
       stop("No such vignette ", sQuote(vig_name), call. = FALSE, domain = NA)
    }
    stopifnot(length(i) == 1L)

    loadVignetteBuilder(pkgdir)
    file <- basename(vigns$docs[i])
    name <- vigns$names[i]
    engine <- vignetteEngine(vigns$engines[i])

    output <- tryCatch({
        engine$tangle(file, quiet = TRUE, encoding = encoding)
        find_vignette_product(name, by = "tangle", engine = engine)
    }, error = function(e) {
        cat("\n  When tangling ", sQuote(file), ":\n", sep="")
        stop(conditionMessage(e), call. = FALSE, domain = NA)
    })

    if(length(output) == 1L) {
        tryCatch({
            source(output, echo = TRUE)
        }, error = function(e) {
            cat("\n  When sourcing ", sQuote(output), ":\n", sep="")
            stop(conditionMessage(e), call. = FALSE, domain = NA)
        })
    }

    cat("\n *** Run successfully completed ***\n")
}

vignetteEngine <- local({
    registry <- new.env(parent = emptyenv())

    engineKey <- function(name, package) {
        key <- strsplit(name, split = "::", fixed = TRUE)[[1L]]
        if (length(key) == 1L) {
	    if (missing(package))
		stop("Vignette engine package not specified", call.=FALSE)
            key[2L] <- key[1L]
            key[1L] <- package
        } else if (length(key) != 2L) {
            stop("Unsupported engine name ", sQuote(name))
        }
        key
    }

    ## FIXME: return a character vector, not stop here.
    getEngine <- function(name, package) {
        if (missing(name)) {
            result <- as.list(registry)
            if (length(result) > 0L && !is.null(package)) {
               package <- unique(package)
               pkgs <- sapply(result, function(engine) engine$package)
               keep <- is.element(pkgs, package)
               if (!any(keep)) {
                   ## was stop() in R 4.4.0
                   msg <-ngettext(length(package),
                                  "Package %s does not have a registered vignette engine",
                                  "None of packages %s have registered vignette engines")
                   warning(sprintf(msg, paste(sQuote(package), collapse = ", ")),
                           domain = NA, call. = FALSE)
                   ## return character() below
               }
               result <- result[keep]
               pkgs <- pkgs[keep]
               if (length(package) > 1L) {
                 result <- result[order(match(pkgs, package))]
               }
            }
        } else {
            result <- NULL
            if (is.null(package)) {
                if (name == "Sweave") {
                    key <- engineKey(name, package = "utils")
                } else {
                    key <- engineKey(name)
                }
		suppressPackageStartupMessages(loadNamespace(key[1]))
                name <- paste(key, collapse = "::")
                result <- registry[[name]]
                if (is.null(result))
                    stop(gettextf("Vignette engine %s is not registered",
                                  sQuote(name)), domain = NA)
            } else {
                for (pkg in package) {
                    key <- engineKey(name, pkg)
		    try(suppressPackageStartupMessages(loadNamespace(key[1])),
                        silent = TRUE)
                    nameT <- paste(key, collapse = "::")
                    result <- registry[[nameT]]
                    if (!is.null(result))
                        break
                }
                if (is.null(result))
                    stop(gettextf("Vignette engine %s is not registered by any of the packages %s",
                                  sQuote(name),
                                  paste(sQuote(package), collapse = ", ")),
                         domain = NA)
            }

            if (!is.null(package) && !is.element(result$package, package))
                stop(gettextf("Vignette engine %s is not registered by any of the packages %s",
                              sQuote(name),
                              paste(sQuote(package), collapse = ", ")),
                     domain = NA)
        }
        result
    }

    setEngine <- function(name, package, pattern, weave, tangle,
                          aspell = list()) {
        key <- engineKey(name, package)
        if (!is.null(package) && key[1L] != package)
            stop(gettextf("Engine name %s and package %s do not match",
                          sQuote(name), sQuote(package)), domain = NA)


        rname <- paste(key, collapse = "::")
        if (is.null(weave)) {
            result <- NULL
            if (exists(rname, envir = registry))
                rm(list = rname, envir = registry)
        } else {
            if (!is.function(weave) && is.na(weave)) {
                if (missing(tangle))
                    tangle <- NA
            } else {
                if (!is.function(weave))
                    stop(gettextf("Argument %s must be a function and not %s",
                                  sQuote("weave"), sQuote(class(weave)[1L])),
                         domain = NA)
                if (!is.function(tangle))
                    stop(gettextf("Argument %s must be a function and not %s",
                                  sQuote("tangle"), sQuote(class(tangle)[1L])),
                         domain = NA)
            }
            if (is.null(pattern))
                pattern <- "[.][rRsS](nw|tex)$"
            else if (!is.character(pattern))
                stop(gettextf("Argument %s must be a character vector or NULL and not %s",
                              sQuote("pattern"), sQuote(class(pattern)[1L])),
                     domain = NA)

            result <-
                list(name = key[2L], package = key[1L], pattern = pattern,
                     weave = weave, tangle = tangle, aspell = aspell)
            assign(rname, result, registry)
        }

        result
    }

    setEngine(name = "Sweave", package = "utils", pattern = NULL,
              weave = function(...) utils::Sweave(...),
              tangle = function(...) utils::Stangle(...),
              aspell = list(filter = "Sweave", control = "-t"))

    function(name, weave, tangle, pattern = NULL, package = NULL,
             aspell = list()) {
        if (missing(weave)) { # we're getting the engine
            getEngine(name, package)
        } else { # we're setting a new engine
            if (is.element(name, c("Sweave", "utils::Sweave"))) {
                stop(gettextf("Cannot change the %s engine or use an engine of that name",
                              sQuote("Sweave")), domain = NA)
            }
            if (missing(package))
                package <- utils::packageName(parent.frame())
            result <-
                setEngine(name, package, pattern = pattern,
                          weave = weave, tangle = tangle, aspell = aspell)
            invisible(result)
        }
    }
})

loadVignetteBuilder <-
function(pkgdir, mustwork = TRUE, lib.loc = NULL)
{
    pkgs <- .get_package_metadata(pkgdir)["VignetteBuilder"]
    if (is.na(pkgs))
        pkgs <- NULL
    else if (length(pkgs)) {
        pkgs <- unlist(strsplit(pkgs, ","))
        pkgs <- gsub('[[:space:]]', '', pkgs)
    }
    pkgs <- unique(c(pkgs, "utils"))

    for (pkg in pkgs) {
	res <- tryCatch(suppressPackageStartupMessages(loadNamespace(pkg,
                                                                     lib.loc = lib.loc)),
                        error = identity)
	if (mustwork && inherits(res, "error"))
            stop(gettextf("vignette builder '%s' not found", pkg), domain = NA)
    }
    pkgs
}

# This gets the info for installed packages

getVignetteInfo <- function(package = NULL, lib.loc = NULL, all = TRUE)
{
    paths <-
        if (is.null(package)) {
            package <- .packages(all.available = all, lib.loc)
            ## allow for misnamed dirs
            find.package(package, lib.loc, quiet = TRUE)
        } else
            find.package(package, lib.loc)

    ## Find the directories with a 'doc' subdirectory *possibly*
    ## containing vignettes.

    paths <- paths[dir.exists(file.path(paths, "doc"))]

    empty <- cbind(Package = character(0),
                   Dir = character(0),
                   Topic = character(0),
                   File = character(0),
                   Title = character(0),
                   R = character(0),
                   PDF = character(0))

    getVinfo <- function(dir) {
        entries <- NULL
        if (file.exists(INDEX <- file.path(dir, "Meta", "vignette.rds")))
            entries <- readRDS(INDEX)
        if (NROW(entries) > 0) {
            # FIXME:  this test is unnecessary?
            R <- entries$R %||% rep.int("", NROW(entries))
            file <- basename(entries$File)
            pdf <- entries$PDF
            topic <- file_path_sans_ext(ifelse(R == "", ifelse(pdf == "", file, pdf), R))
            cbind(Package = basename(dir),
                  Dir = dir,
                  Topic = topic,
                  File = file,
                  Title = entries$Title,
                  R = R,
                  PDF = pdf)[order(entries$Title), , drop=FALSE]
        }
        else empty
    }

    if (length(paths))
    	do.call(rbind, lapply(paths, getVinfo))
    else
    	empty
}

### Local variables: ***
### mode: outline-minor ***
### outline-regexp: "### [*]+" ***
### End: ***
#  File src/library/tools/R/xgettext.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2023 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

xgettext <-
function(dir, verbose = FALSE, asCall = TRUE)
{
    dir <- file_path_as_absolute(dir)
    bn <- basename(dir)
    dir <- file.path(dir, "R")
    exts <- .make_file_exts("code")
    R_files <- list_files_with_exts(dir, exts)
    for(d in c("unix", "windows")) {
        OSdir <- file.path(dir, d)
        if(dir.exists(OSdir))
            R_files <- c(R_files, list_files_with_exts(OSdir, exts))
    }
    if(bn == "base") {
        ## include loader files in R_HOME/share/R
        shdir <- file.path(dir, "../../../../share/R")
        R_files <- c(R_files, list_files_with_exts(shdir, exts))
    }
    out <- vector("list", length = length(R_files))
    names(out) <- R_files

    find_strings <- function(e) {
        find_strings2 <- function(e, suppress) {
            if(is.character(e)) {
                if(!suppress) strings <<- c(strings, e)
            } else if(is.call(e)) {
                if(is.name(e[[1L]])) {
                    fname <- as.character(e[[1L]])
                    if(fname %in% c("warningCondition", "errorCondition")) {
                        e <- match.call(baseenv()[[fname]], e)
                        e <- e["message"] # ignore condition class etc
                    } else if(fname %in% c("gettext", "gettextf")) {
                        domain <- e[["domain"]]
                        suppress <- !is.null(domain) && !is.name(domain) && is.na(domain)
                        if(fname == "gettextf") {
                            e <- match.call(gettextf, e)
                            e <- e["fmt"] # just look at fmt arg
                        } else if(fname == "gettext" &&
                                  !is.null(names(e))) {
                            e <- e[!(names(e) == "domain")] # remove domain arg
                        }
                    } else if(fname == "ngettext")
                        return()
                }
                for(i in seq_along(e)) find_strings2(e[[i]], suppress)
            }
        }
        if(is.call(e)
           && is.name(e[[1L]])
           ## FIXME: this skips `base::`-prefixed calls
           && (as.character(e[[1L]])
               %in% c("warning", "stop", "message", "packageStartupMessage",
                      "gettext", "gettextf"))) {
             domain <- e[["domain"]]
             suppress <- !is.null(domain) && !is.name(domain) && is.na(domain)
             ## remove named args
             if(!is.null(names(e)))
                 e <- e[names(e) %notin% c("call.", "immediate.", "domain")]
             if(asCall) {
                 if(!suppress) strings <<- c(strings, as.character(e)[-1L])
             } else {
                 if(as.character(e[[1L]]) == "gettextf") {
                     e <- match.call(gettextf, e)
                     e <- e["fmt"] # just look at fmt arg
                 }
                 for(i in seq_along(e)) find_strings2(e[[i]], suppress)
             }
        } else if(is.recursive(e))
            for(i in seq_along(e)) Recall(e[[i]])
    }

    for(f in R_files) {
        if(verbose) message(gettextf("parsing '%s'", f), domain = NA)
        strings <- character()
        for(e in parse(file = f)) find_strings(e)
        ## strip leading and trailing white space
        strings <- sub("^[ \t\n]*", "", strings)
        strings <- sub("[ \t\n]*$", "", strings)
        out[[f]] <- structure(unique(strings), class="xgettext")
    }

    out[lengths(out) > 0L]
}

print.xgettext <- function(x, ...)
{
    cat(encodeString(x), sep = "\n")
    invisible(x)
}

print.xngettext <- function(x, ...)
{
    lapply(x, function(x) {
        e <- encodeString(x)
        cat("\nmsgid        = ", e[1L],
            "\nmsgid_plural = ", e[2L], "\n", sep = "")
    })
    invisible(x)
}

xngettext <-
function(dir, verbose = FALSE)
{
    dir <- file_path_as_absolute(dir)
    dir <- file.path(dir, "R")
    exts <- .make_file_exts("code")
    R_files <- list_files_with_exts(dir, exts)
    for(d in c("unix", "windows", "aqua")) {
        OSdir <- file.path(dir, d)
       if(dir.exists(OSdir))
            R_files <- c(R_files, list_files_with_exts(OSdir, exts))
    }
    out <- vector("list", length = length(R_files))
    names(out) <- R_files

    find_strings <- function(e) {
        if(is.call(e) && is.name(e[[1L]])
           && as.character(e[[1L]]) %in% "ngettext") {
	    e <- match.call(ngettext, e)
            domain <- e[["domain"]]
            suppress <- !is.null(domain) && !is.name(domain) && is.na(domain)
	    if (!suppress &&
                is.character(e[["msg1"]]) && is.character(e[["msg2"]]))
	    	strings <<- c(strings, list(c(msg1 = e[["msg1"]],
	    				      msg2 = e[["msg2"]])))
        } else if(is.recursive(e))
            for(i in seq_along(e)) Recall(e[[i]])
    }

    for(f in R_files) {
        if(verbose) message(gettextf("parsing '%s'", f), domain = NA)
        strings <- list()
        for(e in parse(file = f)) find_strings(e)
        out[[f]] <- structure(strings, class="xngettext")
    }

    out[lengths(out) > 0L]
}

xgettext2pot <-
function(dir, potFile, name = "R", version, bugs)
{
    dir <- file_path_as_absolute(dir)
    if(missing(potFile))
        potFile <- paste0("R-", basename(dir), ".pot")
    msgid <- unique(unlist(xgettext(dir, asCall = FALSE)))
    msgid <- msgid[nzchar(msgid)]
    if(length(msgid) > 0L)
	msgid <- shQuote(encodeString(msgid), type="cmd")  # need to quote \n, \t etc
    msgid_plural <- xngettext(dir)
    un <- unique(unlist(msgid_plural))

    con <- file(potFile, "wt")
    on.exit(close(con))
    if(missing(version))
        version <- paste(R.version$major, R.version$minor, sep = ".")
    if(missing(bugs)) bugs <- "bugs.r-project.org"
    writeLines(con = con,
               c('msgid ""',
                 'msgstr ""',
                 sprintf('"Project-Id-Version: %s %s\\n"', name, version),
                 sprintf('"Report-Msgid-Bugs-To: %s\\n"', bugs),
                 paste0('"POT-Creation-Date: ',
                        format(Sys.time(), "%Y-%m-%d %H:%M"), # %z is not portable
                        '\\n"'),
                 '"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\\n"',
                 '"Last-Translator: FULL NAME <EMAIL@ADDRESS>\\n"',
                 '"Language-Team: LANGUAGE <LL@li.org>\\n"',
                 '"Language: \\n"',
                 '"MIME-Version: 1.0\\n"',
                 '"Content-Type: text/plain; charset=CHARSET\\n"',
                 '"Content-Transfer-Encoding: 8bit\\n"',
                 if (length(un)) '"Plural-Forms: nplurals=INTEGER; plural=EXPRESSION;\\n"'))
    for(e in msgid)
        writeLines(con=con, c('', paste('msgid', e), 'msgstr ""'))
    for(ee in msgid_plural)
        for(e in ee)
            if(e[1L] %in% un) {
                writeLines(
                    con=con,
                    c('',
                      paste('msgid       ', shQuote(encodeString(e[1L]), type="cmd")),
                      paste('msgid_plural', shQuote(encodeString(e[2L]), type="cmd")),
                      'msgstr[0]    ""',
                      'msgstr[1]    ""')
                )
                un <- un[-match(e, un)]
            }
}


checkPoFile <- function(f, strictPlural = FALSE)
{
    getfmts <- function(s) .Call(C_getfmts, s)

    lines <- readLines(f, encoding = "bytes")
    i <- 0
    noCformat <- FALSE
    f1_plural <- NULL
    ref <- NA
    fuzzy <- FALSE

    result <- matrix(character(), ncol = 5L, nrow = 0L)
    while (i < length(lines)) {
	i <- i + 1L

	if (startsWith(lines[i], "#,")) { # useBytes=TRUE (speedup ?)
	    noCformat <- noCformat || grepl("no-c-format", lines[i], useBytes = TRUE)
	    fuzzy <- fuzzy || grepl("fuzzy", lines[i], useBytes = TRUE)
	} else if (startsWith(lines[i], "#:")) {
	    if (!is.na(ref))
		ref <- paste(ref, "etc.")
	    else
		ref <- sub("^#:[[:blank:]]*", "", lines[i])
	} else if (startsWith(lines[i], "msgid ")) {
	    s1 <- sub('^msgid[[:blank:]]+["](.*)["][[:blank:]]*$', "\\1", lines[i])
	    while (startsWith(lines[i+1L], '"')) {
		i <- i + 1L
		s1 <- paste0(s1, sub('^["](.*)["][[:blank:]]*$', "\\1", lines[i]))
	    }
	    f1 <- tryCatch(getfmts(s1), error = identity)
	    j <- i + 1L

	    if (noCformat || inherits(f1, "error")) {
		noCformat <- FALSE
		next
	    }

	    while (j <= length(lines)) {
		if (grepl("^msgid_plural[[:blank:]]", lines[j], useBytes = TRUE))
		    statement <- "msgid_plural"
		else if (grepl("^msgstr[[:blank:]]", lines[j], useBytes = TRUE))
		    statement <- "msgstr"
		else if (grepl("^msgstr\\[[[:digit:]]+\\][[:blank:]]", lines[j], useBytes = TRUE))
		    statement <- sub("^(msgstr)\\[([[:digit:]]+)\\].*$", "\\1\\\\[\\2\\\\]", lines[j])
		else
		    break

		s2 <- sub( paste0("^", statement, "[[:blank:]]+[\"](.*)[\"][[:blank:]]*$"),
		                 "\\1", lines[j])
		while (!is.na(lines[j+1L]) && startsWith(lines[j+1L], '"')) {
		    j <- j+1L
		    s2 <- paste0(s2, sub('^["](.*)["][[:blank:]]*$', "\\1", lines[j]))
		}

		if (s1 == "") { # The header
		    encoding <- sub(".*Content-Type:[^\\]*charset=([^\\[:space:]]*)[[:space:]]*\\\\n.*", "\\1", s2)
		    lines <- iconv(lines, encoding, "UTF-8")
		    break
		}

		f2 <- tryCatch(getfmts(s2), error = identity)

		if (statement == "msgid_plural") {
		    if (!strictPlural) {
			f1_plural <- f2
			j <- j+1L
			next
		    }
		}

		if (nzchar(s2) &&
		     !(identical(f1, f2) || identical(f1_plural, f2))) {
		    location <- paste0(f, ":", j)
		    if (inherits(f2, "error"))
			diff <- conditionMessage(f2)
		    else {
		    	if (length(f1) < length(f2)) {
			    diff <- "too many entries"
			    length(f2) <- length(f1)
		    	} else if (length(f1) > length(f2)) {
			    diff <- "too few entries"
			    length(f1) <- length(f2)
			} else
			    diff <- ""
			diffs <- which(f1 != f2)
			if (length(diffs)) {
			    if (nzchar(diff))
			    	diff <- paste0(diff, ", ")
			    if (length(diffs) > 1)
				diff <- paste(paste0(diff, "differences in entries"),
			                      paste(diffs, collapse = ","))
			    else
				diff <- paste(paste0(diff, "difference in entry"),
				              diffs)
			}
			if (grepl("\u066A", s2, fixed=TRUE))
			    diff <- paste0(diff, ", translation contains arabic percent sign U+066A")
			if (grepl("\uFE6A", s2, fixed=TRUE))
			    diff <- paste0(diff, ", translation contains small percent sign U+FE6A")
			if (grepl("\uFF05", s2, fixed=TRUE))
			    diff <- paste0(diff, ", translation contains wide percent sign U+FF05")
		    }
                    if (!fuzzy)
                        result <- rbind(result, c(location, ref, diff, s1, s2))
		}
		j <- j+1L
	    }
	    i <- j-1L
	    noCformat <- FALSE
	    f1_plural <- NULL
	    ref <- NA
            fuzzy <- FALSE
	}
    }
    structure(result, class = "check_po_files")
}

checkPoFiles <- function(language, dir=".")
{
    files <- list.files(path = dir, pattern = paste0(language, "[.]po$"),
                        full.names = TRUE, recursive = TRUE)
    result <- matrix(character(), ncol = 5L, nrow = 0L)
    for (f in files) {
	errs <- checkPoFile(f, strictPlural = startsWith(basename(f), "R-"))
	if (nrow(errs)) result <- rbind(result, errs)
    }
    structure(result, class = "check_po_files")
}

print.check_po_files <- function(x, ...)
{
    if (!nrow(x))
	cat("No errors\n")
    else
	for (i in 1:nrow(x)) {
	    cols <- if(is.na(x[i, 2L])) c(1L, 3:5) else 1:5
	    cat(x[i, cols], sep = "\n")
	    cat("\n")
	}
}
#  File src/library/tools/R/zzz.R
#  Part of the R package, https://www.R-project.org
#
#  Copyright (C) 1995-2013 The R Core Team
#
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 2 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  A copy of the GNU General Public License is available at
#  https://www.R-project.org/Licenses/

.noGenerics <- TRUE

.onUnload <-
function(libpath)
    library.dynam.unload("tools", libpath)

library.dynam("tools", "tools", .Library)
PS_sigs <- getDLLRegisteredRoutines("tools")[[c(".Call", "ps_sigs")]]

## These are created at install time: the numbers are hard-coded in signals.c
## They happen to be the BSD ones as this started in multicore
SIGHUP <- .Call(PS_sigs, 1L)
SIGINT <- .Call(PS_sigs, 2L)
SIGQUIT <- .Call(PS_sigs, 3L)
SIGKILL <- .Call(PS_sigs, 9L)
SIGTERM <- .Call(PS_sigs, 15L)
SIGSTOP <- .Call(PS_sigs, 17L)
SIGTSTP <- .Call(PS_sigs, 18L)
SIGCONT <- .Call(PS_sigs, 19L)
SIGCHLD <- .Call(PS_sigs, 20L)
SIGUSR1 <- .Call(PS_sigs, 30L)
SIGUSR2 <- .Call(PS_sigs, 31L)

rm(PS_sigs)

## This calls C code in the package too
C_parseLatex <-
    getDLLRegisteredRoutines("tools")[[c(".External", "parseLatex")]]
latexArgCount <- integer()              # The next line modifies this
latexTable <- makeLatexTable(utf8table)  # FIXME: Should latexTable be hardcoded instead?
rm(C_parseLatex)

.onLoad <- function(libname, pkgname) {
    ## see if we can render Unicode bullet: not C locales, nor CJK on Windows.
    if (.Platform$OS.type == "windows") {
	cp <- l10n_info()$codepage
	if (cp > 0 && (cp == 874L || (cp >= 1250L && cp <= 1258L)))
	    Rd2txt_options(itemBullet = "\u2022 ")
    } else if (!is.na(iconv("\u2022", "UTF-8", "")))
	Rd2txt_options(itemBullet = "\u2022 ")
}
