The simplest way to write to a workbook is write.xlsx(). By default, write.xlsx calls writeData. If asTable is TRUE write.xlsx will write x as an Excel table.
## write to working directory
library(openxlsx)
write.xlsx(iris, file = "writeXLSX1.xlsx")
write.xlsx(iris, file = "writeXLSXTable1.xlsx", asTable = TRUE)
## write a list of data.frames to individual worksheets using list names as
## worksheet names
<- list(IRIS = iris, MTCARS = mtcars)
l write.xlsx(l, file = "writeXLSX2.xlsx")
write.xlsx(l, file = "writeXLSXTable2.xlsx", asTable = TRUE)
options(openxlsx.borderColour = "#4F80BD")
options(openxlsx.borderStyle = "thin")
options(openxlsx.dateFormat = "mm/dd/yyyy")
options(openxlsx.datetimeFormat = "yyyy-mm-dd hh:mm:ss")
options(openxlsx.numFmt = NULL) ## For default style rounding of numeric columns
<- data.frame(Date = Sys.Date() - 0:19, LogicalT = TRUE, Time = Sys.time() - 0:19 *
df 60 * 60, Cash = paste("$", 1:20), Cash2 = 31:50, hLink = "https://CRAN.R-project.org/",
Percentage = seq(0, 1, length.out = 20), TinyNumbers = runif(20)/1e+09, stringsAsFactors = FALSE)
class(df$Cash) <- "currency"
class(df$Cash2) <- "accounting"
class(df$hLink) <- "hyperlink"
class(df$Percentage) <- "percentage"
class(df$TinyNumbers) <- "scientific"
write.xlsx(df, "writeXLSX3.xlsx")
write.xlsx(df, file = "writeXLSXTable3.xlsx", asTable = TRUE)
<- createStyle(fontColour = "#ffffff", fgFill = "#4F80BD", halign = "center",
hs valign = "center", textDecoration = "Bold", border = "TopBottomLeftRight", textRotation = 45)
write.xlsx(iris, file = "writeXLSX4.xlsx", borders = "rows", headerStyle = hs)
write.xlsx(iris, file = "writeXLSX5.xlsx", borders = "columns", headerStyle = hs)
write.xlsx(iris, "writeXLSXTable4.xlsx", asTable = TRUE, headerStyle = createStyle(textRotation = 45))
<- list(IRIS = iris, colClasses = df)
l write.xlsx(l, file = "writeXLSX6.xlsx", borders = "columns", headerStyle = hs)
write.xlsx(l, file = "writeXLSXTable5.xlsx", asTable = TRUE, tableStyle = "TableStyleLight2")
openXL("writeXLSX6.xlsx")
openXL("writeXLSXTable5.xlsx")
<- write.xlsx(iris, "writeXLSX6.xlsx")
wb setColWidths(wb, sheet = 1, cols = 1:5, widths = 20)
saveWorkbook(wb, "writeXLSX6.xlsx", overwrite = TRUE)
require(ggplot2)
<- createWorkbook()
wb options(openxlsx.borderColour = "#4F80BD")
options(openxlsx.borderStyle = "thin")
modifyBaseFont(wb, fontSize = 10, fontName = "Arial Narrow")
addWorksheet(wb, sheetName = "Motor Trend Car Road Tests", gridLines = FALSE)
addWorksheet(wb, sheetName = "Iris", gridLines = FALSE)
freezePane(wb, sheet = 1, firstRow = TRUE, firstCol = TRUE) ## freeze first row and column
writeDataTable(wb, sheet = 1, x = mtcars, colNames = TRUE, rowNames = TRUE, tableStyle = "TableStyleLight9")
setColWidths(wb, sheet = 1, cols = "A", widths = 18)
iris data.frame is added as excel table on sheet 2.
writeDataTable(wb, sheet = 2, iris, startCol = "K", startRow = 2)
qplot(data = iris, x = Sepal.Length, y = Sepal.Width, colour = Species)
insertPlot(wb, 2, xy = c("B", 16)) ## insert plot at cell B16
<- aggregate(x = iris[, -5], by = list(iris$Species), FUN = mean)
means <- aggregate(x = iris[, -5], by = list(iris$Species), FUN = var) vars
<- createStyle(fgFill = "#DCE6F1", halign = "center", border = "TopBottomLeftRight")
headSty writeData(wb, 2, x = "Iris dataset group means", startCol = 2, startRow = 2)
writeData(wb, 2, x = means, startCol = "B", startRow = 3, borders = "rows", headerStyle = headSty)
writeData(wb, 2, x = "Iris dataset group variances", startCol = 2, startRow = 9)
writeData(wb, 2, x = vars, startCol = "B", startRow = 10, borders = "columns", headerStyle = headSty)
setColWidths(wb, 2, cols = 2:6, widths = 12) ## width is recycled for each col
setColWidths(wb, 2, cols = 11:15, widths = 15)
<- createStyle(fontSize = 14, textDecoration = c("bold", "italic"))
s1 addStyle(wb, 2, style = s1, rows = c(2, 9), cols = c(2, 2))
saveWorkbook(wb, "basics.xlsx", overwrite = TRUE) ## save to working directory
## inspired by xtable gallery
#https://CRAN.R-project.org/package=xtable/vignettes/xtableGallery.pdf
## Create a new workbook
<- createWorkbook()
wb data(tli, package = "xtable")
## data.frame
<- "data.frame"
test.n <- tli[1:10, ]
my.df addWorksheet(wb = wb, sheetName = test.n)
writeData(wb = wb, sheet = test.n, x = my.df, borders = "n")
## matrix
<- "matrix"
test.n <- model.matrix(~ sex * grade, data = my.df)
design.matrix addWorksheet(wb = wb, sheetName = test.n)
writeData(wb = wb, sheet = test.n, x = design.matrix)
## aov
<- "aov"
test.n <- aov(tlimth ~ sex + ethnicty + grade + disadvg, data = tli)
fm1 addWorksheet(wb = wb, sheetName = test.n)
writeData(wb = wb, sheet = test.n, x = fm1)
## lm
<- "lm"
test.n <- lm(tlimth ~ sex*ethnicty, data = tli)
fm2 addWorksheet(wb = wb, sheetName = test.n)
writeData(wb = wb, sheet = test.n, x = fm2)
## anova 1
<- "anova"
test.n <- anova(fm2)
my.anova addWorksheet(wb = wb, sheetName = test.n)
writeData(wb = wb, sheet = test.n, x = my.anova)
## anova 2
<- "anova2"
test.n <- lm(tlimth ~ ethnicty, data = tli)
fm2b <- anova(fm2b, fm2)
my.anova2 addWorksheet(wb = wb, sheetName = test.n)
writeData(wb = wb, sheet = test.n, x = my.anova2)
## glm
<- "glm"
test.n <- glm(disadvg ~ ethnicty*grade, data = tli, family = binomial())
fm3 addWorksheet(wb = wb, sheetName = test.n)
writeData(wb = wb, sheet = test.n, x = fm3)
## prcomp
<- "prcomp"
test.n <- prcomp(USArrests)
pr1 addWorksheet(wb = wb, sheetName = test.n)
writeData(wb = wb, sheet = test.n, x = pr1)
## summary.prcomp
<- "summary.prcomp"
test.n addWorksheet(wb = wb, sheetName = test.n)
writeData(wb = wb, sheet = test.n, x = summary(pr1))
## simple table
<- "table"
test.n data(airquality)
$OzoneG80 <- factor(airquality$Ozone > 80,
airqualitylevels = c(FALSE, TRUE),
labels = c("Oz <= 80", "Oz > 80"))
$Month <- factor(airquality$Month,
airqualitylevels = 5:9,
labels = month.abb[5:9])
<- with(airquality, table(OzoneG80,Month) )
my.table addWorksheet(wb = wb, sheetName = test.n)
writeData(wb = wb, sheet = test.n, x = my.table)
## survdiff 1
library(survival)
<- "survdiff1"
test.n addWorksheet(wb = wb, sheetName = test.n)
<- survdiff(Surv(futime, fustat) ~ rx, data = ovarian)
x writeData(wb = wb, sheet = test.n, x = x)
## survdiff 2
<- "survdiff2"
test.n addWorksheet(wb = wb, sheetName = test.n)
<- survexp(futime ~ ratetable(age=(accept.dt - birth.dt),
expect sex=1,year=accept.dt,race="white"), jasa, cohort=FALSE,
ratetable=survexp.usr)
<- survdiff(Surv(jasa$futime, jasa$fustat) ~ offset(expect))
x writeData(wb = wb, sheet = test.n, x = x)
## coxph 1
<- "coxph1"
test.n addWorksheet(wb = wb, sheetName = test.n)
$rx <- factor(bladder$rx, labels = c("Pla","Thi"))
bladder<- coxph(Surv(stop,event) ~ rx, data = bladder)
x writeData(wb = wb, sheet = test.n, x = x)
## coxph 2
<- "coxph2"
test.n addWorksheet(wb = wb, sheetName = test.n)
<- coxph(Surv(stop,event) ~ rx + cluster(id), data = bladder)
x writeData(wb = wb, sheet = test.n, x = x)
## cox.zph
<- "cox.zph"
test.n addWorksheet(wb = wb, sheetName = test.n)
<- cox.zph(coxph(Surv(futime, fustat) ~ age + ecog.ps, data=ovarian))
x writeData(wb = wb, sheet = test.n, x = x)
## summary.coxph 1
<- "summary.coxph1"
test.n addWorksheet(wb = wb, sheetName = test.n)
<- summary(coxph(Surv(stop,event) ~ rx, data = bladder))
x writeData(wb = wb, sheet = test.n, x = x)
## summary.coxph 2
<- "summary.coxph2"
test.n addWorksheet(wb = wb, sheetName = test.n)
<- summary(coxph(Surv(stop,event) ~ rx + cluster(id), data = bladder))
x writeData(wb = wb, sheet = test.n, x = x)
## view without saving
openXL(wb)
require(ggplot2)
<- createWorkbook()
wb
## read historical prices from yahoo finance
<- "CBA.AX"
ticker <- paste0("https://query1.finance.yahoo.com/v7/finance/download/", ticker,
csv.url "?period1=1597597610&period2=1629133610&interval=1d&events=history&includeAdjustedClose=true")
<- read.csv(url(csv.url), as.is = TRUE)
prices $Date <- as.Date(prices$Date)
prices<- prices$Close
close $logReturns = c(0, log(close[2:length(close)]/close[1:(length(close) - 1)]))
prices
## Create plot of price series and add to worksheet
ggplot(data = prices, aes(as.Date(Date), as.numeric(Close))) + geom_line(colour = "royalblue2") +
labs(x = "Date", y = "Price", title = ticker) + geom_area(fill = "royalblue1",
alpha = 0.3) + coord_cartesian(ylim = c(min(prices$Close) - 1.5, max(prices$Close) +
1.5))
## Add worksheet and write plot to sheet
addWorksheet(wb, sheetName = "CBA")
insertPlot(wb, sheet = 1, xy = c("J", 3))
## Histogram of log returns
ggplot(data = prices, aes(x = logReturns)) + geom_bar(binwidth = 0.0025) + labs(title = "Histogram of log returns")
## currency
class(prices$Close) <- "currency" ## styles as currency in workbook
## write historical data and histogram of returns
writeDataTable(wb, sheet = "CBA", x = prices)
insertPlot(wb, sheet = 1, startRow = 25, startCol = "J")
## Add conditional formatting to show where logReturn > 0.01 using default
## style
conditionalFormat(wb, sheet = 1, cols = 1:ncol(prices), rows = 2:(nrow(prices) +
1), rule = "$H2 > 0.01")
## style log return col as a percentage
<- createStyle(numFmt = "percentage")
logRetStyle
addStyle(wb, 1, style = logRetStyle, rows = 2:(nrow(prices) + 1), cols = "H", gridExpand = TRUE)
setColWidths(wb, sheet = 1, cols = c("A", "F", "G", "H"), widths = 15)
## save workbook to working directory
saveWorkbook(wb, "stockPrice.xlsx", overwrite = TRUE)
openXL("stockPrice.xlsx")
require(openxlsx)
require(jpeg)
require(ggplot2)
<- function(x, ...){
plotFn <- grey(x)
colvec <- array(match(colvec, unique(colvec)), dim = dim(x)[1:2])
colmat image(x = 0:(dim(colmat)[2]), y = 0:(dim(colmat)[1]), z = t(colmat[nrow(colmat):1, ]),
col = unique(colvec), xlab = "", ylab = "", axes = FALSE, asp = 1,
bty ="n", frame.plot=F, ann=FALSE)
}
## Create workbook and add a worksheet, hide gridlines
<- createWorkbook("Einstein")
wb addWorksheet(wb, "Original Image", gridLines = FALSE)
<- readJPEG(file.path(path.package("openxlsx"), "einstein.jpg"))
A <- nrow(A); width <- ncol(A)
height
## write "Original Image" to cell B2
writeData(wb, 1, "Original Image", xy = c(2,2))
## write Object size to cell B3
writeData(wb, 1, sprintf("Image object size: %s bytes",
format(object.size(A+0)[[1]], big.mark=',')),
xy = c(2,3)) ## equivalent to startCol = 2, startRow = 3
## Plot image
par(mar=rep(0, 4), xpd = NA); plotFn(A)
## insert plot currently showing in plot window
insertPlot(wb, 1, width, height, units="px", startRow= 5, startCol = 2)
## SVD of covariance matrix
<- rowMeans(A)
rMeans <- do.call("cbind", lapply(1:ncol(A), function(X) rMeans))
rowMeans <- A - rowMeans
A <- svd(A %*% t(A) / (ncol(A) - 1)) # SVD on covariance matrix of A
E <- data.frame("Eigenvalues" = E$d,
pve "PVE" = E$d/sum(E$d),
"Cumulative PVE" = cumsum(E$d/sum(E$d)))
## write eigenvalues to worksheet
addWorksheet(wb, "Principal Component Analysis")
<- createStyle(fontColour = "#ffffff", fgFill = "#4F80BD",
hs halign = "CENTER", textDecoration = "Bold",
border = "TopBottomLeftRight", borderColour = "#4F81BD")
writeData(wb, 2, x="Proportions of variance explained by Eigenvector" ,startRow = 2)
mergeCells(wb, sheet=2, cols=1:4, rows=2)
setColWidths(wb, 2, cols = 1:3, widths = c(14, 12, 15))
writeData(wb, 2, x=pve, startRow = 3, startCol = 1, borders="rows", headerStyle=hs)
## Plots
<- cbind(pve, "Ind" = 1:nrow(pve))
pve ggplot(data = pve[1:20,], aes(x = Ind, y = 100*PVE)) +
geom_bar(stat="identity", position = "dodge") +
xlab("Principal Component Index") + ylab("Proportion of Variance Explained") +
geom_line(size = 1, col = "blue") + geom_point(size = 3, col = "blue")
## Write plot to worksheet 2
insertPlot(wb, 2, width = 5, height = 4, startCol = "E", startRow = 2)
## Plot of cumulative explained variance
ggplot(data = pve[1:50,], aes(x = Ind, y = 100*Cumulative.PVE)) +
geom_point(size=2.5) + geom_line(size=1) + xlab("Number of PCs") +
ylab("Cumulative Proportion of Variance Explained")
insertPlot(wb, 2, width = 5, height = 4, xy= c("M", 2))
## Reconstruct image using increasing number of PCs
<- c(5, 7, 12, 20, 50, 200)
nPCs <- rep(c(2, 24), each = 3)
startRow <- rep(c("B", "H", "N"), 2)
startCol
## create a worksheet to save reconstructed images to
addWorksheet(wb, "Reconstructed Images", zoom = 90)
for(i in 1:length(nPCs)){
<- E$v[, 1:nPCs[i]]
V <- t(V) %*% A ## project img data on to PCs
imgHat <- object.size(V) + object.size(imgHat) + object.size(rMeans)
imgSize
<- V %*% imgHat + rowMeans ## reconstruct from PCs and add back row means
imgHat <- round((imgHat - min(imgHat)) / (max(imgHat) - min(imgHat))*255) # scale
imgHat plotFn(imgHat/255)
## write strings to worksheet 3
writeData(wb, "Reconstructed Images",
sprintf("Number of principal components used: %s",
nPCs[[i]]), startCol[i], startRow[i])
writeData(wb, "Reconstructed Images",
sprintf("Sum of component object sizes: %s bytes",
format(as.numeric(imgSize), big.mark=',')), startCol[i], startRow[i]+1)
## write reconstruced image
insertPlot(wb, "Reconstructed Images", width, height, units="px",
xy = c(startCol[i], startRow[i]+3))
}
# hide grid lines
showGridLines(wb, sheet = 3, showGridLines = FALSE)
## Make text above images BOLD
<- createStyle(textDecoration="BOLD")
boldStyle
## only want to apply style to specified cells (not all combinations of rows & cols)
addStyle(wb, "Reconstructed Images", style=boldStyle,
rows = c(startRow, startRow+1), cols = rep(startCol, 2),
gridExpand = FALSE)
## save workbook to working directory
saveWorkbook(wb, "Image dimensionality reduction.xlsx", overwrite = TRUE)
## remove example files for cran test
if (identical(Sys.getenv("NOT_CRAN", unset = "true"), "false")) {
<-list.files(pattern="\\.xlsx",recursive = T)
file_list<-fl[!grepl("inst/extdata",file_list)&!grepl("man/",file_list)]
file_list
if(length(file_list)>0){
rm(file_list)
}