This seems to work (but i assume there is a quicker sql
way to this)
sql.l <- lapply(filelist , file)
df_list2 <- lapply(sql.l, function(i) sqldf("select * from i" ,
dbname = tempfile(), file.format = list(header = TRUE, row.names = FALSE)))
Look at speeds - partially taken from mnel's post Quickly reading very large tables as dataframes in R
library(data.table)
library(sqldf)
# test data
n=1e6
DT = data.table( a=sample(1:1000,n,replace=TRUE),
b=sample(1:1000,n,replace=TRUE),
c=rnorm(n),
d=sample(c("foo","bar","baz","qux","quux"),n,replace=TRUE),
e=rnorm(n),
f=sample(1:1000,n,replace=TRUE) )
# write 5 files out
lapply(1:5, function(i) write.table(DT,paste0("test", i, ".dat"),
sep=",",row.names=FALSE,quote=FALSE))
read: data.table
filelist <- list.files(pattern = "*.dat")
system.time(df_list <- lapply(filelist, fread))
# user system elapsed
# 5.244 0.200 5.457
read: sqldf
sql.l <- lapply(filelist , file)
system.time(df_list2 <- lapply(sql.l, function(i) sqldf("select * from i" ,
dbname = tempfile(), file.format = list(header = TRUE, row.names = FALSE))))
# user system elapsed
# 35.594 1.432 37.357
Check - seems ok except for attributes
all.equal(df_list , df_list2)