################################################################################
# This file is released under the GNU General Public License, Version 3, GPL-3 #
# Copyright (C) 2020 Yohann Demont #
# #
# It is part of IFC package, please cite: #
# -IFC: An R Package for Imaging Flow Cytometry #
# -YEAR: 2020 #
# -COPYRIGHT HOLDERS: Yohann Demont, Gautier Stoll, Guido Kroemer, #
# Jean-Pierre Marolleau, Loïc Garçon, #
# INSERM, UPD, CHU Amiens #
# #
# DISCLAIMER: #
# -You are using this package on your own risk! #
# -We do not guarantee privacy nor confidentiality. #
# -This program is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. In no event shall the copyright holders or #
# contributors be liable for any direct, indirect, incidental, special, #
# exemplary, or consequential damages (including, but not limited to, #
# procurement of substitute goods or services; loss of use, data, or profits; #
# or business interruption) however caused and on any theory of liability, #
# whether in contract, strict liability, or tort (including negligence or #
# otherwise) arising in any way out of the use of this software, even if #
# advised of the possibility of such damage. #
# #
# You should have received a copy of the GNU General Public License #
# along with IFC. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
#' @title DAF File Writer
#' @description
#' Writes a new DAF file based on another one and exports new region(s), pop(s), feature(s), graph(s) and / or mask(s).
#' @param fileName path of file to read data from.
#' @param write_to pattern used to export file.
#' Placeholders, like "\%d/\%s_fromR.\%e", will be substituted:\cr
#' -\%d: with full path directory of 'fileName'\cr
#' -\%p: with first parent directory of 'fileName'\cr
#' -\%e: with extension of 'fileName' (without leading .)\cr
#' -\%s: with shortname from 'fileName' (i.e. basename without extension).\cr
#' Exported file extension will be deduced from this pattern. Note that has to be a .daf.
#' @param pops list of population(s) to export. Will be coerced to exportable format by buildPopulation.
#' @param regions list of region(s) to export. Will be coerced to exportable format by buildRegion.
#' @param features list of feature(s) to export.
#' @param graphs list of graph(s) to export. Not yet implemented.
#' @param masks list of mask(s) to export. Not yet implemented.
#' @param viewing_pop Character String. Allow user to change displayed population. Default is 'All'.
#' @param endianness The endian-ness ("big" or "little") of the target system for the file. Default is .Platform$endian.\cr
#' Endianness describes the bytes order of data stored within the files. This parameter may not be modified.
#' @param verbose whether to display information (use for debugging purpose). Default is FALSE.
#' @param overwrite whether to overwrite file or not. Default is FALSE.
#' Note that if TRUE, it will overwrite exported file if path of 'fileName' and deduced from 'write_to' arguments are different.
#' Otherwise, you will get an error saying that overwriting source file is not allowed.\cr
#' Note also that an original file, i.e. generated by IDEAS(R) or INSPIRE(R), will never be overwritten.\cr
#' Otherwise, you will get an error saying that overwriting original file is not allowed.
#' @param fullname whether to export daf file with full name of its corresponding cif, if found. Default is TRUE.
#' If cif can't be found, daf file will be exported with the original cif file name.
#' @param cifdir the path of the directory to initially look to cif file. Default is dirname(fileName). Only apply when 'fullname' is set to TRUE.
#' @param ntry number of times \code{\link{ExportToDAF}} will be allowed to find corresponding cif file. Default is +Inf. Only apply when 'fullname' is set to TRUE.
#' @param ... other arguments to be passed.
#' @examples
#' if(requireNamespace("IFCdata", quietly = TRUE)) {
#' ## use a daf file
#' file_daf <- system.file("extdata", "example.daf", package = "IFCdata")
#' tmp <- tempdir(check = TRUE)
#' ## create a tagged population named test with 1st object
#' pop <- buildPopulation(name = "test", type = "T", obj = 0)
#' ExportToDAF(file_daf, write_to = paste0(tmp, "\\test.daf"),
#' overwrite = TRUE, pops = list(pop))
#' } else {
#' message(sprintf('Please run `install.packages("IFCdata", repos = "%s", type = "source")` %s',
#' 'https://gitdemont.github.io/IFCdata/',
#' 'to install extra files required to run this example.'))
#' }
#' @return It invisibly returns full path of exported file.
#' @export
ExportToDAF <- function(fileName, write_to, pops = list(), regions = list(), features = list(), graphs = list(), masks = list(),
viewing_pop = "All", endianness = .Platform$endian, verbose = FALSE, overwrite = FALSE,
fullname = TRUE, cifdir = dirname(fileName), ntry = +Inf, ...) {
dots = list(...)
# change locale
locale_back <- setloc(c("LC_ALL" = "en_US.UTF-8"))
enc_back <- options("encoding" = "UTF-8")
on.exit(suspendInterrupts({setloc(locale_back); options(enc_back)}), add = TRUE)
# check mandatory param
if(missing(fileName)) stop("'fileName' can't be missing")
tmp = duplicated(fileName)
if(any(tmp)) {
warning(paste0("duplicated files have been removed from 'fileName': ","\n-", paste0(fileName[tmp],collapse="\n-")))
fileName = fileName[!tmp]
}
if(length(fileName) != 1) stop("'fileName' should be of length 1")
if(!file.exists(fileName)) stop(paste("can't find",fileName,sep=" "))
if(getFileExt(fileName)!="daf") stop("'fileName' should be a .daf file")
if(missing(write_to)) stop("'write_to' can't be missing")
assert(write_to, len = 1, typ = "character")
assert(viewing_pop, typ = "character", len = 1)
assert(verbose, len=1, alw=c(TRUE, FALSE))
assert(endianness, len=1, alw=c("little", "big"))
assert(overwrite, len=1, alw=c(TRUE, FALSE))
cifdir = na.omit(as.character(cifdir)); assert(cifdir, len = 1, typ = "character")
ntry = na.omit(as.numeric(ntry)); assert(ntry, len = 1, typ = "numeric")
if(ntry < 0) ntry = 0
assert(fullname, len=1, alw=c(TRUE, FALSE))
fileName = enc2native(normalizePath(fileName, winslash = "/", mustWork = FALSE))
splitf_obj = splitf(fileName)
splitp_obj = splitp(write_to)
write_to = formatn(splitp_obj, splitf_obj)
file_extension = getFileExt(write_to)
assert(file_extension, len = 1, alw = "daf")
if(any(splitp_obj$channel > 0)) message("'write_to' has %c argument but channel information can't be retrieved with ExportToDAF()")
if(any(splitp_obj$object > 0)) message("'write_to' has %o argument but channel information can't be retrieved with ExportToDAF()")
overwritten = FALSE
if(file.exists(write_to)) {
write_to = enc2native(normalizePath(write_to, winslash = "/"))
if(!overwrite) stop("file ",write_to," already exists")
if(tolower(fileName) == tolower(write_to)) stop("you are trying to overwrite source file which is not allowed")
xmlEND_export = cpp_scanFirst(write_to, charToRaw('</Assay>'), start = 0, end = 0)
if(xmlEND_export > 0) {
xml_export = read_xml(readBin(con = write_to, what = "raw", n = xmlEND_export + nchar("</Assay>") - 1), options=c("HUGE","RECOVER","NOENT","NOBLANKS","NSCLEAN"))
tryCatch({
is_fromR = as.character(na.omit(xml_attr(xml_find_first(xml_export, "//Assay"), attr = "IFC_version")))
}, finally = rm(xml_export))
if(length(is_fromR)==0) stop("you are trying to overwrite an original file which is not allowed")
} else {
stop(write_to, "\ndoes not seem to be well formatted: </Assay> not found")
}
tmp_file = tempfile()
overwritten = TRUE
}
dir_name = dirname(write_to)
if(!dir.exists(dir_name)) if(!dir.create(dir_name, recursive = TRUE, showWarnings = FALSE)) stop("can't create\n", dir_name)
file_w = ifelse(overwritten, tmp_file, write_to)
xmlEND = cpp_scanFirst(fileName, charToRaw('</Assay>'), start = 0, end = 0)
if(xmlEND == 0) stop(fileName, "\ndoes not seem to be well formatted: </Assay> not found")
xmlEND = xmlEND + nchar("</Assay>") - 1
xml_tmp = read_xml(readBin(con = fileName, what = "raw", n = xmlEND), options=c("HUGE","RECOVER","NOENT","NOBLANKS","NSCLEAN"))
tryCatch(expr = {
# collects important information in original daf file
masks_daf = c(xml_attr(xml_find_all(xml_tmp, "//mask"), attr = "name"),
unlist(strsplit(xml_attr(xml_find_first(xml_tmp, "//mask[@name='MC']"), attr = "def"), "|Or|", useBytes = TRUE, fixed=TRUE),
recursive = FALSE, use.names = FALSE))
images_daf = c(xml_attr(xml_find_all(xml_tmp, "//Images/image"), attr = "name"))
pops_daf = xml_attr(xml_find_all(xml_tmp, "//Pop"), attr = "name")
if(length(pops_daf)==0) stop("No population found in ", fileName) # should not append, at least 'All' population should be there
regions_daf_label = xml_attr(xml_find_all(xml_tmp, "//Region"), attr = "label") # what happens if empty ?
regions_daf_type = xml_attr(xml_find_all(xml_tmp, "//Region"), attr = "type") # what happens if empty ?
features_daf = xml_attr(xml_find_all(xml_tmp, "//UDF"), attr = "name")
fid = length(features_daf)
channels_daf = xml_attr(xml_find_all(xml_tmp, "//image"), attr = "name")
if(length(channels_daf)==0) stop("No channel found in ", fileName) # should not happen, at least one channel should be there
obj_number = as.numeric(na.omit(xml_attr(xml_find_first(xml_tmp, "//SOD"), attr = "objcount")))
# checksum = as.numeric(na.omit(xml_attr(xml_find_first(xml_tmp, "//SOD"), attr = "checksum")))
if(length(obj_number)==0) stop("No object found in ", fileName) # should not happen, at least one object should be there
is_binary = as.logical(na.omit(xml_attr(xml_find_first(xml_tmp, "//Assay"), attr = "binaryfeatures")))
if(length(is_binary)==0) {is_binary=FALSE}
# try to coerce inputs to compatible daf format
pops = lapply(pops, keep_attributes, what=buildPopulation)
names(pops) = sapply(pops, FUN=function(x) x$name)
regions = lapply(regions, keep_attributes, what=buildRegion)
names(regions) = sapply(regions, FUN=function(x) x$label)
features = lapply(features, FUN=function(x) do.call(what=buildFeature, args=x))
names(features) = sapply(features, FUN=function(x) x$name)
# defines available parameters
operators_pop = c("And","Or","Not","(",")")
comb_operators = c("+", "-", "*", "/", "(", ")", "ABS", "COS", "SIN", "SQR", "SQRT")
extr_operators = c("true", "false", "True", "False")
# collects important information from new nodes
masks_new = unlist(sapply(masks, FUN=function(x) x$name))
# removes duplicated inputs
tmp = duplicated(masks_new)
if(any(tmp)) {
warning("duplicated masks automatically removed:\n\t-", paste0(masks_new[tmp],collapse="\n\t-"), immediate. = TRUE, call. = FALSE)
masks = masks[!tmp]
}
tmp = duplicated(names(pops))
if(any(tmp)) {
warning("duplicated pops automatically removed:\n\t-", paste0(names(pops)[tmp],collapse="\n\t-"), immediate. = TRUE, call. = FALSE)
pops = pops[!tmp]
}
tmp = duplicated(names(regions))
if(any(tmp)) {
warning("duplicated regions automatically removed:\n\t-", paste0(names(regions)[tmp],collapse="\n\t-"), immediate. = TRUE, call. = FALSE)
regions = regions[!tmp]
}
tmp = duplicated(names(features))
if(any(tmp)) {
warning("duplicated features automatically removed:\n\t-", paste0(names(features)[tmp],collapse="\n\t-"), immediate. = TRUE, call. = FALSE)
features = features[!tmp]
}
# finds offsets in existing daf
toskip=c("masks"=cpp_scanFirst(fileName, charToRaw('</masks>'), start = 0, end = xmlEND),
"features_def"=cpp_scanFirst(fileName, charToRaw('</DefinedFeatures>'), start = 0, end = xmlEND),
"pops"=cpp_scanFirst(fileName, charToRaw('</Pops>'), start = 0, end = xmlEND),
"regions"=cpp_scanFirst(fileName, charToRaw('</Regions>'), start = 0, end = xmlEND),
"graphs"=cpp_scanFirst(fileName, charToRaw('</Displays>'), start = 0, end = xmlEND))-1
if(toskip["graphs"] < 0) toskip["graphs"] = cpp_scanFirst(fileName, charToRaw('<Displays'), start = 0, end = xmlEND)-1 # when there is no graphs <Displays node is closed with /> and not </Displays>
tmp = (toskip <= 0)
if(any(tmp)) stop(paste0(fileName, "\ndoes not seem to be well formatted: [",paste0(names(toskip)[tmp], collapse="|"),"] not found"))
if(is_binary) {
toskip=c(toskip,"feat_count"=xmlEND+7)
toskip=c(toskip,"features"=xmlEND+(fid)*(obj_number*8+4)+15)
} else {
toskip=c(toskip,"features"=cpp_scanFirst(fileName, charToRaw('</FeatureValues>'), start = 0, end = xmlEND)-1)
}
toskip=toskip[order(toskip)]
}, error = function(e) {
stop(paste0(write_to, "\ncan't be ",ifelse(overwritten,"overwritten.\nFile was not modified.","created"),"\n", e$message), call. = FALSE)
}, finally = {
rm(xml_tmp) # no need to keep object in memory
})
# opens connections for reading and writing
toread = file(description = fileName, open = "rb")
finfo = file.info(fileName)
tryCatch(suppressWarnings({
towrite = file(description = file_w, open = "wb")
}), error = function(e) {
close(toread)
stop(paste0(ifelse(overwritten,"temp ","'write_to' "), "file: ", file_w, "\ncan't be created: check name ?"))
})
write_to = normalizePath(write_to, winslash = "/", mustWork = FALSE)
raw_3e = as.raw(0x3e)
raw_00 = as.raw(0x00)
raw_2020 = as.raw(c(0x20,0x20))
tryCatch(expr = {
# extracts extra characters separating nodes and initializes nodes
collapse = lapply(names(toskip), FUN=function(x) as.raw(c()))
names(collapse) = names(toskip)
new_nodes = collapse
for(i in 1:length(toskip)) {
if(!(is_binary & names(toskip[i])%in%c("features","feat_count"))) {
k = toskip[i]-1
seek(toread, k)
B = readBin(toread, what="raw", n=1)
while(B != raw_3e) {
if(B != raw_00) collapse[[i]]=c(B,collapse[[i]])
k=k-1
seek(toread, k)
B = readBin(toread, what="raw", n=1)
}
}
}
if(length(features) != 0) {
all_names_feat = c(features_daf, names(features))
split_feat(features_def = features,
all_names = c(features_daf, names(features)),
m_names = c(masks_daf, masks_new),
i_names = images_daf,
comb_operators = comb_operators,
extr_operators = extr_operators,
split = "|",
force = FALSE)
for(i in seq_along(features)) {
feat = features[[i]]
if(verbose) cat(paste0("creating feature: ", feat$name, "\n"))
if(feat$name%in%features_daf) {
warning(paste0(feat$name, ", not exported: trying to export an already defined feature"), immediate. = TRUE, call. = FALSE)
next
}
if(length(feat$val) != obj_number) stop(feat$name, "\nbad feature value length, expected: ", obj_number, ", but is: ", length(feat$val)) # TODO add some lines to allow function to automatically compute feat$val when missing
new_node_features_def = sprintf('<UDF name="%s" type="%s" userfeaturetype="%s" def="%s" />', feat[["name"]], feat[["type"]], feat[["userfeaturetype"]], feat[["def"]])
if(is_binary) {
# TODO maybe change endianness reading / writing
new_nodes$features = c(new_nodes$features, cpp_uint32_to_raw(fid),
sapply(feat$val, FUN=function(x) writeBin(object=as.double(x), con=raw(), size = 8, endian = endianness, useBytes = TRUE)))
} else {
new_nodes$features = c(new_nodes$features,
raw_2020,
charToRaw(sprintf('<UDFValues fid="%s" fv="%s" />', num_to_string(fid), paste0(num_to_string(feat$val), collapse = "|"))),
raw_2020,
collapse$features)
}
fid=fid+1
new_nodes$features_def = c(new_nodes$features_def,
raw_2020,
charToRaw(sprintf('<UDF name="%s" type="%s" userfeaturetype="%s" def="%s" />', feat[["name"]], feat[["type"]], feat[["userfeaturetype"]], feat[["def"]])),
raw_2020,
collapse$features_def)
}
}
if(length(regions)!=0) for(i in 1:length(regions)) {
reg = regions[[i]]
A = attr(reg, "sync")
reg = reg[sapply(reg, length) != 0]
if(verbose) cat(paste0("creating region: ", reg$label, "\n"))
if(reg$label%in%regions_daf_label) {
warning(paste0(reg$label, ", not exported: trying to export an already defined region"), immediate. = TRUE, call. = FALSE)
next
}
if(length(A) == 1) reg$sync = A
new_nodes$regions = c(new_nodes$regions,
raw_2020,
charToRaw(to_xml_list(reg[-which(names(reg)%in%c("x","y"))], name = "Region", escape = rawToChar(c(collapse$regions,raw_2020)),
kids = lapply(1:length(reg$x), FUN=function(k) to_xml_list(x=list("x"=num_to_string(reg$x[k]), "y"=num_to_string(reg$y[k])), name="axy")))),
raw_2020,
collapse$regions)
}
pops_alw = c()
if(length(pops) != 0) {
for(i in 1:length(pops)) {
pop = pops[[i]]
pop = pop[sapply(pop, length) != 0]
if(verbose) cat(paste0("creating population: ", pop$name, "\n"))
if(pop$name%in%pops_daf) {
warning(pop$name, ", not exported: trying to export an already defined population", immediate. = TRUE, call. = FALSE)
next
}
if(pop$type=="G") {
tmp1 = which(regions_daf_label%in%pop$region)
tmp2 = which(names(regions)%in%pop$region)
if(length(tmp1)==0 & length(tmp2)==0) stop(pop$name, ', trying to export a graphical population with a non-defined region: ["', pop$region, '"]', call. = FALSE)
if(length(tmp1)!=0) reg = list("label"=regions_daf_label[tmp1], "type"=regions_daf_type[tmp1])
if(length(tmp2)!=0) reg = regions[[tmp2[1]]]
if(!pop$fx%in%c(features_daf, names(features))) stop(pop$name, ', trying to export a graphical population with an unknown fx ["', pop$fx, '"]', call. = FALSE)
if(length(pop$fy)!=0 & reg$type=="line") {
pop = pop[-which(names(pop=="fy"))]
warning(pop$name, ", trying to export a graphical population based on a region of type 'line' with a fy feature; exported but fy has been automatically removed", immediate. = TRUE, call. = FALSE)
}
if(reg$type!="line") if(!(pop$fy%in%c(features_daf, names(features)))) stop(pop$name, ', trying to export a graphical population with an unknown fy ["', pop$fy, '"]', call. = FALSE)
new_node_pop = to_xml_list(pop, name = "Pop")
}
if(pop$type=="C") {
new_node_pop = to_xml_list(pop, name = "Pop")
}
if(pop$type=="T") {
if(anyNA(pop$obj)) stop(pop$name, ", trying to export a tagged population containing NA/NaN")
K = typeof(pop$obj)
if(length(pop$obj)==0) {
warning(pop$name, ", not exported: trying to export a tagged population of length = 0", immediate. = TRUE, call. = FALSE)
next
}
if(K%in%"logical") {
if(sum(pop$obj)==0) {
warning(paste0(pop$name, ", not exported: trying to export a tagged population of length = 0"), immediate. = TRUE, call. = FALSE)
next
}
if(obj_number != length(pop$obj)) stop(pop$name, ", trying to export a tagged population with more element(s) than total number of objects acquired")
new_node_pop = to_xml_list(pop[-which(names(pop)%in%c("obj"))], name = "Pop", escape = rawToChar(c(collapse$pops,raw_2020)),
kids = lapply(num_to_string(which(pop$obj)-1), FUN=function(ob) to_xml_list(name="ob", x=list("O"=ob))))
}
if(K%in% c("double","integer")) {
if((obj_number <= max(pop$obj)) | (min(pop$obj) < 0) | any(duplicated(pop$obj))) stop(pop$name, ", trying to export a tagged population with element(s) outside of objects acquired")
new_node_pop = to_xml_list(pop[-which(names(pop)%in%c("obj"))], name = "Pop", escape = rawToChar(c(collapse$pops,raw_2020)),
kids = lapply(num_to_string(pop$obj), FUN=function(ob) to_xml_list(name="ob", x=list("O"=ob))))
}
}
pops_alw = c(pops_alw, i)
new_nodes$pops = c(new_nodes$pops,
raw_2020,
charToRaw(new_node_pop),
raw_2020,
collapse$pops)
}
pops = pops[pops_alw]
names(pops) = sapply(pops, FUN=function(p) p$name)
all_names = c(pops_daf, names(pops))
##### final check to ensure that remaining pops do not depend on a pop that has been removed
if(any(sapply(pops, FUN = function(pop) {
if(!(pop$base%in%all_names)) {
stop(pop$name, ', trying to export a population with unknown base ["', pop$base, '"]')
}
pop$type == "C"
}))) {
alt_names = gen_altnames(all_names)
lapply(pops, FUN = function(pop) {
if("C" %in% pop$type) {
tmp3 = try(splitn(definition = pop$definition, all_names = all_names, alt_names = alt_names, operators = operators_pop), silent = TRUE)
if(inherits(tmp3, "try-error")) stop(pop$name, ', trying to export a population with unknown definition ["', pop$definition, '"]', call. = FALSE)
}
return(NULL)
})
}
}
pops_alw = c(pops_daf, names(pops))
# TODO add graph export
offset = NULL
seek(toread, 0)
beg = readBin(toread, what="raw", n=toskip[1], endian = endianness)
ass_beg = cpp_scanFirst(fileName, charToRaw('<Assay'), start = 0, end = xmlEND)+nchar("<Assay")
date_beg = cpp_scanFirst(fileName, charToRaw('date='), start = ass_beg, end = xmlEND)-1
name_beg = cpp_scanFirst(fileName, charToRaw('file='), start = date_beg, end = xmlEND)+nchar("file=")
name_end = cpp_scanFirst(fileName, charToRaw(' creation='), start = name_beg , end = xmlEND)-1
cname = rawToChar(beg[(name_beg+1):(name_end-1)])
if(fullname) {
found = FALSE
# checksum = attr(ExtractFromDAF(fileName, extract_offsets = TRUE, extract_features = FALSE, extract_images = TRUE, extract_stats = FALSE)$offsets, "checksum")
checksum = checksumDAF(fileName = fileName)
cif_name = file.path(cifdir, basename(cname)) # look in cifdir 1st
if(file.exists(cif_name)) {
if(checksumXIF(cif_name) == checksum) found = TRUE
} else {
cif_name = cname
}
if((!found)&& file.exists(cif_name)) {
if(checksumXIF(cif_name) == checksum) found = TRUE
}
while((interactive() && (ntry > 0) && (!found))) {
message(paste0("daf file does not refer to: ", cif_name))
old_wd = getwd()
on.exit(setwd(old_wd), add= TRUE)
setwd(dirname(fileName))
if(.Platform$OS.type == "windows") {
cif_name = choose.files(caption = paste0("Looking for: ", basename(cname)), multi = FALSE, filters = cbind("Compensated Image File (*.cif)", "*.cif"))
} else {
cif_name = file.choose()
}
if(file.exists(cif_name)) if(getFileExt(cif_name)=="cif") if(checksumXIF(cif_name) == checksum) {
found = TRUE
break;
}
ntry = ntry - 1
}
cif_name = normalizePath(cif_name, winslash = "/", mustWork = FALSE) # /!\ ask AMNIS using full path produces error while trying to retrieve compensation
} else {
cif_name = basename(cname)
}
# adds pkg version attribute in XML <ASSAY> node /!\ mandatory to prevent overwriting original file
pkg_ver = paste0(unlist(packageVersion("IFC")), collapse = ".")
pkg_ver = charToRaw(paste0("IFC_version=\"",pkg_ver,"\""))
if(viewing_pop %in% pops_alw) {
pop_beg = cpp_scanFirst(fileName, charToRaw('population='), start = name_end , end = xmlEND)+nchar("population=")
pop_end = cpp_scanFirst(fileName, charToRaw(' showMasks='), start = pop_beg , end = xmlEND)-1
writeBin(object = c(beg[1:ass_beg], pkg_ver, beg[date_beg:name_beg], charToRaw(cif_name),beg[name_end:pop_beg]), con=towrite, endian = endianness)
writeBin(object = c(charToRaw(viewing_pop),beg[(pop_end):length(beg)]), con=towrite, endian = endianness)
} else {
warning(paste0("can't find: ", viewing_pop, " in population names. Displayed population was not changed."), immediate. = TRUE, call. = FALSE)
writeBin(object = c(beg[1:ass_beg], pkg_ver, beg[date_beg:name_beg], charToRaw(cif_name),beg[name_end:length(beg)]), con=towrite, endian = endianness)
}
writeBin(object = new_nodes[[1]], con=towrite , endian = endianness, useBytes = TRUE)
for(i in 2:length(toskip)) {
writeBin(object = readBin(toread, what="raw", n=toskip[i]-toskip[i-1]), con=towrite, endian = endianness, useBytes = TRUE)
if(names(new_nodes)[i]=="feat_count") offset = seek(towrite)
writeBin(object = new_nodes[[i]], con=towrite, endian = endianness, useBytes = TRUE)
}
writeBin(object = readBin(toread, what="raw", n=finfo$size-toskip[i], endian = endianness), con=towrite, endian = endianness, useBytes = TRUE)
if(!is.null(offset)) {
seek(towrite, offset)
# TODO change endianness reading / writing
writeBin(object = cpp_uint32_to_raw(fid), con=towrite, endian = endianness, useBytes = TRUE)
}
}, error = function(e) {
close(toread)
close(towrite)
stop(paste0("Can't create 'write_to' file.\n", write_to,
ifelse(overwritten,"\nFile was not modified.\n","\n"),
"See pre-file @\n", normalizePath(file_w, winslash = "/"), "\n",
e$message), call. = FALSE)
})
close(toread)
close(towrite)
if(overwritten) {
mess = paste0("\n######################\n", write_to, "\nhas been successfully overwritten\n")
if(!suppressWarnings(file.rename(to = write_to, from = file_w))) { # try file renaming which is faster
if(!file.copy(to = write_to, from = file_w, overwrite = TRUE)) { # try file.copy if renaming is not possible
stop(paste0("Can't copy temp file@\n", normalizePath(file_w, winslash = "/"), "\n",
"Can't create 'write_to' file.\n", write_to,
"\nFile was not modified.\n"), call. = FALSE)
} else {
file.remove(file_w, showWarnings = FALSE)
}
}
} else {
mess = paste0("\n######################\n", write_to, "\nhas been successfully exported\n")
}
message(mess)
return(invisible(write_to))
}
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.