#' Quality Control
#'
#' Subfunction for performing QC. Needs to be called from ContDataQC().
#' Requires zoo().
#
# Sourced Routine
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Quality Control (auto)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# make user script smaller and easier to understand
# not a true function, needs defined variables in calling script
# if change variable names in either file have to update the other
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Erik.Leppo@tetratech.com (EWL)
# 20150921 (20151021, make into self standing function)
# 20151112, combine Auto and Manual QC
# 20170323, added 3 parameters (Cond, DO, and pH)
# 20170324, added 2 more parameters (Turbidity and Chlrophylla)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# assumes use of CSV. If using TXT have to modify list.files(pattern)
# , read.csv(), and write.csv()
#
# Basic Operations:
# load all files in data directory
# perform QC
# write QC report
# save QCed data file
#
# 20160208
# SensorDepth - Gross is only negative, Flat = remove
# 20160303
# Rolling SD. Use "zoo" and rollapply. Loop too slow for large/dense data sets
# (will crash if less than 5 records so added "stop")
#
# library (load any required helper functions)
#library(zoo)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#' @param fun.myData.SiteID Station/SiteID.
#' @param fun.myData.Type data type is "QC".
#' @param fun.myData.DateRange.Start Start date for requested data.
#' Format = YYYY-MM-DD.
#' @param fun.myData.DateRange.End End date for requested data.
#' Format = YYYY-MM-DD.
#' @param fun.myDir.import Directory for import data.
#' Default is current working directory.
#' @param fun.myDir.export Directory for export data.
#' Default is current working directory.
#' @param fun.myReport.format Report format (docx or html).
#' Default is specified in config.R (docx). Can be customized in config.R;
#' ContData.env$myReport.Format.
#' @param fun.myReport.Dir Report (rmd) template folder.
#' Default is the package rmd folder. Can be customized in config.R;
#' ContData.env$myReport.Dir.
#' @param fun.CreateReport Boolean parameter to create reports or not.
#' Default = TRUE.
#' @param fun.AddDeployCol Boolean for adding column name. Default = FALSE.
#' Can be customized in config.R; ContData.env$myName.LoggerDeployment.
#'
#' @return Returns a csv file to specified directory with QC flags.
#'
#' @examples
#' #Not intended to be accessed indepedant of function ContDataQC().
#
#'@export
fun.QC <- function(fun.myData.SiteID
, fun.myData.Type = "QC"
, fun.myData.DateRange.Start
, fun.myData.DateRange.End
, fun.myDir.import = ""
, fun.myDir.export = ""
, fun.myReport.format
, fun.myReport.Dir
, fun.CreateReport = TRUE
, fun.AddDeployCol = FALSE) {##FUN.fun.QC.START
#
boo_DEBUG <- "FALSE"
# A. Data Prep ####
# Convert Data Type to proper case
fun.myData.Type <- paste(toupper(substring(fun.myData.Type, 1, 1))
, tolower(substring(fun.myData.Type, 2
, nchar(fun.myData.Type)))
, sep = "")
#
# data directories
# myDir.data.import <- paste(fun.myDir.BASE,ifelse(fun.myDir.SUB.import==""
#,"",paste("/",fun.myDir.SUB.import,sep="")),sep="")
# myDir.data.export <- paste(fun.myDir.BASE,ifelse(fun.myDir.SUB.export==""
#,"",paste("/",fun.myDir.SUB.export,sep="")),sep="")
myDir.data.import <- fun.myDir.import
myDir.data.export <- fun.myDir.export
#
myDate <- format(Sys.Date(),"%Y%m%d")
myTime <- format(Sys.time(),"%H%M%S")
#
# Verify input dates, if blank, NA, or null use all data
# if DateRange.Start is null or "" then assign it 1900-01-01
if (is.na(fun.myData.DateRange.Start)==TRUE ||
fun.myData.DateRange.Start=="") {
fun.myData.DateRange.Start <- ContData.env$DateRange.Start.Default
}
# if DateRange.End is null or "" then assign it today
if (is.na(fun.myData.DateRange.End)==TRUE || fun.myData.DateRange.End=="") {
fun.myData.DateRange.End <- ContData.env$DateRange.End.Default
}
# Read in list of files to work on, uses all files matching pattern
# ("\\.csv$")
# ## if change formats will have to make modifications
# (pattern, import, export)
files2process <- list.files(path=myDir.data.import, pattern=" *.csv")
utils::head(files2process)
#
# Define Counters for the Loop
intCounter <- 0
intCounter.Stop <- length(files2process)
intItems.Total <- intCounter.Stop
print(paste("Total files to process = ",intItems.Total,sep=""))
utils::flush.console()
myItems.Complete <- 0
myItems.Skipped <- 0
myFileTypeNum.Air <- 0
myFileTypeNum.Water <- 0
#
# Create Log file
## List of all items (files)
myItems.ALL <- as.vector(unique(files2process))
# create log file for processing results of items
#myItems.Log <- data.frame(cbind(myItems.ALL,NA),stringsAsFactors=FALSE)
myItems.Log <- data.frame(ItemID = seq_len(intItems.Total)
, Status = NA
, ItemName = myItems.ALL)
#
# Error if no files to process or no files in dir
# Start Time (used to determine run time at end)
myTime.Start <- Sys.time()
#
# B. While Loop ####
# Perform a data manipulation on the data as a new file
# Could use for (n in files2process) but prefer the control of a counter
while (intCounter < intCounter.Stop)
{##while.START
#
# B.0. Increase the Counter
intCounter <- intCounter+1
#
# B.1.0. File Name, Define
strFile <- files2process[intCounter]
# 1.1. File Name, Parse
# QC Check - delimiter for strsplit
if(ContData.env$myDelim==".") {##IF.myDelim.START
# special case for regex check to follow (20170531)
myDelim.strsplit <- "\\."
} else {
myDelim.strsplit <- ContData.env$myDelim
}##IF.myDelim.END
strFile.Base <- substr(strFile, 1, nchar(strFile) - nchar(".csv"))
strFile.parts <- strsplit(strFile.Base, myDelim.strsplit)
strFile.SiteID <- strFile.parts[[1]][1]
strFile.DataType <- strFile.parts[[1]][2]
# Convert Data Type to proper case
strFile.DataType <- paste(toupper(substring(strFile.DataType,1,1))
,tolower(substring(strFile.DataType, 2
, nchar(strFile.DataType)))
,sep="")
strFile.Date.Start <- as.Date(strFile.parts[[1]][3], "%Y%m%d")
strFile.Date.End <- as.Date(strFile.parts[[1]][4], "%Y%m%d")
#
# B.2. Check File and skip if doesn't match user defined parameters
# B.2.1. Check File Size
#if(file.info(paste(myDir.data.import,"/",strFile,sep=""))$size==0){
if(file.info(file.path(myDir.data.import,strFile))$size==0){
# inform user of progress and update LOG
myMsg <- "SKIPPED (file blank)"
myItems.Skipped <- myItems.Skipped + 1
myItems.Log[intCounter,2] <- myMsg
fun.write.log(myItems.Log, myDate, myTime)
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
# go to next Item
next
}
# B.2.2. Check SiteID
# if not in provided site list then skip
if(strFile.SiteID %in% fun.myData.SiteID == FALSE) {
# inform user of progress and update LOG
myMsg <- "SKIPPED (Non-Match, SiteID)"
myItems.Skipped <- myItems.Skipped + 1
myItems.Log[intCounter,2] <- myMsg
fun.write.log(myItems.Log,myDate,myTime)
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
# go to next Item
next
}
# B.2.3. Check DataType
# if not equal go to next file (handles both Air and Water)
if (strFile.DataType %in% fun.myData.Type == FALSE){
# inform user of progress and update LOG
myMsg <- "SKIPPED (Non-Match, DataType)"
myItems.Skipped <- myItems.Skipped + 1
myItems.Log[intCounter,2] <- myMsg
fun.write.log(myItems.Log,myDate,myTime)
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
# go to next Item
next
}
# B.2.4. Check Dates
# B.2.4.2.1. Check File.Date.Start (if file end < my Start then next)
if(strFile.Date.End<fun.myData.DateRange.Start) {
# inform user of progress and update LOG
myMsg <- "SKIPPED (Non-Match, Start Date)"
myItems.Skipped <- myItems.Skipped + 1
myItems.Log[intCounter,2] <- myMsg
fun.write.log(myItems.Log,myDate,myTime)
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
# go to next Item
next
}
# B.2.4.2.2. Check File.Date.End (if file Start > my End then next)
if(strFile.Date.Start>fun.myData.DateRange.End) {
# inform user of progress and update LOG
myMsg <- "SKIPPED (Non-Match, End Date)"
myItems.Skipped <- myItems.Skipped + 1
myItems.Log[intCounter,2] <- myMsg
fun.write.log(myItems.Log,myDate,myTime)
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
# go to next Item
next
}
#
# B.3.0. Import the data
#data.import=read.table(strFile,header=F,varSep)
#varSep = "\t" (use read.delim instead of read.table)
# as.is = T so dates come in as text rather than factor
#data.import <- utils::read.delim(strFile,as.is=TRUE,na.strings="")
# data.import <- utils::read.csv(paste(myDir.data.import,strFile,sep="/")
# ,as.is=TRUE,na.strings=c("","NA"))
data.import <- utils::read.csv(file.path(myDir.data.import,strFile)
,as.is=TRUE
,na.strings=c("","NA"))
#
# QC required fields: SiteID & (DateTime | (Date & Time))
fun.QC.ReqFlds(names(data.import),paste(myDir.data.import,strFile,sep="/"))
#
# B.4.0. Columns
# Kick out if missing minimum of fields
#
# Check for and add any missing columns (but not for missing data fields)
# B.4.1. Date, Time, DateTime
# list
strCol.DT <- c(ContData.env$myName.Date
,ContData.env$myName.Time
,ContData.env$myName.DateTime)
# check for missing
strCol.DT.Missing <- strCol.DT[strCol.DT %in% colnames(data.import)==FALSE]
# go to next item if no date, time, or date/time field
if(length(strCol.DT.Missing)==3) {
myMsg <- "SKIPPED (Missing Fields, Date/Time)"
myItems.Skipped <- myItems.Skipped + 1
myItems.Log[intCounter,2] <- myMsg
fun.write.log(myItems.Log,myDate,myTime)
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
# go to next Item
next
}
# go to next item if no (date or time) AND no date/time field
# (i.e., only 1 of date or time)
if(length(strCol.DT.Missing)==2 & ContData.env$myName.DateTime %in%
strCol.DT.Missing==TRUE) {
myMsg <- "SKIPPED (Missing Fields, 'Date.Time' and one of 'Date' or
'Time')"
myItems.Skipped <- myItems.Skipped + 1
myItems.Log[intCounter,2] <- myMsg
fun.write.log(myItems.Log,myDate,myTime)
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
# go to next Item
next
}
#
# add to df
data.import[,strCol.DT.Missing] <- NA
#
# B.4.2. Check for columns present and reorder columns
# check for columns present
strCol.Present <- ContData.env$myNames.Order[ContData.env$myNames.Order %in%
colnames(data.import)==TRUE]
#
myNames.DataFields.Present <- ContData.env$myNames.DataFields[
ContData.env$myNames.DataFields %in% colnames(data.import)==TRUE]
# kick out if no data fields
if(length(myNames.DataFields.Present)==0){
myMsg <- "SKIPPED (Missing Fields, DATA)"
myItems.Skipped <- myItems.Skipped + 1
myItems.Log[intCounter,2] <- myMsg
fun.write.log(myItems.Log,myDate,myTime)
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
# go to next Item
}
#
# QC, LoggerDeploy, Add Column ----
# if TRUE add default
# if FALSE do nothing
# for TRUE and character add the name to strCol.Present
if (fun.AddDeployCol == TRUE) {
data.import[, ContData.env$myName.LoggerDeployment] <- NA
strCol.Present <- c(strCol.Present, ContData.env$myName.LoggerDeployment)
# populate first and last row with start and end
## first and last non-NA value
### was "1" and nrow(data.import)
DT_nonNA <- which(!is.na(data.import[, ContData.env$myName.DateTime]))
DT_nonNA_first_rowid <- DT_nonNA[1]
DT_nonNA_last_rowid <- DT_nonNA[length(DT_nonNA)]
## Add
data.import[DT_nonNA_first_rowid, ContData.env$myName.LoggerDeployment] <-
ContData.env$myName.LoggerDeployment.start
data.import[DT_nonNA_last_rowid, ContData.env$myName.LoggerDeployment] <-
ContData.env$myName.LoggerDeployment.end
} ## IF ~ fun.AddDeployCol ~ END
# reorder Columns (and drop extra columns)
data.import <- data.import[,strCol.Present]
# B.4.3. Add FLAGS
strCol.Flags <- ContData.env$myNames.Flags[ContData.env$myNames.Cols4Flags
%in% colnames(data.import)==TRUE]
data.import[,strCol.Flags] <- ""
#
#
# data columns for flags that are present (need for later)
#myNames.Cols4Flags.Present <- myNames.Cols4Flags[myNames.Cols4Flags %in%
# colnames(data.import)==TRUE]
#
#
#
# B.5. QC Date and Time fields ####
#
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# may have to tinker with for NA fields
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# get format - if all data NA then get an error
#
# backfill first?
#
# may have to add date and time (data) from above when add the missing field.
#if does not exists then add field and data.
#
# if entire field is NA then fill from other fields
# Date
myField <- ContData.env$myName.Date
data.import[,myField][all(is.na(data.import[,myField]))] <- data.import[
,ContData.env$myName.DateTime]
# Time
myField <- ContData.env$myName.Time
data.import[,myField][all(is.na(data.import[,myField]))] <- data.import[
,ContData.env$myName.DateTime]
# DateTime
#myField <- myName.DateTime
# can't fill fill from others without knowing the format
#
# get current file date/time records so can set format
# Function below gets date or time format and returns R format
# date_time is split and then pasted together.
# if no AM/PM then 24hr time is assumed
format.Date <- fun.DateTimeFormat(data.import[
,ContData.env$myName.Date],"Date")
format.Time <- fun.DateTimeFormat(data.import[
,ContData.env$myName.Time],"Time")
#format.DateTime <- fun.DateTimeFormat(data.import[
# ,ContData.env$myName.DateTime],"DateTime")
# get error if field is NA, need to fix
# same for section below
#
# 20160322, new section, check for NA and fill if needed
if (length(stats::na.omit(data.import[,ContData.env$myName.DateTime]))==0) {
# move 5.2.1 up here
myField <- ContData.env$myName.DateTime
myFormat <- ContData.env$myFormat.DateTime #"%Y-%m-%d %H:%M:%S"
# data.import[,myField][data.import[,myField]==""] <- strftime(paste(data.import[,myName.Date][data.import[,myField]==""]
# ,data.import[,myName.Time][data.import[,myField]==""],sep="")
# ,format=myFormat,usetz=FALSE)
data.import[,myField][is.na(data.import[,myField])] <- strftime(paste(data.import[,ContData.env$myName.Date][is.na(data.import[,myField])]
,data.import[,ContData.env$myName.Time][is.na(data.import[,myField])]
,sep=" ")
,format=myFormat,usetz=FALSE)
}##IF.DateTime==NA.START
format.DateTime <- fun.DateTimeFormat(data.import[,ContData.env$myName.DateTime],"DateTime")
#
# QC
# # format.Date <- "%Y-%m-%d"
# format.Time <- "%H:%M:%S"
# format.DateTime <- "%Y-%m-%d %H:%M"
#
# B.5. QC Date and Time
# 5.1. Convert all Date_Time, Date, and Time formats to expected format (ISO 8601)
# Should allow for users to use different time and date formats in original data
# almost worked
#data.import[!(is.na(data.import[,myName.DateTime])),][myName.DateTime] <- strftime(data.import[!(is.na(data.import[,myName.DateTime])),][myName.DateTime]
# ,format="%Y-%m-%d")
# have to do where is NOT NA because will fail if the first item is NA
# assume all records have the same format.
#
# B.5.1.1. Update Date to "%Y-%m-%d" (equivalent to %F)
myField <- ContData.env$myName.Date
myFormat.In <- format.Date #"%Y-%m-%d"
myFormat.Out <- ContData.env$myFormat.Date #"%Y-%m-%d"
data.import[,myField][!is.na(data.import[,myField])] <- format(strptime(data.import[,myField][!is.na(data.import[,myField])],format=myFormat.In)
,format=myFormat.Out)
# B.5.1.2. Update Time to "%H:%M:%S" (equivalent to %T) (uses different function)
myField <- ContData.env$myName.Time
myFormat.In <- format.Time #"%H:%M:%S"
myFormat.Out <- ContData.env$myFormat.Time #"%H:%M:%S"
data.import[,myField][!is.na(data.import[,myField])] <- format(as.POSIXct(data.import[,myField][!is.na(data.import[,myField])],format=myFormat.In)
,format=myFormat.Out)
# B.5.1.3. Update DateTime to "%Y-%m-%d %H:%M:%S" (equivalent to %F %T)
myField <- ContData.env$myName.DateTime
myFormat.In <- format.DateTime #"%Y-%m-%d %H:%M:%S"
myFormat.Out <- ContData.env$myFormat.DateTime #"%Y-%m-%d %H:%M:%S"
data.import[,myField][!is.na(data.import[,myField])] <- format(strptime(data.import[,myField][!is.na(data.import[,myField])],format=myFormat.In)
,format=myFormat.Out)
# # strptime adds the timezome but drops it when added back to data.import (using format)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# # doesn't work anymore, worked when first line was NA
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# data.import <- y
# x<-data.import[,myField][!is.na(data.import[,myField])]
# (z<-x[2])
# (a <- strptime(z,format=myFormat.In))
# (b <- strptime(x,format=myFormat.In))
# # works on single record but fails on vector with strftime
# # strptime works but adds time zone (don't like but it works)
#
#
# B.5.2. Update DateTime, Date, and Time if NA based on other fields
# B.5.2.1. Update Date_Time if NA (use Date and Time)
myField <- ContData.env$myName.DateTime
myFormat <- ContData.env$myFormat.DateTime #"%Y-%m-%d %H:%M:%S"
# data.import[,myField][data.import[,myField]==""] <- strftime(paste(data.import[,myName.Date][data.import[,myField]==""]
# ,data.import[,myName.Time][data.import[,myField]==""],sep="")
# ,format=myFormat,usetz=FALSE)
data.import[,myField][is.na(data.import[,myField])] <- strftime(paste(data.import[,ContData.env$myName.Date][is.na(data.import[,myField])]
,data.import[,ContData.env$myName.Time][is.na(data.import[,myField])]
,sep=" ")
,format=myFormat,usetz=FALSE)
# B.5.2.2. Update Date if NA (use Date_Time)
myField <- ContData.env$myName.Date
myFormat <- ContData.env$myFormat.Date #"%Y-%m-%d"
# data.import[,myField][data.import[,myField]==""] <- strftime(data.import[,myName.DateTime][data.import[,myName.Date]==""]
# ,format=myFormat,usetz=FALSE)
data.import[,myField][is.na(data.import[,myField])] <- strftime(data.import[,ContData.env$myName.DateTime][is.na(data.import[,myField])]
,format=myFormat,usetz=FALSE)
# B.5.2.3. Update Time if NA (use Date_Time)
myField <- ContData.env$myName.Time
myFormat <- ContData.env$myFormat.Time #"%H:%M:%S"
# data.import[,myField][data.import[,myField]==""] <- strftime(data.import[,myName.DateTime][data.import[,myName.Time]==""]
# ,format=myFormat,usetz=FALSE)
data.import[,myField][is.na(data.import[,myField])] <- as.POSIXct(data.import[,ContData.env$myName.DateTime][is.na(data.import[,myField])]
,format=myFormat,usetz=FALSE)
#
# old code just for reference
# B.5.5. Force Date and Time format
# data.import[,myName.Date] <- strftime(data.import[,myName.Date],format="%Y-%m-%d")
# data.import[,myName.Time] <- as.POSIXct(data.import[,myName.Time],format="%H:%M:%S")
# data.import[,myName.DateTime] <- strftime(data.import[,myName.DateTime],format="%Y-%m-%d %H:%M:%S")
#
#
# Create Month and Day Fields
# month
# myField <- "month"
# data.import[,myField] <- data.import[,myName.Date]
# myFormat <- "%m"
# data.import[,myField][!is.na(data.import[,myName.Date])] <- strftime(data.import[,myName.Date][!is.na(data.import[,myName.DateTime])]
# ,format=myFormat,usetz=FALSE)
data.import[,ContData.env$myName.Mo] <- as.POSIXlt(data.import[,ContData.env$myName.Date])$mon+1
# day
# myField <- "day"
# data.import[,myField] <- data.import[,myName.Date]
# myFormat.In <- myFormat.Date #"%Y-%m-%d"
# myFormat.Out <- "%d"
# data.import[,myField][!is.na(data.import[,myField])] <- format(strptime(data.import[,myField][!is.na(data.import[,myField])],format=myFormat.In)
# ,format=myFormat.Out)
data.import[,ContData.env$myName.Day] <- as.POSIXlt(data.import[,ContData.env$myName.Date])$mday
# year
data.import[,ContData.env$myName.Yr] <- as.POSIXlt(data.import[,ContData.env$myName.Date])$year+1900
# MonthDay (2021-01-27)
data.import[,ContData.env$myName.MoDa] <- paste0(
sprintf("%02d", data.import[,ContData.env$myName.Mo])
, sprintf("%02d", data.import[,ContData.env$myName.Day]))
#
# # example of classes for POSIXlt
# Sys.time()
# unclass(as.POSIXlt(Sys.time()))
# ?DateTimeClasses
# 2020-12-21, Keep only 1st entry on duplicate date.time
# fall for Daylight Savings Time
data.import <- fun.DateTime.GroupBy.First(data.import)
# QC, LoggerDeploy, Recheck last row ----
# DST fix above can remove it
# 2021-01-21
if (fun.AddDeployCol == TRUE) {
## first and last non-NA value
### was "1" and nrow(data.import)
DT_nonNA <- which(!is.na(data.import[, ContData.env$myName.DateTime]))
DT_nonNA_first_rowid <- DT_nonNA[1]
DT_nonNA_last_rowid <- DT_nonNA[length(DT_nonNA)]
# Add
data.import[DT_nonNA_last_rowid, ContData.env$myName.LoggerDeployment] <-
ContData.env$myName.LoggerDeployment.end
} ## IF ~ fun.AddDeployCol ~ END
# B.6. QC for each Data Type present ####
# sub routine adds QC Calcs, QC Test Flags, Assigns overall Flag, and removes QC Calc Fields
# cycle each data type (manually code)
#
# skip if not present
# 20170512, move message inside of IF so user doesn't see it.
#
# _B.6.01. WaterTemp ----
myField <- ContData.env$myName.WaterTemp
if(myField %in% myNames.DataFields.Present==TRUE){##IF.myField.START
#
myMsg.data <- "WaterTemp"
myMsg <- paste("WORKING (QC Tests and Flags - ",myMsg.data,")",sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.WaterTemp
,ContData.env$myThresh.Gross.Fail.Lo.WaterTemp
,ContData.env$myThresh.Gross.Suspect.Hi.WaterTemp
,ContData.env$myThresh.Gross.Suspect.Lo.WaterTemp
,ContData.env$myThresh.Spike.Hi.WaterTemp
,ContData.env$myThresh.Spike.Lo.WaterTemp
,ContData.env$myThresh.RoC.SD.period.WaterTemp
,ContData.env$myThresh.RoC.SD.number.WaterTemp
,ContData.env$myThresh.Flat.Hi.WaterTemp
,ContData.env$myThresh.Flat.Lo.WaterTemp
,ContData.env$myThresh.Flat.Tolerance.WaterTemp)
}## IF ~ all(is.na())
#
}##IF.myField.END
#
# _B.6.02. AirTemp----
myField <- ContData.env$myName.AirTemp
if(myField %in% myNames.DataFields.Present==TRUE){##IF.myField.START
#
myMsg.data <- "AirTemp"
myMsg <- paste("WORKING (QC Tests and Flags - ",myMsg.data,")",sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.AirTemp
,ContData.env$myThresh.Gross.Fail.Lo.AirTemp
,ContData.env$myThresh.Gross.Suspect.Hi.AirTemp
,ContData.env$myThresh.Gross.Suspect.Lo.AirTemp
,ContData.env$myThresh.Spike.Hi.AirTemp
,ContData.env$myThresh.Spike.Lo.AirTemp
,ContData.env$myThresh.RoC.SD.period.AirTemp
,ContData.env$myThresh.RoC.SD.number.AirTemp
,ContData.env$myThresh.Flat.Hi.AirTemp
,ContData.env$myThresh.Flat.Lo.AirTemp
,ContData.env$myThresh.Flat.Tolerance.AirTemp)
}## IF ~ all(is.na())
#
}##IF.myField.END
#
# _B.6.03. WaterP-----
myField <- ContData.env$myName.WaterP
if(myField %in% myNames.DataFields.Present==TRUE){##IF.myField.START
#
myMsg.data <- "WaterP"
myMsg <- paste("WORKING (QC Tests and Flags - ",myMsg.data,")",sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.WaterP
,ContData.env$myThresh.Gross.Fail.Lo.WaterP
,ContData.env$myThresh.Gross.Suspect.Hi.WaterP
,ContData.env$myThresh.Gross.Suspect.Lo.WaterP
,ContData.env$myThresh.Spike.Hi.WaterP
,ContData.env$myThresh.Spike.Lo.WaterP
,ContData.env$myThresh.RoC.SD.period.WaterP
,ContData.env$myThresh.RoC.SD.number.WaterP
,ContData.env$myThresh.Flat.Hi.WaterP
,ContData.env$myThresh.Flat.Lo.WaterP
,ContData.env$myThresh.Flat.Tolerance.WaterP)
}## IF ~ all(is.na())
#
}##IF.myField.END
#
# _B.6.04. AirP----
myField <- ContData.env$myName.AirBP
if(myField %in% myNames.DataFields.Present==TRUE){##IF.myField.START
#
myMsg.data <- "AirP"
myMsg <- paste("WORKING (QC Tests and Flags - ",myMsg.data,")",sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.AirBP
,ContData.env$myThresh.Gross.Fail.Lo.AirBP
,ContData.env$myThresh.Gross.Suspect.Hi.AirBP
,ContData.env$myThresh.Gross.Suspect.Lo.AirBP
,ContData.env$myThresh.Spike.Hi.AirBP
,ContData.env$myThresh.Spike.Lo.AirBP
,ContData.env$myThresh.RoC.SD.period.AirBP
,ContData.env$myThresh.RoC.SD.number.AirBP
,ContData.env$myThresh.Flat.Hi.AirBP
,ContData.env$myThresh.Flat.Lo.AirBP
,ContData.env$myThresh.Flat.Tolerance.AirBP)
}## IF ~ all(is.na())
#
}##IF.myField.END
#
# _B.6.05. SensorDepth----
myField <- ContData.env$myName.SensorDepth
if(myField %in% myNames.DataFields.Present==TRUE){##IF.myField.START
#
myMsg.data <- "SensorDepth"
myMsg <- paste("WORKING (QC Tests and Flags - ",myMsg.data,")",sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.SensorDepth
,ContData.env$myThresh.Gross.Fail.Lo.SensorDepth
,ContData.env$myThresh.Gross.Suspect.Hi.SensorDepth
,ContData.env$myThresh.Gross.Suspect.Lo.SensorDepth
,ContData.env$myThresh.Spike.Hi.SensorDepth
,ContData.env$myThresh.Spike.Lo.SensorDepth
,ContData.env$myThresh.RoC.SD.period.SensorDepth
,ContData.env$myThresh.RoC.SD.number.SensorDepth
,ContData.env$myThresh.Flat.Hi.SensorDepth
,ContData.env$myThresh.Flat.Lo.SensorDepth
,ContData.env$myThresh.Flat.Tolerance.SensorDepth)
}## IF ~ all(is.na())
#
}##IF.myField.END
#
# _B.6.06. Discharge----
myField <- ContData.env$myName.Discharge
if(myField %in% myNames.DataFields.Present==TRUE){##IF.myField.START
#
myMsg.data <- "Discharge"
myMsg <- paste("WORKING (QC Tests and Flags - ",myMsg.data,")",sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.Discharge
,ContData.env$myThresh.Gross.Fail.Lo.Discharge
,ContData.env$myThresh.Gross.Suspect.Hi.Discharge
,ContData.env$myThresh.Gross.Suspect.Lo.Discharge
,ContData.env$myThresh.Spike.Hi.Discharge
,ContData.env$myThresh.Spike.Lo.Discharge
,ContData.env$myThresh.RoC.SD.period.Discharge
,ContData.env$myThresh.RoC.SD.number.Discharge
,ContData.env$myThresh.Flat.Hi.Discharge
,ContData.env$myThresh.Flat.Lo.Discharge
,ContData.env$myThresh.Flat.Tolerance.Discharge)
}## IF ~ all(is.na())
}##IF.myField.END
#
# _B.6.07. Conductivity----
myField <- ContData.env$myName.Cond
if(myField %in% myNames.DataFields.Present==TRUE){##IF.myField.START
#
myMsg.data <- "Cond"
myMsg <- paste("WORKING (QC Tests and Flags - ",myMsg.data,")",sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.Cond
,ContData.env$myThresh.Gross.Fail.Lo.Cond
,ContData.env$myThresh.Gross.Suspect.Hi.Cond
,ContData.env$myThresh.Gross.Suspect.Lo.Cond
,ContData.env$myThresh.Spike.Hi.Cond
,ContData.env$myThresh.Spike.Lo.Cond
,ContData.env$myThresh.RoC.SD.period.Cond
,ContData.env$myThresh.RoC.SD.number.Cond
,ContData.env$myThresh.Flat.Hi.Cond
,ContData.env$myThresh.Flat.Lo.Cond
,ContData.env$myThresh.Flat.Tolerance.Cond)
}## IF ~ all(is.na())
}##IF.myField.END
#
# _B.6.08. Dissolved Oxygen, mg/L ----
myField <- ContData.env$myName.DO
if(myField %in% myNames.DataFields.Present==TRUE){##IF.myField.START
#
myMsg.data <- "DO, mg/L"
myMsg <- paste("WORKING (QC Tests and Flags - ",myMsg.data,")",sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.DO
,ContData.env$myThresh.Gross.Fail.Lo.DO
,ContData.env$myThresh.Gross.Suspect.Hi.DO
,ContData.env$myThresh.Gross.Suspect.Lo.DO
,ContData.env$myThresh.Spike.Hi.DO
,ContData.env$myThresh.Spike.Lo.DO
,ContData.env$myThresh.RoC.SD.period.DO
,ContData.env$myThresh.RoC.SD.number.DO
,ContData.env$myThresh.Flat.Hi.DO
,ContData.env$myThresh.Flat.Lo.DO
,ContData.env$myThresh.Flat.Tolerance.DO)
}## IF ~ all(is.na())
}##IF.myField.END
#
# _B.6.09. Dissolved Oxygen, adjusted----
myField <- ContData.env$myName.DO.adj
if(myField %in% myNames.DataFields.Present==TRUE){
#
myMsg.data <- "DO, adj"
myMsg <- paste("WORKING (QC Tests and Flags - ",myMsg.data,")",sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.DO.adj
,ContData.env$myThresh.Gross.Fail.Lo.DO.adj
,ContData.env$myThresh.Gross.Suspect.Hi.DO.adj
,ContData.env$myThresh.Gross.Suspect.Lo.DO.adj
,ContData.env$myThresh.Spike.Hi.DO.adj
,ContData.env$myThresh.Spike.Lo.DO.adj
,ContData.env$myThresh.RoC.SD.period.DO.adj
,ContData.env$myThresh.RoC.SD.number.DO.adj
,ContData.env$myThresh.Flat.Hi.DO.adj
,ContData.env$myThresh.Flat.Lo.DO.adj
,ContData.env$myThresh.Flat.Tolerance.DO.adj)
}## IF ~ all(is.na())
}##IF.myField.END
#
# _B.6.10. Dissolved Oxygen, pct sat----
myField <- ContData.env$myName.DO.pctsat
if(myField %in% myNames.DataFields.Present==TRUE){
#
myMsg.data <- "DO, pct sat"
myMsg <- paste("WORKING (QC Tests and Flags - ",myMsg.data,")",sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.DO.pctsat
,ContData.env$myThresh.Gross.Fail.Lo.DO.pctsat
,ContData.env$myThresh.Gross.Suspect.Hi.DO.pctsat
,ContData.env$myThresh.Gross.Suspect.Lo.DO.pctsat
,ContData.env$myThresh.Spike.Hi.DO.pctsat
,ContData.env$myThresh.Spike.Lo.DO.pctsat
,ContData.env$myThresh.RoC.SD.period.DO.pctsat
,ContData.env$myThresh.RoC.SD.number.DO.pctsat
,ContData.env$myThresh.Flat.Hi.DO.pctsat
,ContData.env$myThresh.Flat.Lo.DO.pctsat
,ContData.env$myThresh.Flat.Tolerance.DO.pctsat)
}## IF ~ all(is.na())
}##IF.myField.END
#
# _B.6.11. pH----
myField <- ContData.env$myName.pH
if(myField %in% myNames.DataFields.Present==TRUE){##IF.myField.START
#
myMsg.data <- "pH"
myMsg <- paste("WORKING (QC Tests and Flags - ",myMsg.data,")",sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.pH
,ContData.env$myThresh.Gross.Fail.Lo.pH
,ContData.env$myThresh.Gross.Suspect.Hi.pH
,ContData.env$myThresh.Gross.Suspect.Lo.pH
,ContData.env$myThresh.Spike.Hi.pH
,ContData.env$myThresh.Spike.Lo.pH
,ContData.env$myThresh.RoC.SD.period.pH
,ContData.env$myThresh.RoC.SD.number.pH
,ContData.env$myThresh.Flat.Hi.pH
,ContData.env$myThresh.Flat.Lo.pH
,ContData.env$myThresh.Flat.Tolerance.pH)
}## IF ~ all(is.na())
}##IF.myField.END
#
# _B.6.12. Turbidity----
myField <- ContData.env$myName.Turbidity
if(myField %in% myNames.DataFields.Present==TRUE){##IF.myField.START
#
myMsg.data <- "Turbidity"
myMsg <- paste("WORKING (QC Tests and Flags - ",myMsg.data,")",sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.Turbidity
,ContData.env$myThresh.Gross.Fail.Lo.Turbidity
,ContData.env$myThresh.Gross.Suspect.Hi.Turbidity
,ContData.env$myThresh.Gross.Suspect.Lo.Turbidity
,ContData.env$myThresh.Spike.Hi.Turbidity
,ContData.env$myThresh.Spike.Lo.Turbidity
,ContData.env$myThresh.RoC.SD.period.Turbidity
,ContData.env$myThresh.RoC.SD.number.Turbidity
,ContData.env$myThresh.Flat.Hi.Turbidity
,ContData.env$myThresh.Flat.Lo.Turbidity
,ContData.env$myThresh.Flat.Tolerance.Turbidity)
}## IF ~ all(is.na())
}##IF.myField.END
#
# _B.6.13. Chlorophyll a----
myField <- ContData.env$myName.Chlorophylla
if(myField %in% myNames.DataFields.Present==TRUE){##IF.myField.START
#
myMsg.data <- "Chlorophylla"
myMsg <- paste("WORKING (QC Tests and Flags - ",myMsg.data,")",sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.Chlorophylla
,ContData.env$myThresh.Gross.Fail.Lo.Chlorophylla
,ContData.env$myThresh.Gross.Suspect.Hi.Chlorophylla
,ContData.env$myThresh.Gross.Suspect.Lo.Chlorophylla
,ContData.env$myThresh.Spike.Hi.Chlorophylla
,ContData.env$myThresh.Spike.Lo.Chlorophylla
,ContData.env$myThresh.RoC.SD.period.Chlorophylla
,ContData.env$myThresh.RoC.SD.number.Chlorophylla
,ContData.env$myThresh.Flat.Hi.Chlorophylla
,ContData.env$myThresh.Flat.Lo.Chlorophylla
,ContData.env$myThresh.Flat.Tolerance.Chlorophylla)
}## IF ~ all(is.na())
}##IF.myField.END
#
# _B.6.14. Water Level----
myField <- ContData.env$myName.WaterLevel
if(myField %in% myNames.DataFields.Present==TRUE){##IF.myField.START
#
myMsg.data <- "WaterLevel"
myMsg <- paste("WORKING (QC Tests and Flags - ",myMsg.data,")",sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.WaterLevel
,ContData.env$myThresh.Gross.Fail.Lo.WaterLevel
,ContData.env$myThresh.Gross.Suspect.Hi.WaterLevel
,ContData.env$myThresh.Gross.Suspect.Lo.WaterLevel
,ContData.env$myThresh.Spike.Hi.WaterLevel
,ContData.env$myThresh.Spike.Lo.WaterLevel
,ContData.env$myThresh.RoC.SD.period.WaterLevel
,ContData.env$myThresh.RoC.SD.number.WaterLevel
,ContData.env$myThresh.Flat.Hi.WaterLevel
,ContData.env$myThresh.Flat.Lo.WaterLevel
,ContData.env$myThresh.Flat.Tolerance.WaterLevel)
}## IF ~ all(is.na())
}##IF.myField.END
#
# _B.6.15. Salinity----
myField <- ContData.env$myName.Salinity
if(myField %in% myNames.DataFields.Present==TRUE){
#
myMsg.data <- "Salinity"
myMsg <- paste("WORKING (QC Tests and Flags - ", myMsg.data, ")", sep="")
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter, 2] <- myMsg
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
#
if(all(is.na(data.import[, myField]))) {
# Flag Field
data.import[, paste(ContData.env$myName.Flag, myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Gross", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Spike", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "RoC", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
data.import[, paste(ContData.env$myName.Flag, "Flat", myField, sep = ".")] <- ContData.env$myFlagVal.NoData
} else {
# Calc Stats
data.import <- fun.CalcQCStats(data.import
,myField
,ContData.env$myThresh.Gross.Fail.Hi.Salinity
,ContData.env$myThresh.Gross.Fail.Lo.Salinity
,ContData.env$myThresh.Gross.Suspect.Hi.Salinity
,ContData.env$myThresh.Gross.Suspect.Lo.Salinity
,ContData.env$myThresh.Spike.Hi.Salinity
,ContData.env$myThresh.Spike.Lo.Salinity
,ContData.env$myThresh.RoC.SD.period.Salinity
,ContData.env$myThresh.RoC.SD.number.Salinity
,ContData.env$myThresh.Flat.Hi.Salinity
,ContData.env$myThresh.Flat.Lo.Salinity
,ContData.env$myThresh.Flat.Tolerance.Salinity)
}## IF ~ all(is.na())
}##IF.myField.END
#
#
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Names of columns for QC Calculations and Tests with Flags for each data column present
# combine so can check for and remove later.
myNames.DataFields.Present.QCCalcs <- as.vector(t(outer(myNames.DataFields.Present,ContData.env$myNames.QCCalcs,paste,sep = ".")))
myNames.Flags.QCTests <- paste("Flag.",as.vector(t(outer(ContData.env$myNames.QCTests,myNames.DataFields.Present,paste,sep = "."))),sep = "")
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# not sure if need this little bit anymore
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
#
# B.7. QC Tests
# incorporated into subroutine in step 6
#
# B.8. Generate QC File
# incorporated into subroutine in step 6
#
# B.9. Generate Log File
# incorporated into subroutine in step 6
#
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# save file then run QC Report in a separate Script
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# # 10.0. Output file (only works if DataType is Air OR Water *not* both)
# # 10.1. Set Name
# #File.Date.Start <- format(as.Date(myData.DateRange.Start,myFormat.Date),"%Y%m%d")
# #File.Date.End <- format(as.Date(myData.DateRange.End,myFormat.Date),"%Y%m%d")
# strFile.Out <- paste("QCauto",strFile,sep="_")
# # 10.2. Save to File the data (overwrites any existing file).
# #print(paste("Saving output of file ",intCounter," of ",intCounter.Stop," files complete.",sep=""))
# #utils::flush.console()
# write.csv(data.import,file=paste(myDir.data.export,"/",strFile.Out,sep=""),quote=FALSE,row.names=FALSE)
# #
# 2020-12-21, dup row check
# DST can create issues with duplicate lines
# if data is offset can create extra rows in above QC checks
data.import <- unique(data.import)
#*********************
# START QC manual stuff
#************************
#data.import <- read.csv(paste(myDir.data.import,strFile,sep="/"),as.is=TRUE
# ,na.strings=c("","NA"))
#
# B.4.0. Columns
# B.4.1. Check for DataFields (may have already been done)
colsDataFields_NoDiscrete <- ContData.env$myNames.DataFields[!grepl(
paste0("^", ContData.env$myPrefix.Discrete)
, ContData.env$myNames.DataFields
, perl = TRUE)]
cols2check <- c(colsDataFields_NoDiscrete
, ContData.env$myNames.Flags)
myNames.DataFields.Present <- cols2check[cols2check %in%
colnames(data.import) == TRUE]
# add Date.Time to names for modification
myNames.DataFields2Mod <- c(ContData.env$myName.DateTime
, myNames.DataFields.Present)
#
# B.5.0. Add "RAW" and "Comment.MOD" fields ----
# default values
myName.Raw <- "RAW"
myName.Comment.Mod <- "Comment.MOD"
# 5.1. Cycle each present field
for (j in myNames.DataFields2Mod) {##FOR.j.START
#
# A. Add comment field and leave blank
data.import[, paste(myName.Comment.Mod, j, sep = ".")] <- ""
# B. Add data.RAW and populate with original data
data.import[, paste(myName.Raw, j, sep = ".")] <- data.import[, j]
#
}##FOR.j.END
#
# Remove "Comment.MOD.Flag.*" columns (2021-04-19), Issue #123
## easier to remove than to mod code to not add
data.import <- data.import[, !grepl("^Comment\\.MOD\\.Flag\\."
, names(data.import))]
# # leave as a loop so get RAW and Comment together
# j <- myNames.DataFields2Mod
# # A. Add comment field and leave blank
# data.import[,paste(myName.Comment.Mod,j,sep=".")] <- ""
# # B. Add data.RAW and populate with original data
# data.import[,paste(myName.Raw,j,sep=".")] <- data.import[,j]
#
# 6-9 #not here
# B.5.1. Move "Comment.MOD" and "Flag" ----
## Next to measurement (but not Date.Time)
# 2024-01-22
boo_move_CM <- TRUE
if (boo_move_CM) {
# boo_DEBUG <- FALSE
patt_flag_env <- ContData.env$myName.Flag
patt_flag <- paste0("^", patt_flag_env, ".")
patt_cm_env <- "Comment.MOD"
patt_cm <- paste0("^", patt_cm_env, ".")
patt_datetime <- c(ContData.env$myName.Date
, ContData.env$myName.Time
, ContData.env$myName.DateTime)
patt_flag_datetime <- paste(patt_flag_env, patt_datetime, sep = ".")
patt_cm_datetime <- paste(patt_cm_env, patt_datetime, sep = ".")
## Names, all
names_all <- names(data.import)
if (boo_DEBUG) {
names_all
}## IF ~ boo_DEBUG
## Names, Flag
names_flag <- names(data.import)[grepl(patt_flag, names(data.import))]
if (boo_DEBUG) {
names_flag
}## IF ~ boo_DEBUG
# Remove Discrete
patt_discrete <- "Discrete."
names_flag <- names_flag[!grepl(patt_discrete, names_flag)]
## Names, Comment.MOD
names_cm <- names(data.import)[grepl(patt_cm, names(data.import))]
# remove date time
names_cm <- names_cm[!names_cm %in% patt_cm_datetime]
if (boo_DEBUG) {
names_cm
}## IF ~ boo_DEBUG
#ContData.env$myNames.QCTests <- c("Gross","Spike","RoC","Flat")
myNames.QCTests <- c("Gross","Spike","RoC","Flat")
patt_QCTests <- paste(myNames.QCTests, collapse = "|")
# ONLY QC Test flags
names_flag_qctests <- names_flag[grepl(patt_QCTests, names_flag)]
# Non QC Test flags
names_flag_overall <- names_flag[!grepl(patt_QCTests, names_flag)]
# Non Discrete
patt_discrete <- "Discrete."
names_flag_overall <- names_flag_overall[!grepl(patt_discrete, names_flag_overall)]
## remove Date.Time
names_flag_overall <- names_flag_overall[!names_flag_overall %in% patt_flag_datetime]
#IF len not > 0 then quit
# Names, Measurements
names_flag_measurements <- sub(patt_flag, "", names_flag_overall)
names_cm_measurements <- sub(patt_cm, "", names_cm)
# remove Date.Time
names_flag_measurements <- names_flag_measurements[!names_flag_measurements %in% patt_datetime]
names_cm_measurements <- names_cm_measurements[!names_cm_measurements %in% patt_datetime]
if (boo_DEBUG) {
names_flag_qctests
names_flag_overall
names_flag_measurements
names_cm_measurements
names_cm
}## IF ~ boo_DEBUG
# names position
colnums_flag_qctests <- match(names_flag_qctests, names(data.import))
colnums_flag_overall <- match(names_flag_overall, names(data.import))
colnums_cm <- match(names_cm, names(data.import))
colnums_remove <- sort(c(colnums_flag_overall, colnums_flag_qctests, colnums_cm))
colnums_orig <- seq_len(ncol(data.import))
colnums_remove_all <- colnums_orig[-c(colnums_remove )]
# measurements should be in the same order for flags and cm but not guaranteed!
# at position of each names_flag_overall create new order
# p <- names_flag_measurements[1] # testing
for (p in names_flag_measurements) {
if (boo_DEBUG) {
print(p)
print(names_flag_overall)
print(colnums_flag_overall)
print(colnums_cm)
}## IF ~ boo_DEBUG
# number for iterations
p_num <- match(p, names_flag_measurements)
if (boo_DEBUG) {
p_num
}## IF ~ boo_DEBUG
# flag measure
p_flag_msr <- p
if (boo_DEBUG) {
print(p_flag_msr)
}## IF ~ boo_DEBUG
# match flag measure with cm measure
p_flag_msr_match_cm_msr <- match(p_flag_msr, names_cm_measurements)
if (boo_DEBUG) {
print(p_flag_msr_match_cm_msr)
}## IF ~ boo_DEBUG
# cm
p_cm <- names_cm[p_flag_msr_match_cm_msr]
if (boo_DEBUG) {
print(p_cm)
}## IF ~ boo_DEBUG
# flag_overall
p_flag_overall <- names_flag_overall[grepl(p, names_flag_overall)]
if (boo_DEBUG) {
print(p_flag_overall)
}## IF ~ boo_DEBUG
# flag_qctests
p_flag_qctests <- names_flag_qctests[grepl(p, names_flag_qctests)]
if (boo_DEBUG) {
print(p_flag_qctests)
}## IF ~ boo_DEBUG
# Col Num, all
if (p_num == 1) {
colnums_mod <- colnums_remove_all
}## IF ~ p_num == 1
if (boo_DEBUG) {
print(colnums_mod)
}## IF ~ boo_DEBUG
# 0. find measurement colnum
# insert after it using append
# 1. Flag overall
# 2. Comment.MOD
# 3. Other flags
# Col Num, p (measurement)
colnum_p_orig <- match(p, names_all)
if (boo_DEBUG) {
print(colnum_p_orig)
}## IF ~ boo_DEBUG
# Col Num, p_flag_overall
colnum_p_flag_overall_orig <- match(p_flag_overall, names_all)
if (boo_DEBUG) {
print(colnum_p_flag_overall_orig)
}## IF ~ boo_DEBUG
# Col Num, p_cm
colnum_p_cm_orig <- match(p_cm, names_all)
# MIGHT CHANGE, could be out of order
if (boo_DEBUG) {
print(colnum_p_cm_orig)
}## IF ~ boo_DEBUG
# Col Num, p_flag_qctests
colnum_p_flag_qctests_orig <- match(p_flag_qctests, names_all)
# MIGHT CHANGE, could be out of order
if (boo_DEBUG) {
print(colnum_p_flag_qctests_orig)
}## IF ~ boo_DEBUG
# Col Num, insert
colnum_p_insert <- c(colnum_p_flag_overall_orig
, colnum_p_cm_orig
, colnum_p_flag_qctests_orig)
# new position in modified data frame
colnum_p_new <- match(colnum_p_orig, colnums_mod)
if (boo_DEBUG) {
print(colnum_p_new)
}## IF ~ boo_DEBUG
# insert CM after Flag
colnums_mod <- append(colnums_mod
, values = colnum_p_insert
, after = colnum_p_new)
}## FOR ~ p ~ names_flag_overall
# resort data frame on new columns
data.import <- data.import[, colnums_mod]
}## IF ~ boo_move_CM
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# save file then run QC Report in a separate Script
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# B.10.0. Output file
# B.10.1. Set Name
File.Date.Start <- format(as.Date(strFile.Date.Start
,ContData.env$myFormat.Date)
,"%Y%m%d")
File.Date.End <- format(as.Date(strFile.Date.End
,ContData.env$myFormat.Date)
,"%Y%m%d")
strFile.Out.Prefix <- "QC"
strFile.Out <- paste(paste(strFile.Out.Prefix
,strFile.SiteID
,strFile.DataType
,File.Date.Start
,File.Date.End
,sep = ContData.env$myDelim)
, "csv"
, sep = ".")
# 10.2. Save to File the data (overwrites any existing file).
#print(paste("Saving output of file ",intCounter," of ",intCounter.Stop," files complete.",sep=""))
#utils::flush.console()
#write.csv(data.import,file=paste(myDir.data.export,"/",strFile.Out,sep=""),quote=FALSE,row.names=FALSE)
utils::write.csv(data.import,file = file.path(myDir.data.export, strFile.Out)
,quote = FALSE
,row.names = FALSE)
#
# # B.11. Clean up
# # B.11.1. Inform user of progress and update LOG
# myMsg <- "COMPLETE"
# myItems.Complete <- myItems.Complete + 1
# myItems.Log[intCounter,2] <- myMsg
# fun.write.log(myItems.Log,myDate,myTime)
# fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
# utils::flush.console()
# # B.11.2. Remove data (import)
# rm(data.import)
#*********************
# end QC manual stuff
#************************
# Report ####
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# insert QC Report so runs without user intervention
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# DEBUG, REPORT ####
if (boo_DEBUG == TRUE) {
fun.myData.SiteID <- strFile.SiteID
fun.myData.Type <- strFile.DataType
fun.myData.DateRange.Start <- fun.myData.DateRange.Start
fun.myData.DateRange.End <- fun.myData.DateRange.End
fun.myDir.BASE <- fun.myDir.BASE
fun.myDir.SUB.import <- fun.myDir.SUB.export
fun.myDir.SUB.export <- fun.myDir.SUB.export
fun.myFile.Prefix <- strFile.Out.Prefix
}##IF~boo_DEBUG~END
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# run with same import and export directory
###
# B.10.3. Report ####
if (fun.CreateReport == TRUE) {
fun.Report(strFile.SiteID
, strFile.DataType
, strFile.Date.Start
, strFile.Date.End
, fun.myDir.export
, fun.myDir.export
, strFile.Out.Prefix
, fun.myReport.format
, fun.myReport.Dir
)
}##IF.CreateReport.END
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# B.11. Clean up
# B.11.1. Inform user of progress and update LOG
myMsg <- "COMPLETE"
myItems.Complete <- myItems.Complete + 1
myItems.Log[intCounter,2] <- myMsg
fun.write.log(myItems.Log,myDate,myTime)
fun.Msg.Status(myMsg, intCounter, intItems.Total, strFile)
utils::flush.console()
# 11.2. Remove data (import)
rm(data.import)
#
}##while.END
#
# C. Return ####
myTime.End <- Sys.time()
print(paste("Task COMPLETE; "
, round(difftime(myTime.End,myTime.Start, units = "mins"), 2)
," min.", sep = ""))
utils::flush.console()
#
# return data table
#return(data.import) # don't want to return since are saving in the loop (would only be last one anyway)
}##FUN.fun.QC.END
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# # #
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# # # # QC
# fun.data.import <- data.import
# fun.myField.Data <- myName.SensorDepth
# fun.myThresh.Gross.Fail.Hi <- myThresh.Gross.Fail.Hi.SensorDepth
# fun.myThresh.Gross.Fail.Lo <- myThresh.Gross.Fail.Lo.SensorDepth
# fun.myThresh.Gross.Suspect.Hi <- myThresh.Gross.Suspect.Hi.SensorDepth
# fun.myThresh.Gross.Suspect.Lo <- myThresh.Gross.Suspect.Lo.SensorDepth
# fun.myThresh.Spike.Hi <- myThresh.Spike.Hi.SensorDepth
# fun.myThresh.Spike.Lo <- myThresh.Spike.Lo.SensorDepth
# fun.myThresh.RoC.SD.period <- myThresh.RoC.SD.period.SensorDepth
# fun.myThresh.RoC.SD.number <- myThresh.RoC.SD.number.SensorDepth
# fun.myThresh.Flat.Hi <- myThresh.Flat.Hi.SensorDepth
# fun.myThresh.Flat.Lo <- myThresh.Flat.Lo.SensorDepth
# fun.myThresh.Flat.Tolerance <- myThresh.Flat.Tolerance.SensorDepth
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# 20181205, move fun.CalcQCStats to fun.Helper.R
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Floating point math error in some cases (e.g., 0.15 != 0.15)
# http://stackoverflow.com/questions/9508518/why-are-these-numbers-not-equal
# instead of <= may have to use isTrue(all.equal(a,b)) where a<-0.1+0.05 and b<-0.15
# a <- 0.1 + 0.05
# b <- 0.15
# a==b
# a<=b
# b>=a
# isTRUE(all.equal(a,b))
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# leave code "as is"
# Not removing data but flagging.
# Had found some cases with SensorDepth equal to 0.1 and not getting the correct T/F.
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.