R/webservices.R

#' @title Summary of webservice accessors
#' @description
#' Provides a list of all webservice
#' accessors  that are currently supported.
#' @details
#' \code{\link{webservices}} lists the functions with
#' their arguments; whereas, \code{\link{services}}
#' lists the name of the associated web-service at
#' IRIS.
#' @docType package
#' @name irisws-webservices
#' @aliases ws webservice
#' @references \url{http://service.iris.edu/irisws}
#' 
#' @seealso \code{\link{irisws-package}}
#' @family WebServices
#' 
#' @examples
#' # list all the webservice accessors:
#' webservices()
#' 
#' # just the names of webservices, as set by the IRIS API:
#' services()
NULL

#' @rdname irisws-webservices
#' @export
webservices <- function(){
    lsf.str("package:irisws", pattern="^ws.")
}

#' @rdname irisws-webservices
#' @export
services <- function(){
    # list all the 'service' strings
    fmls <- formals(constructor)
    eval(fmls$service)
}

#' Access query data from webservice result
#' @export
#' @param x ws result
#' @param ... additional parameters
#' @examples
#' querydata(flinnengdahl.ws(10,10))
#' # saved to env as 'querydata' too
#' envList()
querydata <- function(x, ...){
  qd <- x$querydata
  assign("querydata", qd, envir=.iriswsEnv)
  return(qd)
}

#' @rdname querydata
#' @export
envList <- function(){
  message(paste("Contents of", .iriswsEnvName))
  ls(envir=.iriswsEnv, all.names=TRUE)
}

#' Access to the 'timeseries' Web Service for obtaining continuous data
#' 
#' @description
#' From [1]: \emph{The ws-timeseries service provides access to individual
#'  channels of time series data for a specified time range. Requested 
#'  segments may be up to 30 days in length and optional signal processing 
#'  may be applied to return data. The time series may be returned in a 
#'  variety of formats.}
#' 
#' @details
#' The query is generated with \code{\link{constructor2}},
#' and executed with \code{\link{query.iris}}, which throws
#' errors based on \code{\link{check.query}}.
#' 
#' \subsection{Output format options (\code{output=})}{
#'  \itemize{
#'      \item{\code{'miniseed'}}: miniSEED 
#'      \item{\code{'sac.asc'}}: SAC Alpha (ASCII)
#'      \item{\code{'sac.bin'}}: SAC binary (either big or little endian)
#'      \item{\code{'audio'}}: Audio (WAV format)
#'      \item{\code{'plot'}}: PNG plot
#'      \item{\code{'ascii.values'}}: ASCII (values only)
#'      \item{\code{'ascii'}}: ASCII (values, and datetimes)
#'  }
#' The SAC formats are read in through \code{\link{read.sac}}.
#' }
#' 
#' \subsection{Filename options (\code{filename=})}{
#'  \itemize{
#'      \item{\code{NA}}: An auto-generated file.
#'      \item{\code{NULL}}: A temporary file.
#'      \item{<character string>}: Any desired name.
#'  }
#' Autogenerated files are of the form:
#'
#' \code{iriswsQ.<network>.<station>.<location>.<channel>.<starttime>.<end>.<extension>}
#'  where \code{end} is either '\code{endtime}', or '\code{<duration>s}'
#'  and \code{extension} will depend on \code{output}.
#'
#' Temporary files are generated within \code{\link{query.iris}}.
#' }
#' 
#' \subsection{Signal processing options}{
#'   \itemize{
#'      \item{high, low and band-pass filter (\code{})}
#'      \item{remove mean value (\code{})}
#'      \item{scaling by constant value (\code{})}
#'      \item{deconvolution of instrument response (with frequency limits and unit conversion)  (\code{})}
#'      \item{differentiation and integration  (\code{})}
#'      \item{decimation to lower sample rates  (\code{})}
#'  }
#' }
#' 
#' @author AJ Barbour
#' @name timeseries
#' 
#' @param network character; the network code
#' @param station character; the station code
#' @param location character; the location code
#' @param channel character; the channel code
#' @param starttime character; the beginning of the record
#' @param duration numeric; the length of the record, in seconds.  This will
#' be ignored if \code{enddate} is not \code{NULL}.
#' @param endtime character; the end of the record
#' @param output character; the type of file to output to. See \strong{Details}
#' @param filename \code{NA} for an auto-generated filename based on the
#' inputs; \code{NULL} for a temporary filename; or, a character string
#' of the user's choosing.  See \strong{Details} for details about
#' the auto-generated name.
#' @param endianness character; specify the endianness of binary SAC output.
#' \code{'auto'} uses the platform value, or \code{'little'} and \code{'big'}
#' can be used to force a specific structure.
#' @param load.results logical; should the program try and load the file within R?
#' Currently the following formats can be loaded:
#' \code{'sac.bin'},
#' \code{'ascii'},
#' \code{'plot'},
#' \code{'ascii.values'},
#' \code{'sac.asc'}
# \code{'miniseed'},
# \code{'audio'}
#' @param verbose logical; should messages be given?
#' @param curl.verbose logical; should messages from \code{\link{curlPerform}} be given?
#' @param opts list; additional query parameters. 
#' Because \code{\link{constructor2}}
#' is used, any bogus options are ignored.
#' @param ... additional parameters to XXX
#' 
#' @return A list (invisibly) with the query string, and data from the result
#' 
#' @references [1] \url{http://service.iris.edu/irisws/timeseries/1/}
#' @references [2] \url{http://www.iris.edu/dms/nodes/dmc/data/formats/simple-ascii/}
#' 
#' @seealso 
#' \code{\link{timestring}} to make properly formatted time strings
#' 
#' \code{\link{read.sac}} for SAC data reader
#' 
#' @family WebServices
#' 
#' @examples
#' \dontrun{
#' #
#' # El Mayor Cucapah earthquake (M7.2 April 4, 2010)
#' #
#' net <- "PB"
#' sta <- "B084"
#' loc <- "--"
#' cha <- "LDD"  # pore fluid pressure
#' elmayor <- "2010.094T22:00:00.000000" # or "2010-04-04T00:00:00"
#' dur <- 7200
#' ws.timeseries(net, sta, loc, cha, elmayor, dur, output="plot")
#' #
#' # or use the string builder
#' elmayor <- timestring(2010,94,22,0,0)
#' ws.timeseries(net, sta, loc, cha, elmayor, dur, output="plot")
#' sacd <- ws.timeseries(net, sta, loc, cha, elmayor, dur, output="sac.bin")
#' print(str(sacd))
#' plot(ts(sacd$querydata$amp, deltat=sacd$querydata$dt))
#' #
#' #
#' # SAC FORMAT: ASCII
#' xa <- ws.timeseries(net, sta, loc, cha, elmayor, dur, output="sac.asc")
#' # SAC binary
#' xb <- ws.timeseries(net, sta, loc, cha, elmayor, dur, output="sac.bin")
#' plot(xa$querydata$amp)
#' lines(xb$querydata$amp, col="red")
#' #
#' #
#' # REGULAR ASCII
#' #  ascii with a datetime string and values
#' #  (the datetime string is converted to POSIXlt with lubridate)
#' xa <- ws.timeseries(net, sta, loc, cha, elmayor, dur, output="ascii")
#' plot(xa$querydata, type="s")
#' #
#' #  ASCII, again, but only values are returned (and metadata)
#' xa <- ws.timeseries(net, sta, loc, cha, elmayor, dur, output="ascii.values")
#' dat <- xa$querydata$value
#' plot(dat, type="s")
#' #
#' # we can use the header metadata to align in time
#' hdr <- attr(xa$querydata,"header")
#' require(lubridate)
#' tst <- ymd_hms(hdr[7])
#' npt <- as.numeric(hdr[3])
#' sps <- as.numeric(hdr[5])
#' datDatetime <- seq(from=tst, to=(tst+npt*sps), length.out=npt)
#' plot(datDatetime, dat, type="s")
#' #
#' #
#' # PNG plot
#' require(png)
#' xp <- ws.timeseries(net, sta, loc, cha, elmayor, dur, output="plot")
#' plot(1:2)
#' rasterImage(xa$querydata, 1, 1, 2, 2)
#' }
NULL

#' @rdname timeseries
#' @export
ws.timeseries <- function(network, station, location, channel, 
                          starttime, duration, endtime=NULL, 
                          output=c('sac.bin','ascii','plot','ascii.values','sac.asc','miniseed','audio'), 
                          filename=NA,
                          endianness=c("auto","little","big"),
                          load.results=TRUE,
                          verbose=TRUE, curl.verbose=FALSE,
                          opts=list(),
                          ...){
    outpo <- outp <- match.arg(output)
    if (outp=="sac.bin"){
        endi <- match.arg(endianness)
        if (endi=="auto"){
            endi <- .Platform[["endian"]]
        }
        # needs 'sacbl' or 'sacbb'
        outp <- paste0("sacb",substr(endi,1,1))
    } else if (outp=="sac.asc"){
        outp <- 'saca'
    } else if (outp=="ascii.values"){
        outp <- 'ascii1'
    } 
    # /query? (channel-options) (date-range-options) (filter-options) [plot-options] [audio-options] (output-options)
    # where:
    #   channel-options      ::  (net=<network> & sta=<station> & loc=<location> & cha=<channel>)
    #   date-range-options   ::  (starttime=<time>) & ([endtime=<time>] | [duration=<seconds>])
    #   filter-options       ::  [taper=WIDTH,TYPE] [envelope] [lpfilter=FREQ] [hpfilter=FREQ]
    #                            [bpfilter=HIFREQ-LOFREQ] [demean=<true|false>] 
    #                            [diff=<true|false>] [int=<true|false>] [scale=number|AUTO] 
    #                            [divscale=number] [correct=<true|false>] [freqlimits=F1-F2-F3-F4] 
    #                            [autolimits=lowerdBdown-upperdBdown] 
    #                            [units=<DEF|DIS|VEL|ACC>] [decimate=SAMPLERATE]
    #   plot-options         ::  [antialiasplot=<true|false>]
    #   audio-options        ::  [audiocompress=<true|false>] [audiosamplerate=<playback-rate-hz>]
    #   output-options       ::  (output=<miniseed|saca|sacbb|sacbl|plot|ascii|ascii1|ascii2|audio>)
    #   
    #   (..) required
    #   [..] optional
    network <- toupper(network) 
    station <- toupper(station)
    location <- toupper(location)
    channel <- toupper(channel)
    #
    #opts <- list(...) # add these [ ]
    #print(opts)
    #
    durmiss <- missing(duration)
    etnull <- is.null(endtime)
    #
    if (durmiss & etnull){
        stop("must specify either 'duration', or 'endtime'")
    } else if (!durmiss & !etnull){
        endtime <- NULL
        warning("'endtime' was ignored in favor of 'duration'")
    }
    #
    Q <- constructor2(net=network, 
                      sta=station,
                      loc=location,
                      cha=channel, 
                      starttime=starttime, 
                      duration=duration, 
                      endtime=endtime, 
                      output=outp,
                      ...,
                      service="timeseries")
    #
    if (verbose) message(paste("Query: ", Q))
    # Filename
    if (!is.null(filename)){
        if (is.na(filename)) {
            #
            # automatic generation
            #
            if (durmiss){
                fiendtime <- endtime
            } else {
                fiendtime <- paste0(duration,"s")
            }
            # extension
            ext <- switch(outpo, 
                          sac.bin='sac', sac.asc='txt',
                          plot='png', audio='wav',
                          ascii='txt', ascii.values='txt',  
                          miniseed='mseed')
            filename <- paste("iriswsQ", network, station, location, 
                              channel, starttime, fiendtime, ext, sep=".")
        }
        filename <- as.character(filename)
    }
    # Execute query
    res <- query.iris(Q, filename=filename, verbose=curl.verbose)
    success <- res$success
    #
    # load data
    #
    fi <- res[["file"]]
    if (verbose & success) message(paste(" Success.\n","File: ", fi))
    #
    if (load.results & success){
        if (verbose) message(paste(" loading... "))
        #
        if (outpo=="sac.bin" | outpo=="sac.asc"){
            isbin <- switch(outpo, sac.bin=TRUE, sac.asc=FALSE)
            dat <- sync(read.sac(fi, is.binary=isbin, endianness=endi))[[1]]
        } else if (outpo == "ascii" | outpo == "ascii.values") {
            nms <- switch(outpo, 
                          ascii=c("Datetime","value"),
                          ascii.values=c("value"),
                          )
            hdr <- read.csv(fi, header=FALSE, nrows=1)
            #TIMESERIES PB_B084__LDD_M, 100 samples, 1 sps, 
            # 2013-10-01T00:00:00.810000, TSPAIR, INTEGER, COUNTS
            hdrv <- as.vector(unlist(apply(as.matrix(hdr), 2, function(X) strsplit(X," "))))
            hdrv <- hdrv[sapply(hdrv,nchar)>0]
            #[1] "TIMESERIES"                 "PB_B084__LDD_M"
            #[3] "100"                        "samples"
            #[5] "1"                          "sps"
            #[7] "2013-10-01T00:00:00.810000" "TSPAIR"
            #[9] "INTEGER"
            #
            dat <- read.table(fi, header=FALSE, skip=1, stringsAsFactors=FALSE)
            #
            if (outp=="ascii"){
                names(dat) <- c("sacDatetime","value")
                op <- options(digits.secs=6)
                Datetime <- NULL
                dat <- within(dat, expr={
                    sacDatetime <- lubridate::ymd_hms(sacDatetime)
                })
                on.exit(options(op))
            } else {
                names(dat) <- c("value")
            }
            attr(dat, "header") <- hdrv
        } else if (outpo == "miniseed") {
            .NotYetImplemented()
        } else if (outpo == "audio") {
            .NotYetImplemented()
        } else if (outpo == "plot") {
            dat <-  png::readPNG(fi, native=TRUE)
        } else {
            #
        }
    } else {
        dat <- NA
    }
    #
    toret <- res
    toret$opts <- opts
    toret$querydata <- dat 
    return(invisible(toret))
}

#' @rdname timeseries
#' @export
timeseries.ws <- ws.timeseries

#' Access the 'distaz' Web Service for distance/azimuth computations
#' 
#' @description
#' From [1]: 
#' \emph{The distance-azimuth service will calculate the great-circle 
#' angular distance, azimuth, and backazimuth between two geographic 
#' coordinate pairs. All results are reported in degrees, with azimuth 
#' and backazimuth measured clockwise from North.
#' The azimuth is the angle from the station to the event, 
#' while the backazimuth is the angle from the event to the station.
#' Latitudes are converted to geocentric latitudes using the WGS84 spheroid 
#' to correct for ellipticity.}
#' 
#' @details
#' The query is generated with \code{\link{constructor2}},
#' and executed with \code{\link{query.iris}}, which throws
#' errors based on \code{\link{check.query}}.
#' 
#' \code{\link{distaz.ws}} is simply a pointer to \code{\link{ws.distaz}}
#' 
#' \subsection{Filename options (\code{filename=})}{
#'  \itemize{
#'      \item{\code{NA}}: An auto-generated file.
#'      \item{\code{NULL}}: A temporary file.
#'      \item{<character string>}: Any desired name.
#'  }
#' Autogenerated files are of the form:
#'
#' \code{iriswsQ.distaz}
#'
#' Temporary files are generated within \code{\link{query.iris}}.
#' }
#' 
#' @author AJ Barbour
#' @name distaz
#' @aliases Distance-Azimuth
#' 
#' @param station.latlon numeric; the decimal latitude and longitude of the station
#' @param event.latlon numeric; the decimal latitude and longitude of the event (the 'source')
#' @param filename \code{NA} for an auto-generated filename based on the
#' inputs; \code{NULL} for a temporary filename; or, a character string
#' of the user's choosing.  See \strong{Details} for details about
#' the auto-generated name.
#' @param verbose logical; should messages be given?
#' 
#' @return A list (invisibly) with the query string, and data from the result
#' 
#' @references [1] \url{http://service.iris.edu/irisws/distaz/1/}
#' 
#' @family WebServices
#' 
#' @examples
#' \dontrun{
#' 
#' # exact same point -- should give zero
#' qr <- ws.distaz()
#' querydata(qr)
#' 
#' # two different locations -- note the coordinates
#' # must be in the order of latitude, longitude
#' qr <- ws.distaz(c(0.,0.),c(30.,-100.))
#' querydata(qr)
#' # should give
#' #   azimuth backAzimuth distance
#' #1 84.98686    300.2136 98.66374
#' 
#' }
NULL

#' @rdname distaz
#' @export
ws.distaz <- function(station.latlon=c(0.,0.), event.latlon=c(0.,0.), filename=NULL, verbose=TRUE){
    #
    sta <- as.numeric(station.latlon)
    stopifnot(length(sta) == 2)
    evt <- as.numeric(event.latlon)
    stopifnot(length(evt) == 2)
    #
    Q <- constructor2(stalat=sta[1], stalon=sta[2], evtlat=evt[1], evtlon=evt[2], service="distaz")
    #
    # deal with filename
    if (!is.null(filename)){
        if (is.na(filename)){
            # auto
            filename <- paste("iriswsQ", "distaz", sep=".")
        }
        filename <- as.character(filename)
    }
    #
    toret <- res <- query.iris(Q, filename=filename, verbose=verbose)
    #
    xmlfi <- res[["file"]]
    #
    # sink in case of cat
    if (!verbose) sink(file=tempfile())
    xdat <- tryCatch(XML::xmlToDataFrame(xmlfi), error=function(e) data.frame(rep(NA,3)))
    if (!verbose) sink()
    #          text
    #   1 202.45809  <-- azimuth
    #   2   21.2588  <-- back azimuth
    #   3   4.30128  <-- distance
    if (nrow(xdat)!=3) warning("query results may be bogus")
    dat <- data.frame(azimuth=xdat[1,],  backAzimuth=xdat[2,],  distance=xdat[3,])
    #
    toret$querydata <- list(station.latlon=sta, event.latlon=evt, distaz.data=dat)
    return(invisible(toret))
}

#' @rdname distaz
#' @export
distaz.ws <- ws.distaz

#' Access the 'traveltime' Web Service for traveltime computations
#' 
#' @description
#' From [1]: \emph{The traveltime webservice calculates travel-times for 
#' seismic phases using a 1-D spherical earth model.}
#' There are two ways to perform this computation: 
#' (1) \code{\link{ws.ttDistances}} for epicentral distances (in decimal
#' degrees, or kilometers), and
#' (2) \code{\link{ws.ttStaSrc}} for station/source pairs of latitudes
#' and longitudes.
#' 
#' @details
#' For distance calculations \code{\link{ws.ttDistances}} is the primary
#' function; both \code{\link{ws.ttDeg}} and \code{\link{ws.ttKm}} are
#' wrapper functions for calculations using degrees and kilometers, respectively. 
#' 
#' For station/source pair calculations \code{\link{ws.ttStaSrc}} is
#' the primary function.
#'   
#' Note that \code{\link{ttDeg.ws}} is simply a pointer to \code{\link{ws.ttDeg}} and
#' similarly for \code{\link{ttKm.ws}} and \code{\link{ttStaSrc.ws}}.
#' 
#' The query is generated with \code{\link{constructor2}},
#' and executed with \code{\link{query.iris}}, which throws
#' errors based on \code{\link{check.query}}.
#' 
#' \subsection{Filename options (\code{filename=})}{
#'  \itemize{
#'      \item{\code{NA}}: An auto-generated file.
#'      \item{\code{NULL}}: A temporary file.
#'      \item{<character string>}: Any desired name.
#'  }
#' Autogenerated files are of the form:
#'
#' \code{iriswsQ.<calculation type>.<model>.tt}
#'  where \code{calculation type} depends on the type of calculation:
#'  \code{Dist} if the calculation was based on distances, or 
#'  \code{StaSrc} if based on station-source pairs.
#'
#' Temporary files are generated within \code{\link{query.iris}}.
#' }
#' 
#' \subsection{Utility functions}{
#'   \code{PS_time} functions:
#'   return P- and S-wave data and
#'   the difference between the S and P information.
#'   More specifically,
#'   \code{\link{PS_time.distances}} does this using 
#'   \code{\link{ws.ttDistances}} and \code{phases=c("P","S")}; hence, distance 
#'   units are in decimal degrees (by default) but can be modified through the
#'   additional parameters \code{...}.
#' }
#'
#' \subsection{Notes}{
#'   From [1]:
#'   \enumerate{
#'     \item{\emph{Valid arbitrary phases can be made up 
#'     e.g. sSKJKP, but invalid phases will be ignored.}}
#'     \item{\emph{Travel times are produced 
#'     in ascending time order regardless of the order in 
#'     which the phases are specified.}}
#'   }
#'   
#'   Also note that one can specify a specific phase velocity
#'   by adding \code{'kmps'} after the desired velocity (in
#'   km/s) [3].  For example \code{'4.4kmps'} will return the
#'   principal G-phases.
#'   
#' }
#' 
#' \subsection{Warnings}{
#'   \itemize{
#'     \item{It is advisable \emph{not} to turn on \code{traveltime.only=TRUE}
#'     or \code{rayparam.only=TRUE} \emph{unless} a vector of \code{phases}
#'     is given; this is because the IRIS WS does not return the phase list
#'     if these options are enabled, so the numbers returned
#'     will be essentially meaningless (that is, if \code{phases} is not set).
#'     }
#'   }
#' }
#' 
#' @name traveltime
#' @aliases ws.traveltime traveltimes tt ws.tt
#' 
#' @author AJ Barbour
#' 
#' @param distances numeric; great-circle distance from source to station.
#' Specify multiple distances as a vector (e.g., \code{c(1,20,30)})
#' @param distance.units character; the units of \code{distances}, either
#' in decimal degrees or kilometers.
#' @param depth numeric; the depth of the event, in kilometers. 
#' \emph{Note that currently only one depth can be specified.}
#' @param model character; Name of 1-D earth velocity model to be used. 
#' Available models include: 
#' \code{iasp91} by the Int'l Assoc of Seismology and Physics of the 
#' Earth's Interior,
#' \code{prem } the Preliminary Reference Earth Model, and 
#' \code{ak135} by 
#' Kennett B.L.N., Engdahl E.R. and Buland R. (1995). See [2].
#' @param phases character; Comma separated list of phases, defaulting to 
#' \code{p,s,P,S}, 
#' \code{Pn,Sn,PcP,ScS},
#' \code{Pdiff,Sdiff,PKP},
#' \code{SKS,PKiKP,SKiKS},
#' \code{PKIKP,SKIKS}. See Note (1).
#' @param filename \code{NA} for an auto-generated filename based on the
#' inputs; \code{NULL} for a temporary filename; or, a character string
#' of the user's choosing.  See \strong{Details} for details about
#' the auto-generated name.
#' @param no.header logical; suppresses header from the resulting table
#' @param traveltime.only logical; returns a space-separated list of 
#' travel times, in seconds. See Note (2).
#' @param rayparam.only logical; will return a space-separated list of ray 
#' parameters, in sec/deg.
#' @param mintime.only logical; will only retrieve the first arrival of 
#' each phase for each distance
#' @param verbose logical; should messages be given?
#' @param ... additional parameters to \code{\link{ws.ttDistances}}
#' @param event.latlon numeric; the lat/lon of the eqarthquake epicenter
#' @param station.latlons numeric; the lat(s)/lon(s) of the stations.  See \code{X}.
#' @param agg.fun function for data aggregation: i.e. \code{\link{mean}}, \code{\link{min}}, or \code{\link{max}}
#' @param X numeric; the lat(s)/lon(s) of the stations; these
#' can be specified as a single vector with latitudes and longitudes concatenated;
#' or (preferrably), a list or data.frame.
#' \code{\link{.llpair}} is used to form the lat/lon pairs in the format
#' acceptable to IRIS WS
#' 
#' @return A list (invisibly) with the query string, and data from the result
#' 
#' @references [1] \url{http://service.iris.edu/irisws/traveltime/1/}
#' @references [2] \url{http://www.iris.edu/dms/products/emc-referencemodels/} 
#' describes the spherical-Earth reference models.
#' @references [3] \url{http://www.seis.sc.edu/TauP/} is the engine used for
#' the computations.
#' 
#' @family WebServices
#' 
#' @examples
#' \dontrun{
#' #
#' # In epicentral degrees
#' wsdeg1 <- ws.ttDistances(c(0,10,20,30,40), verbose=TRUE)
#' wsdeg2 <- ws.ttDeg(c(0,10,20,30,40), verbose=TRUE)
#' all.equal(wsdeg1, wsdeg2)
#' #
#' # In kilometers
#' ws.ttKm(c(0,10,20,30,40), verbose=TRUE)
#' #
#' # Get P/S wave times for distances
#' PS_time.distances(1:20)
#' PS_time.distances(1:20, distance.units="kilometers")
#' #
#' # Stations pairs:
#' # (fake some Lats/lons)
#' (ws.ttStaSrc(c(1,2),c(20,10,1:10), verbose=FALSE))
#' (ws.ttStaSrc(c(1,2),list(lats=1:10, lons=11:20), verbose=FALSE))
#' (ws.ttStaSrc(c(1,2), data.frame(lats=1:10, lons=11:20), verbose=FALSE))
#' #
#' PS_time.stations(c(10,20),1:10)
#' #
#' # Here's how the lat lon pairs are combined...
#' .llpair(1:10)
#' try(.llpair(1:11)) # success, but only because of value recycling
#' .llpair(data.frame(x=1:5,y=6:10))
#' .llpair(list(x=1:5,y=6:10))
#' .llpair(matrix(1:12,ncol=3))
#' try(.llpair(list(x=1:5,y=6:11))) # failure
#' }
NULL

#' @rdname traveltime
#' @export
ws.ttDistances <- function(distances, 
                           distance.units=c("degrees","kilometers"),
                           depth=0, 
                           model=c('iasp91','prem','ak135'), phases=NULL, 
                           filename=NULL,
                           no.header=FALSE, traveltime.only=FALSE, rayparam.only=FALSE, mintime.only=FALSE, 
                           verbose=TRUE){
    # /query? (distdeg=<degrees>) [evdepth=<km>] [model=<iasp91|prem|ak135>] [phases=<phaselist>] [output-params]
    # where:
    #   output-params     ::    [noheader=<true|false>] 
    #                           [traveltimeonly=<true|false>]
    #                           [rayparamonly=<true|false>]
    #                           [mintimeonly=<true|false>]
    #   
    #   (..) required
    #   [..] optional
    model <- match.arg(model)
    #
    distance.units <- match.arg(distance.units)
    distlist <- paste(as.character(as.numeric(distances)),collapse=",")
    #
    if (!is.null(phases)){
        phaselist <- paste(as.character(phases),collapse=",")
    } else {
        phaselist <- phases
    }
    #
    IE <- function(x) ifelse(x, "true", "false")
    NH <- IE(no.header)
    TTO <- IE(traveltime.only)
    RPO <- IE(rayparam.only)
    MTO <- IE(mintime.only)
    #
    if (distance.units=="degrees"){
        deg.flag <- TRUE
        Q <- constructor2(distdeg=distlist, evdepth=depth, model=model, phases=phaselist, 
                          noheader=NH, traveltimeonly=TTO, rayparamonly=RPO, mintimeonly=MTO,
                          service="tt.deg")
    } else if (distance.units=="kilometers"){
        deg.flag <- FALSE
        Q <- constructor2(distkm=distlist, evdepth=depth, model=model, phases=phaselist, 
                          noheader=NH, traveltimeonly=TTO, rayparamonly=RPO, mintimeonly=MTO,
                          service="tt.km")
    }
    stopifnot(exists("Q"))
    #
    # deal with filename
    if (!is.null(filename)){
        if (is.na(filename)){
            # auto
            filename <- paste("iriswsQ", "Dist", model, "tt", sep=".")
        }
        filename <- as.character(filename)
    }
    #
    res <- query.iris(Q, filename=filename, verbose=verbose)
    fi <- res[["file"]]
    if (verbose) system(paste("cat",fi))
    #
    # (note, tt has precedence)
    if (traveltime.only | rayparam.only){
        dat <- scan(fi, nlines=1, quiet=!verbose)
    } else {
        nskip <- ifelse(no.header,0,4)
        distu <- ifelse(deg.flag, "deg", "km")
        timeu <- "s"
        dist <- paste0("dist.",distu)
        dat <- read.table(fi, header=FALSE,
            col.names=c(dist, 
                        paste0("depth.",distu), 
                        "phase",
                        paste0("traveltime.",timeu),
                        paste0("slowness.",timeu,"_per_",distu),
                        "takeoffAng.deg","incidentAng.deg",
                        "distance.purist","xxx","phase.purist"),
            skip=nskip)
        xxx <- NULL
        dat <- subset(dat,select=-c(xxx))
        # distances can be equal, so we
        # cant simply use unique
        dists <- dat[,dist]
        phs <- dat[,'phase']
        isP <- phs=="P"
        di <- c(2, diff(isP))
        #print(rbind(di,phs))
        distances <- dists[di > 0]
        distances <- data.frame(distance=distances, number=seq_along(distances))
        names(distances)[1] <- dist
    }
    #
    toret <- res
    toret$querydata <- list(phases=phaselist, traveltime.data=dat, distance.data=distances)
    return(invisible(toret))
}
#' @rdname traveltime
#' @export
PS_time.distances <- function(distances, agg.fun=mean, ...){
    #
    x <- ws.ttDistances(distances, phases=c("P","S"), verbose=FALSE, ...)
    dat <- x$querydata
    ttdat <- dat$traveltime.data
    distdat <- dat$distance.data
    #
    distttdat <- merge(distdat, ttdat)
    #print(ttdat)
    mltdat <- distttdat[ , c(1,3:7) + 1] # Assumed: station num, phase, tt, slow, takeoff, incidence
    names(mltdat)[1] <- 'distance'
    # melt the data down...
    mlt <- reshape2::melt(mltdat, id.vars=c('phase', "distance"))
    # and recast three times, taking averages of results (to account for triplicates)
    cst1 <- cbind(phase="P", reshape2::dcast(mlt, variable ~ distance, fun.aggregate=function(tt){agg.fun(tt[1], na.rm=TRUE)}))
    cst2 <- cbind(phase="S", reshape2::dcast(mlt, variable ~ distance, fun.aggregate=function(tt){agg.fun(tt[2], na.rm=TRUE)}))
    cst3 <- cbind(phase="S.minus.P", reshape2::dcast(mlt, variable ~ distance, fun.aggregate=function(tt){agg.fun(tt[2], na.rm=TRUE) - agg.fun(tt[1], na.rm=TRUE)}))
    return(list(pstimes=rbind(cst1,cst2,cst3), distances=distdat))
}

#' @rdname traveltime
#' @export
ws.ttDeg <- function(distances, ...) ws.ttDistances(distances, distance.units="degrees", ...)

#' @rdname traveltime
#' @export
ttDeg.ws <- ws.ttDeg

#' @rdname traveltime
#' @export
ws.ttKm <- function(distances, ...) ws.ttDistances(distances, distance.units="kilometers", ...)

#' @rdname traveltime
#' @export
ttKm.ws <- ws.ttKm

#' @rdname traveltime
#' @export
ws.ttStaSrc <- function(event.latlon, station.latlons,
                        depth=0, 
                        model=c('iasp91','prem','ak135'), 
                        phases=NULL, 
                        filename=NULL,
                        no.header=FALSE, traveltime.only=FALSE, rayparam.only=FALSE, mintime.only=FALSE, 
                        verbose=TRUE){
    model <- match.arg(model)
    #
    if (!is.null(phases)){
        phaselist <- paste(as.character(phases),collapse=",")
    } else {
        phaselist <- phases
    }
    #
    IE <- function(x) ifelse(x, "true", "false")
    NH <- IE(no.header)
    TTO <- IE(traveltime.only)
    RPO <- IE(rayparam.only)
    MTO <- IE(mintime.only)
    #
    evt <- .llpair(event.latlon, verbose=verbose)
    stas <- .llpair(station.latlons, verbose=verbose)
    #
    Q <- constructor2(evloc=evt, 
                      evdepth=depth, 
                      staloc=stas,
                      model=model, phases=phaselist, 
                      noheader=NH, traveltimeonly=TTO, rayparamonly=RPO, mintimeonly=MTO,
                      service="tt.llpairs")
    #return(Q)
    stopifnot(exists("Q"))
    #
    # deal with filename
    if (!is.null(filename)){
        if (is.na(filename)){
            # auto
            filename <- paste("iriswsQ", "StaSrc", model, "tt", sep=".")
        }
        filename <- as.character(filename)
    }
    #
    res <- query.iris(Q, filename=filename, verbose=verbose)
    fi <- res[["file"]]
    if (verbose) system(paste("cat",fi))
    #
    if (traveltime.only | rayparam.only){
        dat <- scan(fi, nlines=1, quiet=!verbose)
    } else {
        nskip <- ifelse(no.header,0,4)
        distu <- "deg"
        timeu <- "s"
        dist <- paste0("dist.",distu)
        dat <- read.table(fi, header=FALSE,
                          col.names=c(dist, 
                                      paste0("depth.",distu), 
                                      "phase",
                                      paste0("traveltime.",timeu),
                                      paste0("slowness.",timeu,"_per_",distu),
                                      "takeoffAng.deg","incidentAng.deg",
                                      "distance.purist","xxx","phase.purist"),
                          skip=nskip)
        xxx <- NULL
        dat <- subset(dat, select=-c(xxx))
        # distances can be equal, so we
        # cant simply use unique
        dists <- dat[,dist]
        phs <- dat[,'phase']
        isP <- phs=="P"
        di <- c(2, diff(isP))
        #print(rbind(di,phs))
        distances <- dists[di > 0]
        stations <- data.frame(distance=distances, number=seq_along(distances))
        names(stations)[1] <- dist
    }
    #
    toret <- res
    toret$querydata <- list(phases=phaselist, traveltime.data=dat, station.data=stations)
    return(invisible(toret))
}
#' @rdname traveltime
#' @export
ttStaSrc.ws <- ws.ttStaSrc
#' @rdname traveltime
#' @export
PS_time.stations <- function(event.latlon, station.latlons, agg.fun=mean, ...){
    #
    x <- ws.ttStaSrc(event.latlon=event.latlon, station.latlons=station.latlons, 
                     phases=c("P","S"), verbose=FALSE, ...)
    dat <- x$querydata
    ttdat <- dat$traveltime.data
    stadat <- dat$station.data
    stattdat <- merge(stadat, ttdat)
    #print(ttdat)
    mltdat <- stattdat[ ,c(1,3:7) + 1] # Assumed: station num, phase, tt, slow, takeoff, incidence
    names(mltdat)[1] <- 'station'
    #
    mlt <- reshape2::melt(mltdat, id.vars=c('phase', 'station'))
    #
    #
    cst1 <- cbind(phase="P", reshape2::dcast(mlt, variable ~ station, fun.aggregate=function(tt) {agg.fun(tt[1], na.rm=TRUE)}))
    cst2 <- cbind(phase="S", reshape2::dcast(mlt, variable ~ station, fun.aggregate=function(tt) {agg.fun(tt[2], na.rm=TRUE)}))
    cst3 <- cbind(phase="S.minus.P", reshape2::dcast(mlt, variable ~ station, fun.aggregate=function(tt) {agg.fun(tt[2], na.rm=TRUE) - agg.fun(tt[1], na.rm=TRUE)}))
    #
    return(list(pstimes=rbind(cst1,cst2,cst3), stations=stadat))
}

#' @rdname traveltime
#' @export
.llpair <- function(X, verbose=TRUE){
    Xm <- matrix(as.matrix(as.data.frame(X)), ncol=2)
    colnames(Xm) <- c("lat","lon")
    if (verbose) print(Xm)
    lbrk <- "[" #"%5B"
    rbrk <- "]" #"%5D"
    paste(apply(Xm, 1, function(x) sprintf("%s%f,%f%s", lbrk, x[1], x[2], rbrk)), collapse=",")
}

#' Access the 'flinnengdahl' Web Service for Flinn-Engdahl region information
#' 
#' @description From [1]:
#' \emph{The Flinn-Engdahl webservice returns either the Flinn-Engdahl 
#' region code or region name for a specified latitude and longitude.}
#' 
#' \emph{Returned values are text strings, e.g. region code `014' 
#' which corresponds to region name `Kenai Peninsula, Alaska'.}
#' 
#' @details
#' 
#' \subsection{Filename options (\code{filename=})}{
#'  \itemize{
#'      \item{\code{NA}}: An auto-generated file.
#'      \item{\code{NULL}}: A temporary file.
#'      \item{<character string>}: Any desired name.
#'  }
#' Autogenerated files are of the form:
#'
#' \code{iriswsQ.flinnengdahl}
#'
#' Temporary files are generated within \code{\link{query.iris}}.
#' }
#' 
#' @name flinnengdahl
#' @aliases flinn
#' @author AJ Barbour
#' 
#' @param lat numeric; North latitude, in decimal degrees.
#' Must be within the range [-90.0, 90.0]
#' @param lon numeric; East longitude, in decimal degrees.
#' Must be within the range [-180.0, 180.0]
#' @param output character; 
#' determines which Flinn-Engdahl representation is retrieved:
#' \code{code} retrieves the numeric code, (e.g., 32), and
#' \code{region} retrieves the region description, (e.g., OREGON)
#' @param filename \code{NA} for an auto-generated filename based on the
#' inputs; \code{NULL} for a temporary filename; or, a character string
#' of the user's choosing.  See \strong{Details} for details about
#' the auto-generated name.
#' @param verbose logical; should messages be given?
#' 
#' @return A list (invisibly) with the query string, and data from the result
#' 
#' @references [1] \url{http://service.iris.edu/irisws/flinnengdahl/2/}
#' 
#' @family WebServices
#'
#' @examples
#' \dontrun{
#' querydata(flinnengdahl.ws(10,10)) # '755'
#' querydata(flinnengdahl.ws(10,10,output="region")) # 'NIGERIA'
#' }
NULL

#' @rdname flinnengdahl
#' @export
ws.flinnengdahl <- function(lat, lon, output=c('code','region'), filename=NULL, verbose=FALSE){
    #
    stopifnot(length(lat) == 1)
    nlat <- as.numeric(lat)
    stopifnot(nlat<=90 & nlat>=-90)
    #
    stopifnot(length(lon) == 1)
    elon <- as.numeric(lon)
    stopifnot(elon<=180 & elon>=-180)
    #
    outp <- match.arg(output)
    #
    Q <- constructor2(lat=nlat, lon=elon, output=outp, service="flinnengdahl", ws.version="2")
    stopifnot(exists("Q"))
    #
    # deal with filename
    if (!is.null(filename)){
        if (is.na(filename)){
            # auto
            filename <- "iriswsQ.flinnengdahl"
        }
        filename <- as.character(filename)
    }
    #
    res <- query.iris(Q, filename=filename, verbose=verbose)
    #
    fi <- res[["file"]]
    if (verbose) system(paste("cat",fi))
    dat <- scan(fi, nlines=1, what=character(), quiet=!verbose)
    #
    toret <- res
    toret$querydata <- dat
    return(invisible(toret))
}
#' @rdname flinnengdahl
#' @export
flinnengdahl.ws <- ws.flinnengdahl

#' Access the 'resp' web service for instrument response information
#' 
#' @description From [1]: \emph{The RESP webservice gives access to 
#' instrument response information. The response information is 
#' returned in the RESP format [2]. The service 
#' can return responses for a channel, one or more channels under a 
#' location, or one or more channels under a station.}
#' 
#' @details
#' \subsection{Filename options (\code{filename=})}{
#'  \itemize{
#'      \item{\code{NA}}: An auto-generated file.
#'      \item{\code{NULL}}: A temporary file.
#'      \item{<character string>}: Any desired name.
#'  }
#' Autogenerated files are of the form:
#'
#' \code{iriswsQ.<network>.<station>.<location>.<channel>.resp}
#'
#' Temporary files are generated within \code{\link{query.iris}}.
#' }
#' 
#' @name resp
#' @author AJ Barbour
#'
#' @param network character; Regular network (e.g., \code{'IU'}) 
#' or virtual network (e.g., \code{'_FDSN'})
#' @param station charcter; Station code
#' @param location character; Location code. Use \code{--} instead of spaces. 
#' Wildcards are accepted.
#' @param channel character; Channel code. Wildcards are accepted.
#' @param time character; Find the response for the given time. \code{time} 
#' cannot be used with \code{starttime} or \code{endtime}.  Using this
#' can tend to produce 
#' @param starttime character; find response information from this point on
#' @param endtime character; find response information up until this point
#' @param filename \code{NA} for an auto-generated filename based on the
#' inputs; \code{NULL} for a temporary filename; or, a character string
#' of the user's choosing.  See \strong{Details} for details about
#' the auto-generated name.
#' @param verbose logical; should messages be given?
#'
#' @return A list (invisibly) with the query string, and data from the result
#' 
#' @references [1] \url{http://service.iris.edu/irisws/resp/1/}
#' 
#' @references [2] \url{http://www.iris.edu/KB/questions/69/What+is+a+RESP+file?}
#' 
#' @family WebServices
#' 
#' @examples
#' ws.resp("PB","B084","--","LDD")
#' try(ws.resp("PB","B084","--","LDD", time=timestring(2012,10,0,0,0)))
#' try(ws.resp("PB","B084","--","LDD", starttime=timestring(2012,10,0,0,0)))
NULL

#' @rdname resp
#' @export
ws.resp <- function(network,
                    station,
                    location,
                    channel, 
                    time="",
                    starttime="",
                    endtime="",
                    filename=NA,
                    verbose=FALSE){
    #
    network <- toupper(network) 
    station <- toupper(station)
    location <- toupper(location)
    channel <- toupper(channel)
    #
    if (!is.null(time)){
        starttime <- endtime <- NULL
    }
    #
    Q <- constructor2(net=network, 
                      sta=station,
                      loc=location,
                      cha=channel, 
                      time=time,
                      starttime=starttime, 
                      endtime=endtime,
                      service="resp")
    stopifnot(exists("Q"))
    #
    # deal with filename
    if (!is.null(filename)){
        if (is.na(filename)){
            # auto
            filename <- paste("iriswsQ", network, station, location, channel, "resp", sep=".")
        }
        filename <- as.character(filename)
    }
    #
    res <- query.iris(Q, filename=filename, verbose=verbose)
    #
    fi <- res[["file"]]
    if (verbose) system(paste("cat",fi))
    dat <- NA
    #
    toret <- res
    toret$querydata <- dat
    return(invisible(toret))
}
#' @rdname resp
#' @export
resp.ws <- ws.resp
abarbour/irisws documentation built on May 10, 2019, 4:07 a.m.