knitr::opts_chunk$set( collapse = TRUE, comment = "#>" )
library(ncdf4) library(chron) library(raster) library(lattice) library(RColorBrewer) library(ggplot2)
memory.limit() memory.limit(size=100000)
#Enter Path to data below ppath <- "C:/Users/aandi/Desktop/Fall_2021/GEOG_399/Datasets/" #High Cloud Area Fraction ncpath <- ppath ncname <- "hcdc.mon.mean" ncfname <- paste(ncpath, ncname, ".nc", sep="") ncfname2 <- paste0(ncpath, ncname, ".nc") dname <- "hcdc" # open a netCDF file ncin <- nc_open(ncfname) print(ncin) ncin
# get longitude and latitude lon <- ncvar_get(ncin, "lon") #might be "lon" or "longitude", check your print(nc) nlon <- dim(lon) head(lon) #view first 5 entries lon_units <- ncatt_get(ncin,"lon","units") lon_units #'hasatt' means 'has attribute'. lat <- ncvar_get(ncin, "lat") #might be "lat" or "latitude", check your print(nc) nlat <- dim(lat) head(lat) #starts at north pole heads south lat_units <- ncatt_get(ncin,"lat","units") lat_units print(c(nlon,nlat)) cbind(lon, lat, hcdc_array) dim(lat)
# get time time <- ncvar_get(ncin,"time") time tunits <- ncatt_get(ncin,"time","units") nt <- dim(time) nt tunits
# get high cloud data (tmp, not temporary) - hasatt is a logical variable, "value" is the actual data hcdc_array <- ncvar_get(ncin, dname) #this can take a long time with a big file dlname <- ncatt_get(ncin,dname,"long_name") dunits <- ncatt_get(ncin,dname,"units") fillvalue <- ncatt_get(ncin,dname,"_FillValue") dim(hcdc_array) #if 4 dimensions, use [,,,1] in brackets below, otherwise if 3 dimensions, use [,,1] below
# get global attributes (not all of these will exist, check the original nc file) title <- ncatt_get(ncin,0,"title") institution <- ncatt_get(ncin,0,"institution") datasource <- ncatt_get(ncin,0,"source") references <- ncatt_get(ncin,0,"references") history <- ncatt_get(ncin,0,"history") Conventions <- ncatt_get(ncin,0,"Conventions")
ls()
# replace netCDF fill values with NA's hcdc_array[hcdc_array==fillvalue$value] <- NA #double check it worked: head(as.vector(hcdc_array[,,1])) length(na.omit(as.vector(hcdc_array[,,1]))) #total number of non-NA grid cells; depending on dimensions could be 2 or 3 commas
#convert time code from website example code: tustr <- strsplit(tunits$value, " ") tdstr <- strsplit(unlist(tustr)[3], "-") tmonth <- as.integer(unlist(tdstr)[2]) tday <- as.integer(unlist(tdstr)[3]) tyear <- as.integer(unlist(tdstr)[1]) chron(time,origin=c(tmonth, tday, tyear))
m <- 1 hcdc_slice <- hcdc_array[,,m] dim(hcdc_slice) dim(hcdc_array) plot(hcdc_slice) dim(hcdc_slice) ggplot2::ggplot image(lon,lat,hcdc_slice, col=rev(brewer.pal(10,"RdBu")))
#Convert Time (my version) var.nc <- brick(ncfname, varname=dname, layer="time") #reopen netcdf file as Raster brick for TIME variable var.nc
#TIME: remove H M S from time format TIME <- as.POSIXct(substr(var.nc@data@names, start=2, stop=20), format="%Y.%m.%d") df <- data.frame(INDEX = 1:length(TIME), TIME=TIME) head(TIME) tail(TIME) head(df)
# get a single slice or layer (January) m <- 1 hcdc_slice <- hcdc_array[,,m] dim(hcdc_slice) plot(hcdc_slice)
# levelplot of the slice #Create a title for plot: take TIME[m] string and how many characters from the left to keep in title? 7 or 10 ttl <- paste(dname," ", substr(TIME[m], 1, 7),sep="") grid <- expand.grid(lon=lon, lat=lat) cutpts <- c(-50,-40,-30,-20,-10,0,10,20,30,40,50) #for color legend levelplot(hcdc_slice ~ lon * lat, data=grid, at=cutpts, cuts=11, pretty=T,main = ttl, col.regions=(rev(brewer.pal(10,"RdBu"))))
seq(as.Date("2000/1/1"), by = "month", length.out = 12)
library(maptools) #for map2SpatialPolygons library(maps) #for world maps library("latticeExtra") #for spplot
map("world2",fill=FALSE,plot=TRUE) World2 <- map("world2",fill=TRUE,plot=FALSE) World2sp <- map2SpatialPolygons(World2, IDs = sub(":.*", "", World2$names), proj4string=CRS("+proj=longlat +datum=WGS84")) #grab time slice from raster brick instead of array: var.nc2<-brick(ncfname,varname=dname) var.nc2 plot(var.nc2[[1:12]]) #plot first 12 maps m <- 200 #which time slice do we want to view (can use this to create a LOOP later) #subset extracts a single layer from the raster brick tmp_slice_r<-subset(var.nc2,m) #again, convert degrees K to C dim(tmp_slice_r) plot(tmp_slice_r) #SAVE PLOT as high quality PNG file #adjust dpi dpi=300 cutpts <- c(-50,-40,-30,-20,-10,0,10,20,30,40,50) #for color breaks #create color palettes: precip.palette <-colorRampPalette(brewer.pal(11,"BrBG"))(100) temp.palette <- rev(colorRampPalette(c("darkred","red","orange", "lightyellow","white","azure", "cyan","blue","darkblue"))(100)) #Create a title for plot: take TIME[m] string and how many characters from the left to keep in title? 7 or 10 ttl <- paste(dname," ", substr(TIME[m], 1, 7),sep="") #test it: spplot(tmp_slice_r, main = "High Cloud Area Fraction 1979", col.regions=temp.palette) #+ layer(sp.polygons(World2sp, col='black'))
#set output folder (can be a folder that doesn't exist yet) out_folder = "C:/AbbyF_working/ClimateData/t2m_YearlyAvgs_example" dir.create(out_folder) #LOOP (remove comments from beginning and end to loop) for(YEAR in years){ subset <- df[format(df$TIME, "%Y") == YEAR,] #grab all the files in that year sub.var <- var.nc[[subset$INDEX]] #create a raster stack subset for the files in that year print(paste("Executing Average for Year: ",YEAR)) av.var <- calc(sub.var, fun=func, filename=paste0(out_folder,"/",dname,"_Year",YEAR,"Avg.tif"),overwrite=TRUE) print(paste("Raster for Year ",YEAR," Ready in the Output Folder")) } #now extract from brick, multiple layers at same time: b <- var.nc2[[1:12]] #make a smaller brick, layers 1-12 ann.extract <- extract(b,extract.pts,method="bilinear") head(ann.extract) matplot(lon.pts,ann.extract,type="l",xlab="Longitude",ylab="Temp") #matplot: plot columns of matrices #try hawaii example (won't be much temperature variation here) new.ex.hi<-extent(195,210,15,26) #165W (dateline 180+15) to 150W (180+30), 15N to 26N tmp_slice_r_crop.hi<-crop(tmp_slice_r,new.ex.hi,snap="out") #crop to change extent plot(tmp_slice_r_crop.hi) #take a transect lon.pts2 <- seq(195,210,by=0.5) lat.pts2 <- rep(19.5,length(lon.pts2)) plot(tmp_slice_r_crop.hi) points(lon.pts2,lat.pts2,pch=4,col="red") extract.pts2 <- cbind(lon.pts2,lat.pts2) ext <- extract(tmp_slice_r_crop.hi,extract.pts2,method="bilinear") ext #see how it looks: plot(lon.pts2,ext,type="b",pch=2,xlab="Longitude",ylab="temp") #--> we can see the cold temperatures at high elevations on Hawaii Island! ################################################################################################ #Now let's create annual average files from monthly Reanalysis Temperature 2m #If the year is the same, average the months: #Function: mean or sum? func=mean df <- data.frame(INDEX = 1:length(TIME), TIME=TIME) head(df) years <- unique(format(TIME, "%Y")) head(years) YEAR = years[1] #remember var.nc was the brick: #var.nc <- brick(ncfname, varname=dname, layer="time") #reopen netcdf file as Raster brick for TIME variable #LOOP (remove comments from beginning and end to loop) for(YEAR in years){ subset <- df[format(df$TIME, "%Y") == YEAR,] #grab all the files in that year sub.var <- var.nc[[subset$INDEX]] #create a raster stack subset for the files in that year print(paste("Executing Average for Year: ",YEAR)) av.var <- calc(sub.var, fun=func, filename=paste0(out_folder,"/",dname,"_Year",YEAR,"Avg.tif"),overwrite=TRUE) print(paste("Raster for Year ",YEAR," Ready in the Output Folder")) } #open one to see what it looks like: setwd(out_folder) YEAR=2020 outname=paste0(dname,"_Year",YEAR,"Avg.tif") r<-raster(outname) plot(r-273.15,main=outname) #can set up same loop avgs based on days or months if you want daily or monthly avgs: days <- unique(format(TIME, "%d")) months <- unique(format(TIME, "%Y.%m")) ################################################################################################ #Now let's create a data frame from Time Slice - turn all grid cells (lat-lon) into a long vector # create dataframe -- reshape data # matrix (nlon*nlat rows by 2 cols) of lons and lats lonlat <- as.matrix(expand.grid(lon,lat)) dim(lonlat) #head(lonlat) tmp_slice2<-tmp_slice[,,1] #need to reduce it one more time..........it had 4 dimensions to start # vector of `tmp` values tmp_vec <- as.vector(tmp_slice2) length(tmp_vec) # create dataframe and add names tmp_df01 <- data.frame(cbind(lonlat,tmp_vec)) names(tmp_df01) <- c("lon","lat",paste(dname,as.character(m), sep="_")) head(na.omit(tmp_df01), 10) #here you can write the output table to CSV if you want, using na.omit(), create some path & filename, "csvfile" #write.table(na.omit(tmp_df01),csvfile, row.names=FALSE, sep=",")
writeRaster(r, "hcdc1979.tif", "GTiff", overwrite=TRUE)
library(maptools) #for map2SpatialPolygons library(maps) #for world maps library("latticeExtra") #for spplot
map("world2",fill=FALSE,plot=TRUE) World2 <- map("world2",fill=TRUE,plot=FALSE) World2sp <- map2SpatialPolygons(World2, IDs = sub(":.*", "", World2$names), proj4string=CRS("+proj=longlat +datum=WGS84"))
var.nc2<-brick(ncfname,varname=dname) var.nc2 plot(var.nc2[[1:12]]) #plot first 12 maps
m <- 200 #which time slice do we want to view (can use this to create a LOOP later)
tmp_slice_r<-subset(var.nc2,m)-273.15 #again, convert degrees K to C dim(tmp_slice_r) plot(tmp_slice_r)
dpi=300 cutpts <- c(-50,-40,-30,-20,-10,0,10,20,30,40,50) #for color breaks
precip.palette <-colorRampPalette(brewer.pal(11,"BrBG"))(100) temp.palette <- rev(colorRampPalette(c("darkred","red","orange", "lightyellow","white","azure", "cyan","blue","darkblue"))(100))
ttl <- paste(dname," ", substr(TIME[m], 1, 7),sep="")
spplot(tmp_slice_r, main = ttl, col.regions=temp.palette) + layer(sp.polygons(World2sp, col='black'))
if (dname=="sst"){ x1 <- crop(tmp_slice_r, extent(-180, 0, -90, 90)) x2 <- crop(tmp_slice_r, extent(0, 180, -90, 90)) extent(x1) <- c(180, 360, -90, 90) tmp_slice_r <- merge(x1, x2) #try spplot again, it should work! }
imgpath<-file.path("C:","AbbyF_working","ClimateData",paste0(ttl,".png")) png(file=imgpath,width=9dpi,height=5dpi,res=dpi)
p<-spplot(tmp_slice_r, at=cutpts, main = ttl, col.regions=temp.palette)+ layer(sp.polygons(World2sp, col='black')) print(p) dev.off()
new.ex<-extent(50,260,-5,30) #xmin, xmax (180+80), ymin, ymax
tmp_slice_r_crop<-crop(tmp_slice_r,new.ex,snap="out") #crop to change extent tmp_slice_r_crop #check extent
spplot(tmp_slice_r_crop, at=cutpts, main = ttl, col.regions=temp.palette) + layer(sp.polygons(World2sp, col='black'))
new.ex.hi<-extent(195,210,15,26) #165W (dateline 180+15) to 150W (180+30), 15N to 26N tmp_slice_r_crop.hi<-crop(tmp_slice_r,new.ex.hi,snap="out") #crop to change extent
cutpts.hi <- c(10,12,15,17,20,22,25,27,30) #for color breaks
spplot(tmp_slice_r_crop.hi, at=cutpts.hi, main = ttl, col.regions=temp.palette) + layer(sp.polygons(World2sp, col='black'))
ROI <- extent(0,20,45,65) #new extent, North Sea r.crop <- crop(tmp_slice_r,ROI) plot(r.crop)
lon.pts <- seq(0,20,by=0.5) lat.pts <- rep(55,length(lon.pts)) plot(r.crop) points(lon.pts,lat.pts,pch=4,col="red")
extract.pts <- cbind(lon.pts,lat.pts) ext <- extract(r.crop,extract.pts,method="bilinear") ext
plot(lon.pts,ext,type="b",pch=2,xlab="Longitude",ylab="temp")
b <- var.nc2[[1:12]] #make a smaller brick, layers 1-12 ann.extract <- extract(b,extract.pts,method="bilinear") head(ann.extract) matplot(lon.pts,ann.extract,type="l",xlab="Longitude",ylab="Temp") #matplot: plot columns of matrices
new.ex.hi<-extent(195,210,15,26) #165W (dateline 180+15) to 150W (180+30), 15N to 26N tmp_slice_r_crop.hi<-crop(tmp_slice_r,new.ex.hi,snap="out") #crop to change extent plot(tmp_slice_r_crop.hi)
lon.pts2 <- seq(195,210,by=0.5) lat.pts2 <- rep(19.5,length(lon.pts2)) plot(tmp_slice_r_crop.hi) points(lon.pts2,lat.pts2,pch=4,col="red")
extract.pts2 <- cbind(lon.pts2,lat.pts2) ext <- extract(tmp_slice_r_crop.hi,extract.pts2,method="bilinear") ext
plot(lon.pts2,ext,type="b",pch=2,xlab="Longitude",ylab="temp")
out_folder = "C:/AbbyF_working/ClimateData/t2m_YearlyAvgs_example" dir.create(out_folder)
func=mean df <- data.frame(INDEX = 1:length(TIME), TIME=TIME) head(df) years <- unique(format(TIME, "%Y")) head(years) YEAR = years[1]
for(YEAR in years){ subset <- df[format(df$TIME, "%Y") == YEAR,] #grab all the files in that year sub.var <- var.nc[[subset$INDEX]] #create a raster stack subset for the files in that year
print(paste("Executing Average for Year: ",YEAR)) av.var <- calc(sub.var, fun=func, filename=paste0(out_folder,"/",dname,"_Year",YEAR,"Avg.tif"),overwrite=TRUE) print(paste("Raster for Year ",YEAR," Ready in the Output Folder")) }
setwd(out_folder) YEAR=2020 outname=paste0(dname,"_Year",YEAR,"Avg.tif") r<-raster(outname) plot(r-273.15,main=outname)
days <- unique(format(TIME, "%d")) months <- unique(format(TIME, "%Y.%m"))
lonlat <- as.matrix(expand.grid(lon,lat)) dim(lonlat)
tmp_slice2<-tmp_slice[,,1] #need to reduce it one more time..........it had 4 dimensions to start
tmp
valuestmp_vec <- as.vector(tmp_slice2) length(tmp_vec)
tmp_df01 <- data.frame(cbind(lonlat,tmp_vec)) names(tmp_df01) <- c("lon","lat",paste(dname,as.character(m), sep="_")) head(na.omit(tmp_df01), 10)
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.