#-------------Manual for obtaining and applying -------------#
#-----hydrometeorological variables from global databases----#
#-----------------for drought management --------------------#
#------------Scripts to obtain variables----------------#
#Ronnie Araneda Cabrera
#Universidade da Coruņa
#ronnie.aranedac@udc.es
#13/april/2021
#---------------------------------------------------------#
#Options to increase the range of visibility of matrices in the console
options(max.print=99999999)#Number of characters to be displayed on the console
options(scipen=999)#To avoid scientific notation of numbers
#Options to increase the memory available to make use of the script
memory.limit()#The available/usable memory limit is displayed.
memory.size()#Current memory usage is displayed
memory.size(max=60000)#Memory usage size is changed
#The packages to be used are installed
install.packages("maptools")#Package intended to use shapefiles
install.packages("tmap")#Package for using and graphing geographic information
install.packages("lattice")#Package intended to use spatial graphics
install.packages("RColorBrewer")#Package for colouring graphics
install.packages("ggplot2")#Package for graphs
install.packages("maptools")#Package for using shapefiles
install.packages("ncdf4")#Package intended to use .nc files
install.packages("raster")#Package intended for use with raster files
install.packages("dplyr")#Package for matrix manipulation
install.packages("sf")#Package intended to manipulate geographic information
#The packages to be used are loaded
library(maptools)
library(tmap)
library(lattice)
library(RColorBrewer)
library(ggplot2)
library(maptools)
library(ncdf4)
library(raster)
library(dplyr)
library(sf)
#----------------------------Section 10.Precipitation from CHIRPS----------------------------#
#The necessary geographical information is read from the folders where they are stored
mz<-readShapePoly("E:/GIS_Mozambique/Layer_Mozambique/mozambique.shp")#Mozambique shapefile is read
#The composition of the .nc files is shown
chi <- nc_open("E:/19_MANUAL_data_global/CHIRPS/chirps-v2.0.monthly.nc")#The CHIRPS file is read
print(chi)#The file and its composition are displayed
time<-data.frame(as.Date(ncvar_get(chi, "time", verbose = F),origin="1980-01-01"))#A vector is extracted de dates
years<-data.frame(as.numeric(unlist(format(time, format = "%Y"))))#Vector of years
months<-data.frame(as.numeric(unlist(format(time, format = "%m"))))#Vector of months
dim(time)#The dimension of the vector is shown: number of rows x number of columns
nc_close(chi)#The .nc file is closed as it uses a large amount of disk memory
#Precipitation information is extracted
precip<-brick("E:/19_MANUAL_data_global/CHIRPS/chirps-v2.0.monthly.nc", varname = "precip")#Precipitation is extracted
class(precip)#It shows what kind of variable it is
mode(precip)#SIt is shown whether the variable information is numeric or text
dim(precip)#The dimension of the raster is displayed number of rows x number of columns x number of months
#A raster is extracted from month 1
precip_1 <-subset(precip,1) #1 is the order of the number of months, comparable with the time step of the variable time
class(precip_1);mode(precip_1)#It is checked that the variable maintains raster characteristics
dim(precip_1)#It is verified that we now have the raster for a single time step (1 in this case)
#Verification of geographical information
plot(precip_1)#The raster is plotted to verify the information
plot(mz,add=T)#Plot the raster and check if the shapefile of interest (mz) is located where it should be
#Precipitation information is extracted within the shapefile of interest (mz)
step1 <- crop(precip_1, mz) #The raster is intersected with the area of interest
step2 <- rasterize(mz, step1) #The shapefile is transformed into a raster
final <- step1*step2 # The final product is created
plot(final)#The raster of interest is plotted
plot(mz, add=TRUE)
#The coordinates are extracted according to CHIRPS within the area of interest
aux <-extract(final, mz, cellnumbers=T)#From the final product we extract the order of the coordinates
#within the main raster and the associated precipitation value
coordinates<-data.frame(xyFromCell(final, data.frame(aux)[,1]))#The coordinates of what is inside the
#shape mz are obtained
colnames(coordinates)<-c("long","lat")#Each column of x and y data is named as Longitude and Latitude
#It can be modified
dim(coordinates)#Look at the dimension of the vector which equals the number of cells within the area of interest
plot(coordinates[,1],coordinates[,2])#The coordinates are plotted to check once again that they are correct
head(coordinates)#The first rows of the coordinates are looked at to check that the matrix is correct
#The coordinates are saved as a .txt file
write.table(coordinates,"E:/19_MANUAL_data_global/CHIRPS/coordinate_CHIRPS.txt",col.names = T,row.names = F)
#Change the desired location
#Information is extracted for each time step in a loop (can take 20-30 minutes or more)
for (i in 1:468){#468 are the months of information
precip_i <-subset(precip,i)
step1 <- crop(precip_i, mz) #The raster is intersected with the area of interest
step2 <- rasterize(mz, step1) #The shapefile is converted to raster
final <- step1*step2 #The final product is created
if(i==1){
aux <-data.frame(date=time[1,],year=years[1,],month=months[1,],t(data.frame(extract(final, mz, cellnumbers=T))[,2]))
}
if(i!=1){
aux <-rbind(aux,data.frame(date=time[1,],year=years[1,],month=months[1,],t(data.frame(extract(final, mz, cellnumbers=T))[,2])))
}
print(paste(i,"de",468))
#aux is the result matrix containing the time series of information for each cell
}
dim(aux)#The number of columns of the aux matrix must be equal to the number of cells +3 (3 columns of date, year and month)
#The number of rows equals the number of months
head(aux[,1:10])#The upper left part of the matrix is visible
#The information is saved as an .RDS file which is used to store large information
#associated with the R environment. Using .txt files may take more time
saveRDS(aux,"E:/19_MANUAL_data_global/CHIRPS/data_CHIRPS.rds")#Change to the desired location
#Monthly data are aggregated to annual data
#Monthly information is read in RDS format
dat_month<-readRDS("E:/19_MANUAL_data_global/CHIRPS/data_CHIRPS.rds")#File location is used
head(dat_month[,1:10])#The upper left part of the matrix is visible
class(dat_month)
mode(dat_month)
#Loop aggregating monthly to annual data
for(g in 1981:2019){# g varies for all the years for which information is available
aux_a<-filter(dat_month,dat_month[,2]==g)#The matrix is filtered for data corresponding to each year
if(g==1981){
dat_anual<-data.frame(year=g,t(colSums(aux_a[,4:ncol(aux_a)])))#Monthly data are aggregated to annual data
}
if(g!=1981){
dat_anual<-rbind(dat_anual,data.frame(year=g,t(colSums(aux_a[,4:ncol(aux_a)]))))
}
print(paste(g,"de",2019))#Loop progress indicator
}
dim(dat_anual)#The dimension of the matrix is checked (number of years x number of cells +1 corresponding to the year)
head(dat_anual[,1:10])#The upper left part of the matrix is checked
#The information is saved as an .RDS file
saveRDS(dat_anual,"E:/19_MANUAL_data_global/CHIRPS/data_anual_CHIRPS.rds")#Change to the desired location
#Annual precipitation data are averaged
dat_anual_average<-data.frame(coordinates,colMeans(dat_anual[,-1]))#A matrix containing the coordinates of each pixel
# and its corresponding annual average precipitation
#value is formed. Name can be changed
colnames(dat_anual_average)<-c("Long","lat","Pre_CHIRPS")#The columns of the matrix are named
head(dat_anual_average)#The composition of the matrix is reviewed
#The information is saved as an .RDS file
saveRDS(dat_anual_average,"E:/19_MANUAL_data_global/CHIRPS/data_anual_average_CHIRPS.rds")#Change the desired location
#END
#----------------------------Section 11.Vegetation indices from NOAA STAR----------------------------#
#The necessary geographical information is read from the folders where they are stored
mz<-readShapePoly("E:/GIS_Mozambique/Layer_Mozambique/mozambique.shp")#Mozambique shapefile is read
#Vector with VH filenames is read
vh<-read.table("E:/data_NOAASTAR/VH_extract.txt")#File with names of downloaded files
dim(vh)#Dimension (number of weeks of data) is revised
#The composition of the files is checked, one is taken as an example
i=1 #i is the row number of the file with names vh
aux <- nc_open(paste0("E:/data_NOAASTAR/doc_pag_web/",vh[i,]))#The first downloaded file is read
print(aux)#The file is displayed and its composition is shown
#A variable is extracted to analyse its composition
vci <- ncvar_get(aux,"VCI",verbose=F)#The VCI is extracted
close(aux)#The aux file is closed as it may use too much memory
class(vci)#The class of the file is shown
mode(vci)#The mode of the file is shown
dim(vci)#The dimension of the file corresponding to the number of cells with data in the world is displayed
plot(raster(vci))#The variable VCI is plotted
#Vectors containing the possible longitudes and latitudes are created
dlong <- 360/10000; dlat <- 360/10000
lon <- matrix(nrow = 10000); lat <- matrix(nrow = 3616)
for (g in 0:9999){ #A matrix with possible length coordinates is created
lon[g+1,] <- (-180+(g+0.5)*dlong)
}
for (h in 0:3615){ #A matrix is created with possible latitude coordinates
lat[h+1] <- (75.024-(h+0.5)*dlat)
}
dim(lon)#Number of longitudinal divisions
dim(lat)#Number of latitudinal divisions
head(lon)
head(lat)
#Limited to a square encompassing Mozambique
long1<-30 #Left longitudinal limit
long2<-41 #Right longitudinal limit
lat1<-(-27) #Lower latitudinal limit
lat2<-(-10) #Upper latitudinal limit
long_a<-lon[lon<=long2]
lon_b<-data.frame(long_a[long_a>=long1])#Lengths are trimmed within limits
lat_a<-lat[lat<=lat2]
lat_b<-data.frame(lat_a[lat_a>=lat1])#Latitudes are cut off within the boundaries
#A matrix is created with ordered pairs of all coordinates within the box of interest
coor <- matrix(ncol=2)
for (ll in 1:nrow(lon_b)){
for (mm in 1:nrow(lat_b)){
aux <- matrix(ncol=2)
aux[1,1] <- lon_b[ll,]
aux[1,2] <- lat_b[mm,]
coor<- rbind(coor,aux)
}
}
coor <- coor[-1,]#coor is the matrix with the ordered pairs of coordinates
colnames(coor)<-c("longitude", "latitude")#Columns are named
dim(coor)#Dimension of the matrix is checked
head(coor)#First rows of the matrix are observed
#Check that the extracted coordinates are well located
plot(coor) #The points are plotted
plot(mz,add=T,col="blue")#Plot the Mozambique shapefile and check that the
#points are in the same place and cover the whole polygon
#The coordinates within the area of interest are obtained
#The coordinate matrix is transformed into a file containing geographic information
coor_gis <- SpatialPointsDataFrame(coords = as.data.frame(coor),
data = as.data.frame(coor),
proj4string = CRS("+proj=longlat +datum=WGS84 +no_defs"))#The projection is defined
proj4string(mz) <- proj4string(coor_gis)#Same projection is defined for an
coor_mz<-coor_gis[complete.cases(over(coor_gis, mz)),]#The coordinates intersecting the shapefile mz are extracted
class(mz);class(coor_gis);class(coor_mz)#The type of variable is checked, they must be spatial.
dim(coor_mz)#The dimension is reviewed
#It is drawn to check that they are those points
plot(coor_mz)
coordinates<-as.data.frame(coor_mz)[,1:2]#The spatial file is transformed into a matrix and saved as text
write.table(coordinates,"E:/19_MANUAL_data_global/VCI_TCI_VHI/coordinates_extract.txt",
col.names = T,row.names = F)#Change the folder
#Coordinates are read
coordinates<-read.table("E:/19_MANUAL_data_global/VCI_TCI_VHI/coordinates_extract.txt",header = T)
dim(coordinates)
#Round the decimal values of the coordinates to match
r1<-round(coordinates[,1:2],3)#Mz coordinates are rounded to 3 decimal places
r2<-round(lon,3)#All longitude values are rounded to 3 decimals
r3<-round(lat,3)#All latitude values are rounded to 3 decimal places
#A matrix is created that locates the positions within the "VCI, TCI and VHI" files
for (i in 1:nrow(r1)){#Loop varies for each coordinate within mz
if (i==1){
lo1<-cbind(which(r2[,1]==r1[i,1]),which(r3[,1]==r1[i,2]))
}
if (i!=1){
lo1<-rbind(lo1,cbind(which(r2[,1]==r1[i,1]),which(r3[,1]==r1[i,2])))
}
}#lo1 is the variable containing the positions within the files to extract for each coordinate
colnames(lo1)<-c("longitude", "latitude")#Name the position columns
dim(lo1)#Check dimension, must be equal to that of coordinates
head(lo1)#Matrix organisation is checked, must be equal to that of coordinates
#The weekly series of each index are extracted in a matrix of dimension number of weeks x number of cells
for (i in 1:nrow(sm)){ #The loop opens and extracts the values of each index in the 1946 files
indvarios <- nc_open(paste0("E:/data_NOAASTAR/doc_pag_web/",vh[i,]))#Each weekly file is read
vci <- ncvar_get(indvarios,"VCI",verbose=F)#Global VCI information is extracted
tci <- ncvar_get(indvarios,"TCI",verbose=F)#Global TCI information is extracted
vhi <-ncvar_get(indvarios,"VHI",verbose=F)#Global VHI information is extracted
#The dates of each file are extracted from which we derive the year, month and week of the file
year <- as.data.frame(as.numeric(unlist(ncatt_get(indvarios,0, "YEAR",verbose=F))))[-1,]
week <- as.matrix(as.numeric(unlist(ncatt_get(indvarios,0, "PERIOD_OF_YEAR"))))[-1,]
month <- as.numeric(format((as.Date(paste(year,week,"1",sep = "-"),'%Y-%W-%u')),"%m"))
#The loop that extracts the information from each cell is launched.
#It can take several hours, depending on the number of cells
for (j in 1:nrow(lo1)){#The number of steps in the loop is the number of cells we will extract values from
if (j==1){
vci1<- data.frame(year,month,week,vci[lo1[j,1],lo1[j,2]])
tci1<- data.frame(year,month,week,tci[lo1[j,1],lo1[j,2]])
vhi1<- data.frame(year,month,week,vhi[lo1[j,1],lo1[j,2]])
}
if (j!=1){
vci1<- cbind(vci1,vci[lo1[j,1],lo1[j,2]])#The vector contains the VCI data of the corresponding week for each cell
tci1<- cbind(tci1,tci[lo1[j,1],lo1[j,2]])#The vector contains the TCI data of the corresponding week for each cell
vhi1<- cbind(vhi1,vhi[lo1[j,1],lo1[j,2]])#The vector contains the VHI data of the corresponding week for each cell
}
print(paste0(j,"de",nrow(lo1)))#Step counter
}
colnames(vci1)<-c("year","month","week",(1:(ncol(vci1)-3)))#The columns are named
colnames(tci1)<-c("year","month","week",(1:(ncol(tci1)-3)))#The columns are named
colnames(vhi1)<-c("year","month","week",(1:(ncol(vhi1)-3)))#The columns are named
if (i==1){
vci2<-vci1
tci2<-tci1
vhi2<-vhi1
}
if (i!=1){
vci2<-rbind(vci2,vci1)#The matrix accumulates the corresponding weekly VCI series for each cell
tci2<-rbind(tci2,tci1)#The matrix accumulates the corresponding weekly TCI series for each cell
vhi2<-rbind(vhi2,vhi1)#The matrix accumulates the corresponding weekly VHI series for each cell
}
nc_close(indvarios)#The file is closed as keeping it open takes up a lot of internal memory
print(paste(i,"de",nrow(sm)))#Step counter
} # Close loop with list of vh files
dim(vci3)#Check dimension of matrix, must be number of weeks x number of cells +3 (year, month and week)
dim(tci3)#Check dimension of the matrix, must be number of weeks x number of cells +3 (year, month and week)
dim(vh3)#Check dimension of the matrix, must be number of weeks x number of cells +3 (year, month and week)
#Weekly data matrices are saved as .RDS files
saveRDS(vci3,"E:/19_MANUAL_data_global/VCI_TCI_VHI/vci_weekly_data.rds")
saveRDS(tci3,"E:/19_MANUAL_data_global/VCI_TCI_VHI/tci_weekly_data.rds")
saveRDS(vhi3,"E:/19_MANUAL_data_global/VCI_TCI_VHI/vhi_weekly_data.rds")
#Weekly matrices are added to monthly matrices
#The files containing the VCI, TCI and VHI index matrices are read
vci3<-readRDS("E:/19_MANUAL_data_global/VCI_TCI_VHI/vci_weekly_data.rds")
tci3<-readRDS("E:/19_MANUAL_data_global/VCI_TCI_VHI/tci_weekly_data.rds")
vhi3<-readRDS("E:/19_MANUAL_data_global/VCI_TCI_VHI/vhi_weekly_data.rds")
#The dimensions of the matrices are reviewed
dim(vci2)
dim(tci2)
dim(vhi2)
head(vhi2[,1:10])#The composition of the matrices is reviewed
#Weekly matrices may contain values with errors, we define them as NA
auxil2<-vci2 #New name of the array containing the VCI values
auxil2[is.na(auxil2)]<-NA#Outliers are removed and replaced by NA
dim(auxil2)#The composition of the new matrix is revised
auxil3<-tci2#New name of the array containing the TCI values
auxil3[is.na(auxil3)]<-NA#Outliers are removed and replaced by NA
dim(auxil3)#The composition of the new matrix is revised
auxil4<-vhi2#New name of the array containing the VHI values
auxil4[is.na(auxil4)]<-NA#Outliers are removed and replaced by NA
dim(auxil4)#The composition of the new matrix is revised
#Weekly matrices are added to monthly matrices
for (i in 1981:2019){#The loop varies for the years available in the extracted data
for (j in 1:12){#The weeks of each month in the year in which the loop is found are filtered out
fir2<-filter(auxil2,auxil2[,1]==i & auxil2[,2]==j)#An auxiliary matrix is created with the weekly VCI
#values for the year and month according to the loops
fir3<-filter(auxil3,auxil3[,1]==i & auxil3[,2]==j)#An auxiliary matrix is created with the weekly TCI
#values for the year and month according to the loops
fir4<-filter(auxil4,auxil4[,1]==i & auxil4[,2]==j)#An auxiliary matrix is created with the weekly VHI
#values for the year and month according to the loops
if (j==1){
gir2<-data.frame(year=i,month=j,vci=t(data.frame(colMeans(fir2,na.rm = T))))
gir3<-data.frame(year=i,month=j,tci=t(data.frame(colMeans(fir3,na.rm = T))))
gir4<-data.frame(year=i,month=j,vhi=t(data.frame(colMeans(fir4,na.rm = T))))
}
if (j!=1){
gir2<-rbind(gir2,data.frame(year=i,month=j,vci=t(data.frame(colMeans(fir2,na.rm = T)))))#CA monthly VCI matrix is created for each year of the loop
gir3<-rbind(gir3,data.frame(year=i,month=j,tci=t(data.frame(colMeans(fir3,na.rm = T)))))#A monthly TCI matrix is created for each year of the loop
gir4<-rbind(gir4,data.frame(year=i,month=j,vhi=t(data.frame(colMeans(fir4,na.rm = T)))))#A monthly VHI matrix is created for each year of the loop
}
}
if (i==1981){
vci_m<-gir2
tci_m<-gir3
vhi_m<-gir4
}
if (i!=1981){
vci_m<-rbind(vci_m,gir2)#Monthly VCI matrices are collected at each step of the loop
tci_m<-rbind(tci_m,gir3)#Monthly TCI matrices are collected at each step of the loop
vhi_m<-rbind(vhi_m,gir4)#Monthly VHI matrices are collected at each step of the loop
}
print(paste(i,"de","2019"))#Loop step counter
}
dim(vhi_m)#dimension of the monthly VCI matrix is revised, should be num. of months x num. of cells +2 (year and month)
dim(vci_m)#Check the diemnsion of the monthly TCI matrix, must be num. of months x num. of cells +2 (year and month)
dim(tci_m)#VHI monthly matrix dimension is checked, must be num. of months x num. of cells +2 (year and month)
head(vhi_m[,1:8])#Matrix composition is checked
head(vci_m[,1:8])#Matrix composition checked
head(tci_m[,1:8])#Matrix composition checked
#Because some weekly values can be averaged with NA (week with
#undetected value), erroneous values are created and replaced by NA
vci_m[is.na(vci_m)]<-NA
tci_m[is.na(tci_m)]<-NA
vhi_m[is.na(vhi_m)]<-NA
#Monthly data matrices are saved as .RDS files
saveRDS(vci_m,"E:/19_MANUAL_data_global/VCI_TCI_VHI/vci_monthly_data.rds")
saveRDS(tci_m,"E:/19_MANUAL_data_global/VCI_TCI_VHI/tci_monthly_data.rds")
saveRDS(vhi_m,"E:/19_MANUAL_data_global/VCI_TCI_VHI/vhi_monthly_data.rds")
#END
#------------------------------------Section 12. Various: TerraClimate-------------------------------------#
#The necessary geographic information is read from the folders where it is stored
cl<-readShapePoly("E:/GIS_Mozambique/Layer_Mozambique/basin_Licungo.shp")#The shapefile of the Licungo basin is read
plot(cl)
#The composition of the files is analysed, one is taken as an example
aux <- nc_open("E:/data_TerraClimate/ppt_bruto/TerraClimate_ppt_2018.nc")
print(aux)
close(aux)#The aux file is closed as it may use too much memory
#A variable is extracted to analyse its composition
# Precipitation is extracted from the example file
pre <- brick(paste0("E:/data_TerraClimate/ppt_bruto/TerraClimate_ppt_2018.nc",sep = ""), varname = "ppt")
class(pre)#Shows what kind of file it is
mode(pre)#Shows the mode of the file
dim(pre)#Displays the dimension of the file corresponding to the number of cells with data in the world
#A raster is extracted from month 1
precip_1 <-subset(pre,1) # 1 is the order of the number of months, comparable with the time step of the variable time
class(precip_1);mode(precip_1)#Revised to maintain raster characteristics
dim(precip_1)#It is confirmed that you now have the raster for only one time step (1 in this case)
#Geographic information check
plot(precip_1) #Plot the raster to verify the information
plot(cl,add=T)#Plot the raster and check if the shapefile of interest (cl) is located where it is supposed to be
#Precipitation information is extracted within the shapefile of interest (cl)
step1 <- crop(precip_1, cl) #Intersect the raster with the area of interest
step2 <- rasterize(cl, step1) #Rasterize the shapefile
final <- step1*step2 #Create final product
plot(final)#Plot the raster of interest
plot(cl, add=TRUE)
#TerraClimate coordinates within the area of interest are extracted
aux2 <-extract(final, cl, cellnumbers=T)#From the final product we extract the order of the coordinates
#within the main raster and the associated precipitation value
coordinates<-data.frame(xyFromCell(final, data.frame(aux2)[,1]))#The coordinates of what is inside the
#shape cl are obtained
colnames(coordinates)<-c("long","lat")#Each column of x and y data is named as Longitude and Latitude
#and can be modified
dim(coordinates)#Look at the dimension of the vector that equals the number of cells within the area of interest
plot(coordinates[,1],coordinates[,2])#Plot the coordinates to check once again that they are correct
head(coordinates)#Look at the first few rows of the coordinates to check that the matrix is correct
#The coordinates are saved as a .txt file
write.table(coordinates,"E:/19_MANUAL_data_global/TerraClimate/coordendas_TC_Licungo.txt",col.names = T,row.names = F)
#Change the desired location
#Vectors are created with dates and their corresponding years and months
#Vectors are created with the dates defining the start, end and time step
dates<-data.frame(seq(as.Date("1958/1/15"), as.Date("2019/12/15"), "months"))
year<-data.frame(format(seq(as.Date("1958/1/15"), as.Date("2019/12/15"), "months"),"%Y"))#Dates to years
month<-data.frame(format(seq(as.Date("1958/1/15"), as.Date("2019/12/15"), "months"),"%m"))#Dates to months
#A loop is launched for the extraction of the variables of interest
for (k in 1958:2019){#The loop depends on the period of years the data needs to be extracted
#The files containing each variable are opened from the folder containing the files
aa1 <- brick(paste0("E:/data_TerraClimate/ppt_bruto/TerraClimate_ppt_",k,".nc",sep = ""), varname = "ppt")
aa2 <- brick(paste0("E:/data_TerraClimate/pet_bruto/TerraClimate_pet_",k,".nc",sep = ""), varname = "pet")
aa3 <- brick(paste0("E:/data_TerraClimate/aet_bruto/TerraClimate_aet_",k,".nc",sep = ""), varname = "aet")
for(j in 1:12){#This loop extracts the raster of the 12 months within each annual file
pal1 <-subset(aa1,j)#The raster of the month varying from 1 to 12 is extracted when "j" varies
pal2 <-subset(aa2,j)
pal3 <-subset(aa3,j)
step1_1 <- crop(pal1, cl) #Intersect the raster with the shapefile of interest
step1_2 <- crop(pal2, cl)
step1_3 <- crop(pal3, cl)
step2_1 <- rasterize(cl, step1_1) #The shapefile is converted to raster
step2_2 <- rasterize(cl, step1_2)
step2_3 <- rasterize(cl, step1_3)
final1 <- step1_1*step2_1 #The final product is created
final2 <- step1_2*step2_2
final3 <- step1_3*step2_3
ext1 <- extract(final1, cl, cellnumbers=T)#From the final product, the order of the coordinates within the main raster and the
#raster and the value of the variable associated with these coordinates are extracted
ext2 <- extract(final2, cl, cellnumbers=T)
ext3 <- extract(final3, cl, cellnumbers=T)
if(j==1){
cor_data1<-data.frame(t(data.frame(ext1)[,2]))
cor_data2<-data.frame(t(data.frame(ext2)[,2]))
cor_data3<-data.frame(t(data.frame(ext3)[,2]))
}
if(j!=1){
cor_data1<-rbind(cor_data1,data.frame(t(data.frame(ext1)[,2])))
cor_data2<-rbind(cor_data2,data.frame(t(data.frame(ext2)[,2])))
cor_data3<-rbind(cor_data3,data.frame(t(data.frame(ext3)[,2])))
}
print(paste(j, "de", 12, "en" ,k))#Contador de pasos
}
aa1<-0#The auxiliary variable is replaced to reduce memory usage
aa2<-0#The auxiliary variable is replaced to reduce memory usage
aa3<-0#The auxiliary variable is replaced to reduce memory usage
if(k==1958){
pre_data<-cor_data1
etp_data<-cor_data2
etr_data<-cor_data3
}
if(k!=1958){
pre_data<-rbind(pre_data,cor_data1)#The matrices of each domain containing the
#variable of all pixels within cl are accumulated
etp_data<-rbind(etp_data,cor_data2)
etr_data<-rbind(etr_data,cor_data3)
}
print(paste(k,"de 2019"))#Step counter
}
#A vector of dates, years and months is attached to the beginning of each matrix
pre_data<-cbind(dates,year,month,pre_data);colnames(pre_data)<-c("dates","year","month",c(1:(ncol(pre_data)-3)))
etp_data<-cbind(dates,year,month,etp_data);colnames(etp_data)<-c("dates","year","month",c(1:(ncol(etp_data)-3)))
etr_data<-cbind(dates,year,month,etr_data);colnames(etr_data)<-c("dates","year","month",c(1:(ncol(etr_data)-3)))
#The dimension of the extracted matrices is checked
dim(pre_data)
dim(etp_data)
dim(etr_data)
#The composition of each of the matrices is reviewed
head(pre_data[,1:10])
head(etp_data[,1:10])
head(etr_data[,1:10])
#The matrices are stored in a known folder
saveRDS(pre_data,"E:/19_MANUAL_data_global/TerraClimate/pre_data_cl.rds")
saveRDS(etp_data,"E:/19_MANUAL_data_global/TerraClimate/etp_data_cl.rds")
saveRDS(etr_data,"E:/19_MANUAL_data_global/TerraClimate/etr_data_cl.rds")
#END
#--------------------------------Section 13. Solution to unify spatial scales---------------------------------#
#The coordinates of the NOAA STAR cell centroids are read
coor_vhi<-read.table("E:/19_MANUAL_data_global/VCI_TCI_VHI/coordinates_extract.txt",header = T)#The coordinates are read
dim(coor_vhi)#Dimension checked
head(coor_vhi)#Check that the matrix has the desired composition
#The coordinates of the centroids of the CHIRPS cells are read
coor_chi<-read.table("E:/19_MANUAL_data_global/CHIRPS/coordendas_CHIRPS.txt",header = T)
dim(coor_chi)#Dimension checked
head(coor_chi)#Check that the matrix has the desired composition
#Se lee la matriz que contenga los data del VHI
vhi<-readRDS("E:/19_MANUAL_data_global/VCI_TCI_VHI/vhi_monthly_data.rds")#The VHI matrix is read
dim(vhi)#Dimension checked
head(vhi[,1:5])#Check that the matrix has the desired composition
#Se lee la matriz que contenga los data del VHI
chi_data<-readRDS("E:/19_MANUAL_data_global/CHIRPS/data_CHIRPS.rds")#The precipitation matrix is read
dim(chi_data)#Dimension checked
head(chi_data[,1:5])#Check that the matrix has the desired composition
#Averaging within the CHIRPS focus area
chi_mz<-data.frame(chi_data[,1:3],pre=rowMeans(chi_data[,4:ncol(chi_data)],na.rm=T))
#Matrix containing dates and average rainfall across the country
dim(chi_mz)
head(chi_mz)
#The date matrix and representative rainfall vector for Mozambique are stored
write.table(chi_mz,"E:/19_MANUAL_data_global/change_coordinates/CHIRPS_mz.txt",row.names = F)
#It shows how to find the position of the nearest coordinate with an example
objetive<-data.frame(coor_chi[,1:2])#Target coordinates are defined
initials<-data.frame(coor_vhi)#The coordinates to be searched for are defined
d<-data.frame(which(data.frame(pointDistance(objetive[1,],initials,lonlat = T))
==min(data.frame(pointDistance(objetive[1,],initials,lonlat = T)))))
#d is the position of the nearest coordinate
#Nearby coordinates are displayed on the console
objetive[1,]
initials[d[,1,1],]
#The nearest coordinates of the whole CHIRPS coordinate matrix are searched
objetive<-data.frame(coor_chi[,1:2])#Target coordinates are defined
initials<-coor_vhi#The coordinates to be searched for are defined
for(i in 1:nrow(coor_chi)){
d<-data.frame(which(data.frame(pointDistance(objetive[i,],initials,lonlat = T))
==min(data.frame(pointDistance(objetive[i,],initials,lonlat = T)))))
if(i==1){
di1<-data.frame(vhi[,1:2],data.frame(vhi[,-c(1:2)])[,d[1,1]])
}
if(i!=1){
di1<-cbind(di1,data.frame(data.frame(vhi[,-c(1:2)])[,d[1,1]]))#A new VHI matrix is created where the left to
#right order of the columns corresponds to the
#top to bottom coordinate of the CHIRPS
}
print(paste(i,"de", nrow(coor_chi)))#Step counter
}
colnames(di1)<-c("year","month",c(1:(ncol(di1)-2)))#The columns of the new matrix are renamed
dim(di1)#Dimension must be equal to CHIRPS data
head(di1[,1:15])
#The new VHI data matrix is saved but corresponding to the CHIRPS coordinates
saveRDS(di1,"E:/19_MANUAL_data_global/change_coordinates/vhi_coordinates_chirps.txt")