I’m currently endeavoring to generate IDF (intensity-duration-frequency) curves utilizing rainfall data. Up to this point, I’ve succeeded in crafting probability/frequency curves for precipitation using lmom packeage
Would anyone be able to offer assistance or guidance on how to proceed further with this endeavor?
Below, I’ve included a reproducible example of the code for reference:
<code>#Dados
set.seed(999)
dates <- seq(as.POSIXct("2000-01-01 00:00:00"),as.POSIXct("2019-12-31 23:00:00"),by = 'day')
sample.precip <- rgamma(n = length(dates), shape = 0.05, rate = 0.4)
precip.df <- data.frame(date=dates,RR=sample.precip)
prec.ts=ts(precip.df$RR,frequency=365.25,start=c(2000,1,1))
plot(prec.ts,main="Prec time series", ylab="Precipitação (mm)")
chuva <- sort(precip.df$RR, decreasing = T)
#Probabilidade de ocorrência
df <- data.frame(x = 100/length(chuva)*1:length(chuva),y = chuva)
#Ajustando o modelo
library(lubridate)
library(lmom)
str(prec.ts)
data <- precip.df %>%
mutate(year = lubridate::year(date))
max.by.year<- data %>%
group_by(year) %>%
summarize(max_amount = max(RR, na.rm = T))
maximas <- max.by.year$max_amount
maximas <- maximas[!is.na(maximas)]
sorted.maximas <- sort(maximas,decreasing=T)
p<-(c(1:length(sorted.maximas)))/(length(sorted.maximas)+1)
#Computing the recurrence time
tr<-1/p
fit<-lmom::samlmu(maximas)
plot(fit)
para<-lmom::pelgum(fit)
para
#Plotando
plot(p,sorted.maximas,ylab="Precipitação (mm)",xlab="Probabilidade de observar um evento maior",main="")
#Gumbel fitting
lines(1-cdfgum(sorted.maximas,para),sorted.maximas,col="blue",lty=2)
grid()
</code>
<code>#Dados
set.seed(999)
dates <- seq(as.POSIXct("2000-01-01 00:00:00"),as.POSIXct("2019-12-31 23:00:00"),by = 'day')
sample.precip <- rgamma(n = length(dates), shape = 0.05, rate = 0.4)
precip.df <- data.frame(date=dates,RR=sample.precip)
prec.ts=ts(precip.df$RR,frequency=365.25,start=c(2000,1,1))
plot(prec.ts,main="Prec time series", ylab="Precipitação (mm)")
chuva <- sort(precip.df$RR, decreasing = T)
#Probabilidade de ocorrência
df <- data.frame(x = 100/length(chuva)*1:length(chuva),y = chuva)
#Ajustando o modelo
library(lubridate)
library(lmom)
str(prec.ts)
data <- precip.df %>%
mutate(year = lubridate::year(date))
max.by.year<- data %>%
group_by(year) %>%
summarize(max_amount = max(RR, na.rm = T))
maximas <- max.by.year$max_amount
maximas <- maximas[!is.na(maximas)]
sorted.maximas <- sort(maximas,decreasing=T)
p<-(c(1:length(sorted.maximas)))/(length(sorted.maximas)+1)
#Computing the recurrence time
tr<-1/p
fit<-lmom::samlmu(maximas)
plot(fit)
para<-lmom::pelgum(fit)
para
#Plotando
plot(p,sorted.maximas,ylab="Precipitação (mm)",xlab="Probabilidade de observar um evento maior",main="")
#Gumbel fitting
lines(1-cdfgum(sorted.maximas,para),sorted.maximas,col="blue",lty=2)
grid()
</code>
#Dados
set.seed(999)
dates <- seq(as.POSIXct("2000-01-01 00:00:00"),as.POSIXct("2019-12-31 23:00:00"),by = 'day')
sample.precip <- rgamma(n = length(dates), shape = 0.05, rate = 0.4)
precip.df <- data.frame(date=dates,RR=sample.precip)
prec.ts=ts(precip.df$RR,frequency=365.25,start=c(2000,1,1))
plot(prec.ts,main="Prec time series", ylab="Precipitação (mm)")
chuva <- sort(precip.df$RR, decreasing = T)
#Probabilidade de ocorrência
df <- data.frame(x = 100/length(chuva)*1:length(chuva),y = chuva)
#Ajustando o modelo
library(lubridate)
library(lmom)
str(prec.ts)
data <- precip.df %>%
mutate(year = lubridate::year(date))
max.by.year<- data %>%
group_by(year) %>%
summarize(max_amount = max(RR, na.rm = T))
maximas <- max.by.year$max_amount
maximas <- maximas[!is.na(maximas)]
sorted.maximas <- sort(maximas,decreasing=T)
p<-(c(1:length(sorted.maximas)))/(length(sorted.maximas)+1)
#Computing the recurrence time
tr<-1/p
fit<-lmom::samlmu(maximas)
plot(fit)
para<-lmom::pelgum(fit)
para
#Plotando
plot(p,sorted.maximas,ylab="Precipitação (mm)",xlab="Probabilidade de observar um evento maior",main="")
#Gumbel fitting
lines(1-cdfgum(sorted.maximas,para),sorted.maximas,col="blue",lty=2)
grid()
New contributor
Matheus Paiva is a new contributor to this site. Take care in asking for clarification, commenting, and answering.
Check out our Code of Conduct.