from scipy.stats import poisson, lognorm
# Function to calculate VaR for Net Loss using Monte Carlo
def calculate_net_loss_var(frequency_lambda, severity_shape, severity_scale, severity_loc, confidence_level=0.999, n_simulations=10000):
simulated_event_counts = poisson.rvs(mu=frequency_lambda, size=n_simulations)
simulated_loss_amounts = lognorm.rvs(s=severity_shape, loc=severity_loc, scale=severity_scale, size=n_simulations)
total_losses = simulated_event_counts * simulated_loss_amounts
var_value = np.percentile(total_losses, confidence_level * 100)
return var_value
# Function to calculate VaR for Event Counts using Monte Carlo
def calculate_event_count_var(frequency_lambda, confidence_level=0.999, n_simulations=10000):
simulated_event_counts = poisson.rvs(mu=frequency_lambda, size=n_simulations)
var_event_count = np.percentile(simulated_event_counts, confidence_level * 100)
return var_event_count
# Prepare results
results = []
# Define confidence level and number of simulations
confidence_level = 0.999
n_simulations = 10000
# Define specific business lines
selected_business_lines = ['Retail Banking', 'Corporate Banking'] # Replace with desired business lines
# Define event types and their adjustment factors
event_type_adjustments = {
'Cyber Attack': 1.10, # Increase by 10%
'Fraud': 1.15, # Increase by 15%
'Operational Failure': 1.25 # Increase by 25%
}
# Loop through each year, quarter, business line, and event type
for year in range(2021, 2025):
yearly_data = df[df['Year'] == year]
# Calculate quarter from 'Date' column if necessary
yearly_data['Quarter'] = yearly_data['Date'].dt.quarter # Assuming 'Date' is a datetime object
for quarter in [1, 2, 3, 4]: # Loop through quarters
quarterly_data = yearly_data[yearly_data['Quarter'] == quarter]
for business_line in quarterly_data['Business Line'].unique():
if business_line not in selected_business_lines:
continue # Skip non-selected business lines
bl_data = quarterly_data[quarterly_data['Business Line'] == business_line]
[enter image description here](https://i.sstatic.net/IY3mebGW.png)
for event_type in bl_data['Event Type'].unique():
et_data = bl_data[bl_data['Event Type'] == event_type]
# Total event counts and net loss
total_event_count = et_data.shape[0]
total_net_loss = et_data['Net Loss Amount'].sum()
# Frequency parameter (Poisson λ)
frequency_lambda = total_event_count / 1 # Events per quarter
# Severity parameters (Log-Normal distribution)
positive_losses = et_data[et_data['Net Loss Amount'] > 0]['Net Loss Amount']
if positive_losses.empty:
print(f"No positive losses for {business_line} - {event_type} in {year} Q{quarter}. Skipping.")
continue
severity_shape, severity_loc, severity_scale = lognorm.fit(positive_losses)
# Default adjustment (no increase) for baseline
adjustments = [('Baseline', 1.0)]
# Apply specific adjustments for defined event types
if event_type in event_type_adjustments:
adjustments.append((f"+{int((event_type_adjustments[event_type] - 1) * 100)}%", event_type_adjustments[event_type]))
# Calculate VaR for each adjustment scenario
for adjustment, factor in adjustments:
# Adjust frequency and severity
adjusted_frequency_lambda = frequency_lambda * factor
adjusted_severity_scale = severity_scale * factor
# Calculate VaR for Net Loss
var_net_loss = calculate_net_loss_var(
frequency_lambda=adjusted_frequency_lambda,
severity_shape=severity_shape,
severity_scale=adjusted_severity_scale,
severity_loc=severity_loc,
confidence_level=confidence_level,
n_simulations=n_simulations
)
# Calculate VaR for Event Count
var_event_count = calculate_event_count_var(
frequency_lambda=adjusted_frequency_lambda,
confidence_level=confidence_level,
n_simulations=n_simulations
)
# Append results
results.append({
'Year': year,
'Quarter': quarter,
'Business Line': business_line,
'Event Type': event_type,
'Adjustment': adjustment,
'Total Net Loss': total_net_loss * factor,
'Total Event Count': total_event_count * factor,
'VaR Net Loss (99.9%)': var_net_loss,
'VaR Event Count (99.9%)': var_event_count
})
# Convert results to DataFrame
results_df = pd.DataFrame(results)
# Display results
print(results_df)
I am trying to increase few business line increased by 10%,15% then want to see the impact w r t loss and frequency.
from scipy.stats import poisson, lognorm
import pandas as pd
import numpy as np
# Function to calculate VaR for Net Loss using Monte Carlo
def calculate_net_loss_var(frequency_lambda, severity_shape, severity_scale, severity_loc, confidence_level=0.999, n_simulations=10000):
simulated_event_counts = poisson.rvs(mu=frequency_lambda, size=n_simulations)
simulated_loss_amounts = lognorm.rvs(s=severity_shape, loc=severity_loc, scale=severity_scale, size=n_simulations)
total_losses = simulated_event_counts * simulated_loss_amounts
var_value = np.percentile(total_losses, confidence_level * 100)
return var_value
# Function to calculate VaR for Event Counts using Monte Carlo
def calculate_event_count_var(frequency_lambda, confidence_level=0.999, n_simulations=10000):
simulated_event_counts = poisson.rvs(mu=frequency_lambda, size=n_simulations)
var_event_count = np.percentile(simulated_event_counts, confidence_level * 100)
return var_event_count
# Prepare results
results = []
# Define confidence level and number of simulations
confidence_level = 0.999
n_simulations = 10000
# Define specific business lines
selected_business_lines = ['Retail Banking', 'Corporate Banking'] # Replace with desired business lines
# Define event types and their adjustment factors
event_type_adjustments = {
'Cyber Attack': 1.10, # Increase by 10%
'Fraud': 1.15, # Increase by 15%
'Operational Failure': 1.25 # Increase by 25%
}
# Loop through each year, business line, and event type
for year in range(2021, 2025):
yearly_data = df[df['Year'] == year]
for business_line in yearly_data['Business Line'].unique():
if business_line not in selected_business_lines:
continue # Skip non-selected business lines
bl_data = yearly_data[yearly_data['Business Line'] == business_line]
for event_type in bl_data['Event Type'].unique():
et_data = bl_data[bl_data['Event Type'] == event_type]
# Total event counts and net loss
total_event_count = et_data.shape[0]
total_net_loss = et_data['Net Loss Amount'].sum()
# Frequency parameter (Poisson λ)
frequency_lambda = total_event_count / 1 # Events per year
# Severity parameters (Log-Normal distribution)
positive_losses = et_data[et_data['Net Loss Amount'] > 0]['Net Loss Amount']
if positive_losses.empty:
print(f"No positive losses for {business_line} - {event_type} in {year}. Skipping.")
continue
severity_shape, severity_loc, severity_scale = lognorm.fit(positive_losses)
# Default adjustment (no increase) for baseline
adjustments = [('Baseline', 1.0)]
# Apply specific adjustments for defined event types
if event_type in event_type_adjustments:
adjustments.append((f"+{int((event_type_adjustments[event_type] - 1) * 100)}%", event_type_adjustments[event_type]))
# Calculate VaR for each adjustment scenario
for adjustment, factor in adjustments:
# Adjust frequency and severity
adjusted_frequency_lambda = frequency_lambda * factor
adjusted_severity_scale = severity_scale * factor
# Calculate VaR for Net Loss
var_net_loss = calculate_net_loss_var(
frequency_lambda=adjusted_frequency_lambda,
severity_shape=severity_shape,
severity_scale=adjusted_severity_scale,
severity_loc=severity_loc,
confidence_level=confidence_level,
n_simulations=n_simulations
)
# Calculate VaR for Event Count
var_event_count = calculate_event_count_var(
frequency_lambda=adjusted_frequency_lambda,
confidence_level=confidence_level,
n_simulations=n_simulations
)
# Append results
results.append({
'Year': year,
'Business Line': business_line,
'Event Type': event_type,
'Adjustment': adjustment,
'Total Net Loss': total_net_loss * factor,
'Total Event Count': total_event_count * factor,
'VaR Net Loss (99.9%)': var_net_loss,
'VaR Event Count (99.9%)': var_event_count
})
# Convert results to DataFrame
results_df = pd.DataFrame(results)
After increasing the Frequency by 10, 15% only few event types not all, and then automatically baseline should remove from records want to see the comparison on loss amount and frequency yearly and quarterly.
Is there any other best method to calculated the VAR other than Monte Carlo. for Net loss and frequency. w r t different confidence interwall.yearlyquarterly
user28750107 is a new contributor to this site. Take care in asking for clarification, commenting, and answering.
Check out our Code of Conduct.
2