This is a part of a code to achieve parameters for increasing the accuracy of prediction.
How can I get the parameters from the fitness funnction and COA function to put in the decision tree classifier?
def fitness_dt(parameters):
# Unpack the parameters, ensuring they are appropriate integers
max_depth = int(parameters[0])
min_samples_split = int(parameters[1])
min_samples_leaf = int(parameters[2])
# Initialize and train the Decision Tree model using the provided parameters
model = DecisionTreeClassifier(
max_depth=max_depth if max_depth > 0 else None, # None if max_depth is not positive
min_samples_split=max(2, min_samples_split), # At least 2
min_samples_leaf=max(1, min_samples_leaf) # At least 1
)
# Use cross-validation to evaluate the model
scores = cross_val_score(model, X_train, y_train, cv=5, scoring='accuracy')
# Return the negative of the accuracy as the fitness score to minimize
return -np.mean(scores)
def coa_dt(searchagents, max_iterations, bounds, dimension, fitness_function):
lowerbounds, upperbounds = np.array([b[0] for b in bounds]), np.array([b[1] for b in bounds])
# INITIALIZATION
x = np.random.rand(searchagents, dimension) * (upperbounds - lowerbounds) + lowerbounds
x = np.round(x) # Ensure parameters are integers
fit = np.array([fitness_function(ind) for ind in x])
# Variables to track the best solution found
fbest = np.min(fit)
xbest = x[np.argmin(fit), :]
for t in range(max_iterations):
for i in range(searchagents):
# Exploration and Exploitation
if np.random.rand() < 0.5: # Explore
iguana = xbest
a = 1 + np.random.rand()
x_p1 = x[i, :] + np.random.rand() * (iguana - a * x[i, :])
else: # Exploit
lo_local = lowerbounds / (t+1)
hi_local = upperbounds / (t+1)
x_p1 = x[i, :] + (1 - 2 * np.random.rand()) * (lo_local + np.random.rand() * (hi_local - lo_local))
# Ensure the new position is within bounds and is integer
x_p1 = np.maximum(x_p1, lowerbounds)
x_p1 = np.minimum(x_p1, upperbounds)
x_p1 = np.round(x_p1) # Ensure parameters are integers
# Evaluate new position
f_p1 = fitness_function(x_p1)
if f_p1 < fit[i]:
x[i, :] = x_p1
fit[i] = f_p1
# Update the best solution found
current_fbest = np.min(fit)
if current_fbest < fbest:
fbest = current_fbest
xbest = x[np.argmin(fit), :]
return fbest, xbest
# Define bounds for Decision Tree hyperparameters
dt_bounds = [(1, 20), (2, 20), (1, 20)] # Bounds for max_depth, min_samples_split, min_samples_leaf
opt_dt_model = DecisionTreeClassifier(max_depth=11, min_samples_split=9, min_samples_leaf=16, random_state=42)
opt_dt_model.fit(X_train, y_train)
y_pred_opt_dt = opt_dt_model.predict(X_test)
accuracy_opt_dt = accuracy_score(y_test, y_pred_opt_dt)
print("Accuracy:", accuracy_opt_dt)
print("Confusion Matrix:n", confusion_matrix(y_test, y_pred_opt_dt))
I dont know how can I find the parameters with these functions and put them in the classifier.
Please help me with that.
New contributor
mahdi is a new contributor to this site. Take care in asking for clarification, commenting, and answering.
Check out our Code of Conduct.