#abc-svm Demo1
import numpy as np
from sklearn.datasets import make_blobs
from sklearn.svm import OneClassSVM
from sklearn.metrics import roc_auc_score
from sklearn.model_selection import train_test_split
# 蜂群算法类
class ArtificialBeeColony:
def __init__(self, fitness_func, param_ranges,kernel_options, population_size=30, max_generations=100):
self.fitness_func = fitness_func
self.param_ranges = param_ranges
self.kernel_options = kernel_options # 添加kernel选项
self.population_size = population_size
self.max_generations = max_generations
self.population = self.initialize_population()
self.best_solution = None
self.best_fitness = -np.inf
def initialize_population(self):
population = []
for _ in range(self.population_size):
solution = {}
for param_name, (low, high, step) in self.param_ranges.items():
if isinstance(low, list): # Handle categorical variables
solution[param_name] = np.random.choice(low)
else:
solution[param_name] = low + step * np.random.rand() * (high - low)
# 随机选择一个kernel
solution['kernel'] = np.random.choice(self.kernel_options)
population.append(solution)
return population
def update_population(self, new_solutions):
self.population = new_solutions
current_best = max(new_solutions, key=lambda x: self.fitness_func(x))
current_fitness = self.fitness_func(current_best)
if current_fitness > self.best_fitness:
self.best_fitness = current_fitness
self.best_solution = current_best
def optimize(self):
for generation in range(self.max_generations):
new_population = []
for solution in self.population:
new_solution = self.search_for_new_solution(solution)
new_population.append(new_solution)
self.update_population(new_population)
print(f"Generation {generation + 1}: Best Fitness = {self.best_fitness}")
return self.best_solution, self.best_fitness
def search_for_new_solution(self, solution):
new_solution = {}
for param_name, value in solution.items():
if param_name == 'kernel':
# 对于kernel参数,从kernel_options列表中随机选择一个值
new_solution[param_name] = np.random.choice(self.kernel_options)
else:
low, high, step = self.param_ranges[param_name]
if isinstance(low, list): # Handle categorical variables
new_value = np.random.choice(low)
else:
new_value = value + step * (np.random.rand() - 0.5) * (high - low)
new_value = max(low, min(new_value, high)) # Ensure value is within bounds
new_solution[param_name] = new_value
return new_solution
# 创建模拟数据集
X, y = make_blobs(n_samples=500, centers=1, n_features=20, random_state=42)
# print(X)
# print("---------")
# print(y)
# print("---------")
from sklearn.metrics import r2_score
# 适应度函数
def fitness_function(params):
# 选取X_train 为正类
X_train, X_test, _, y_test = train_test_split(X, y, test_size=0.3, random_state=42)
y_test = np.where(y_test == 0, -1, 1) # 将标签转换为-1和1
nu = params['nu']
kernel = params['kernel']
gamma = params['gamma']
# 创建和训练One-Class SVM模型
ocsvm = OneClassSVM(nu=nu, kernel=kernel, gamma=gamma)
ocsvm.fit(X_train)
# ocsvm.fit(X)
# 预测测试集并计算ROC AUC分数作为适应度值
y_pred = ocsvm.predict(X_test)
# fitness = roc_auc_score(y_test, y_pred)
# fitness = roc_auc_score(y, y_pred)
fitness =r2_score(y_test, y_pred)
return fitness
# 参数搜索范围
param_ranges = {
'nu': (0.1, 0.9, 0.1), # nu 的搜索范围,步长为 0.1
# 'kernel': ['rbf', 'linear'], # 核函数的选择列表
'gamma': (0.01, 1, 0.1) # gamma 的搜索范围,步长为 0.1
}
kernel_options = ['rbf', 'linear'] # 可能的kernel值列表
# 创建并运行人工蜂群算法
# abc = ArtificialBeeColony(fitness_function, param_ranges)
abc = ArtificialBeeColony(fitness_function, param_ranges, kernel_options)
best_solution, best_fitness = abc.optimize()
# Best Solution: {'nu': 0.10587043848153328, 'gamma': 0.2575143995541765, 'kernel': 'rbf'}
# Best Fitness: 1.0
# 输出最佳解和最佳适应度
print(f"Best Solution: {best_solution}")
print(f"Best Fitness: {best_fitness}")
没有合适的资源?快使用搜索试试~ 我知道了~
资源推荐
资源详情
资源评论
收起资源包目录
蜂群算法优化一类支持向量机.zip (2个子文件)
py
abc-ocsvm.py 5KB
ocsvm.py 1KB
共 2 条
- 1
资源评论
傻傻虎虎
- 粉丝: 3608
- 资源: 34
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功