资源简介
基于NavieBayes的adaboost算法实现 求分享
代码片段和文件信息
% adaboost.M1
function [trerr tserr w] = adaboostM1(typefea_tr lab_tr fea_ts lab_tsMcnln)
% perform adaboost algorithm
% type: the based classifier
% fea_tr lab_tr: training feature and label
% fea_ts lab_ts: testing feature and label
% M: iteration num
% cn: class num
% trerr training error (combined)
% tserr testing error
% w weight distribution
if nargin < 6
disp(‘adaboostM1(type fea_tr lab_tr fea_ts lab_ts M [cn])‘);
return;
elseif nargin == 6
cn = 2;
end
if nargout < 3
w = [];
end
% first get parameters
vsize = size(fea_tr 1);
trnum = size(fea_tr 2);
tsnum = size(fea_ts 2);
%
if (vsize ~= size(fea_ts 1))
disp(‘training feature and testing feature donnot have same size\n‘);
return;
end
if (trnum ~= size(lab_tr 2))
disp(‘training set has different feature and label size\n‘);
return;
end
if (tsnum ~= size(lab_ts 2))
disp(‘testing set has different feature and label size\n‘);
return;
end
% deal with different base classifiers
% switch lower(type)
%
% case ‘nearest‘ % nearest neighbour
% parameters
weight = ones(1 trnum) / trnum;
alpha_M = zeros(1 M);
hypo_tr_M = zeros(1 trnum M);
I_tr_M = zeros(1 trnum M);
hypo_ts_M = zeros(1 tsnum M);
I_ts_M = zeros(1 tsnum M);
m = 1; err = -1;
trainError=[];
testError=[];
%while m<=M & err<0.5 & err~=0
while m<=M
% [fea_trfea_ts] = get_sample(ln);
% resample data
[fea_w lab_w idx_w] = resample(fea_tr lab_tr weight);
% train base classifier and run base classifier on ORIGINAL trainning data I==1 means wrongly classified
% for KNN it requires only one step
if strcmp(type‘bayes‘)
[I_tr hypo_tr] = bayes(fea_w lab_w fea_tr lab_tr); % train on training set
[I_ts hypo_ts] = bayes(fea_w lab_w fea_ts lab_ts); % train on testing set
end
if strcmp(type‘knn‘)
[I_tr hypo_tr] = knn(fea_w lab_w fea_tr lab_tr); % train on training set
[I_ts hypo_ts] = knn(fea_w lab_w fea_ts lab_ts); % train on testing set
end
% calculate err
err = sum(weight .* I_tr 2) / sum(weight 2);
% calculate alpha
alpha_M(m) = 0.5*log((1-err)/err);
% update weight
weight = weight .* exp(2*alpha_M(m)*I_tr);
weight = weight ./ sum(weight2);
% store parameters for round m
hypo_tr_M(: : m) = hypo_tr;
hypo_ts_M(: : m) = hypo_ts;
I_tr_M(: : m) = I_tr;
I_ts_M(: : m) = I_ts;
% update m
m = m + 1;
end
% show debug information
%
属性 大小 日期 时间 名称
----------- --------- ---------- ----- ----
文件 4019 2015-05-11 20:20 adaboost+bayes\adaboostM1.m
文件 250 2015-05-11 17:29 adaboost+bayes\bayes.m
文件 857 2015-05-11 21:05 adaboost+bayes\compareKNNAndNavibayes.m
文件 145268 2015-05-11 17:57 adaboost+bayes\data.mat
文件 1602 2015-05-11 17:58 adaboost+bayes\generateTrainTestSamples.m
文件 816 2015-05-11 21:05 adaboost+bayes\main.m
文件 116 2015-05-11 17:52 adaboost+bayes\normalData.m
文件 1253 2015-05-11 16:30 adaboost+bayes\resample.m
文件 8050 2015-05-11 21:04 adaboost+bayes\对比.fig
文件 7856 2015-05-11 20:24 adaboost+bayes\错误率.fig
目录 0 2015-05-11 21:13 adaboost+bayes
----------- --------- ---------- ----- ----
170087 11
- 上一篇:应用近世代数 习题详解 胡冠章
- 下一篇:最新版广西省电子版矢量图
相关资源
- 嵌入式零树小波图像编码算法
- fm_gui_v2.zip
- PSOT粒子群算法工具箱
- AdaBoost讲解
- lmd程序
- 信号DCT字典稀疏表示
- LPCC的编程代码.docx
- 高斯分布来建立背景模型
- 现代信号处理教程_胡广书随书光盘
- 2D Fast Marching Computations
- 序贯蒙特卡洛可靠性评估.rar
- 基于DCT变换的数字水印算法
- 粒子群算法(pso)标准测试函数验证
- 3种不同语言的BP算法
- FIR滤波器设计
- 三维装箱问题程序
- 算法测试基准函数
- 忆阻神经网络实验
- 带有电压恢复补偿功能的直流微电网
- 自回归滑动平均模型
- 点云数据ply格式
- 直接序列扩频信号的keystone变换捕获
- 大量-小波变换源程序
- 数值计算 函数逼近与曲线拟合 复化梯
- 在vrep环境下的人工势场法仿真
- 5次B样条曲线.rar
- MTD雷达信号处理
- libsvm工具包含网格法查找最优解函数
- libsvm工具包含网格法查找最优解函数
- libsvm工具包含网格法查找最优解函数
评论
共有 条评论