资源简介
基于NavieBayes的adaboost算法实现 求分享

代码片段和文件信息
% adaboost.M1
function [trerr tserr w] = adaboostM1(typefea_tr lab_tr fea_ts lab_tsMcnln)
% perform adaboost algorithm
% type: the based classifier
% fea_tr lab_tr: training feature and label
% fea_ts lab_ts: testing feature and label
% M: iteration num
% cn: class num
% trerr training error (combined)
% tserr testing error
% w weight distribution
if nargin < 6
disp(‘adaboostM1(type fea_tr lab_tr fea_ts lab_ts M [cn])‘);
return;
elseif nargin == 6
cn = 2;
end
if nargout < 3
w = [];
end
% first get parameters
vsize = size(fea_tr 1);
trnum = size(fea_tr 2);
tsnum = size(fea_ts 2);
%
if (vsize ~= size(fea_ts 1))
disp(‘training feature and testing feature donnot have same size\n‘);
return;
end
if (trnum ~= size(lab_tr 2))
disp(‘training set has different feature and label size\n‘);
return;
end
if (tsnum ~= size(lab_ts 2))
disp(‘testing set has different feature and label size\n‘);
return;
end
% deal with different base classifiers
% switch lower(type)
%
% case ‘nearest‘ % nearest neighbour
% parameters
weight = ones(1 trnum) / trnum;
alpha_M = zeros(1 M);
hypo_tr_M = zeros(1 trnum M);
I_tr_M = zeros(1 trnum M);
hypo_ts_M = zeros(1 tsnum M);
I_ts_M = zeros(1 tsnum M);
m = 1; err = -1;
trainError=[];
testError=[];
%while m<=M & err<0.5 & err~=0
while m<=M
% [fea_trfea_ts] = get_sample(ln);
% resample data
[fea_w lab_w idx_w] = resample(fea_tr lab_tr weight);
% train base classifier and run base classifier on ORIGINAL trainning data I==1 means wrongly classified
% for KNN it requires only one step
if strcmp(type‘bayes‘)
[I_tr hypo_tr] = bayes(fea_w lab_w fea_tr lab_tr); % train on training set
[I_ts hypo_ts] = bayes(fea_w lab_w fea_ts lab_ts); % train on testing set
end
if strcmp(type‘knn‘)
[I_tr hypo_tr] = knn(fea_w lab_w fea_tr lab_tr); % train on training set
[I_ts hypo_ts] = knn(fea_w lab_w fea_ts lab_ts); % train on testing set
end
% calculate err
err = sum(weight .* I_tr 2) / sum(weight 2);
% calculate alpha
alpha_M(m) = 0.5*log((1-err)/err);
% update weight
weight = weight .* exp(2*alpha_M(m)*I_tr);
weight = weight ./ sum(weight2);
% store parameters for round m
hypo_tr_M(: : m) = hypo_tr;
hypo_ts_M(: : m) = hypo_ts;
I_tr_M(: : m) = I_tr;
I_ts_M(: : m) = I_ts;
% update m
m = m + 1;
end
% show debug information
%
属性 大小 日期 时间 名称
----------- --------- ---------- ----- ----
文件 4019 2015-05-11 20:20 adaboost+bayes\adaboostM1.m
文件 250 2015-05-11 17:29 adaboost+bayes\bayes.m
文件 857 2015-05-11 21:05 adaboost+bayes\compareKNNAndNavibayes.m
文件 145268 2015-05-11 17:57 adaboost+bayes\data.mat
文件 1602 2015-05-11 17:58 adaboost+bayes\generateTrainTestSamples.m
文件 816 2015-05-11 21:05 adaboost+bayes\main.m
文件 116 2015-05-11 17:52 adaboost+bayes\normalData.m
文件 1253 2015-05-11 16:30 adaboost+bayes\resample.m
文件 8050 2015-05-11 21:04 adaboost+bayes\对比.fig
文件 7856 2015-05-11 20:24 adaboost+bayes\错误率.fig
目录 0 2015-05-11 21:13 adaboost+bayes
----------- --------- ---------- ----- ----
170087 11
- 上一篇:应用近世代数 习题详解 胡冠章
- 下一篇:最新版广西省电子版矢量图
相关资源
- 编程实现二维DCT变换
- 图像二值化
- 用FFT对信号进行频谱分析
- Tone-Reservation
- QGA 量子遗传算法
- 差分形式的阻滞增长模型
- 遗传算法的M文件
- 简单二阶互联系统的非线性动力学分
- 手写数字识别-模板匹配法
- Stock_Watson_动态因子分析模型
- 果蝇优化算法优化支持向量回归程序
- 论文研究-基于肤色和AdaBoost算法的彩
- 自己做的一个简单GUI扑克纸牌识别-
- multi output SVR
- AR过程的线性建模过程与各种功率谱估
- PCNN TOOLBOX
- plstoolbox.zip
- 中国国家基础地理信息系统GIS数据
- 粒子群微电网优化调度
- 矩阵分析-经典教材-中文版-Roger.A.Ho
- Paul Viola 的Adaboost级联检测器英文原文
- 压缩感知TwIST
- 基于最小错误率的贝叶斯手写数字分
- 最全系统辨识源代码,包括多种最小
- 导弹制导实验
- 画跟踪精确度图的程序.zip
- 重力场大地水准面及重力异常阶次误
- prtools5.2.3工具包
- 脉冲耦合神经网络工具箱PCNN-toolbox
- SVM算法-回归拟合程序.zip
评论
共有 条评论