搜档网
当前位置:搜档网 › libSVM核函数实现【matlab】

libSVM核函数实现【matlab】


%%
% by yunmi
%Email:lvyunmi@https://www.sodocs.net/doc/4916875927.html,
%reference:faruto

%% 若转载请注明:
% libsvm几种核函数的实现
% 本文档为学习文档,仅供libsvm初学者共同学习
% 本文档实现了libsvm中四种基本核:线性核、多项式核、径向基核和S形核,还有faruto提到的几种自定义核,纯粹学习使用,便宜自己学习自定义核

%%


%清除
clear
clc
%算法开始时间
tic;
%读数据
load heart_scale.mat
trainData = heart_scale_inst;
trainLabel = heart_scale_label;
testData = heart_scale_inst;
testLabel = heart_scale_label;
clear heart_scale_inst;
clear heart_scale_label;
%训练及测试数据的行数及属性个数
[trainRow,Dim]=size(trainData);
testRow=length(testData);

%参数设定
%参数g:gama
%参数coef0
%参数d:degree
%g = 1/length(unique(trainLabel));
g = 1/Dim;%默认为属性个数的倒数
coef0 = 0;
d = 3;


%线性核函数k(x,x')=x*x'
%{
kTrain=trainData*trainData';
kTrain=[(1:trainRow)',kTrain];
kTest=testData*trainData';
kTest=[(1:testRow)',kTest];
%}
%多项式核函数k(ui,vj)=(gama*ui*vj' + coef0)^degree
%参数g:gama
%参数coef0
%参数d:degree
%{
kTrain=g*(trainData*trainData')+coef0;
for i = 1:trainRow
for j = 1:trainRow
kTrain(i,j) = kTrain(i,j)^d;
end
end
kTrain=[(1:trainRow)',kTrain];
kTest=g*(testData*trainData')+coef0;
for i = 1:testRow
for j = 1:trainRow
kTest(i,j) = kTest(i,j)^d;
end
end
kTest=[(1:testRow)',kTest];
%}
%径向基核函数k(ui,vj)=exp(-gama*||ui-vj||^2)
%参数g:gama
%%{
kTrain=zeros(trainRow,trainRow);
for i = 1:trainRow
for j = 1:trainRow
kTrain(i,j) = exp(-g*norm(trainData(i,:)-trainData(j,:))^2);
end
end
kTrain=[(1:trainRow)',kTrain];
kTest=zeros(testRow,trainRow);
for i = 1:testRow
for j = 1:trainRow
kTest(i,j) = exp(-g*norm(testData(i,:)-trainData(j,:))^2);
end
end
kTest=[(1:testRow)',kTest];
%}
%s形核函数k(ui,vj)=tanh(gama*ui*vj'+coef0)
%参数g:gama
%参数coef0
%%{
kTrain=zeros(trainRow,trainRow);
for i = 1:trainRow
for j = 1:trainRow
kTrain(i,j) = tanh(g*trainData(i,:)*trainData(j,:)'+coef0);
end
end
kTrain=[(1:trainRow)',kTrain];
kTest=zeros(testRow,trainRow);
for i = 1:testRow
for j = 1:trainRow
kTest(i,j) = tanh(g*testData(i,:)*trainData(j,:)'+coef0);
end
end
kTest=[(1:testRow)',kTest];
%}
%自定义核1 k(u,v)[i,j]=||ui||*||vj||
%{
kTrain = ones(trainRow,trainRow);
for i = 1:trainRow
for j = 1:trainRow
kTrain(i,j) = sum(trainData(i,:).^2)^0.5 * sum(trainData(j,:).^2)^0.5;
end
end
kTrain = [(1:trainRow)',kTrain];

kTest = ones(testRow,trainRow);
for i = 1:testRow
for j = 1:trainRow
kTest(i,j) = sum(testData(i,:).^2)^0.5 * sum(trainData(j,:).^2)^0.5;
end
end
kTest = [(1:testRow)', kTest];
%}
%自定义核2 k(u,v)[i,j]=ui*vj'/(||ui||*||

vj||)
%{
kTrain = ones(trainRow,trainRow);
for i = 1:trainRow
for j = 1:trainRow
kTrain(i,j) = trainData(i,:)*trainData(j,:)'/(sum(trainData(i,:).^2)^0.5 * sum(trainData(j,:).^2)^0.5);
end
end
kTrain = [(1:trainRow)',kTrain];
kTest = ones(testRow,trainRow);
for i = 1:testRow
for j = 1:trainRow
kTest(i,j) = testData(i,:)*trainData(j,:)'/(sum(testData(i,:).^2)^0.5 * sum(trainData(j,:).^2)^0.5);
end
end
kTest = [(1:testRow)', kTest];
%}
%训练模型
modelKernel=svmtrain(trainLabel,kTrain,['-t 4 -g ',num2str(g),' -d ',num2str(d),' -r ',num2str(coef0)]);
%测试,预测
[plabel,acc,dec]=svmpredict(testLabel,kTest,modelKernel);

%算法结束时间
toc;

相关主题