-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcompute_rank2.m
141 lines (137 loc) · 5.4 KB
/
compute_rank2.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
% calculate the matching characteristics.
% By Fei Xiong,
% ECE Dept,
% Northeastern University
% 2013-11-04
% Input:
% Method: the distance learning algorithm struct. In this function
% two field are used.
% P is the projection matrix. d'-by-Ntr (assume the kernel trick is used.)
% kernel is the name of the kernel function.
% train: The data used to learn the projection matric. Each row is a
% sample vector. Ntr-by-d
% test: The data used to test and calculate the CMC for the
% algorithm. Each row is a sample vector. Nts-by-d
% ix_partition: the randomly generated partition of the test set.
% Each row is a randomly generated partition. 1 represents
% this test sample is used as reference sample, while 0
% represents such sample is used as probe sample. Nit-by-Nts
% IDs: The identity of the samples in the test set. Nts-by-1, where
% Nts is the size of test set. Nts-by-1
function [X_new] = compute_rank2(Method, train, test)
% K_path='.\K_test\';
% for k =1:size(ix_partition,1) % calculate the CMC for each random partition.
% set the kernel matrix for reference and prob set.
% ix_ref = ix_partition(k,:) ==1;
% when the probe set is not the same as test gallery set, it will be
% labeled as "-1"
% if min(min(double(ix_partition))) < 0
% ix_prob = ix_partition(k,:) ==-1;
% else
% ix_prob = ix_partition(k,:) ==0;
% end
% ref_ID = IDs(ix_ref);
% prob_ID = IDs(ix_prob);
% [rows,cols]=size(test);
% dis = zeros(rows,rows);
% for c = 1:numel(test)
% for kk=1:2
A = Method.P; % Projection vector
% if strcmp(Method{c}.name,'oLFDA')
% K_test = test{c}';
% else
[K_test] = ComputeKernelTest(train, test, Method); %compute the kernel matrix.
X_new=A*K_test;
% K_name=[K_path cnnname];
% save(K_name,'K_test');
clear K_test;
% end
% K_ref = K_test;
% K_prob = K_test;
% for i =1: size(K_prob,2)
% diff = bsxfun(@minus, K_ref,K_prob(:,i));
% diff = A*diff;
% dis(i, :) = dis(i, :) + sum(diff.^2,1);
% end
% end
% calculate the distance and ranking for each prob sample
% for i =1:sum(ix_prob)
% diff = bsxfun(@minus, K_ref,K_prob(:,i));
% diff = A*diff;
% dis(i, :) = sum(diff.^2,1);
% [tmp, ix] = sort(dis(i, :));
% r(i) = find(ref_ID(ix) == prob_ID(i));
% ixx(i,:)=ix;
% end
% returned ranking matrix, each row is the ranking for a reference/prob
% set partition
% R(k, :) = r;
% Alldist(:,:,kk)= dis; % distance matrix
% end
% end
return;
% Calculate the kernel matrix for train and test set.
% TODO: Replace the ComputeKernel function in ComputeKernel.m
% Input:
% Method: the distance learning algorithm struct. In this function
% only field used "kernel", the name of the kernel function.
% train: The data used to learn the projection matric. Each row is a
% sample vector. Ntr-by-d
% test: The data used to test and calculate the CMC for the
% algorithm. Each row is a sample vector. Nts-by-d
function [K_test] = ComputeKernelTest(train, test, Method)
if (size(train,2))>2e4 && (strcmp(Method.kernel, 'chi2') || strcmp(Method.kernel, 'chi2-rbf'))
% if the input data matrix is too large then use parallel computing
% tool box.
matlabpool open
switch Method.kernel
case {'linear'}
K_test = train * test';
case {'chi2'}
parfor i =1:size(test,1)
dotp = bsxfun(@times, test(i,:), train);
sump = bsxfun(@plus, test(i,:), train);
K_test(:,i) = 2* sum(dotp./(sump+1e-10),2);
end
case {'chi2-rbf'}
sigma = Method.rbf_sigma;
parfor i =1:size(test,1)
subp = bsxfun(@minus, test(i,:), train);
subp = subp.^2;
sump = bsxfun(@plus, test(i,:), train);
K_test(:,i) = sum(subp./(sump+1e-10),2);
end
K_test =exp(-K_test./sigma);
end
matlabpool close
else
switch Method.kernel
case {'linear'}
K_test = train * test';
case {'chi2'}
for i =1:size(test,1)
dotp = bsxfun(@times, test(i,:), train);
sump = bsxfun(@plus, test(i,:), train);
K_test(:,i) = 2* sum(dotp./(sump+1e-10),2);
end
case {'chi2-rbf'}
sigma = Method.rbf_sigma;
for i =1:size(test,1)
subp = bsxfun(@minus, test(i,:), train);
subp = subp.^2;
sump = bsxfun(@plus, test(i,:), train);
K_test(:,i) = sum(subp./(sump+1e-10),2);
end
K_test =exp(-K_test./sigma);
case {'gaus-rbf'}
%% Gaussian RBF kernel: myself
sigma = Method.rbf_sigma;
ro_tr=size(train,1);
for i = 1:ro_tr
subp=bsxfun(@minus,train(i,:),test)';
K_test(i,:)= arrayfun(@(x) norm(subp(:,x)),1:size(subp,2));
end
K_test=exp(-K_test./sigma);
end
end
return;