-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdigit_classify.m
More file actions
123 lines (100 loc) · 4.69 KB
/
digit_classify.m
File metadata and controls
123 lines (100 loc) · 4.69 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
% Digit classify main function made for Pattern recognition course.
% Loads in hyperparameters and neural network weights from NetworkStructure.mat file
% Loads minumum values and maximum values from maxForNormalization and
% minForNormalizations
% IN ORDER TO RUN THIS, digit_classify_parameters and usedFunctions folders
% have to be added to MATLAB path!!!
function C = digit_classify(testdata)
parameters = load("NetworkStructure.mat");
normMin = load("minForNormalization.mat");
normMax = load("maxForNormalization.mat");
% ------------------------------------------------------------
% Given timeseries preprocessing
smoothed = expsmooth(testdata(:,1:2),500,10); % Exponential smoothing, already dropping Z axis (height)
[eigenvectors,eigenvalues] = eig(cov(smoothed));
eigenvector11 = eigenvectors(1,1);
eigenvector12 = eigenvectors(2,1);
eigenvector21 = eigenvectors(1,2);
eigenvector22 = eigenvectors(2,2);
eigenvalue1 = eigenvalues(1,1);
eigenvalue2 = eigenvalues(2,2);
centered = center(smoothed); % Centering
rescaled = minmaxnorm(centered); % Digit normalization
%------------------------------------------------------------
% Feature extraction
pixelWidth = 12; % hyperparameter of the solution that could increase accuracy
pixels = zeros(pixelWidth,pixelWidth);
[pixels,startX,startY,afterstartX,afterstartY,...
beforeendX,beforeendY,endX,endY,startDirX,...
startDirY,afterstartdirX,afterstartdirY,...
beforeenddirX,beforeenddirY] = time2matrix2(rescaled,pixelWidth);
dataWrangled(1,1:pixelWidth*pixelWidth) = reshape(pixels,1,[]);
k = 1;
dataWrangled(1,pixelWidth*pixelWidth+k) = startX;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = startY;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = afterstartX;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = afterstartY;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = beforeendX;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = beforeendY;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = endX;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = endY;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = startDirX;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = startDirY;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = afterstartdirX;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = afterstartdirY;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = beforeenddirX;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = beforeenddirY;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = eigenvector11;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = eigenvector12;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = eigenvector21;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = eigenvector22;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = eigenvalue1;
k = k+1;
dataWrangled(1,pixelWidth*pixelWidth+k) = eigenvalue2;
[temp,features] = size(dataWrangled);
%temporarily adding min and max values of features in order to do
%minmaxnormalization
dataWrangled(2,:) = normMin(1).minForScaling;
dataWrangled(3,:) = normMax(1).maxForScaling;
%normalize the feature columns
dataWrangled(:,pixelWidth*pixelWidth:features) = minmaxnorm(dataWrangled(:,pixelWidth*pixelWidth:features));
dataWrangled = dataWrangled(1,:); % Removing the temporary min max rows
%---------------------------------------------------------------------------
% Neural Network classifier
dataWrangled = dataWrangled';
NNconfig = parameters(1).NNconfigStored;
wHidden = parameters(1).wHiddenStored;
wOutput = parameters(1).wOutputStored;
actFunc = 'sigma';
extendedInput = [dataWrangled; ones(1, size(dataWrangled, 2))];
N = size(dataWrangled, 2); % number of data samples
for i = 1:length(NNconfig) % for each hidden layer
if i == 1
yHidden{i} = forwardpropagate(extendedInput,wHidden{i},actFunc,N);
else
yHidden{i} = forwardpropagate(yHidden{i-1},wHidden{i},actFunc,N);
end
end
vOutput = wOutput' * yHidden{end}; % output layer weighted inputs
yOutput = vOutput; % output layer (linear) output
[~, testClass] = max(yOutput, [], 1);
C = testClass-1;
end