|
1 | | -using LinearAlgebra |
| 1 | +using LinearAlgebra, AverageShiftedHistograms, Distances |
2 | 2 | """ |
3 | 3 | T = ind_class(N::Int) |
4 | 4 |
|
@@ -167,6 +167,77 @@ function dmatrix_flatten(dmatrix::Array{Float64,3}, flatten::Any) |
167 | 167 | return dmatrix |
168 | 168 | end # of function dmatrix_flatten! |
169 | 169 |
|
| 170 | +""" |
| 171 | +function ldb_discriminant_measure(p::Vector{Float64}, q::Vector{Float64}; dm::Symbol = :KLdivergence) |
| 172 | +
|
| 173 | +Discriminat measure in LDB. |
| 174 | +
|
| 175 | +### Input Arguments |
| 176 | +* `p,q::Vector{Float64}`: probability mass functions. |
| 177 | +* `dm::Symbol`: discriminant measure. Options: `:KLdivergence`(default), |
| 178 | + `:Jdivergence`, `:l1`, `:l2`, and `:Hellinger`. |
| 179 | +""" |
| 180 | +function ldb_discriminant_measure(p::Vector{Float64}, q::Vector{Float64}; dm::Symbol = :KLdivergence) |
| 181 | + @assert all(p .>= 0) && all(q .>= 0) |
| 182 | + @assert length(p) == length(q) |
| 183 | + if dm == :KLdivergence |
| 184 | + ind = findall((p .> 0) .& (q .> 0)) |
| 185 | + return Distances.kl_divergence(p[ind], q[ind]) |
| 186 | + elseif dm == :Jdivergence |
| 187 | + ind = findall((p .> 0) .& (q .> 0)) |
| 188 | + return Distances.kl_divergence(p[ind], q[ind]) + Distances.kl_divergence(q[ind], p[ind]) |
| 189 | + elseif dm == :l1 |
| 190 | + return norm(p - q, 1) |
| 191 | + elseif dm == :l2 |
| 192 | + return norm(p - q, 2) |
| 193 | + elseif dm == :Hellinger |
| 194 | + return hellinger(p, q) |
| 195 | + else |
| 196 | + error("This discriminat measure $(dm) is not supported! ") |
| 197 | + end |
| 198 | +end |
| 199 | + |
| 200 | +""" |
| 201 | + function dmatrix_ldb_flatten(dmatrix::Array{Float64,3}...; dm::Symbol = :KLdivergence) |
| 202 | +
|
| 203 | +Flatten dmatrices using the LDB method; after this function is called, it returns |
| 204 | +a matrix of size (~, ~, 1). |
| 205 | +
|
| 206 | +### Input Arguments |
| 207 | +* `dmatrix::Array{Float64,3}`: the matrix of LDB expansion coefficients in one class. |
| 208 | +* `dm::Symbol`: discriminant measure. Options: `:KLdivergence` (default), |
| 209 | + `:Jdivergence`, `:l1`, `:l2`, and `:Hellinger`. |
| 210 | +
|
| 211 | +### Example Usage: |
| 212 | +`dmatrix_ldb_flatten(dmatrix1, dmatrix2, dmatrix3)`, |
| 213 | +each argument is the expansion coefficient matrix of a class of signals. It uses |
| 214 | +the default discriminant measure KL divergence to flatten these matrices. |
| 215 | +In other words, it flattens these expansion coefficent matrices by computing and |
| 216 | +summing "statistical distances" among them. |
| 217 | +""" |
| 218 | +function dmatrix_ldb_flatten(dmatrix::Array{Float64,3}...; dm::Symbol = :KLdivergence) |
| 219 | + C = length(dmatrix) # number of signal classes |
| 220 | + if C < 2 |
| 221 | + error("Input should contain at least two classes of signals.") |
| 222 | + end |
| 223 | + N, jmax, _ = Base.size(dmatrix[1]) |
| 224 | + res = zeros(N, jmax) |
| 225 | + for u = 1:(C - 1), v = (u + 1):C |
| 226 | + for j = 1:jmax |
| 227 | + for i = 1:N |
| 228 | + h1 = ash(dmatrix[u][i, j, :]) |
| 229 | + p = h1.density / norm(h1.density, 1) |
| 230 | + h2 = ash(dmatrix[v][i, j, :]) |
| 231 | + q = h2.density / norm(h2.density, 1) |
| 232 | + res[i, j] += ldb_discriminant_measure(p, q; dm = dm) |
| 233 | + end |
| 234 | + end |
| 235 | + end |
| 236 | + res = reshape(res[:, :, 1], N, jmax, 1) |
| 237 | + return res |
| 238 | +end |
| 239 | + |
| 240 | + |
170 | 241 | """ |
171 | 242 | costfun = cost_functional(cfspec::Any) |
172 | 243 |
|
|
0 commit comments