clc;
clear;
close all;
warning off;
%%
%--------------------------------------------------------------------------------
%
% Different information measures comparison
%
%----------------------------- PATHS -----------------------------------
%addpath('plot2svg');
%addpath('DataIO');
addpath('MSP');
addpath('QuantitativeMeasures');
addpath('ReferenceMethods');
%--------------------- SIMULATION PARAMETERS ---------------------------
% simulation
iterations = 100; % iterations
sigma_vector = 5:5:50; % noise levels
% numerical optimization properties (gradient descent method)
sigma0 = 10;
step_size = 5;
max_iter = 100;
max_tolerance = 1e-3;
delta = 0;
% Read real raw data
%ReadData_Transverse % (!!!) CONNECT TO YOUR OWN DATA
% Temporary handle artificial data - COMMENT HERE
dataset_re = zeros(217, 181); % real part of the MRI image
dataset_im = zeros(217, 181); % imaginary part of the MRI image
mask_T1_1mm_bg = zeros(217, 181); % background mask of the MRI image - PUT YOUR MASK HERE
mask_T1_1mm_bg(50:150, 50:100) = 1; % artificial background mask
%% Rayleigh distribution (background data)
Rayleigh_MSP_results_GD_KL1 = zeros(iterations, length(sigma_vector)); % Kullback-Leibler divergence, k = 1
Rayleigh_MSP_results_GD_KL2 = zeros(iterations, length(sigma_vector)); % Kullback-Leibler divergence, k = 4
Rayleigh_MSP_results_GD_Jeffreys1 = zeros(iterations, length(sigma_vector)); % J-divergence, k = 1
Rayleigh_MSP_results_GD_Jeffreys2 = zeros(iterations, length(sigma_vector)); % J-divergence, k = 4
Rayleigh_MSP_results_GD_Renyi1 = zeros(iterations, length(sigma_vector)); % Renyi, k = 1, p = 1/2
%Rayleigh_MSP_results_GD_Renyi2 = zeros(iterations, length(sigma_vector)); % Renyi, k = 1, p = 2
Rayleigh_MSP_results_GD_Renyi3 = zeros(iterations, length(sigma_vector)); % Renyi, k = 4, p = 2
%Rayleigh_MSP_results_NM_Vajda1 = zeros(iterations, length(sigma_vector)); % Vajda, k = 1, p = 1
Rayleigh_MSP_results_NM_Vajda2 = zeros(iterations, length(sigma_vector)); % Vajda, k = 1, p = 2
%Rayleigh_MSP_results_NM_Vajda3 = zeros(iterations, length(sigma_vector)); % Vajda, k = 4, p = 2
FUNCTION_HANDLE_MSP_RAYLEIGH_KL = @(sigma_, m_, k_, delta_, weights_)MSP_Rayleigh_distribution(sigma_, m_, k_, delta_, weights_);
FUNCTION_HANDLE_MSP_RAYLEIGH_JEFFREYS = @(sigma_, m_, k_, delta_, weights_)MSP_Rayleigh_distribution_Jeffreys_Divergence(sigma_, m_, k_, delta_, weights_);
FUNCTION_HANDLE_MSP_RAYLEIGH_RENYI = @(sigma_, m_, k_, delta_, weights_)MSP_Rayleigh_distribution_Renyi_Divergence(sigma_, m_, k_, delta_, weights_);
%-----------------------------------------------------------------------------
for iter=1:iterations
for noise_level=1:length(sigma_vector)
sigma = sigma_vector(noise_level);
% Noise in magnitude
dataset_noise1 = random('norm', 0, sigma, size(dataset_re, 1), size(dataset_re, 2));
dataset_noise2 = random('norm', 0, sigma, size(dataset_im, 1), size(dataset_im, 2));
dataset_T1_1mm_noisy = sqrt( (dataset_re + dataset_noise1).^2 + (dataset_im + dataset_noise2).^2 );
data = dataset_T1_1mm_noisy .* mask_T1_1mm_bg;
data = data(:);
data = data(data > 0);
data = sort(data);
% ------------------------------- Individual information measures ------------------------------
% Kullback-Leibler (gradient descent optimization method) - k = 1 - KULLBACK-LEIBLER DIVERGENCE
[sigma_MSP_GD_KL1, function_value_MSP_GD_KL1] = Optimization_Rayleigh_GD(FUNCTION_HANDLE_MSP_RAYLEIGH_KL, sigma0, data, 1, delta, step_size, max_iter, max_tolerance, 0);
% Kullback-Leibler (gradient descent optimization method) - k = 4 - KULLBACK-LEIBLER DIVERGENCE
[sigma_MSP_GD_KL2, function_value_MSP_GD_KL2] = Optimization_Rayleigh_GD(FUNCTION_HANDLE_MSP_RAYLEIGH_KL, sigma0, data, 4, delta, step_size, max_iter, max_tolerance, 0);
%-----------------------------------------------------------------------------------
% J-divergence (gradient descent optimization method) - k = 1 - J-DIVERGENCE
[sigma_MSP_GD_Jeffreys1, function_value_MSP_GD_Jeffreys1] = Optimization_Rayleigh_GD(FUNCTION_HANDLE_MSP_RAYLEIGH_JEFFREYS, sigma0, data, 1, delta, step_size, max_iter, max_tolerance, 0);
% J-divergence (gradient descent optimization method) - k = 4 - J-DIVERGENCE
[sigma_MSP_GD_Jeffreys2, function_value_MSP_GD_Jeffreys2] = Optimization_Rayleigh_GD(FUNCTION_HANDLE_MSP_RAYLEIGH_JEFFREYS, sigma0, data, 4, delta, step_size, max_iter, max_tolerance, 0);
%-----------------------------------------------------------------------------------
%-----------------------------------------------------------------------------------
% Renyi divergence (Nelder-Mead optimization method) - k = 1, p = 1/2 - RENYI'S DIVERGENCE
%[sigma_MSP_GD_Renyi1, function_value_MSP_GD_Renyi1] = Optimization_Rayleigh_GD(FUNCTION_HANDLE_MSP_RAYLEIGH_RENYI, sigma0, data, 1, delta, step_size, max_iter, max_tolerance, 0.5);
n = length(data); k = 1; p = 1/2;
beta = (n+1)/k;
RENYI1 = @(sigma) ( -((sign(1-p) * beta^p)/(n-k+2)) .* ( (1 - exp(-(data(k).^2) ./ (2*sigma^2)))^p + sum((exp(-(data(1:n-k).^2) ./ (2*sigma^2)) - exp(-(data(k+1:n).^2) ./ (2*sigma^2))).^p) + exp(-(data(n-k+1).^2) ./ (2*sigma^2))^p));
sigma_MSP_GD_Renyi1 = fminsearch(RENYI1, sigma0);
%-----------------------------------------------------------------------------------
%-----------------------------------------------------------------------------------
% Renyi divergence (Nelder-Mead optimization method) - k = 4, p = 2 - RENYI'S DIVERGENCE
%[sigma_MSP_GD_Renyi3, function_value_MSP_GD_Renyi3] = Optimization_Rayleigh_GD(FUNCTION_HANDLE_MSP_RAYLEIGH_RENYI, sigma0, data, 4, delta, step_size, max_iter, max_tolerance, 2);
n = length(data); k = 4; p = 2;
beta = (n+1)/k;
RENYI3 = @(sigma) ( -((sign(1-p) * beta^p)/(n-k+2)) .* ( (1 - exp(-(data(k).^2) ./ (2*sigma^2)))^p + sum((exp(-(data(1:n-k).^2) ./ (2*sigma^2)) - exp(-(data(k+1:n).^2) ./ (2*sigma^2))).^p) + exp(-(data(n-k+1).^2) ./ (2*sigma^2))^p));
sigma_MSP_GD_Renyi3 = fminsearch(RENYI3, sigma0);
%-----------------------------------------------------------------------------------
%-----------------------------------------------------------------------------------
% Vajda's measure (Nelder-Mead optimization method) - k = 1, p = 2
n = length(data); k = 1; p = 2;
beta = (n+1)/k;
VAJDA2 = @(sigma) ( (1/(n-k+2)) .* ( abs( 1 - beta*(1 - exp(-(data(k).^2) ./ (2*sigma^2)) ) )^p + sum( ( abs(1 - beta*(exp(-(data(1:n-k).^2) ./ (2*sigma^2)) - exp(-(data(k+1:n).^2) ./ (2*sigma^2))) ) ).^p ) + (abs(1 - beta*exp(-(data(n-k+1).^2) ./ (2*sigma^2))))^p ) );
sigma_MSP_NM_Vajda2 = fminsearch(VAJDA2, sigma0);
%-----------------------------------------------------------------------------------
% store results in arrays
Rayleigh_MSP_results_GD_KL1(iter, noise_level) = sigma_MSP_GD_KL1;
Rayleigh_MSP_results_GD_KL2(iter, noise_level) = sigma_MSP_GD_KL2;
Rayleigh_MSP_results_GD_Jeffreys1(iter, noise_level) = sigma_MSP_GD_Jeffreys1;
Rayleigh_MSP_results_GD_Jeffreys2(iter,
MSP-estimate算法仿真-源码
版权申诉
129 浏览量
2021-10-01
23:43:16
上传
评论
收藏 26KB ZIP 举报
mYlEaVeiSmVp
- 粉丝: 1957
- 资源: 19万+
最新资源
- Delphi 控件之unidac-10.2.1-d24pro.exe
- PHP XML Expat 解析器.md
- PHP DOM扩展库:SimpleXML 解析XML文档.md
- DBA技能图谱思维导图
- chenhao耗子叔博客集锦
- Delphi 12 控件之unidac-10.2.1-d27pro.exe
- 支持主流Modbus、OPC DA/UA等主流协议,支持西门子、三菱、欧姆龙、松下、GE、AB等主流PLC
- LIMS3-AMBIDEX- Cartesian Space Impedance Control
- Impedance Control for Soft Robots
- YOLO 数据集:8 种寄生虫检测【包含划分好的数据集、类别class文件、数据可视化脚本】
资源上传下载、课程学习等过程中有任何疑问或建议,欢迎提出宝贵意见哦~我们会及时处理!
点击此处反馈