write the full code for parameter estimation of several parametrs using mcmc in matlab, consider input parameters have uniform prior and take values for example in matlab in matlab in matlab

main.m
% Define your likelihood function. Let's say it is a simple linear regression model
function y = likelihood(x, data)
    % Model: y = slope*x + intercept
    slope = x(1);
    intercept = x(2);
    
    % Calculate the predicted values
    y_pred = slope * data(:,1) + intercept;
    
    % Calculate the negative log likelihood
    y = -sum(log(normpdf(data(:,2), y_pred, x(3))));
end

% Define the function for log prior (assuming uniform priors)
function y = logprior(x)
    % Define the ranges for uniform priors
    slope_range = [-10, 10];
    intercept_range = [-20, 20];
    sigma_range = [0.1, 100];
    
    % Check if parameters are within the defined ranges
    if all(x(1) >= slope_range(1) && x(1) <= slope_range(2) && ...
           x(2) >= intercept_range(1) && x(2) <= intercept_range(2) && ...
           x(3) >= sigma_range(1) && x(3) <= sigma_range(2))
        y = 0; % Log of a uniform distribution is 0
    else
        y = -Inf; % Log(0) = -Inf, indicating inadmissible values
    end
end

% Define the number of MCMC iterations
n_iterations = 10000;

% Initialize the Markov chain
x = [0, 0, 1]; % Initial values for slope, intercept, and sigma
chain = zeros(n_iterations, 3); % To store the samples

% Initialize the log likelihood and log prior
log_likelihood = likelihood(x, data);
log_prior = logprior(x);

% Metropolis-Hastings algorithm
for i = 1:n_iterations
    % Propose a new sample
    x_proposed = x + 0.1 * randn(1,3); % Small random step
    
    % Calculate the log likelihood and log prior for the proposed sample
    log_likelihood_new = likelihood(x_proposed, data);
    log_prior_new = logprior(x_proposed);
    
    % Calculate the acceptance probability
    alpha = exp((log_likelihood_new + log_prior_new) - (log_likelihood + log_prior));
    
    % Accept or reject the proposal
    if rand() < alpha
        x = x_proposed;
        log_likelihood = log_likelihood_new;
        log_prior = log_prior_new;
    end
    
    chain(i, :) = x; % Store the sample in the chain
end

% Display the results
burn_in = 0.1 * n_iterations; % Ignore the burn-in period
samples = chain(burn_in+1:end, :);
mean_estimate = mean(samples);
var_estimate = var(samples);

disp('Parameter Estimates:');
disp(['Slope: ', num2str(mean_estimate(1))]);
disp(['Intercept: ', num2str(mean_estimate(2))]);
disp(['Sigma: ', num2str(mean_estimate(3))]);
disp('Parameter Variance:');
disp(['Slope: ', num2str(var_estimate(1))]);
disp(['Intercept: ', num2str(var_estimate(2))]);
disp(['Sigma: ', num2str(var_estimate(3))]);
2539 chars
78 lines

This code snippet demonstrates how to perform parameter estimation using Markov Chain Monte Carlo (MCMC) with a Metropolis-Hastings algorithm in MATLAB. The code includes a likelihood function, log prior function assuming uniform priors, and the implementation of the Metropolis-Hastings algorithm to sample from the posterior distribution of the parameters.

related categories

gistlibby LogSnag