-
Notifications
You must be signed in to change notification settings - Fork 3
pca, garch, and tests for binomial tree #46
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,55 @@ | ||
| #ifndef GARCH_H | ||
| #define GARCH_H | ||
|
|
||
| #include <vector> | ||
| #include <Eigen/Dense> | ||
|
|
||
| namespace finmath { | ||
| namespace StatisticalModels { | ||
|
|
||
| class GARCH { | ||
| public: | ||
| // Constructor with default parameters (GARCH(1,1)) | ||
| GARCH(double omega = 0.0001, double alpha = 0.1, double beta = 0.8); | ||
|
|
||
| // Fit the model to data | ||
| void fit(const Eigen::VectorXd& returns); | ||
|
|
||
| // Predict volatility | ||
| double predict_volatility(int steps_ahead = 1) const; | ||
|
|
||
| // Get model parameters | ||
| double get_omega() const; | ||
| double get_alpha() const; | ||
| double get_beta() const; | ||
|
|
||
| // Get fitted values | ||
| Eigen::VectorXd get_fitted_values() const; | ||
|
|
||
| // Get residuals | ||
| Eigen::VectorXd get_residuals() const; | ||
|
|
||
| // Get AIC (Akaike Information Criterion) | ||
| double get_aic() const; | ||
|
|
||
| // Get BIC (Bayesian Information Criterion) | ||
| double get_bic() const; | ||
|
|
||
| private: | ||
| double omega_; // Constant term | ||
| double alpha_; // ARCH parameter | ||
| double beta_; // GARCH parameter | ||
|
|
||
| Eigen::VectorXd fitted_values_; | ||
| Eigen::VectorXd residuals_; | ||
| Eigen::VectorXd conditional_variances_; | ||
|
|
||
| // Helper functions | ||
| void update_parameters(const Eigen::VectorXd& returns); | ||
| double calculate_likelihood() const; | ||
| }; | ||
|
|
||
| } // namespace StatisticalModels | ||
| } // namespace finmath | ||
|
|
||
| #endif // GARCH_H |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,48 @@ | ||
| #ifndef PCA_H | ||
| #define PCA_H | ||
|
|
||
| #include <vector> | ||
| #include <Eigen/Dense> | ||
|
|
||
| namespace finmath { | ||
| namespace StatisticalModels { | ||
|
|
||
| class PCA { | ||
| public: | ||
| PCA(const Eigen::MatrixXd& data, int num_components = 0); | ||
|
|
||
| // Get principal components | ||
| Eigen::MatrixXd get_components() const; | ||
|
|
||
| // Get explained variance ratio | ||
| Eigen::VectorXd get_explained_variance_ratio() const; | ||
|
|
||
| // Transform data to principal component space | ||
| Eigen::MatrixXd transform(const Eigen::MatrixXd& data) const; | ||
|
|
||
| // Inverse transform from principal component space | ||
| Eigen::MatrixXd inverse_transform(const Eigen::MatrixXd& transformed_data) const; | ||
|
|
||
| // Get number of components | ||
| int get_n_components() const; | ||
|
|
||
| // Get mean of original data | ||
| Eigen::VectorXd get_mean() const; | ||
|
|
||
| // Get standard deviation of original data | ||
| Eigen::VectorXd get_std() const; | ||
|
|
||
| private: | ||
| Eigen::MatrixXd components_; | ||
| Eigen::VectorXd explained_variance_ratio_; | ||
| Eigen::VectorXd mean_; | ||
| Eigen::VectorXd std_; | ||
| int n_components_; | ||
|
|
||
| void fit(const Eigen::MatrixXd& data); | ||
| }; | ||
|
|
||
| } // namespace StatisticalModels | ||
| } // namespace finmath | ||
|
|
||
| #endif // PCA_H |
| Original file line number | Diff line number | Diff line change | ||||||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| @@ -0,0 +1,127 @@ | ||||||||||||||||||||||
| #include "finmath/StatisticalModels/garch.h" | ||||||||||||||||||||||
| #include <cmath> | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| namespace finmath { | ||||||||||||||||||||||
| namespace StatisticalModels { | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| GARCH::GARCH(double omega, double alpha, double beta) | ||||||||||||||||||||||
| : omega_(omega), alpha_(alpha), beta_(beta) {} | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| void GARCH::fit(const Eigen::VectorXd& returns) { | ||||||||||||||||||||||
| int n = returns.size(); | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| // Initialize vectors | ||||||||||||||||||||||
| fitted_values_ = Eigen::VectorXd::Zero(n); | ||||||||||||||||||||||
| residuals_ = Eigen::VectorXd::Zero(n); | ||||||||||||||||||||||
| conditional_variances_ = Eigen::VectorXd::Zero(n); | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| // Initialize first conditional variance | ||||||||||||||||||||||
| conditional_variances_(0) = returns.array().square().mean(); | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| // Update parameters using maximum likelihood | ||||||||||||||||||||||
| update_parameters(returns); | ||||||||||||||||||||||
| } | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| void GARCH::update_parameters(const Eigen::VectorXd& returns) { | ||||||||||||||||||||||
| int n = returns.size(); | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| // Initialize parameters for optimization | ||||||||||||||||||||||
| Eigen::Vector3d params(omega_, alpha_, beta_); | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| // Simple gradient descent optimization | ||||||||||||||||||||||
| double learning_rate = 0.0001; | ||||||||||||||||||||||
| int max_iterations = 1000; | ||||||||||||||||||||||
|
|
||||||||||||||||||||||
| for (int iter = 0; iter < max_iterations; ++iter) { | ||||||||||||||||||||||
| // Compute conditional variances | ||||||||||||||||||||||
| for (int t = 1; t < n; ++t) { | ||||||||||||||||||||||
| conditional_variances_(t) = omega_ + | ||||||||||||||||||||||
| alpha_ * returns(t-1) * returns(t-1) + | ||||||||||||||||||||||
| beta_ * conditional_variances_(t-1); | ||||||||||||||||||||||
|
Comment on lines
+37
to
+40
|
||||||||||||||||||||||
| for (int t = 1; t < n; ++t) { | |
| conditional_variances_(t) = omega_ + | |
| alpha_ * returns(t-1) * returns(t-1) + | |
| beta_ * conditional_variances_(t-1); | |
| constexpr double epsilon = 1e-8; | |
| for (int t = 1; t < n; ++t) { | |
| conditional_variances_(t) = std::max( | |
| epsilon, | |
| omega_ + alpha_ * returns(t-1) * returns(t-1) + beta_ * conditional_variances_(t-1) | |
| ); |
Copilot
AI
Dec 10, 2025
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Potential division by zero when alpha_ + beta_ equals 1.0. The GARCH stationarity condition requires alpha_ + beta_ < 1, but the code allows values up to 0.99. If the sum equals or exceeds 1.0, the long-run variance calculation will result in division by zero or negative variance. Add a check to ensure alpha_ + beta_ stays well below 1.0 or handle this edge case.
Copilot
AI
Dec 10, 2025
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The AIC formula uses a constant 6 which should be 2 * 3 = 6 for 3 parameters. However, this assumes the model always has exactly 3 parameters. Consider using a more explicit calculation like 2 * 3 to make the formula clearer and easier to maintain if the number of parameters changes in future GARCH variants.
| return -2 * calculate_likelihood() + 6; // 3 parameters | |
| return -2 * calculate_likelihood() + 2 * 3; // 3 parameters |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,81 @@ | ||
| #include "finmath/StatisticalModels/pca.h" | ||
| #include <cmath> | ||
|
|
||
| namespace finmath { | ||
| namespace StatisticalModels { | ||
|
|
||
| PCA::PCA(const Eigen::MatrixXd& data, int num_components) : n_components_(num_components) { | ||
| fit(data); | ||
| } | ||
|
|
||
| void PCA::fit(const Eigen::MatrixXd& data) { | ||
| // Center the data | ||
| mean_ = data.colwise().mean(); | ||
| Eigen::MatrixXd centered = data.rowwise() - mean_.transpose(); | ||
|
|
||
| // Scale the data | ||
| std_ = ((centered.adjoint() * centered) / double(data.rows() - 1)).diagonal().array().sqrt(); | ||
| Eigen::MatrixXd scaled = centered.array().rowwise() / std_.transpose().array(); | ||
|
||
|
|
||
| // Compute covariance matrix | ||
| Eigen::MatrixXd cov = (scaled.adjoint() * scaled) / double(data.rows() - 1); | ||
|
|
||
| // Compute eigenvalues and eigenvectors | ||
| Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> eigen_solver(cov); | ||
|
|
||
| // Sort eigenvalues and eigenvectors in descending order | ||
| Eigen::VectorXd eigenvalues = eigen_solver.eigenvalues().reverse(); | ||
| Eigen::MatrixXd eigenvectors = eigen_solver.eigenvectors().rowwise().reverse(); | ||
|
||
|
|
||
| // Set number of components if not specified | ||
| if (n_components_ == 0) { | ||
| n_components_ = data.cols(); | ||
| } | ||
|
|
||
| // Store components and explained variance ratio | ||
| components_ = eigenvectors.leftCols(n_components_); | ||
| explained_variance_ratio_ = eigenvalues.head(n_components_) / eigenvalues.sum(); | ||
| } | ||
|
|
||
| Eigen::MatrixXd PCA::get_components() const { | ||
| return components_; | ||
| } | ||
|
|
||
| Eigen::VectorXd PCA::get_explained_variance_ratio() const { | ||
| return explained_variance_ratio_; | ||
| } | ||
|
|
||
| Eigen::MatrixXd PCA::transform(const Eigen::MatrixXd& data) const { | ||
| // Center and scale the data | ||
| Eigen::MatrixXd centered = data.rowwise() - mean_.transpose(); | ||
| Eigen::MatrixXd scaled = centered.array().rowwise() / std_.transpose().array(); | ||
|
|
||
| // Project onto principal components | ||
| return scaled * components_; | ||
| } | ||
|
|
||
| Eigen::MatrixXd PCA::inverse_transform(const Eigen::MatrixXd& transformed_data) const { | ||
| // Reconstruct original data | ||
| Eigen::MatrixXd reconstructed = transformed_data * components_.transpose(); | ||
|
|
||
| // Unscale and uncenter | ||
| reconstructed = reconstructed.array().rowwise() * std_.transpose().array(); | ||
| reconstructed = reconstructed.rowwise() + mean_.transpose(); | ||
|
|
||
| return reconstructed; | ||
| } | ||
|
|
||
| int PCA::get_n_components() const { | ||
| return n_components_; | ||
| } | ||
|
|
||
| Eigen::VectorXd PCA::get_mean() const { | ||
| return mean_; | ||
| } | ||
|
|
||
| Eigen::VectorXd PCA::get_std() const { | ||
| return std_; | ||
| } | ||
|
|
||
| } // namespace StatisticalModels | ||
| } // namespace finmath | ||
Uh oh!
There was an error while loading. Please reload this page.