diff --git a/assignment 6/README.md b/assignment 6/README.md index f7524a3..39ddc3c 100644 --- a/assignment 6/README.md +++ b/assignment 6/README.md @@ -3,23 +3,18 @@ ### Implementing Gaussian Processes in Python In this assignment, your task is to implement the Gaussian Processes algorithm as a class in Python. Gaussian Processes are a powerful tool for probabilistic regression and can be used for various machine learning tasks. The goal of this assignment is to gain a deeper understanding of Gaussian Processes and their implementation. -Tasks #### GaussianProcess class Implement the class that can perform the following tasks: -* Fit the model: The class should be able to fit the model to a given training data set (X, y), where X is an n x d matrix representing the input features, and y is an n-dimensional vector representing the target values. The Gaussian Process fitting involves estimating the hyperparameters of the kernel function that captures the underlying covariance structure of the data. +* Fit the model: The class should be able to fit the model to a given training data set (X, y), where X is an n x d matrix representing the input features, and y is an n-dimensional vector representing the target values * Predict the output: The class should be able to predict the output for a given test data set X*, where X* is an m x d matrix representing the input features of the test data. The predicted output is obtained by using the training data and the estimated covariance structure to infer the most likely target values for the test data points. -* Compute log marginal likelihood: The class should be able to compute the log marginal likelihood of the training data set. The log marginal likelihood measures how well the model with its estimated hyperparameters explains the observed training data. It is used as an optimization objective for finding the best hyperparameters. - -* Compute gradients of log marginal likelihood: The class should be able to compute the gradients of the log marginal likelihood with respect to the hyperparameters. These gradients provide information on how the log marginal likelihood changes as the hyperparameters are varied and are crucial for optimizing the hyperparameters. +* Compute log marginal likelihood: The class should be able to compute the log marginal likelihood of the training data set. The log marginal likelihood measures how well the model with its estimated hyperparameters explains the observed training data. It is used as an optimization objective for finding the best hyperparameters(no need to do that). * Compute predictive mean and variance: The class should be able to compute the predictive mean and variance for a given test data set X*. The predictive mean represents the expected value of the target variable for each test data point, while the predictive variance indicates the uncertainty associated with the predictions. -* Compute gradients of predictive mean and variance: The class should be able to compute the gradients of the predictive mean and variance with respect to the hyperparameters. These gradients provide information on how the predictive mean and variance change as the hyperparameters are varied, which is useful for optimizing the hyperparameters. - #### Kernel Functions You need to implement the following kernel functions: diff --git a/assignment 6/iml_assgnment6_unsolved.ipynb b/assignment 6/iml_assgnment6_unsolved.ipynb index d90409b..91ba5af 100644 --- a/assignment 6/iml_assgnment6_unsolved.ipynb +++ b/assignment 6/iml_assgnment6_unsolved.ipynb @@ -142,24 +142,6 @@ " lml=None\n", " return lml\n", "\n", - " def log_marginal_likelihood_gradient(self):\n", - " # Compute the gradient of the log marginal likelihood with respect to the kernel hyperparameters.\n", - " lml_grad=None\n", - " return lml_grad\n", - "\n", - " def predict_gradient(self, X_star):\n", - " # K_star is the gradient of the kernel function\n", - " # y_star is the gradient of the mean function\n", - " # v is the gradient of the covariance function\n", - " # var is the gradient of the variance function\n", - " # K_star_grad is the gradient of the kernel function\n", - " # y_star_grad is the gradient of the mean function\n", - " # v_grad is the gradient of the covariance function\n", - " # var_grad is the gradient of the variance function\n", - " y_star, var, K_star_grad, v_grad = None, None, None, None\n", - " y_star_grad, var_grad = None, None\n", - " return y_star, var, y_star_grad, var_grad, K_star_grad, v_grad\n", - "\n", " def plot(self, X_star, y_star, var):\n", " plt.figure(figsize=(10, 10))\n", " plt.plot(self.X, self.y, \"r.\", markersize=10, label=\"Observations\")\n", @@ -197,9 +179,6 @@ " def __call__(self, X1, X2):\n", " pass\n", "\n", - " def gradient(self, X1, X2):\n", - " pass\n", - "\n", " def set_params(self, params):\n", " pass\n", "\n" @@ -222,9 +201,6 @@ " def __call__(self, X1, X2):\n", " pass\n", "\n", - " def gradient(self, X1, X2):\n", - " pass\n", - "\n", " def set_params(self, params):\n", " pass" ], @@ -245,9 +221,6 @@ " def __call__(self, X1, X2):\n", " pass\n", "\n", - " def gradient(self, X1, X2):\n", - " pass\n", - "\n", " def set_params(self, params):\n", " pass\n" ], @@ -268,8 +241,6 @@ " def __call__(self, X1, X2):\n", " pass\n", "\n", - " def gradient(self, X1, X2):\n", - " pass\n", "\n", " def set_params(self, params):\n", " pass\n"