library(ape) library(inline) library(RcppEigen) # All routines written by Pantelis Z. Hadjipantelis and David Springate (30-Jul-2012) while still experimental this code is due to be appear under some GPL-related license # Most routines are based on Eigen ( Ga\"{e}l Guennebaud and Beno\^{i}t Jacob and others 2012) and make use of RcppEigen (D. Bates 2012). fLogLik_OU2_givenL_and_SNR <- cxxfunction(signature(ThetaI = "vector",YY = "vector", QQ= "matrix", LambdaI = "vector", SNR_I= "vector" ), '#include // to use sqrt and exp using Eigen::Map; // to map input variable to an existing array of data using Eigen::MatrixXd; // to use MatrixXd using Eigen::VectorXd; // to use VectorXd using Eigen::ArrayXd; // to use ArrayXd using Eigen::LLT; // to do the LLT decomposition using Eigen::Lower; // to get the lower triangular view const Map X(Rcpp::as > (QQ)); //Map matrix QQ to matrixXd X const Map Y(Rcpp::as > (YY)); //Map vector YY to matrixXd Y const Map Theta(Rcpp::as > (ThetaI)); //Map vector ThetaI to matrixXd Theta const Map Lambda(Rcpp::as > (LambdaI)); //Map vector ThetaI to matrixXd Theta const Map SNR(Rcpp::as > (SNR_I)); //Map vector ThetaI to matrixXd Theta using namespace std; int N= Y.size(); //number of points double s_f, l, s_n; //hyperparameters / function amplitude, characteristic length, noise amplitude, s_c ArrayXd dF = ArrayXd::Constant( Theta.size(),0); s_f = exp( 2.0* Theta(0)); //exponentiate to make sure they are positive l = exp( Lambda(0) ); s_n = exp( 2.0* Theta(0) )/SNR(0); double rr = .0; MatrixXd K = MatrixXd::Zero(N,N) ; //Covariance K MatrixXd K_x_x = MatrixXd::Zero(N,N) ; //Covariance K helper for (int i=0; i llt_K(K); //compute the Cholesky decomposition of the matrix double logLikelihood = 0.; if (llt_K.info()==Eigen::Success) { // if the Cholesky decomposition exists. VectorXd alpha = llt_K.solve(Y); //Get alpha = inv(K)Y double A1 = 0.5*(Y.transpose()*alpha).value(); //Fit term double A2 = llt_K.matrixLLT().diagonal().array().log().sum(); //Complexity term logLikelihood = ( A1 + A2 + (N*.5)*log(2*M_PI)); //Final log-likelihood MatrixXd K1 = MatrixXd::Zero(N,N) ; //Covariance K for s_f MatrixXd K3 = MatrixXd::Identity(N,N)*(2*s_n); //Covariance K for s_n for (int i=0; i // to use sqrt and exp using Eigen::Map; // to map input variable to an existing array of data using Eigen::MatrixXd; // to use MatrixXd using Eigen::VectorXd; // to use VectorXd using Eigen::ArrayXd; // to use ArrayXd using Eigen::LLT; // to do the LLT decomposition using Eigen::Lower; // to get the lower triangular view const Map X(Rcpp::as > (QQ)); //Map matrix QQ to matrixXd X const Map Y(Rcpp::as > (YY)); //Map vector YY to matrixXd Y const Map Theta(Rcpp::as > (ThetaI)); //Map vector ThetaI to matrixXd Theta const Map Lambda(Rcpp::as > (LambdaI)); //Map vector ThetaI to matrixXd Theta const Map S_Noise(Rcpp::as > (S_NoiseI)); //Map vector ThetaI to matrixXd Theta int N= Y.size(); //number of points double s_f, l, s_n; //hyperparameters / function amplitude, characteristic length, noise amplitude, s_c ArrayXd dF = ArrayXd::Constant( Theta.size(),0); s_f = exp( 2.0* Theta(0) ); //exponentiate to make sure they are positive l = exp( Lambda(0) ); s_n = exp( 2.0* S_Noise(0) ); double rr = .0; MatrixXd K = MatrixXd::Zero(N,N) ; //Covariance K MatrixXd K_x_x = MatrixXd::Zero(N,N) ; //Covariance K helper for (int i=0; i llt_K(K); //compute the Cholesky decomposition of the matrix double logLikelihood = 0.; if (llt_K.info()==Eigen::Success) { // if the Cholesky decomposition exists. VectorXd alpha = llt_K.solve(Y); //Get alpha = inv(K)Y double A1 = 0.5*(Y.transpose()*alpha).value(); //Fit term double A2 = llt_K.matrixLLT().diagonal().array().log().sum(); //Complexity term logLikelihood = ( A1 + A2 + (N*.5)*log(2*M_PI)); //Final log-likelihood MatrixXd K1 = MatrixXd::Zero(N,N) ; //Covariance K for s_f for (int i=0; i // to use sqrt and exp using Eigen::Map; // to map input variable to an existing array of data using Eigen::MatrixXd; // to use MatrixXd using Eigen::VectorXd; // to use VectorXd using Eigen::ArrayXd; // to use ArrayXd using Eigen::LLT; // to do the LLT decomposition using Eigen::Lower; // to get the lower triangular view const Map X(Rcpp::as > (QQ)); //Map matrix QQ to matrixXd X const Map Y(Rcpp::as > (YY)); //Map vector YY to matrixXd Y const Map Theta(Rcpp::as > (ThetaI)); //Map vector ThetaI to matrixXd Theta const Map S_Func(Rcpp::as > (S_funcI)); //Map vector ThetaI to matrixXd Theta const Map S_Noise(Rcpp::as > (S_NoiseI)); //Map vector ThetaI to matrixXd Theta int N= Y.size(); //number of points double s_f, l, s_n; //hyperparameters / function amplitude, characteristic length, noise amplitude, s_c ArrayXd dF = ArrayXd::Constant( Theta.size(),0); s_f = exp( 2.0* S_Func(0) ); //exponentiate to make sure they are positive l = exp( Theta(0) ); s_n = exp( 2.0* S_Noise(0) ); double rr = .0; MatrixXd K = MatrixXd::Zero(N,N) ; //Covariance K MatrixXd K_x_x = MatrixXd::Zero(N,N) ; //Covariance K helper for (int i=0; i llt_K(K); //compute the Cholesky decomposition of the matrix double logLikelihood = 0.; if (llt_K.info()==Eigen::Success) { // if the Cholesky decomposition exists. VectorXd alpha = llt_K.solve(Y); //Get alpha = inv(K)Y double A1 = 0.5*(Y.transpose()*alpha).value(); //Fit term double A2 = llt_K.matrixLLT().diagonal().array().log().sum(); //Complexity term logLikelihood = ( A1 + A2 + (N*.5)*log(2*M_PI)); //Final log-likelihood MatrixXd K2 = MatrixXd::Zero(N,N) ; //Covariance K for l for (int i=0; i // to use sqrt and exp using Eigen::Map; // to map input variable to an existing array of data using Eigen::MatrixXd; // to use MatrixXd using Eigen::VectorXd; // to use VectorXd using Eigen::ArrayXd; // to use ArrayXd using Eigen::LLT; // to do the LLT decomposition using Eigen::Lower; // to get the lower triangular view const Map X(Rcpp::as > (QQ)); //Map matrix QQ to matrixXd X const Map Y(Rcpp::as > (YY)); //Map vector YY to matrixXd Y const Map Theta(Rcpp::as > (ThetaI)); //Map vector ThetaI to matrixXd Theta const Map S_Func(Rcpp::as > (S_funcI)); //Map vector ThetaI to matrixXd Theta const Map Lambda(Rcpp::as > (LambdaI)); //Map vector ThetaI to matrixXd Theta int N= Y.size(); //number of points double s_f, l, s_n; //hyperparameters / function amplitude, characteristic length, noise amplitude, s_c ArrayXd dF = ArrayXd::Constant( Theta.size(),0); s_f = exp( 2.0* S_Func(0) ); //exponentiate to make sure they are positive l = exp( Lambda(0) ); s_n = exp( 2.0* Theta(0) ); double rr = .0; MatrixXd K = MatrixXd::Zero(N,N) ; //Covariance K MatrixXd K_x_x = MatrixXd::Zero(N,N) ; //Covariance K helper for (int i=0; i llt_K(K); //compute the Cholesky decomposition of the matrix double logLikelihood = 0.; if (llt_K.info()==Eigen::Success) { // if the Cholesky decomposition exists. VectorXd alpha = llt_K.solve(Y); //Get alpha = inv(K)Y double A1 = 0.5*(Y.transpose()*alpha).value(); //Fit term double A2 = llt_K.matrixLLT().diagonal().array().log().sum(); //Complexity term logLikelihood = ( A1 + A2 + (N*.5)*log(2*M_PI)); //Final log-likelihood MatrixXd K3 = MatrixXd::Identity(N,N)*(2*s_n); //Covariance K MatrixXd AAinvK = ((alpha * alpha.transpose()) - (K).inverse()) ; dF(0) = 0.5 * (AAinvK * K3).trace(); } else { return (List::create(Named("F") =1234567890. ,Named("dF") = ArrayXd::Constant( Theta.size(),1000) )); } return (List::create(Named("F") = logLikelihood ,Named("dF") = -dF));', plugin = "RcppEigen") Log_Likelihood_OU1_SN <- function( Theta, Y, X,SF,L){ # Theta : Hyperparamaters # Y : Trait values at tips # L : Lambda # X : Distance matrix # returns logLikelihood storage.mode(X) <- "double"; #Save stuff as doubles storage.mode(Y) <- "double"; storage.mode(Theta) <- "double"; storage.mode(SF) <- "double"; storage.mode(L) <- "double"; return ( fLogLik_OU1_SN(Theta,Y,X,SF,L)); #LogLikelihood } fLogLik_OU1_SN_Only_F <- function( Theta, Y, X,SF,L){ # Theta : Hyperparamaters # Y : Trait values at tips # SF : # X : Distance matrix # returns logLikelihood storage.mode(X) <- "double"; #Save stuff as doubles storage.mode(Y) <- "double"; storage.mode(Theta) <- "double"; storage.mode(SF) <- "double"; storage.mode(L) <- "double"; return (fLogLik_OU1_SN(Theta,Y,X,SF,L)$F); #LogLikelihood } fLogLik_OU1_SN_Only_dF <- function( Theta, Y, X,SF,L){ # Theta : Hyperparamaters # Y : Trait values at tips # SF : # X : Distance matrix # returns logLikelihood storage.mode(X) <- "double"; #Save stuff as doubles storage.mode(Y) <- "double"; storage.mode(Theta) <- "double"; storage.mode(SF) <- "double"; storage.mode(L) <- "double"; return (fLogLik_OU1_SN(Theta,Y,X,SF,L)$dF); #LogLikelihood }