Commit 8b94124c authored by Corson N. Areshenkoff's avatar Corson N. Areshenkoff

Fix spd.pca kernel

parent 5509af36
......@@ -12,12 +12,12 @@ spddot <- function(sigma = 1, method = 'logeuclidean'){
# Compute kernel
if (is(x,"vector") && is(y,"vector")){
if (length(x) != length(y)){
stop("number of dimension must be the same on both data points")
stop("number of dimensions must be the same for both data points")
}
d <- spd.dist(spd.vectorize(x),
spd.vectorize(y),
method = method)
return(exp(-sigma*d))
return(exp(-sigma * d^2))
}
}
return(new("spdkernel", .Data = rval, kpar = list(sigma = sigma, method = method)))
......
#' Kernel pca for SPD matrices
#'
#' Function performs kernel principal component analysis on a set of symmetric,
#' positive-definite matrices using an rbf kernel: \code{exp(sigma * d(i,j))},
#' positive-definite matrices using an rbf kernel: \code{exp(-sigma * d(i,j)^2)},
#' where \code{d(i,j)} is a distance function implemented by \code{spd.dist}.
#' This function is more or less a wrapper around the kernlab function \code{kpca.}
#'
......@@ -12,12 +12,12 @@
#' @param ... Further arguments for kernlab::kpca.
#' @details Function performs kpca using a rbf kernel, where the distance between
#' two inputs is given by \code{method}. Note that only "euclidean" and "logeuclidean"
#' have been proven to give rise to positive-definite kernels, although any distance
#' have been proven to give rise to positive-definite kernels for all values of sigma, although any distance
#' implemented in \code{spd.dist} may be used. Anecdotally, \code{method = "riemannian"}
#' often achieves superior performance.
#' @return An S4 object of class kpca.
spd.pca <- function(x, method = 'logeuclidean', sigma = 1, ...){
spd.pca <- function(x, method = 'euclidean', sigma = 1, ...){
# Check input
if (!'spd.list' %in% input.type(x)){
......
......@@ -4,17 +4,22 @@
\alias{spd.heatmap}
\title{Heatmap of an SPD matrix}
\usage{
spd.heatmap(V, labs = NULL, col.scale = NULL, ...)
spd.heatmap(V, labs = NULL, ...)
}
\arguments{
\item{V}{A numeric matrix of autoregressive coefficients with targets as rows and sources as columns.}
\item{V}{A numeric matrix of autoregressive coefficients with targets as
rows and sources as columns.}
\item{labs}{An optional character vector of labels. If NULL, the row and column names of V are used.}
\item{labs}{An optional character vector of labels. If NULL, the row and
column names of V are used.}
\item{...}{Additional arguments to levelplot}
}
\value{
NA
}
\description{
Function plots a heatmap of vector autoregressive coefficients. Currently,
plotting options are limited to the default settings.
Function plots a heatmap of a matrix. Currently, the function is not particularly
featureful, and any customization should be done by passing additional arguments
to levelplot through \code{...}.
}
......@@ -21,14 +21,14 @@ An S4 object of class kpca.
}
\description{
Function performs kernel principal component analysis on a set of symmetric,
positive-definite matrices using an rbf kernel: \code{exp(sigma * d(i,j))},
positive-definite matrices using an rbf kernel: \code{exp(-sigma * d(i,j)^2)},
where \code{d(i,j)} is a distance function implemented by \code{spd.dist}.
This function is more or less a wrapper around the kernlab function \code{kpca.}
}
\details{
Function performs kpca using a rbf kernel, where the distance between
two inputs is given by \code{method}. Note that only "euclidean" and "logeuclidean"
have been proven to give rise to positive-definite kernels, although any distance
have been proven to give rise to positive-definite kernels for all values of sigma, although any distance
implemented in \code{spd.dist} may be used. Anecdotally, \code{method = "riemannian"}
often achieves superior performance.
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment