### Implemented a veriety of kernels and hrbf eval

```Implemented polynomial kernels x^2, x^3, x^4, and x^5
as well as the Gaussian kernel and finite support csrbf kernels.```
parents
Cargo.toml 0 → 100644
 [package] name = "hrbf" version = "0.1.0" authors = ["Egor Larionov <[email protected]>"] [dependencies] num-traits = "0.1" nalgebra = "*" lazy_static = "*"
src/hrbf.rs 0 → 100644
 use nalgebra::{Point3, Vector4, Vector3, dot, norm}; use kernel::Kernel; use Real; /// HRBF specific kernel type. In general, we can assign a unique kernel to each hrbf site, or we /// can use the same kernel for all points. This corresponds to Local and Global kernel types /// respectively. #[derive(Clone, Debug)] pub enum KernelType { Local(Vec), // each site has its own kernel Global(K), // same kernel for all sites } impl ::std::ops::Index for KernelType { type Output = K; fn index(&self, index: usize) -> &K { match *self { KernelType::Local(ref ks) => &ks[index], KernelType::Global(ref k) => k, } } } #[derive(Clone, Debug)] pub struct HRBF where T: Real, K: Kernel { sites: Vec>, normals: Vec>, betas: Vec>, kernel: KernelType, } impl Default for HRBF where T: Real, K: Kernel { fn default() -> Self { HRBF::new() } } impl HRBF where T: Real, K: Kernel { pub fn new() -> Self { HRBF { sites: Vec::new(), normals: Vec::new(), betas: Vec::new(), kernel: KernelType::Global(K::default()), } } /// Given a vector `x` and its norm `l`, return the gradient of the kernel evaluated /// at `l` wrt `x`. `j` denotes the site at which the kernel is evaluated. fn grad_phi(&self, x: Vector3, l: T, j: usize) -> Vector3 { x*self.kernel[j].df_l(l) } /// Evaluate the HRBF at point `p`. pub fn eval(&self, p: Point3) -> T { self.betas.iter() .enumerate() .fold(T::zero(), |sum, (j, b)| sum + dot(&self.eval_block(p, j),b)) } /// Helper function for `eval`. fn eval_block(&self, p: Point3, j: usize) -> Vector4 { let x = p - self.sites[j]; let l = norm(&x); let w = self.kernel[j].f(l); let g = self.grad_phi(x, l, j); Vector4::new(w, g, g, g) } ///// Gradient of the hrbf function at point `p`. //pub fn grad(&self p: Point3) -> T { // sites.iter() // .zip(betas.iter()) // .fold(T::zero(), |sum, (c, b)| sum + grad_block(p - c)*b) //} ///// Helper function for `grad`. Returns a 3x4 matrix that gives the gradient of the hrbf when ///// multiplied by the corresponding coefficients. //fn grad_block(&self, x: Vector3) -> Matrix3x4 { //} ///// Hessian block product. To avoid dealing with high order tensors, we expose the product of ///// the hessian block with an arbitrary Vector3. //pub fn hess_block_prod(&self, p: Point3, lambda: Vector3) -> T { // sites.iter() // .zip(betas.iter()) // .fold(T::zero(), |sum, (c, b)| -> sum + dot(grad_block(p - c),b)) //} //pub fn hess_block(&self, x: Vector3) -> T { //} }
src/kernel.rs 0 → 100644
This diff is collapsed.
src/lib.rs 0 → 100644
 extern crate num_traits; extern crate nalgebra; pub mod hrbf; pub mod kernel; pub trait Real: nalgebra::Real + num_traits::Float + ::std::fmt::Debug {} #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!