Source code for torchpme.potentials.coulomb

from typing import Optional

import torch

from .potential import Potential


[docs] class CoulombPotential(Potential): """ Smoothed electrostatic Coulomb potential :math:`1/r`. Here :math:`r` is the inter-particle distance It can be used to compute: 1. the full :math:`1/r` potential 2. its short-range (SR) and long-range (LR) parts, the split being determined by a length-scale parameter (called "Inverse" in the code) 3. the Fourier transform of the LR part :param smearing: float or torch.Tensor containing the parameter often called "sigma" in publications, which determines the length-scale at which the short-range and long-range parts of the naive :math:`1/r` potential are separated. The smearing parameter corresponds to the "width" of a Gaussian smearing of the particle density. :param exclusion_radius: A length scale that defines a *local environment* within which the potential should be smoothly zeroed out, as it will be described by a separate model. """ def __init__( self, smearing: Optional[float] = None, exclusion_radius: Optional[float] = None, ): super().__init__(smearing, exclusion_radius)
[docs] def from_dist(self, dist: torch.Tensor) -> torch.Tensor: """ Full :math:`1/r` potential as a function of :math:`r`. :param dist: torch.tensor containing the distances at which the potential is to be evaluated. """ return 1.0 / dist
[docs] def lr_from_dist(self, dist: torch.Tensor) -> torch.Tensor: """ Long range of the range-separated :math:`1/r` potential. Used to subtract out the interior contributions after computing the LR part in reciprocal (Fourier) space. :param dist: torch.tensor containing the distances at which the potential is to be evaluated. """ if self.smearing is None: raise ValueError( "Cannot compute long-range contribution without specifying `smearing`." ) return torch.erf(dist / self.smearing / 2.0**0.5) / dist
[docs] def lr_from_k_sq(self, k_sq: torch.Tensor) -> torch.Tensor: r""" Fourier transform of the LR part potential in terms of :math:`\mathbf{k^2}`. :param k_sq: torch.tensor containing the squared lengths (2-norms) of the wave vectors k at which the Fourier-transformed potential is to be evaluated """ if self.smearing is None: raise ValueError( "Cannot compute long-range kernel without specifying `smearing`." ) # avoid NaNs in backward, see # https://github.com/jax-ml/jax/issues/1052 # https://github.com/tensorflow/probability/blob/main/discussion/where-nan.pdf masked = torch.where(k_sq == 0, 1.0, k_sq) return torch.where( k_sq == 0, 0.0, 4 * torch.pi * torch.exp(-0.5 * self.smearing**2 * masked) / masked, )
[docs] def self_contribution(self) -> torch.Tensor: # self-correction for 1/r potential if self.smearing is None: raise ValueError( "Cannot compute self contribution without specifying `smearing`." ) return (2 / torch.pi) ** 0.5 / self.smearing
[docs] def background_correction(self) -> torch.Tensor: # "charge neutrality" correction for 1/r potential if self.smearing is None: raise ValueError( "Cannot compute background correction without specifying `smearing`." ) return torch.pi * self.smearing**2
self_contribution.__doc__ = Potential.self_contribution.__doc__ background_correction.__doc__ = Potential.background_correction.__doc__