You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

385 lines
13 KiB

# Copyright (c) [2024] []
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Docstring in core.
"""
import numpy as np
from numpy.linalg import inv
from grogupy.magnetism import blow_up_orbindx, parse_magnetic_entity
from grogupy.utilities import commutator
def onsite_projection(matrix, idx1, idx2):
"""It produces the slices of a matrix for the on site projection.
The slicing is along the last two axes as these contains the orbital indexing.
Args:
matrix : (..., :, :) np.array_like
Some matrix
idx : np.array_like
The indexes of the orbitals
Returns:
np.array_like
Reduced matrix based on the projection
"""
return matrix[..., idx1, :][..., idx2]
def calc_Vu(H, Tu):
"""Calculates the local perturbation in case of a spin rotation.
Args:
H : (NO, NO) np.array_like
Hamiltonian
Tu : (NO, NO) array_like
Rotation around u
Returns:
Vu1 : (NO, NO) np.array_like
First order perturbed matrix
Vu2 : (NO, NO) np.array_like
Second order perturbed matrix
"""
Vu1 = 1j / 2 * commutator(H, Tu) # equation 100
Vu2 = 1 / 8 * commutator(commutator(Tu, H), Tu) # equation 100
return Vu1, Vu2
def build_hh_ss(dh):
"""It builds the Hamiltonian and Overlap matrix from the sisl.dh class.
It restructures the data in the SPIN BOX representation, where NS is
the number of supercells and NO is the number of orbitals.
Args:
dh : sisl.physics.Hamiltonian
Hamiltonian read in by sisl
Returns:
hh : (NS, NO, NO) np.array_like
Hamiltonian in SPIN BOX representation
ss : (NS, NO, NO) np.array_like
Overlap matrix in SPIN BOX representation
"""
NO = dh.no # shorthand for number of orbitals in the unit cell
# preprocessing Hamiltonian and overlap matrix elements
h11 = dh.tocsr(dh.M11r)
h11 += dh.tocsr(dh.M11i) * 1.0j
h11 = h11.toarray().reshape(NO, dh.n_s, NO).transpose(0, 2, 1).astype("complex128")
h22 = dh.tocsr(dh.M22r)
h22 += dh.tocsr(dh.M22i) * 1.0j
h22 = h22.toarray().reshape(NO, dh.n_s, NO).transpose(0, 2, 1).astype("complex128")
h12 = dh.tocsr(dh.M12r)
h12 += dh.tocsr(dh.M12i) * 1.0j
h12 = h12.toarray().reshape(NO, dh.n_s, NO).transpose(0, 2, 1).astype("complex128")
h21 = dh.tocsr(dh.M21r)
h21 += dh.tocsr(dh.M21i) * 1.0j
h21 = h21.toarray().reshape(NO, dh.n_s, NO).transpose(0, 2, 1).astype("complex128")
sov = (
dh.tocsr(dh.S_idx)
.toarray()
.reshape(NO, dh.n_s, NO)
.transpose(0, 2, 1)
.astype("complex128")
)
# Reorganization of Hamiltonian and overlap matrix elements to SPIN BOX representation
U = np.vstack(
[np.kron(np.eye(NO, dtype=int), [1, 0]), np.kron(np.eye(NO, dtype=int), [0, 1])]
)
# This is the permutation that transforms ud1ud2 to u12d12
# That is this transforms FROM SPIN BOX to ORBITAL BOX => U
# the inverse transformation is U.T u12d12 to ud1ud2
# That is FROM ORBITAL BOX to SPIN BOX => U.T
# From now on everything is in SPIN BOX!!
hh, ss = np.array(
[
U.T
@ np.block([[h11[:, :, i], h12[:, :, i]], [h21[:, :, i], h22[:, :, i]]])
@ U
for i in range(dh.lattice.nsc.prod())
]
), np.array(
[
U.T
@ np.block(
[[sov[:, :, i], sov[:, :, i] * 0], [sov[:, :, i] * 0, sov[:, :, i]]]
)
@ U
for i in range(dh.lattice.nsc.prod())
]
)
return hh, ss
def setup_pairs_and_magnetic_entities(
magnetic_entities, pairs, dh, simulation_parameters
):
"""It creates the complete structure of the dictionaries and fills some basic data.
It creates orbital indexes, spin box indexes, coordinates and tags for magnetic entities.
Furthermore it creates the structures for the energies, the perturbed potentials and
the Greens function calculation. It dose the same for the pairs.
Args:
pairs : dict
Contains the initial pair information
magnetic_entities : dict
Contains the initial magnetic entity information
dh : sisl.physics.Hamiltonian
Hamiltonian read in by sisl
simulation_parameters : dict
A set of parameters from the simulation
Returns:
pairs : dict
Contains the initial information and the complete structure
magnetic_entities : dict
Contains the initial information and the complete structure
"""
# for every site we have to store 3 Greens function (and the associated _tmp-s) in the 3 reference directions
for mag_ent in magnetic_entities:
parsed = parse_magnetic_entity(dh, **mag_ent) # parse orbital indexes
2 months ago
mag_ent["orbital_indices"] = parsed
mag_ent["spin_box_indices"] = blow_up_orbindx(
parsed
) # calculate spin box indexes
# if orbital is not set use all
if "l" not in mag_ent.keys():
mag_ent["l"] = "all"
# tag creation for one atom
if isinstance(mag_ent["atom"], int):
mag_ent["tags"] = [
f"[{mag_ent['atom']}]{dh.atoms[mag_ent['atom']].tag}({mag_ent['l']})"
]
mag_ent["xyz"] = [dh.xyz[mag_ent["atom"]]]
# tag creation for more atoms
if isinstance(mag_ent["atom"], list):
mag_ent["tags"] = []
mag_ent["xyz"] = []
# iterate over atoms
for atom_idx in mag_ent["atom"]:
mag_ent["tags"].append(
f"[{atom_idx}]{dh.atoms[atom_idx].tag}({mag_ent['l']})"
)
mag_ent["xyz"].append(dh.xyz[atom_idx])
# calculate size for Greens function generation
2 months ago
spin_box_shape = len(mag_ent["spin_box_indices"])
# we will store the second order energy derivations here
mag_ent["energies"] = []
# These will be the perturbed potentials from eq. 100
mag_ent["Vu1"] = [] # so they are independent in memory
mag_ent["Vu2"] = []
mag_ent["Gii"] = [] # Greens function
mag_ent["Gii_tmp"] = [] # Greens function for parallelization
for _ in simulation_parameters["ref_xcf_orientations"]:
# Rotations for every quantization axis
mag_ent["Vu1"].append([])
mag_ent["Vu2"].append([])
# Greens functions for every quantization axis
mag_ent["Gii"].append(
np.zeros(
(simulation_parameters["eset"], spin_box_shape, spin_box_shape),
dtype="complex128",
)
)
mag_ent["Gii_tmp"].append(
np.zeros(
(simulation_parameters["eset"], spin_box_shape, spin_box_shape),
dtype="complex128",
)
)
# for every site we have to store 2x3 Greens function (and the associated _tmp-s)
# in the 3 reference directions, because G_ij and G_ji are both needed
for pair in pairs:
# calculate distance
xyz_ai = magnetic_entities[pair["ai"]]["xyz"]
xyz_aj = magnetic_entities[pair["aj"]]["xyz"]
xyz_aj = xyz_aj + pair["Ruc"] @ simulation_parameters["cell"]
pair["dist"] = np.linalg.norm(xyz_ai - xyz_aj)
# calculate size for Greens function generation
2 months ago
spin_box_shape_i = len(magnetic_entities[pair["ai"]]["spin_box_indices"])
spin_box_shape_j = len(magnetic_entities[pair["aj"]]["spin_box_indices"])
# tag generation
pair["tags"] = []
for mag_ent in [magnetic_entities[pair["ai"]], magnetic_entities[pair["aj"]]]:
tag = ""
# get atoms of magnetic entity
atoms_idx = mag_ent["atom"]
orbitals = mag_ent["l"]
# if magnetic entity contains one atoms
if isinstance(atoms_idx, int):
tag += f"[{atoms_idx}]{dh.atoms[atoms_idx].tag}({orbitals})"
# if magnetic entity contains more than one atoms
if isinstance(atoms_idx, list):
# iterate over atoms
atom_group = "{"
for atom_idx in atoms_idx:
atom_group += f"[{atom_idx}]{dh.atoms[atom_idx].tag}({orbitals})--"
# end {} of the atoms in the magnetic entity
tag += atom_group[:-2] + "}"
pair["tags"].append(tag)
pair["energies"] = [] # we will store the second order energy derivations here
pair["Gij"] = [] # Greens function
pair["Gji"] = []
pair["Gij_tmp"] = [] # Greens function for parallelization
pair["Gji_tmp"] = []
for _ in simulation_parameters["ref_xcf_orientations"]:
# Greens functions for every quantization axis
pair["Gij"].append(
np.zeros(
(simulation_parameters["eset"], spin_box_shape_i, spin_box_shape_j),
dtype="complex128",
)
)
pair["Gij_tmp"].append(
np.zeros(
(simulation_parameters["eset"], spin_box_shape_i, spin_box_shape_j),
dtype="complex128",
)
)
pair["Gji"].append(
np.zeros(
(simulation_parameters["eset"], spin_box_shape_j, spin_box_shape_i),
dtype="complex128",
)
)
pair["Gji_tmp"].append(
np.zeros(
(simulation_parameters["eset"], spin_box_shape_j, spin_box_shape_i),
dtype="complex128",
)
)
return pairs, magnetic_entities
2 months ago
def parallel_Gk(HK, SK, eran, eset):
"""Calculates the Greens function by inversion.
It calculates the Greens function on all the energy levels at the same time.
2 months ago
Args:
HK : (NO, NO), np.array_like
Hamiltonian at a given k point
SK : (NO, NO), np.array_like
Overlap Matrix at a given k point
eran : (eset) np.array_like
Energy sample along the contour
eset : int
Number of energy samples along the contour
2 months ago
Returns:
Gk : (eset, NO, NO), np.array_like
Green's function at a given k point
2 months ago
"""
# Calculates the Greens function on all the energy levels
return inv(SK * eran.reshape(eset, 1, 1) - HK)
def sequential_GK(HK, SK, eran, eset):
"""Calculates the Greens function by inversion.
It calculates sequentially over the energy levels.
Args:
HK : (NO, NO), np.array_like
Hamiltonian at a given k point
SK : (NO, NO), np.array_like
Overlap Matrix at a given k point
eran : (eset) np.array_like
Energy sample along the contour
eset : int
Number of energy samples along the contour
Returns:
Gk : (eset, NO, NO), np.array_like
Green's function at a given k point
"""
# creates an empty holder
Gk = np.zeros(shape=(eset, HK.shape[0], HK.shape[1]), dtype="complex128")
# fills the holder sequentially by the Greens function on a given energy
for j in range(eset):
Gk[j] = inv(SK * eran[j] - HK)
return Gk
def remove_clutter_for_save(pairs, magnetic_entities):
"""Removes unimportant data from the dictionaries.
It is used before saving to throw away data that
is not needed for post processing.
Args:
pairs : dict
Contains all the pair information
magnetic_entities : dict
Contains all the magnetic entity information
Returns:
pairs : dict
Contains all the reduced pair information
magnetic_entities : dict
Contains all the reduced magnetic entity information
"""
# remove clutter from magnetic entities and pair information
for pair in pairs:
del pair["Gij"]
del pair["Gij_tmp"]
del pair["Gji"]
del pair["Gji_tmp"]
for mag_ent in magnetic_entities:
del mag_ent["Gii"]
del mag_ent["Gii_tmp"]
del mag_ent["Vu1"]
del mag_ent["Vu2"]
return pairs, magnetic_entities