Compare commits

...

2 Commits

@ -1,47 +1,44 @@
InputFile /Users/danielpozsar/Downloads/nojij/Fe3GeTe2/monolayer/soc/lat3_791/Fe3GeTe2.fdf
OutputFile ./Fe3GeTe2_notebook
InputFile /Users/danielpozsar/Downloads/nojij/Fe3GeTe2/monolayer/soc/lat3_791/Fe3GeTe2.fdf # comment test
OutputFile ./Fe3GeTe2_notebook # comment test
ScfOrientation [ 0 0 1 ]
ScfXcfOrientation [ 0 0 1 ] # comment test
# comment test
%block XCF_Rotation # comment test
1 0 0 0 1 0 0 0 1 # comment test
0 1 0 1 0 0 0 0 1 # comment test
0 0 1 1 0 0 0 1 0 # comment test
%endblock XCFRotation # comment test
%block XCF_Rotation
1 0 0 0 1 0 0 0 1
0 1 0 1 0 0 0 0 1
0 0 1 1 0 0 0 1 0
%endblock XCFRotation
%block MagneticEntites # atom index and orbital index
3 2
4 2
5 2
%endblock MagneticEntites
%block MagneticEntites
Claster 4 5 # this is a list of atoms
%block MagneticEntities # comment test
Cluster 4 5 # this is a list of atoms
AtomShell 3 2 # this is one atom and shell index
AtomShell 4 2 # this is one atom and shell index
AtomShell 5 2 # this is one atom and shell index
AtomOrbital 3 # this is one atom and orbital index
ExplicitOrbital # this is a slice of orbital index
%endblock MagneticEntites
Orbitals # this is a slice of orbital index
%endblock MagneticEntities
%Pairsblock # MagneticEntites index ai and aj, supercell offset
%block Pairs # MagneticEntities index ai and aj, supercell offset
0 1 0 0 0
0 2 0 0 0
1 2 0 0 0
0 2 -1 -1 0
0 2 -1 -1 0 # comment test
1 2 -1 -1 0
0 2 -1 0 0
1 2 -1 0 0
1 2 -2 0 0
1 2 -3 0 0
%endPairsblock
1 2 -3 0 0 # comment test
%endblock Pairs
INTEGRAL.Kset 3
INTEGRAL.Kdirs xy
INTEGRAL.Ebot -13
INTEGRAL.Eset 300
INTEGRAL.Esetp 1000
INTEGRAL.Ebot -13 # comment test
INTEGRAL.Eset 300 # comment test
INTEGRAL.Esetp 1000 # comment test
GREEN.ParallelSolver False # comment test
PadawanMode True # comment test

@ -18,16 +18,18 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import warnings
from sys import getsizeof
from timeit import default_timer as timer
# use numpy number of threads one
from threadpoolctl import threadpool_info, threadpool_limits
try:
from threadpoolctl import threadpool_info, threadpool_limits
user_api = threadpool_info()[0]["user_api"]
threadpool_limits(limits=1, user_api=user_api)
user_api = threadpool_info()["user_api"]
threadpool_limits(limits=1, user_api=user_api)
except:
print("Warning: threadpoolctl could not make numpy use single thread!")
import numpy as np
import sisl
@ -45,14 +47,6 @@ from grogupy import *
def main():
# constrain numpy in parallel run
os.environ["OMP_NUM_THREADS"] = "1" # export OMP_NUM_THREADS=1
os.environ["OPENBLAS_NUM_THREADS"] = "1" # export OPENBLAS_NUM_THREADS=1
os.environ["MKL_NUM_THREADS"] = "1" # export MKL_NUM_THREADS=1
os.environ["VECLIB_MAXIMUM_THREADS"] = "1" # export VECLIB_MAXIMUM_THREADS=1
os.environ["NUMEXPR_NUM_THREADS"] = "1" # export NUMEXPR_NUM_THREADS=1
# runtime information
times = dict()
times["start_time"] = timer()

@ -7,26 +7,38 @@
"outputs": [],
"source": [
"# use numpy number of threads one\n",
"# from threadpoolctl import threadpool_info, threadpool_limits\n",
"# print(threadpool_info())\n",
"# from threadpoolctl import threadpool_info, threadpool_limits\n",
"# user_api = threadpool_info()[0][\"user_api\"]\n",
"# threadpool_limits(limits=1, user_api=user_api)"
"# threadpool_limits(limits=1, user_api=user_api)\n",
"# print(threadpool_info())"
]
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 2,
"metadata": {},
"outputs": [
{
"ename": "ImportError",
"evalue": "attempted relative import with no known parent package",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mImportError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[0;32mIn[6], line 5\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mtimeit\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m default_timer \u001b[38;5;28;01mas\u001b[39;00m timer\n\u001b[1;32m 4\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01msisl\u001b[39;00m\n\u001b[0;32m----> 5\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msrc\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mgrogupy\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;241m*\u001b[39m\n\u001b[1;32m 6\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mmpi4py\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m MPI\n\u001b[1;32m 7\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mwarnings\u001b[39;00m\n",
"\u001b[0;31mImportError\u001b[0m: attempted relative import with no known parent package"
"name": "stderr",
"output_type": "stream",
"text": [
"info:0: SislInfo: Please install tqdm (pip install tqdm) for better looking progress bars\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"0.14.3\n",
"1.24.4\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"[Daniels-Air:00184] shmem: mmap: an error occurred while determining whether or not /var/folders/yh/dx7xl94n3g52ts3td8qcxjcc0000gn/T//ompi.Daniels-Air.501/jf.0/273678336/sm_segment.Daniels-Air.501.10500000.0 could be created.\n"
]
}
],
@ -60,42 +72,96 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": null,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[{'o': array([1., 0., 0.]),\n",
" 'vw': array([[0., 1., 0.],\n",
" [0., 0., 1.]])},\n",
" {'o': array([0., 1., 0.]),\n",
" 'vw': array([[1., 0., 0.],\n",
" [0., 0., 1.]])},\n",
" {'o': array([0., 0., 1.]),\n",
" 'vw': array([[1., 0., 0.],\n",
" [0., 1., 0.]])}]"
"ename": "TypeError",
"evalue": "'NoneType' object is not iterable",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[0;32mIn[58], line 28\u001b[0m\n\u001b[1;32m 26\u001b[0m magnetic_entities \u001b[38;5;241m=\u001b[39m fdf\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mMagneticEntities\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 27\u001b[0m fdf_magnetic_entities \u001b[38;5;241m=\u001b[39m []\n\u001b[0;32m---> 28\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m mag_ent \u001b[38;5;129;01min\u001b[39;00m magnetic_entities:\n\u001b[1;32m 29\u001b[0m row \u001b[38;5;241m=\u001b[39m mag_ent\u001b[38;5;241m.\u001b[39msplit()\n\u001b[1;32m 30\u001b[0m dat \u001b[38;5;241m=\u001b[39m []\n",
"\u001b[0;31mTypeError\u001b[0m: 'NoneType' object is not iterable"
]
},
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"# open fdf input\n",
"fdf = sisl.io.fdfSileSiesta(\"input.fdf\")\n",
"\n",
"fdf_parameters = dict()\n",
"fdf_parameters[\"infile\"] = fdf.get(\"InputFile\")\n",
"fdf_parameters[\"outfile\"] = fdf.get(\"OutputFile\")\n",
"fdf_parameters[\"scf_xcf_orientation\"] = np.array(fdf.get(\"ScfXcfOrientation\"))\n",
"\n",
"rotations = fdf.get(\"XCF_Rotation\")\n",
"my_rot = []\n",
"for rot in rotations:\n",
" dat = np.array(rot.split(), dtype=float)\n",
"if rotations is None:\n",
" pass\n",
"else:\n",
" fdf_rot = []\n",
" for rot in rotations:\n",
" dat = np.array(rot.split()[:9], dtype=float)\n",
" o = dat[:3]\n",
" vw = dat[3:]\n",
" vw = vw.reshape(2, 3)\n",
" my_rot.append(dict(o=o, vw=vw))\n",
"\n",
"my_rot"
" fdf_rot.append(dict(o=o, vw=vw))\n",
" fdf_parameters[\"ref_xcf_orientations\"] = fdf_rot\n",
"\n",
"pairs = fdf.get(\"Pairs\")\n",
"if pairs in None:\n",
" pass\n",
"else:\n",
" fdf_pairs = []\n",
" for fdf_pair in pairs:\n",
" dat = np.array(fdf_pair.split()[:5], dtype=int)\n",
" my_pair = dict(ai=dat[0], aj=dat[1], Ruc=np.array(dat[2:]))\n",
" fdf_pairs.append(my_pair)\n",
"\n",
"magnetic_entities = fdf.get(\"MagneticEntities\")\n",
"if magnetic_entities is None:\n",
" pass\n",
"else:\n",
" fdf_magnetic_entities = []\n",
" for mag_ent in magnetic_entities:\n",
" row = mag_ent.split()\n",
" dat = []\n",
" for string in row:\n",
" if string.find(\"#\") != -1:\n",
" break\n",
" dat.append(string)\n",
" if dat[0] == \"Cluster\" or \"cluster\":\n",
" fdf_magnetic_entities.append(dict(atom=dat[1:]))\n",
" continue\n",
" elif dat[0] == \"AtomShell\" or \"Atomshell\" or \"atomShell\" or \"atomshell\":\n",
" fdf_magnetic_entities.append(dict(atom=dat[1], l=dat[2:]))\n",
" continue\n",
" elif dat[0] == \"AtomOrbital\" or \"Atomorbital\" or \"tomOrbital\" or \"atomorbital\":\n",
" continue\n",
" elif dat[0] == \"Orbitals\" or \"orbitals\":\n",
" continue\n",
" else:\n",
" print(\"Unrecognizable magnetic entity in fdf\")\n",
" break\n",
"\n",
"\n",
"fdf_parameters[\"kset\"] = fdf.get(\"INTEGRAL.Kset\")\n",
"fdf_parameters[\"kdirs\"] = fdf.get(\"INTEGRAL.Kdirs\")\n",
"fdf_parameters[\"ebot\"] = fdf.get(\"INTEGRAL.Ebot\")\n",
"fdf_parameters[\"eset\"] = fdf.get(\"INTEGRAL.Eset\")\n",
"fdf_parameters[\"esetp\"] = fdf.get(\"INTEGRAL.Esetp\")\n",
"fdf_parameters[\"parallel_solver_for_Gk\"] = fdf.get(\"GREEN.ParallelSolver\")\n",
"fdf_parameters[\"padawan_mode\"] = fdf.get(\"PadawanMode\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 4,

Loading…
Cancel
Save