Commit 0c070ac9 authored by Adam Fekete's avatar Adam Fekete
Browse files

cleanup

parent c8d6acd2
%% Cell type:code id:realistic-humanity tags:
``` python
from abiflows.database.mongoengine.utils import DatabaseData
from pymatgen.core import Structure
from abiflows.database.mongoengine.abinit_results import RelaxResult, DfptResult
from jupyter_jsmol.pymatgen import quick_view
```
%% Cell type:code id:martial-football tags:
``` python
%matplotlib notebook
```
%% Cell type:code id:altered-outdoors tags:
``` python
db = DatabaseData(host='mongo', port=27017, collection='relax',
database='fireworks', username=None, password=None)
db.connect_mongoengine()
```
%% Cell type:code id:proprietary-humidity tags:
``` python
with db.switch_collection(RelaxResult) as RelaxResult:
relaxed_results = RelaxResult.objects(mp_id="mp-149")
relaxed = relaxed_results[0]
```
%% Cell type:code id:useful-indicator tags:
``` python
# load the relaxed Structure
structure = Structure.from_dict(relaxed.abinit_output.structure)
```
%% Cell type:code id:going-agent tags:
``` python
quick_view(structure)
```
%% Cell type:code id:suitable-czech tags:
``` python
gsr = relaxed.abinit_output.gsr.abiopen()
print(gsr)
```
%% Cell type:code id:swedish-poison tags:
``` python
fig = gsr.plot_bz()
```
%% Cell type:code id:proved-endorsement tags:
``` python
db.collection = "phonon"
with db.switch_collection(DfptResult) as DfptResult:
ph_results = DfptResult.objects()
ph_res = ph_results[0]
```
%% Cell type:code id:narrative-religion tags:
``` python
ddb = ph_res.abinit_output.ddb.abiopen()
```
%% Cell type:code id:antique-ceremony tags:
``` python
phb = ddb.anaget_phmodes_at_qpoint([0,0,0])
```
%% Cell type:code id:cutting-hopkins tags:
``` python
phb.phfreqs
```
%% Cell type:code id:ambient-glossary tags:
``` python
phbst, phdos = ddb.anaget_phbst_and_phdos_files(line_density=10)
```
%% Cell type:code id:electric-narrative tags:
``` python
fig = phbst.plot_phbands()
```
%% Cell type:code id:comparative-pacific tags:
``` python
phbst.phbands.create_phononwebsite_json("/home/jovyan/Si_phononwebsite.json")
```
%% Cell type:markdown id:electric-second tags:
Download the Si_phononwebsite.json file on your local machine and upload it on the following website to display the atomic displacents associated to the phonon modes: http://henriquemiranda.github.io/phononwebsite/phonon.html
%% Cell type:markdown id:exempt-cloud tags:
# Abinit - quickstart
https://abinit.github.io/abipy/flow_gallery/run_sic_relax.html#sphx-glr-flow-gallery-run-sic-relax-py
%% Cell type:code id:necessary-budapest tags:
``` python
!abicheck.py
```
%% Cell type:markdown id:chronic-invention tags:
## Relaxation Flow
This example shows how to build a very simple Flow for the structural relaxation of SiC. One could use a similar logic to perform multiple relaxations with different input parameters…
%% Cell type:code id:handmade-dividend tags:
``` python
import abipy.abilab as abilab
import abipy.data as data
import abipy.flowtk as flowtk
def build_flow(workdir):
pseudos = data.pseudos("14si.pspnc", "6c.pspnc")
structure = data.structure_from_ucell("SiC")
# Initialize the input
relax_inp = abilab.AbinitInput(structure, pseudos=pseudos)
# Set variables
relax_inp.set_vars(
ecut=20,
paral_kgb=1,
iomode=3,
# Relaxation part
ionmov=2,
optcell=1,
strfact=100,
ecutsm=0.5, # Important!
dilatmx=1.15, # Important!
toldff=1e-6,
tolmxf=1e-5,
ntime=100,
)
# K-points sampling
shiftk = [
[0.5,0.5,0.5],
[0.5,0.0,0.0],
[0.0,0.5,0.0],
[0.0,0.0,0.5]
]
relax_inp.set_kmesh(ngkpt=[4, 4, 4], shiftk=shiftk)
# Initialize the flow
flow = flowtk.Flow(workdir)
# Register the task.
flow.register_relax_task(relax_inp)
return flow
```
%% Cell type:markdown id:related-music tags:
Build and run the flow:
%% Cell type:code id:steady-fellowship tags:
``` python
flow = build_flow('flow_sic_relax')
flow.rmtree()
scheduler = flow.make_scheduler()
scheduler.start()
```
%% Cell type:markdown id:confirmed-space tags:
To visualize the evolution of the lattice parameters during the structura relaxation use:
%% Cell type:code id:raising-valuable tags:
``` python
abifile = abilab.abiopen('flow_sic_relax/w0/t0/outdata/out_HIST.nc')
abifile.plot();
```
%% Cell type:code id:changing-bennett tags:
``` python
```
%% Cell type:code id:passing-mineral tags:
``` python
```
%% Cell type:code id:prescribed-mexico tags:
``` python
```
%% Cell type:markdown id:metallic-supervisor tags:
# Fireworks (Five-minute quickstart)
https://materialsproject.github.io/fireworks/quickstart.html
%% Cell type:markdown id:australian-firewall tags:
Testing mongodb connection:
%% Cell type:code id:referenced-routine tags:
``` python
import pymongo
client = pymongo.MongoClient(
host='mongo',
username='',
password='',
authSource='admin'
)
client.list_database_names()
```
%% Cell type:markdown id:gorgeous-curve tags:
## Start FireWorks
If not already running, start MongoDB (if your MongoDB is hosted and maintained externally, follow the note below regarding lpad init):
%% Cell type:code id:electronic-essex tags:
``` python
# !lpad init
```
%% Cell type:code id:increasing-context tags:
``` python
!cat ~/.fireworks/my_launchpad.yaml
```
%% Cell type:markdown id:white-owner tags:
Reset/Initialize the FireWorks database (the LaunchPad):
%% Cell type:code id:entertaining-scotland tags:
``` python
! echo 'y' | lpad reset
```
%% Cell type:markdown id:numerous-regard tags:
## Add a Workflow
There are many ways to add Workflows to the database, including a Python API. Let’s start with an extremely simple example that can be added via the command line:
%% Cell type:code id:square-primary tags:
``` python
!lpad add_scripts 'echo "hello"' 'echo "goodbye"' -n hello goodbye -w test_workflow
```
%% Cell type:markdown id:manufactured-bernard tags:
This added a two-job linear workflow. The first jobs prints hello to the command line, and the second job prints goodbye. We gave names (optional) to each step as “hello” and “goodbye”. We named the workflow overall (optional) as “test_workflow”.
%% Cell type:markdown id:mounted-audio tags:
Let’s look at our test workflow:
%% Cell type:code id:identical-international tags:
``` python
!lpad get_wflows -n test_workflow -d more
```
%% Cell type:markdown id:fuzzy-intellectual tags:
We get back basic information on our workflows. The second step “goodbye” is waiting for the first one to complete; it is not ready to run because it depends on the first job.
%% Cell type:markdown id:useful-reviewer tags:
## Run all Workflows
You can run jobs one at a time (“singleshot”) or all at once (“rapidfire”). Let’s run all jobs:
%% Cell type:code id:excess-chosen tags:
``` python
!rlaunch --silencer rapidfire
```
%% Cell type:markdown id:dedicated-commerce tags:
Clearly, both steps of our workflow ran in the correct order.
%% Cell type:markdown id:joined-antique tags:
Let’s again look at our workflows:
%% Cell type:code id:conditional-henry tags:
``` python
!lpad get_wflows -n test_workflow -d more
```
%% Cell type:markdown id:lucky-failing tags:
FireWorks automatically created launcher_ directories for each step in the Workflow and ran them. We see that both steps are complete. Note that there exist options to choose where to run jobs, as well as to tear down empty directories after running jobs.
%% Cell type:markdown id:forward-client tags:
## Launch the web GUI
open: http://localhost:8888/fireworks
%% Cell type:markdown id:explicit-denial tags:
## Python code
The following Python code achieves the same behavior:
%% Cell type:code id:cutting-modification tags:
``` python
from fireworks import Firework, Workflow, LaunchPad, ScriptTask
from fireworks.core.rocket_launcher import rapidfire
# set up the LaunchPad and reset it
launchpad = LaunchPad(host='mongo')
launchpad.reset('', require_password=False)
# create the individual FireWorks and Workflow
fw1 = Firework(ScriptTask.from_str('echo "hello"'), name="hello")
fw2 = Firework(ScriptTask.from_str('echo "goodbye"'), name="goodbye")
wf = Workflow([fw1, fw2], {fw1:fw2}, name="test workflow")
# store workflow and launch it locally
launchpad.add_wf(wf)
rapidfire(launchpad)
```
%% Cell type:markdown id:allied-lesson tags:
In the code above, the `{fw1:fw2}` argument to Workflow is adding a dependency of fw2 to fw1. You could instead define this dependency when defining your FireWorks:
%% Cell type:code id:interstate-intervention tags:
``` python
fw1 = Firework(ScriptTask.from_str('echo "hello"'), name="hello")
fw2 = Firework(ScriptTask.from_str('echo "goodbye"'), name="goodbye", parents=[fw1])
wf = Workflow([fw1, fw2], name="test workflow")
```
%% Cell type:code id:speaking-institution tags:
``` python
```
%% Cell type:code id:devoted-treat tags:
``` python
```
#!/bin/bash
#SBATCH --partition=debug
sleep 10
echo "done"
from abipy.abilab import Structure
from abiflows.fireworks.workflows.abinit_workflows import DfptFWWorkflow
from abiflows.database.mongoengine.utils import DatabaseData
from abiflows.database.mongoengine.abinit_results import RelaxResult
# data for the database where the relaxed structures were stored
source_db = DatabaseData(host='mongo', port=27017, collection='relax',
database='fireworks', username=None, password=None)
# data for the database where the phonon results will be stored.
# note that these can be in different databases or in the same.
# The collections should be different
db = DatabaseData(host='mongo', port=27017, collection='phonon',
database='fireworks', username=None, password=None)
# Open the connection to the database
source_db.connect_mongoengine()
# in case you are using multiple workers for the same fireworks db (i.e. different clusters or queues)
# it may be a good idea to set the worker explicitly. One can just get the name from the configuration:
# fworker = FWorker.from_file(os.path.join(os.getenv("HOME"), ".fireworks", "my_fworker.yaml"))
# or you can also just write the name of the fworker explicitely
#fworker_name = 'name_of_the_fworker'
mp_id = 'mp-149'
# This context manager is required to use the collection name selected in source_db
# By default mongoengine uses the name of the class (in this case RelaxResult) as
# name of the collection to query.
with source_db.switch_collection(RelaxResult) as RelaxResult:
# download from the database the relaxed structure
# This relies on mongoengine (http://mongoengine.org/) to interact with the database.
# See the module abiflows.database.mongoengine.abinit_results for the objects used to store the results
relaxed_results = RelaxResult.objects(mp_id=mp_id)
# Assume that there is one and only one result matching the query. In real cases you might want to check this.
# At this point is an instance of a RelaxResult object
relaxed = relaxed_results[0]
# load the relaxed Structure
structure = Structure.from_dict(relaxed.abinit_output.structure)
# use the same k-point sampling as the one of the relax
kppa = relaxed.abinit_input.kppa
ngkpt = relaxed.abinit_input.ngkpt
# The AbinitInput object used for the relax is stored in the database.
# We get it to use the same approximations used during the relaxation.
relax_input = relaxed.abinit_input.last_input.to_mgobj()
# We use the same k and q point grid
qppa = kppa
extra_abivars = dict(chkprim=1, nstep=100, chksymbreak=1, paral_kgb=1)
# as for the relax workflow, information stored in the database for the calculation. In particular information
# about the source structure.
initialization_info = dict(kppa=kppa, mp_id=mp_id,
relax_db=source_db.as_dict_no_credentials(), relax_id=relaxed.id,
relax_tol_val=1e-6, qppa=qppa)
# In this case the base is the input file of the of the relax workflow.
# Use the DfptFWWorkflow that allow to calculate the different kind of Dfpt perturbations
# with abinit in a single workflow. In this case only the phonons.
gen = DfptFWWorkflow.from_gs_input(structure=structure, gs_input=relax_input, extra_abivars=extra_abivars, autoparal=True,
initialization_info=initialization_info, do_ddk=True, do_dde=True, ph_ngqpt=[1,1,1],
do_strain=False)
# add to the workflow a step that automatically adds the results to the database in the collection specified above.
gen.add_mongoengine_db_insertion(db)
# add a step to the workflow that cleans up files with this extensions once the other calculations are completed.
# The list of extensions is customizable and these are usually file that won't be needed again.
# Here we do not delete the DDB files.
gen.add_final_cleanup(["WFK", "1WF", "WFQ", "1POT", "1DEN"])
# This will specify that all the steps will be forced to be executed on the same worker
# and will set the worker to the one chosen before for the existing fireworks. This step is not mandatory.
#gen.fix_fworker(fworker_name)
# adds the workflow to the fireworks database. It will use the fireworks LaunchPad that has been set by default.
# If a different one should be used it can be passed as an argument.
gen.add_to_db()
from fireworks import FWorker
import os
import pseudo_dojo
from abiflows.fireworks.workflows.abinit_workflows import RelaxFWWorkflow
from abiflows.database.mongoengine.utils import DatabaseData
from pymatgen.ext.matproj import MPRester
from pymatgen.core.structure import Structure
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
import abipy.data as abidata
# use the pseudo dojo table of pseudopotentials. These are good pseudopotentials. If you want to use
# some other kind of pseudos you will need to provide explicitly the cutoff for the calculation
pseudo_table = pseudo_dojo.OfficialDojoTable.from_djson_file(
os.path.join(pseudo_dojo.dojotable_absdir("ONCVPSP-PBE-PDv0.4"), 'standard.djson'))
pseudo_path = pseudo_dojo.dojotable_absdir("ONCVPSP-PBE-PDv0.4")
# connection data of the output MongoDB database
# it can be the same database used for fireworks with other collections or a different one
db = DatabaseData(host='mongo', port=27017, collection='relax',
database='fireworks', username=None, password=None)
# in case you are using multiple workers for the same fireworks db (i.e. different clusters or queues) it may be a good idea
# setting the worker explicitly. Here I just get the name
#fworker = FWorker.from_file(os.path.join(os.getenv("HOME"), ".fireworks", "my_fworker.yaml"))
# Get the structure from the Materials Project. mp-149 is silicon.
mp_id = 'mp-149'
#structure = MPRester().get_structure_by_material_id(mp_id)
structure = abidata.structure_from_cif("si.cif")
spga = SpacegroupAnalyzer(structure)
structure = spga.get_primitive_standard_structure()
# check if the pseudo is available and just selects those that are needed for the specific structure
try:
pseudos = pseudo_table.get_pseudos_for_structure(structure)
except BaseException as e:
print("no pseudo")
exit(1)
# density of k-points per reciprocal atom. set to 1500 for phonons.
kppa = 500
# this will be read at the end of the workflow to store this information in the database. It is not mandatory
initialization_info = dict(kppa=kppa, mp_id=mp_id)
# use a more strict tolmxf in case this might be needed, for example for phonon calculations.
tolmxf = 1e-5
#override some default parameters from the factory function
extra_abivars = dict(tolmxf=tolmxf, ionmov=2, chksymbreak=1, ntime=30, nstep=100)
# uncomment this if you want to try paral_kgb=1
#extra_abivars['paral_kgb'] = 1
# this will create a fireworks workflow object (still not added to fireworks database)
# check the function for the different options available.
# The OneSymmetric option will set a single shift that respects the symmetry of the crystal.
# The target_dilatmx means that the dilatmx parameter will be automatically progressively
# reduced and relaxation restarted until the desired value has been used.
gen = RelaxFWWorkflow.from_factory(structure, pseudo_table, kppa=kppa, spin_mode="unpolarized", extra_abivars=extra_abivars,
autoparal=True, initialization_info=initialization_info, target_dilatmx=1.01,
smearing=None, shift_mode='OneSymmetric', ecut=5)
# add to the workflow a step that automatically adds the results to the database in the collection specified above.
gen.add_mongoengine_db_insertion(db)
# add a step to the workflow that cleans up files with this extensions once the other calculations are completed.
# The list of extensions is customizable and these are usually file that won't be needed again
gen.add_final_cleanup(["WFK", "1WF", "DEN", "WFQ", "DDB"])
# This will specify that all the steps will be forced to be executed on the same worker
# and will set the worker to the one chosen before for the existing fireworks. This step is not mandatory.
#gen.fix_fworker(fworker.name)
# adds the workflow to the fireworks database. It will use the fireworks LaunchPad that has been set by default.
# If a different one should be used it can be passed as an argument.
fw_id_maps = gen.add_to_db()
print("{} submitted".format(mp_id))
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment