Commit e910d86b authored by Markus Scheidgen's avatar Markus Scheidgen
Browse files

Merge branch 'v1.0.7' into 'master'

Merge for release

See merge request !621
parents e086fc91 b8d22908
Pipeline #131401 canceled with stages
in 8 minutes and 23 seconds
...@@ -85,9 +85,10 @@ python tests: ...@@ -85,9 +85,10 @@ python tests:
alias: rabbitmq alias: rabbitmq
- name: docker.elastic.co/elasticsearch/elasticsearch:7.17.1 - name: docker.elastic.co/elasticsearch/elasticsearch:7.17.1
alias: elastic alias: elastic
# fix issue with running elastic in gitlab ci runner: command:
# https://gitlab.com/gitlab-org/gitlab-ce/issues/42214 - bash
command: [ "bin/elasticsearch", "-Ediscovery.type=single-node" ] - "-c"
- ES_JAVA_OPTS="-Xms512m -Xmx512m" docker-entrypoint.sh elasticsearch -Ediscovery.type=single-node -Expack.security.enabled=false
- name: mongo:5.0.6 - name: mongo:5.0.6
alias: mongo alias: mongo
variables: variables:
...@@ -101,6 +102,8 @@ python tests: ...@@ -101,6 +102,8 @@ python tests:
NOMAD_KEYCLOAK_PASSWORD: ${CI_KEYCLOAK_ADMIN_PASSWORD} NOMAD_KEYCLOAK_PASSWORD: ${CI_KEYCLOAK_ADMIN_PASSWORD}
NOMAD_NORMALIZE_SPRINGER_DB_PATH: /nomad/fairdi/db/data/springer.msg NOMAD_NORMALIZE_SPRINGER_DB_PATH: /nomad/fairdi/db/data/springer.msg
script: script:
- sleep 5
- curl http://elastic:9200/_cat/health
- cd /app - cd /app
- ls /builds - ls /builds
- python -m pytest --cov=nomad -sv tests - python -m pytest --cov=nomad -sv tests
......
Subproject commit 958d4f2ef91563791ebfe6b329fae5615d992024 Subproject commit 7a57120d6b4845f471577af75ad3e205ee503ea5
Subproject commit 122d8347e79cd4e77adfcc6db4962b10b6075801 Subproject commit eee9bd051c3c52e3560cdb8092f79488dee05009
Subproject commit d9d499b873b1bd1847a13376acc48a928661036a Subproject commit 4050431cce67d80793e548e7f79e44de7e95ecb8
...@@ -103,6 +103,7 @@ services: ...@@ -103,6 +103,7 @@ services:
image: docker.elastic.co/elasticsearch/elasticsearch:7.17.1 image: docker.elastic.co/elasticsearch/elasticsearch:7.17.1
container_name: nomad_oasis_elastic container_name: nomad_oasis_elastic
environment: environment:
- ES_JAVA_OPTS=-Xms512m -Xmx512m
- discovery.type=single-node - discovery.type=single-node
volumes: volumes:
- nomad_oasis_elastic:/usr/share/elasticsearch/data - nomad_oasis_elastic:/usr/share/elasticsearch/data
......
{ {
"name": "nomad-fair-gui", "name": "nomad-fair-gui",
"version": "1.0.6", "version": "1.0.7",
"commit": "e98694e", "commit": "e98694e",
"private": true, "private": true,
"workspaces": [ "workspaces": [
......
...@@ -11,7 +11,7 @@ window.nomadEnv = { ...@@ -11,7 +11,7 @@ window.nomadEnv = {
'encyclopediaBase': 'https://nomad-lab.eu/prod/rae/encyclopedia/#', 'encyclopediaBase': 'https://nomad-lab.eu/prod/rae/encyclopedia/#',
'debug': false, 'debug': false,
'version': { 'version': {
'label': '1.0.6', 'label': '1.0.7',
'isBeta': false, 'isBeta': false,
'isTest': true, 'isTest': true,
'usesBetaData': true, 'usesBetaData': true,
......
...@@ -11,7 +11,7 @@ global.nomadEnv = { ...@@ -11,7 +11,7 @@ global.nomadEnv = {
'appBase': 'http://nomad-lab.eu/prod/rae/beta', 'appBase': 'http://nomad-lab.eu/prod/rae/beta',
'debug': false, 'debug': false,
'version': { 'version': {
'label': '1.0.6', 'label': '1.0.7',
'isBeta': false, 'isBeta': false,
'isTest': true, 'isTest': true,
'usesBetaData': true, 'usesBetaData': true,
......
...@@ -68,6 +68,9 @@ app.mount(app_base, WSGIMiddleware(flask_app)) ...@@ -68,6 +68,9 @@ app.mount(app_base, WSGIMiddleware(flask_app))
@app.on_event('startup') @app.on_event('startup')
async def startup_event(): async def startup_event():
from nomad.parsing.parsers import import_all_parsers
import_all_parsers()
from nomad import infrastructure from nomad import infrastructure
# each subprocess is supposed disconnect connect again: https://jira.mongodb.org/browse/PYTHON-2090 # each subprocess is supposed disconnect connect again: https://jira.mongodb.org/browse/PYTHON-2090
try: try:
......
...@@ -77,8 +77,8 @@ def _all_metainfo_packages(): ...@@ -77,8 +77,8 @@ def _all_metainfo_packages():
# TODO similar to before, due to lazyloading, we need to explicily access parsers # TODO similar to before, due to lazyloading, we need to explicily access parsers
# to actually import all parsers and indirectly all metainfo packages # to actually import all parsers and indirectly all metainfo packages
from nomad.parsing import parsers from nomad.parsing.parsers import import_all_parsers
parsers.parsers import_all_parsers()
# Create the ES mapping to populate ES annoations with search keys. # Create the ES mapping to populate ES annoations with search keys.
from nomad.search import entry_type from nomad.search import entry_type
......
...@@ -311,7 +311,7 @@ datacite = NomadConfig( ...@@ -311,7 +311,7 @@ datacite = NomadConfig(
) )
meta = NomadConfig( meta = NomadConfig(
version='1.0.6', version='1.0.7',
commit=gitinfo.commit, commit=gitinfo.commit,
deployment='devel', deployment='devel',
label=None, label=None,
......
...@@ -551,7 +551,7 @@ class EntryMetadata(metainfo.MSection): ...@@ -551,7 +551,7 @@ class EntryMetadata(metainfo.MSection):
a_elasticsearch=Elasticsearch()) a_elasticsearch=Elasticsearch())
external_db = metainfo.Quantity( external_db = metainfo.Quantity(
type=metainfo.MEnum('EELS Data Base', 'Materials Project', 'AFLOW', 'OQMD'), type=metainfo.MEnum('EELS Data Base', 'Materials Project', 'AFLOW', 'OQMD', 'Kyoto Phonopy Database'),
categories=[MongoUploadMetadata, EditableUserMetadata], categories=[MongoUploadMetadata, EditableUserMetadata],
description='The repository or external database where the original data resides', description='The repository or external database where the original data resides',
a_elasticsearch=Elasticsearch(material_entry_type)) a_elasticsearch=Elasticsearch(material_entry_type))
......
...@@ -310,7 +310,7 @@ class Energy(MSection): ...@@ -310,7 +310,7 @@ class Energy(MSection):
energy calculated with the functional stored in XC_functional. energy calculated with the functional stored in XC_functional.
''') ''')
# TODO Could potential be generalized for other energy types # TODO Remove this should use xc.potential
xc_potential = SubSection( xc_potential = SubSection(
sub_section=EnergyEntry.m_def, sub_section=EnergyEntry.m_def,
description=''' description='''
...@@ -335,6 +335,7 @@ class Energy(MSection): ...@@ -335,6 +335,7 @@ class Energy(MSection):
energy. energy.
''') ''')
# TODO remove this or electrostatic
coulomb = SubSection( coulomb = SubSection(
sub_section=EnergyEntry.m_def, sub_section=EnergyEntry.m_def,
description=''' description='''
...@@ -528,6 +529,38 @@ class Forces(MSection): ...@@ -528,6 +529,38 @@ class Forces(MSection):
corresponding to the minus gradient of energy_T0. corresponding to the minus gradient of energy_T0.
''') ''')
enthalpy = Quantity(
type=np.dtype(np.float64),
shape=[],
unit='joule',
description='''
Value of the calculated enthalpy per cell i.e. energy_total + pressure * volume.
''')
entropy = Quantity(
type=np.dtype(np.float64),
shape=[],
unit='joule / kelvin',
description='''
Value of the entropy.
''')
chemical_potential = Quantity(
type=np.dtype(np.float64),
shape=[],
unit='joule',
description='''
Value of the chemical potential.
''')
internal = Quantity(
type=np.dtype(np.float64),
shape=[],
unit='joule',
description='''
Value of the internal energy.
''')
contributions = SubSection( contributions = SubSection(
sub_section=ForcesEntry.m_def, sub_section=ForcesEntry.m_def,
description=''' description='''
...@@ -1129,6 +1162,7 @@ class Multipoles(MSection): ...@@ -1129,6 +1162,7 @@ class Multipoles(MSection):
higher_order = SubSection(sub_section=MultipolesEntry.m_def, repeats=True) higher_order = SubSection(sub_section=MultipolesEntry.m_def, repeats=True)
# TODO remove this section
class Thermodynamics(MSection): class Thermodynamics(MSection):
''' '''
Section containing results related to a thermodynamics calculation. Section containing results related to a thermodynamics calculation.
...@@ -1564,6 +1598,37 @@ class BaseCalculation(MSection): ...@@ -1564,6 +1598,37 @@ class BaseCalculation(MSection):
contamination in spin-unrestricted calculations. contamination in spin-unrestricted calculations.
''') ''')
pressure = Quantity(
type=np.dtype(np.float64),
shape=[],
unit='pascal',
description='''
Value of the pressure of the system.
''')
temperature = Quantity(
type=np.dtype(np.float64),
shape=[],
unit='kelvin',
description='''
Value of the temperature of the system at which the properties are calculated.
''')
time_physical = Quantity(
type=np.dtype(np.int32),
shape=[],
unit='second',
description='''
The elapsed time with respect to the start of the simulation.
''')
time_step = Quantity(
type=np.dtype(np.int32),
shape=[],
description='''
The number of time steps with respect to the start of the simulation.
''')
energy = SubSection(sub_section=Energy.m_def, categories=[FastAccess]) energy = SubSection(sub_section=Energy.m_def, categories=[FastAccess])
forces = SubSection(sub_section=Forces.m_def) forces = SubSection(sub_section=Forces.m_def)
......
...@@ -232,7 +232,7 @@ class MoleculeParameters(MSection): ...@@ -232,7 +232,7 @@ class MoleculeParameters(MSection):
Number of atoms in the molecule. Number of atoms in the molecule.
''') ''')
atoms = SubSection(sub_section=AtomParameters.m_def, repeats=True) atom_parameters = SubSection(sub_section=AtomParameters.m_def, repeats=True)
class GaussianBasisGroup(MSection): class GaussianBasisGroup(MSection):
...@@ -432,8 +432,7 @@ class Interaction(MSection): ...@@ -432,8 +432,7 @@ class Interaction(MSection):
type=str, type=str,
shape=[], shape=[],
description=''' description='''
Denotes the classification of the potential. Could be one of pair, LJ, Morse, EAM, Denotes the classification of the potential.
MEAM.
''') ''')
name = Quantity( name = Quantity(
...@@ -444,16 +443,24 @@ class Interaction(MSection): ...@@ -444,16 +443,24 @@ class Interaction(MSection):
cut-offs, potential versions. cut-offs, potential versions.
''') ''')
n_atoms = Quantity(
type=np.dtype(np.int32),
shape=[],
description='''
Number of atoms included in the interaction
'''
)
atom_labels = Quantity( atom_labels = Quantity(
type=str, type=str,
shape=[2], shape=['n_atoms'],
description=''' description='''
Labels of the atoms described by the interaction. Labels of the atoms described by the interaction.
''') ''')
atom_indices = Quantity( atom_indices = Quantity(
type=np.dtype(np.int32), type=np.dtype(np.int32),
shape=[2], shape=['n_atoms'],
description=''' description='''
Indices of the atoms in the system described by the interaction. Indices of the atoms in the system described by the interaction.
''') ''')
......
...@@ -399,7 +399,7 @@ class Constraint(MSection): ...@@ -399,7 +399,7 @@ class Constraint(MSection):
Number of atoms involved in this constraint. Number of atoms involved in this constraint.
''') ''')
indices = Quantity( atom_indices = Quantity(
type=np.dtype(np.int32), type=np.dtype(np.int32),
shape=['n_constraints', 'n_atoms'], shape=['n_constraints', 'n_atoms'],
description=''' description='''
......
...@@ -308,6 +308,7 @@ class Keycloak(): ...@@ -308,6 +308,7 @@ class Keycloak():
kwargs = {key: value[0] for key, value in keycloak_user.get('attributes', {}).items()} kwargs = {key: value[0] for key, value in keycloak_user.get('attributes', {}).items()}
oasis_admin = kwargs.pop('is_oasis_admin', None) is not None oasis_admin = kwargs.pop('is_oasis_admin', None) is not None
return datamodel.User( return datamodel.User(
m_ignore_additional_keys=True,
user_id=keycloak_user['id'], user_id=keycloak_user['id'],
email=keycloak_user.get('email'), email=keycloak_user.get('email'),
username=keycloak_user.get('username'), username=keycloak_user.get('username'),
......
...@@ -1292,30 +1292,28 @@ class MSection(metaclass=MObjectMeta): # TODO find a way to make this a subclas ...@@ -1292,30 +1292,28 @@ class MSection(metaclass=MObjectMeta): # TODO find a way to make this a subclas
return cast(MSectionBound, sub_section) return cast(MSectionBound, sub_section)
def m_update(self, safe: bool = True, **kwargs): def m_update(self, m_ignore_additional_keys: bool = False, **kwargs):
''' Updates all quantities and sub-sections with the given arguments. ''' ''' Updates all quantities and sub-sections with the given arguments. '''
self.m_mod_count += 1 self.m_mod_count += 1
if safe: for name, value in kwargs.items():
for name, value in kwargs.items(): prop = self.m_def.all_aliases.get(name, None)
prop = self.m_def.all_aliases.get(name, None) if prop is None:
if prop is None: if m_ignore_additional_keys:
raise KeyError('%s is not an attribute of this section %s' % (name, self)) continue
raise KeyError('%s is not an attribute of this section %s' % (name, self))
if isinstance(prop, SubSection):
if prop.repeats:
if isinstance(value, List):
for item in value:
self.m_add_sub_section(prop, item)
else:
raise TypeError('Sub section %s repeats, but no list was given' % prop.name)
else:
self.m_add_sub_section(prop, value)
if isinstance(prop, SubSection):
if prop.repeats:
if isinstance(value, List):
for item in value:
self.m_add_sub_section(prop, item)
else:
raise TypeError('Sub section %s repeats, but no list was given' % prop.name)
else: else:
self.m_set(prop, value) self.m_add_sub_section(prop, value)
else: else:
self.__dict__.update(**kwargs) self.m_set(prop, value)
def m_as(self, section_cls: Type[MSectionBound]) -> MSectionBound: def m_as(self, section_cls: Type[MSectionBound]) -> MSectionBound:
''' 'Casts' this section to the given extending sections. ''' ''' 'Casts' this section to the given extending sections. '''
......
...@@ -237,18 +237,27 @@ class MatchingParserInterface(MatchingParser): ...@@ -237,18 +237,27 @@ class MatchingParserInterface(MatchingParser):
def mainfile_parser(self): def mainfile_parser(self):
if self._mainfile_parser is None: if self._mainfile_parser is None:
try: try:
module_path, parser_class = self._parser_class_name.rsplit('.', 1) Parser = self.import_parser_class()
module = importlib.import_module(module_path) self._mainfile_parser = Parser()
self._mainfile_parser = getattr(module, parser_class)()
except Exception as e: except Exception as e:
logger = utils.get_logger(__name__) logger = utils.get_logger(__name__)
logger.error('Error importing parser.', exc_info=e) logger.error('cannot instantiate parser.', exc_info=e)
raise e raise e
return self._mainfile_parser return self._mainfile_parser
def parse(self, mainfile: str, archive: EntryArchive, logger=None): def parse(self, mainfile: str, archive: EntryArchive, logger=None):
self.mainfile_parser.parse(mainfile, archive, logger) self.mainfile_parser.parse(mainfile, archive, logger)
def import_parser_class(self):
try:
module_path, parser_class = self._parser_class_name.rsplit('.', 1)
module = importlib.import_module(module_path)
return getattr(module, parser_class)
except Exception as e:
logger = utils.get_logger(__name__)
logger.error('cannot import parser', exc_info=e)
raise e
class ArchiveParser(MatchingParser): class ArchiveParser(MatchingParser):
def __init__(self): def __init__(self):
......
...@@ -545,3 +545,13 @@ for parser in parsers: ...@@ -545,3 +545,13 @@ for parser in parsers:
code_names = sorted(set(code_names), key=lambda code_name: code_name.lower()) code_names = sorted(set(code_names), key=lambda code_name: code_name.lower())
results.Simulation.program_name.a_elasticsearch[0].values = code_names + [ results.Simulation.program_name.a_elasticsearch[0].values = code_names + [
config.services.unavailable_value, config.services.not_processed_value] config.services.unavailable_value, config.services.not_processed_value]
def import_all_parsers():
'''
Imports all the parsers. This will instantiate all parser metainfo as a side
effect.
'''
for parser in parsers:
if isinstance(parser, MatchingParserInterface):
parser.import_parser_class()
...@@ -48,11 +48,13 @@ services: ...@@ -48,11 +48,13 @@ services:
image: docker.elastic.co/elasticsearch/elasticsearch:7.17.1 image: docker.elastic.co/elasticsearch/elasticsearch:7.17.1
container_name: nomad_elastic container_name: nomad_elastic
environment: environment:
- ES_JAVA_OPTS=-Xms512m -Xmx512m
- cluster.routing.allocation.disk.threshold_enabled=true - cluster.routing.allocation.disk.threshold_enabled=true
- cluster.routing.allocation.disk.watermark.flood_stage=1gb - cluster.routing.allocation.disk.watermark.flood_stage=1gb
- cluster.routing.allocation.disk.watermark.low=4gb - cluster.routing.allocation.disk.watermark.low=4gb
- cluster.routing.allocation.disk.watermark.high=2gb - cluster.routing.allocation.disk.watermark.high=2gb
- discovery.type=single-node - discovery.type=single-node
- xpack.security.enabled=false
ports: ports:
- 9200:9200 - 9200:9200
volumes: volumes:
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment