Skip to content
Snippets Groups Projects
Commit c90ad638 authored by Lauri Himanen's avatar Lauri Himanen
Browse files

Added the inclusion of onOpen triggers from a CommonMatcher, updated the...

Added the inclusion of onOpen triggers from a CommonMatcher, updated the Archive object to support local deletion of values and sections.
parent 167b3605
Branches
Tags
No related merge requests found
......@@ -59,8 +59,16 @@ class ArchiveSection(metaclass=ABCMeta):
"""
parts, _, _ = self.get_path_parts(path)
current_data = self
current_path = self._path
n_parts = len(parts)
for i_part, part in enumerate(parts):
# Check that the section has not been deleted
current_path = "{}/{}".format(current_path, part)
deleted = self.check_deletions(current_path)
if deleted:
raise KeyError("Value for '{}' could not be found".format(current_path))
current_data = current_data.get_child(part)
if i_part == n_parts - 1:
return current_data
......@@ -77,7 +85,13 @@ class ArchiveSection(metaclass=ABCMeta):
The value for the key if found, otherwise returns the given default
value.
"""
# First check cache
# Check that the value has not been deleted
full_path = "{}/{}".format(self._path, key)
deleted = self.check_deletions(full_path)
if deleted:
return default
# Check cache
try:
value = self.get_from_cache(key)
except KeyError:
......@@ -118,15 +132,18 @@ class ArchiveSection(metaclass=ABCMeta):
dictionary that is separate from the original data.
"""
if self._archive.use_write_cache:
path = "{}/{}".format(self._path, path)
# If this key had been deleted in the local cache, remove that mark.
full_path = "{}/{}".format(self._path, path)
self._archive._deletions.pop(full_path, None)
# If the value is not already a numpy array, make it so
if not isinstance(value, np.ndarray):
if isinstance(value, (list, tuple)):
value = np.array(value)
else:
value = np.array((value))
self._archive._overrides[path] = value
value = np.array((value,))
self._archive._overrides[full_path] = value
else:
raise ValueError(
"Writing to the source file is currently disabled. If you want "
......@@ -134,6 +151,24 @@ class ArchiveSection(metaclass=ABCMeta):
" of this Archive to True."
)
def __delitem__(self, key):
if self._archive.use_write_cache:
if key in self:
full_path = "{}/{}".format(self._path, key)
self._archive._deletions[full_path] = True
else:
raise KeyError(
"A value for the given key '{}' has not been set, so it "
"could not be deleted."
.format(key)
)
else:
raise ValueError(
"Deleting from the source file is currently disabled. If you "
"want to enable a local write cache, set the 'use_write_cache' "
"attribute of this Archive to True."
)
def __getitem__(self, key):
"""Used to get a direct child of this section by name.
......@@ -143,6 +178,11 @@ class ArchiveSection(metaclass=ABCMeta):
Returns:
ArchiveSection or a concrete value.
"""
# Check that the value has not been deleted
full_path = "{}/{}".format(self._path, key)
deleted = self.check_deletions(full_path)
if deleted:
raise KeyError("Value for '{}' has not been set.".format(full_path))
try:
value = self.get_from_cache(key)
except KeyError:
......@@ -219,6 +259,21 @@ class ArchiveSection(metaclass=ABCMeta):
value = self._archive._overrides[key]
return value
def check_deletions(self, full_path):
"""Used to get a value from an internal cache that is defined for the
Arcvive object to which this section belongs to.
Args:
key (string): The key to look for in the cache. The given key is
relative to the path of this section.
Returns:
The value found from the cache, is it is set.
"""
value = self._archive._deletions.get(full_path)
if value is True:
return True
return False
def get_path_parts(self, path):
"""Used to separate the given path into sections, and the section to
name and an index if specified.
......@@ -283,11 +338,14 @@ class Archive(object):
filepath (string): Path to the archive file
repositories (dict): Contains all the repositories within this Archive.
calculations (dict): Contains all the calculations within this Archive.
use_write_cache (boolean): Whether to enable writing to a cache that
will not persist to the original source file.
_overrides (dictionary): Contains the values that are set during the
lifetime of this object. These values will not persists on the
original file.
use_write_cache (boolean): Whether to enable writing to a cache that
will not persist to the original source file.
_deletions (dictionary): Contains the keys that are deleted during the
lifetime of this object. These deletions will not be made to the
original file.
"""
def __init__(self, filepath, use_write_cache=False):
"""
......@@ -301,6 +359,7 @@ class Archive(object):
self.calculations = {}
self.use_write_cache = use_write_cache
self._overrides = {}
self._deletions = {}
@staticmethod
def factory(archive_path, use_write_cache=False):
......@@ -355,21 +414,17 @@ class ArchiveHDF5(Archive):
calculations.
"""
for repo_name, repo in root.items():
self.repositories[repo_name] = ArchiveSectionHDF5(
repo,
"{}".format(repo_name),
self,
[[0]],
0
)
self.repositories[repo_name] = {}
for calc_name, calc in repo.items():
self.calculations[calc_name] = ArchiveSectionHDF5(
calc_section = self.calculations[calc_name] = ArchiveSectionHDF5(
calc,
"{}/{}".format(repo_name, calc_name),
self,
[[0]],
0
)
self.calculations[calc_name] = calc_section
self.repositories[repo_name][calc_name] = calc_section
class ArchiveSectionHDF5(ArchiveSection):
......@@ -382,7 +437,7 @@ class ArchiveSectionHDF5(ArchiveSection):
super().__init__(data, path, archive)
_, names, indices = self.get_path_parts(path)
# Here we drop out the indices of the repository and calculation
# section, as they are "Nonei"
# section, as they are "None"
self._indices = indices[2:]
self._names = names
self._index_datas = index_datas
......@@ -556,9 +611,14 @@ class ArchiveSectionHDF5(ArchiveSection):
group_list = []
section = self._data[name]
for candidate in candidates:
# Only add the sections that have not been deleted
full_path = "{}/{}:{}".format(self._path, name, candidate)
deleted = self.check_deletions(full_path)
if not deleted:
group = ArchiveSectionHDF5(
section,
"{}/{}:{}".format(self._path, name, candidate),
full_path,
self._archive,
index_datas,
candidate
......@@ -665,17 +725,15 @@ class ArchiveJSON(Archive):
def setup(self, root):
for repo_name, repo in root.items():
self.repositories[repo_name] = ArchiveSectionJSON(
repo,
"{}".format(repo_name),
self
)
self.repositories[repo_name] = {}
for calc_name, calc in repo.items():
self.calculations[calc_name] = ArchiveSectionJSON(
calc_section = self.calculations[calc_name] = ArchiveSectionJSON(
calc,
"{}/{}".format(repo_name, calc_name),
self
)
self.calculations[calc_name] = calc_section
self.repositories[repo_name][calc_name] = calc_section
class ArchiveSectionJSON(ArchiveSection):
......@@ -784,7 +842,15 @@ class ArchiveSectionJSON(ArchiveSection):
# index in the section name directly (only specific to JSON)
except KeyError:
sections = get_root_sections(name)
data = [ArchiveSectionJSON(sec, child_path, self._archive) for index, sec in enumerate(sections)]
data = []
# Only add the sections that have not been deleted
for i_section, sec in enumerate(sections):
full_path = "{}:{}".format(child_path, i_section)
deleted = self.check_deletions(full_path)
if not deleted:
data.append(ArchiveSectionJSON(sec, full_path, self._archive))
# If index specified, try to get the specific section.
elif is_section:
......
......@@ -6,7 +6,6 @@ from builtins import str
from builtins import object
import os
import sys
import copy
import numpy as np
import logging
......@@ -414,6 +413,7 @@ class MainHierarchicalParser(AbstractBaseParser):
cachingLevelForMetaName=self.caching_levels,
superContext=self,
onClose=self.on_close,
onOpen=self.on_open,
default_units=self.parser_context.default_units,
metainfo_units=self.parser_context.metainfo_units,
superBackend=self.parser_context.super_backend,
......@@ -446,6 +446,7 @@ class MainHierarchicalParser(AbstractBaseParser):
"""
self.cm = common_matcher
self.on_close.update(common_matcher.getOnCloseTriggers())
self.on_open.update(common_matcher.getOnOpenTriggers())
self.caching_levels.update(common_matcher.caching_levels)
......@@ -477,6 +478,17 @@ class CommonParser(object):
onClose[attr] = [callback]
return onClose
def getOnOpenTriggers(self):
"""
Returns:
A dictionary containing a section name as a key, and a list of
trigger functions associated with opening that section.
"""
onOpen = {}
for attr, callback in extractOnOpenTriggers(self).items():
onOpen[attr] = [callback]
return onOpen
#===============================================================================
class ParserContext(object):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment