Compare commits
No commits in common. "develop" and "main" have entirely different histories.
@ -13,12 +13,11 @@ from diskcache import Cache
|
|||||||
|
|
||||||
import kheops.plugin as KheopsPlugins
|
import kheops.plugin as KheopsPlugins
|
||||||
from kheops.controllers import QueryProcessor
|
from kheops.controllers import QueryProcessor
|
||||||
from kheops.utils import schema_validate, dict_hash
|
from kheops.utils import schema_validate
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
CACHE_CONFIG_EXPIRE = 15
|
|
||||||
CONF_SCHEMA = {
|
CONF_SCHEMA = {
|
||||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@ -45,6 +44,7 @@ CONF_SCHEMA = {
|
|||||||
"default": [],
|
"default": [],
|
||||||
# "arrayItem": { "$ref": "#/$defs/rules_items" },
|
# "arrayItem": { "$ref": "#/$defs/rules_items" },
|
||||||
},
|
},
|
||||||
|
|
||||||
"config": {
|
"config": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"default": {},
|
"default": {},
|
||||||
@ -108,7 +108,6 @@ class KheopsNamespace(GenericInstance, QueryProcessor):
|
|||||||
Kheops Namespace Class
|
Kheops Namespace Class
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, app, name, config=None):
|
def __init__(self, app, name, config=None):
|
||||||
"""
|
"""
|
||||||
Kheops Namespace Instance
|
Kheops Namespace Instance
|
||||||
@ -123,22 +122,14 @@ class KheopsNamespace(GenericInstance, QueryProcessor):
|
|||||||
:type config: Any
|
:type config: Any
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Init object
|
config = schema_validate(config, CONF_SCHEMA)
|
||||||
|
super().__init__(config)
|
||||||
|
|
||||||
self.name = name
|
self.name = name
|
||||||
self.app = app
|
self.app = app
|
||||||
self.run = dict(app.run)
|
self.run = dict(app.run)
|
||||||
self.cache = app.cache
|
|
||||||
|
|
||||||
# Init config (from cache)
|
|
||||||
config_hash = "conf_ns_" + dict_hash(config)
|
|
||||||
try:
|
|
||||||
config = self.cache[config_hash]
|
|
||||||
log.debug("Loading namespace '%s' configuration from cache", self.name)
|
|
||||||
except KeyError:
|
|
||||||
config = schema_validate(config, CONF_SCHEMA)
|
|
||||||
self.cache.set(config_hash, config, expire=CACHE_CONFIG_EXPIRE)
|
|
||||||
super().__init__(config)
|
|
||||||
|
|
||||||
|
# Validate configuration
|
||||||
self.run["path_ns"] = str(Path(app.run["config_src"]).parent.resolve())
|
self.run["path_ns"] = str(Path(app.run["config_src"]).parent.resolve())
|
||||||
|
|
||||||
|
|
||||||
@ -148,7 +139,7 @@ class Kheops(GenericInstance):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, config="kheops.yml", namespace="default", cache=None):
|
def __init__(self, config="kheops.yml", namespace="default"):
|
||||||
"""
|
"""
|
||||||
Kheops Application Instance
|
Kheops Application Instance
|
||||||
|
|
||||||
@ -175,17 +166,8 @@ class Kheops(GenericInstance):
|
|||||||
|
|
||||||
self.ns_name = namespace
|
self.ns_name = namespace
|
||||||
self.namespaces = {}
|
self.namespaces = {}
|
||||||
|
|
||||||
self.cache = cache or Cache("/tmp/kheops_cache/")
|
|
||||||
self.raw_config = self.parse_conf(config)
|
self.raw_config = self.parse_conf(config)
|
||||||
|
|
||||||
# needle = 'conf_app_' + dict_hash(config)
|
|
||||||
# try:
|
|
||||||
# self.raw_config = self.cache[needle]
|
|
||||||
# except KeyError:
|
|
||||||
# self.raw_config = self.parse_conf(config)
|
|
||||||
# self.cache.set(needle, config, expire=CACHE_CONFIG_EXPIRE)
|
|
||||||
|
|
||||||
def parse_conf(self, config="kheops.yml"):
|
def parse_conf(self, config="kheops.yml"):
|
||||||
"""
|
"""
|
||||||
Parse Kheops configuration
|
Parse Kheops configuration
|
||||||
@ -208,13 +190,12 @@ class Kheops(GenericInstance):
|
|||||||
elif isinstance(config, dict):
|
elif isinstance(config, dict):
|
||||||
dict_conf = config
|
dict_conf = config
|
||||||
source = "dict"
|
source = "dict"
|
||||||
|
|
||||||
self.run["conf_source"] = source
|
|
||||||
return dict_conf
|
return dict_conf
|
||||||
|
|
||||||
def lookup(
|
def lookup(
|
||||||
self,
|
self,
|
||||||
keys=None,
|
keys=None,
|
||||||
|
policy=None,
|
||||||
scope=None,
|
scope=None,
|
||||||
trace=False,
|
trace=False,
|
||||||
explain=False,
|
explain=False,
|
||||||
@ -235,14 +216,11 @@ class Kheops(GenericInstance):
|
|||||||
:type scope: dict
|
:type scope: dict
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Loop over keys
|
|
||||||
ret = {}
|
ret = {}
|
||||||
|
# Loop over keys
|
||||||
for key_def in keys:
|
for key_def in keys:
|
||||||
|
|
||||||
key_def = key_def or ""
|
key_def = key_def or ""
|
||||||
assert isinstance(
|
|
||||||
key_def, str
|
|
||||||
), f"Expected string as key, got {type(key_def)}: {key_def}"
|
|
||||||
|
|
||||||
# Identify namespace and key
|
# Identify namespace and key
|
||||||
parts = key_def.split("/")
|
parts = key_def.split("/")
|
||||||
@ -269,7 +247,7 @@ class Kheops(GenericInstance):
|
|||||||
|
|
||||||
# Prepare output
|
# Prepare output
|
||||||
_key = key_name
|
_key = key_name
|
||||||
if namespace_prefix is True:
|
if namespace_prefix == True:
|
||||||
_key = key_def
|
_key = key_def
|
||||||
ret[_key] = result
|
ret[_key] = result
|
||||||
|
|
||||||
@ -281,12 +259,15 @@ class Kheops(GenericInstance):
|
|||||||
# log.debug("Return '%s' result", key_name)
|
# log.debug("Return '%s' result", key_name)
|
||||||
# return result
|
# return result
|
||||||
|
|
||||||
if explain:
|
|
||||||
# This is never a really good idea to show direct data ...
|
|
||||||
log.debug("Returned result: %s", ret)
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# To clean/implement
|
# To clean/implement
|
||||||
|
|
||||||
# def DEPRECATED_dump_schema(self):
|
# def DEPRECATED_dump_schema(self):
|
||||||
|
|||||||
@ -13,18 +13,17 @@ from pathlib import Path
|
|||||||
from prettytable import PrettyTable
|
from prettytable import PrettyTable
|
||||||
|
|
||||||
import kheops.plugin as KheopsPlugins
|
import kheops.plugin as KheopsPlugins
|
||||||
from kheops.utils import render_template_python, str_ellipsis, dict_hash
|
from kheops.utils import render_template_python, str_ellipsis
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
tracer = logging.getLogger(f"{__name__}.explain")
|
tracer = logging.getLogger(f"{__name__}.explain")
|
||||||
CACHE_QUERY_EXPIRE = 10
|
|
||||||
|
|
||||||
|
|
||||||
# Helper classes
|
# Helper classes
|
||||||
# ------------------------
|
# ------------------------
|
||||||
|
|
||||||
|
|
||||||
class LoadPlugin:
|
class LoadPlugin:
|
||||||
"""Kheops plugins loader
|
"""Kheops plugins loader
|
||||||
|
|
||||||
@ -61,8 +60,7 @@ class LoadPlugin:
|
|||||||
# Return plugin Classe
|
# Return plugin Classe
|
||||||
return plugin_cls.Plugin
|
return plugin_cls.Plugin
|
||||||
|
|
||||||
|
class BackendCandidate():
|
||||||
class BackendCandidate:
|
|
||||||
"""Backend Candidate
|
"""Backend Candidate
|
||||||
|
|
||||||
This object represents a backend candidate. It holds the value of the
|
This object represents a backend candidate. It holds the value of the
|
||||||
@ -99,7 +97,6 @@ class Query:
|
|||||||
# Query Processor class
|
# Query Processor class
|
||||||
# ------------------------
|
# ------------------------
|
||||||
|
|
||||||
|
|
||||||
class QueryProcessor:
|
class QueryProcessor:
|
||||||
"""QueryProcessor
|
"""QueryProcessor
|
||||||
|
|
||||||
@ -137,20 +134,15 @@ class QueryProcessor:
|
|||||||
# ------------------------
|
# ------------------------
|
||||||
|
|
||||||
def query(self, key=None, scope=None, explain=False):
|
def query(self, key=None, scope=None, explain=False):
|
||||||
"""Query key with scope"""
|
"""Query key with scope
|
||||||
|
|
||||||
# Look into cache
|
"""
|
||||||
query_hash = dict_hash([self.name, key, scope])
|
|
||||||
if query_hash in self.cache:
|
|
||||||
log.debug("Result fetched from cache")
|
|
||||||
self.cache.touch(query_hash, expire=CACHE_QUERY_EXPIRE)
|
|
||||||
return self.cache[query_hash]
|
|
||||||
|
|
||||||
if explain:
|
if explain:
|
||||||
tracer.setLevel(logging.DEBUG)
|
tracer.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
query = Query(key, scope)
|
query = Query(key, scope)
|
||||||
log.info("Creating new query for namespace '%s': %s", self.name, query.__dict__)
|
log.info("Creating new query: %s", query.__dict__)
|
||||||
|
|
||||||
# Match the KeyRule in keys (RULE CACHE)
|
# Match the KeyRule in keys (RULE CACHE)
|
||||||
# Get the matching keys
|
# Get the matching keys
|
||||||
@ -168,7 +160,7 @@ class QueryProcessor:
|
|||||||
|
|
||||||
# Generate explain report
|
# Generate explain report
|
||||||
if explain:
|
if explain:
|
||||||
self._explain_lookups(parsed_lookups, query)
|
self._explain_lookups(parsed_lookups)
|
||||||
|
|
||||||
# Fetch the module
|
# Fetch the module
|
||||||
# Retrieve the module instance
|
# Retrieve the module instance
|
||||||
@ -193,9 +185,9 @@ class QueryProcessor:
|
|||||||
# TODO: Apply output plugins
|
# TODO: Apply output plugins
|
||||||
# result = self._exec_output_plugins(result)
|
# result = self._exec_output_plugins(result)
|
||||||
|
|
||||||
self.cache.set(query_hash, result, expire=CACHE_QUERY_EXPIRE)
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
# Query parts methods
|
# Query parts methods
|
||||||
# ------------------------
|
# ------------------------
|
||||||
|
|
||||||
@ -218,6 +210,7 @@ class QueryProcessor:
|
|||||||
|
|
||||||
return rule
|
return rule
|
||||||
|
|
||||||
|
|
||||||
def _exec_assemble_lookups(self, lookups, query):
|
def _exec_assemble_lookups(self, lookups, query):
|
||||||
|
|
||||||
assert isinstance(lookups, list)
|
assert isinstance(lookups, list)
|
||||||
@ -257,9 +250,7 @@ class QueryProcessor:
|
|||||||
plugin_name = plugin_def.get("module", None)
|
plugin_name = plugin_def.get("module", None)
|
||||||
|
|
||||||
if plugin_name:
|
if plugin_name:
|
||||||
plugin = self.plugin_loader.load("scope", plugin_name)(
|
plugin = self.plugin_loader.load("scope", plugin_name)(namespace=self)
|
||||||
namespace=self
|
|
||||||
)
|
|
||||||
ret = plugin.process_items(ret, plugin_def)
|
ret = plugin.process_items(ret, plugin_def)
|
||||||
|
|
||||||
new_lookups2.extend(ret)
|
new_lookups2.extend(ret)
|
||||||
@ -275,12 +266,11 @@ class QueryProcessor:
|
|||||||
lookup["path"] = new_path
|
lookup["path"] = new_path
|
||||||
new_lookups3.append(lookup)
|
new_lookups3.append(lookup)
|
||||||
else:
|
else:
|
||||||
log.warning(
|
log.warning("Ignore lookup item because of missing scope vars: '%s'", path)
|
||||||
"Ignore lookup item because of missing scope vars: '%s'", path
|
|
||||||
)
|
|
||||||
|
|
||||||
return new_lookups3
|
return new_lookups3
|
||||||
|
|
||||||
|
|
||||||
def _exec_backend_plugins(self, lookups, selector="matched"):
|
def _exec_backend_plugins(self, lookups, selector="matched"):
|
||||||
selector = "matched"
|
selector = "matched"
|
||||||
assert selector in ["last", "first", "all", "matched"]
|
assert selector in ["last", "first", "all", "matched"]
|
||||||
@ -314,7 +304,7 @@ class QueryProcessor:
|
|||||||
# Explain methods
|
# Explain methods
|
||||||
# ------------------------
|
# ------------------------
|
||||||
|
|
||||||
def _explain_lookups(self, parsed_lookups, query):
|
def _explain_lookups(self, parsed_lookups):
|
||||||
"""Explain list of lookups"""
|
"""Explain list of lookups"""
|
||||||
|
|
||||||
table = PrettyTable()
|
table = PrettyTable()
|
||||||
@ -335,7 +325,7 @@ class QueryProcessor:
|
|||||||
)
|
)
|
||||||
table.field_names = ["Config", "Runtime"]
|
table.field_names = ["Config", "Runtime"]
|
||||||
table.align = "l"
|
table.align = "l"
|
||||||
tracer.info("Explain lookups: %s/%s\n%s", self.name, query.key, str(table))
|
tracer.info("Explain lookups:\n%s", str(table))
|
||||||
|
|
||||||
def _explain_candidates(self, candidates, query):
|
def _explain_candidates(self, candidates, query):
|
||||||
"""Explain list of candidates"""
|
"""Explain list of candidates"""
|
||||||
@ -371,4 +361,4 @@ class QueryProcessor:
|
|||||||
|
|
||||||
table.field_names = ["Status", "Runtime", "Key Value"]
|
table.field_names = ["Status", "Runtime", "Key Value"]
|
||||||
table.align = "l"
|
table.align = "l"
|
||||||
tracer.info("Explain candidates: %s/%s\n%s", self.name, query.key, str(table))
|
tracer.info("Explain candidates:\n%s", str(table))
|
||||||
|
|||||||
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# from pprint import pprint
|
# from pprint import pprint
|
||||||
|
|
||||||
import anyconfig
|
import anyconfig
|
||||||
@ -10,8 +9,6 @@ from anyconfig.common.errors import BaseError as AnyConfigBaseError
|
|||||||
from kheops.plugin.common import BackendPlugin, BackendCandidate
|
from kheops.plugin.common import BackendPlugin, BackendCandidate
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
CACHE_FILE_EXPIRE = 5
|
|
||||||
|
|
||||||
|
|
||||||
class Plugin(BackendPlugin):
|
class Plugin(BackendPlugin):
|
||||||
"""File Backend Plugin
|
"""File Backend Plugin
|
||||||
@ -43,12 +40,13 @@ class Plugin(BackendPlugin):
|
|||||||
to find all of the specified format. It is better to keep this list as small
|
to find all of the specified format. It is better to keep this list as small
|
||||||
as possible.
|
as possible.
|
||||||
""",
|
""",
|
||||||
|
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"default": extensions,
|
"default": extensions,
|
||||||
"additionalProperties": {
|
"additionalProperties": {
|
||||||
"title": "Name of the extension with parser",
|
"title": "Name of the extension with parser",
|
||||||
"type": "string",
|
"type": "string"
|
||||||
},
|
}
|
||||||
},
|
},
|
||||||
"path_prefix": {
|
"path_prefix": {
|
||||||
"title": "Prefix string to append to final path",
|
"title": "Prefix string to append to final path",
|
||||||
@ -56,7 +54,7 @@ class Plugin(BackendPlugin):
|
|||||||
String to be added at the end of the resolved path. This is useful to change
|
String to be added at the end of the resolved path. This is useful to change
|
||||||
the place of the root hierarchy.
|
the place of the root hierarchy.
|
||||||
""",
|
""",
|
||||||
"type": "string",
|
"type": "string"
|
||||||
},
|
},
|
||||||
"path_suffix": {
|
"path_suffix": {
|
||||||
"title": "Suffix string to prepend to final path",
|
"title": "Suffix string to prepend to final path",
|
||||||
@ -66,11 +64,12 @@ class Plugin(BackendPlugin):
|
|||||||
"type": "string",
|
"type": "string",
|
||||||
"examples": [
|
"examples": [
|
||||||
{ "path_suffix": "/ansible" },
|
{ "path_suffix": "/ansible" },
|
||||||
],
|
]
|
||||||
},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
_schema_props_new = {
|
_schema_props_new = {
|
||||||
"path": {
|
"path": {
|
||||||
@ -119,8 +118,6 @@ class Plugin(BackendPlugin):
|
|||||||
|
|
||||||
def fetch_data(self, config) -> list:
|
def fetch_data(self, config) -> list:
|
||||||
|
|
||||||
cache = self.ns.cache
|
|
||||||
|
|
||||||
path = config.get("path")
|
path = config.get("path")
|
||||||
if self.path_suffix:
|
if self.path_suffix:
|
||||||
path = f"{path}{self.path_suffix}"
|
path = f"{path}{self.path_suffix}"
|
||||||
@ -130,21 +127,11 @@ class Plugin(BackendPlugin):
|
|||||||
extensions = self.config.get("extensions", self.extensions)
|
extensions = self.config.get("extensions", self.extensions)
|
||||||
for ext, parser in extensions.items():
|
for ext, parser in extensions.items():
|
||||||
new_path = os.path.join(self.top_path, path + ext)
|
new_path = os.path.join(self.top_path, path + ext)
|
||||||
cache_key = "file_content_" + new_path
|
|
||||||
|
|
||||||
# Check first if content exists in cache
|
|
||||||
try:
|
|
||||||
raw_data = cache[cache_key]
|
|
||||||
status = "found"
|
|
||||||
# log.info("Found cached: %s with %s", new_path, raw_data)
|
|
||||||
break
|
|
||||||
except KeyError:
|
|
||||||
if os.path.isfile(new_path):
|
if os.path.isfile(new_path):
|
||||||
status = "found"
|
status = "found"
|
||||||
try:
|
try:
|
||||||
log.info("Found file: %s", new_path)
|
log.info("Found file: %s", new_path)
|
||||||
raw_data = anyconfig.load(new_path, ac_parser=parser)
|
raw_data = anyconfig.load(new_path, ac_parser=parser)
|
||||||
cache.set(cache_key, raw_data, expire=CACHE_FILE_EXPIRE)
|
|
||||||
except AnyConfigBaseError as err:
|
except AnyConfigBaseError as err:
|
||||||
status = "broken"
|
status = "broken"
|
||||||
raw_data = None
|
raw_data = None
|
||||||
|
|||||||
@ -6,7 +6,7 @@ from kheops.utils import schema_validate
|
|||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
NoneType = type(None)
|
|
||||||
|
|
||||||
# Vocabulary:
|
# Vocabulary:
|
||||||
# Key Rules
|
# Key Rules
|
||||||
@ -22,16 +22,13 @@ NoneType = type(None)
|
|||||||
# Generic Plugin classes
|
# Generic Plugin classes
|
||||||
# -------------------------
|
# -------------------------
|
||||||
|
|
||||||
|
|
||||||
class KheopsPlugin:
|
class KheopsPlugin:
|
||||||
plugin_name = None
|
plugin_name = None
|
||||||
plugin_type = None
|
plugin_type = None
|
||||||
plugin_kind = None
|
plugin_kind = None
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
assert isinstance(
|
assert isinstance(self.plugin_name, str), f"Missing name attribute in plugin: {self.__class__}"
|
||||||
self.plugin_name, str
|
|
||||||
), f"Missing name attribute in plugin: {self.__class__}"
|
|
||||||
assert isinstance(self.plugin_kind, str)
|
assert isinstance(self.plugin_kind, str)
|
||||||
|
|
||||||
config_key = f"{self.plugin_kind}_{self.plugin_name}"
|
config_key = f"{self.plugin_kind}_{self.plugin_name}"
|
||||||
@ -64,7 +61,6 @@ class KheopsItemPlugin(KheopsPlugin):
|
|||||||
# Plugin classes
|
# Plugin classes
|
||||||
# -------------------------
|
# -------------------------
|
||||||
|
|
||||||
|
|
||||||
class BackendPlugin(KheopsItemPlugin):
|
class BackendPlugin(KheopsItemPlugin):
|
||||||
plugin_kind = "backend"
|
plugin_kind = "backend"
|
||||||
|
|
||||||
@ -144,14 +140,11 @@ class ScopePlugin(KheopsListPlugin):
|
|||||||
self.ns = namespace
|
self.ns = namespace
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
|
|
||||||
# Helper classes
|
# Helper classes
|
||||||
# -------------------------
|
# -------------------------
|
||||||
|
|
||||||
|
class BackendCandidate():
|
||||||
class BackendCandidate:
|
|
||||||
"""Represent a backend candidate"""
|
"""Represent a backend candidate"""
|
||||||
|
|
||||||
def __init__(self, path=None, data=None, run=None, status=None):
|
def __init__(self, path=None, data=None, run=None, status=None):
|
||||||
assert isinstance(run, dict)
|
assert isinstance(run, dict)
|
||||||
self.path = path
|
self.path = path
|
||||||
@ -163,6 +156,7 @@ class BackendCandidate:
|
|||||||
return f"Status: {self.status}, Path: {self.path} => {self.data}"
|
return f"Status: {self.status}, Path: {self.path} => {self.data}"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class ScopeExtLoop:
|
class ScopeExtLoop:
|
||||||
"""This Scope Extension allow to loop over a lookup"""
|
"""This Scope Extension allow to loop over a lookup"""
|
||||||
|
|
||||||
@ -185,15 +179,10 @@ class ScopeExtLoop:
|
|||||||
}
|
}
|
||||||
|
|
||||||
def loop_over(
|
def loop_over(
|
||||||
self,
|
self, lookups, conf, module_name, var_name="item", callback_context=None, callback=None
|
||||||
lookups,
|
|
||||||
conf,
|
|
||||||
module_name,
|
|
||||||
var_name="item",
|
|
||||||
callback_context=None,
|
|
||||||
callback=None,
|
|
||||||
):
|
):
|
||||||
|
|
||||||
|
|
||||||
var_name = conf.get("var", var_name)
|
var_name = conf.get("var", var_name)
|
||||||
var_data_ref = conf.get("data", None)
|
var_data_ref = conf.get("data", None)
|
||||||
|
|
||||||
@ -218,8 +207,7 @@ class ScopeExtLoop:
|
|||||||
|
|
||||||
# Validate generated
|
# Validate generated
|
||||||
if not isinstance(var_data, list):
|
if not isinstance(var_data, list):
|
||||||
if not isinstance(var_data, NoneType):
|
log.warning("Loop data must be a list, got: %s", var_data)
|
||||||
log.warning("Loop data must be a list, got: '%s'", var_data)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Create new object
|
# Create new object
|
||||||
@ -235,8 +223,6 @@ class ScopeExtLoop:
|
|||||||
"variable": var_name,
|
"variable": var_name,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Note: This implie a performance penalty to do so, but
|
|
||||||
# we really need a full copy of the dict. copy.copy or dict() are not enough
|
|
||||||
new_item = copy.deepcopy(lookup)
|
new_item = copy.deepcopy(lookup)
|
||||||
new_item["_run"]["scope"][var_name] = var_value
|
new_item["_run"]["scope"][var_name] = var_value
|
||||||
new_item["_run"][module_name].append(ctx)
|
new_item["_run"][module_name].append(ctx)
|
||||||
@ -246,6 +232,8 @@ class ScopeExtLoop:
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# To clean/implement
|
# To clean/implement
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -4,10 +4,6 @@ import collections
|
|||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import json
|
|
||||||
from typing import Dict, Any
|
|
||||||
|
|
||||||
from jinja2 import Template
|
from jinja2 import Template
|
||||||
from jsonschema import Draft7Validator, validators
|
from jsonschema import Draft7Validator, validators
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
@ -78,17 +74,6 @@ class Default(dict):
|
|||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
# Source: https://www.doc.ic.ac.uk/~nuric/coding/how-to-hash-a-dictionary-in-python.html
|
|
||||||
def dict_hash(dictionary: Dict[str, Any]) -> str:
|
|
||||||
"""MD5 hash of a dictionary."""
|
|
||||||
dhash = hashlib.md5()
|
|
||||||
# We need to sort arguments so {'a': 1, 'b': 2} is
|
|
||||||
# the same as {'b': 2, 'a': 1}
|
|
||||||
encoded = json.dumps(dictionary, sort_keys=True).encode()
|
|
||||||
dhash.update(encoded)
|
|
||||||
return dhash.hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
def render_template_python(text, params, ignore_missing=True):
|
def render_template_python(text, params, ignore_missing=True):
|
||||||
"""Render template for a given string"""
|
"""Render template for a given string"""
|
||||||
assert isinstance(params, dict), f"Got: {params}"
|
assert isinstance(params, dict), f"Got: {params}"
|
||||||
@ -105,7 +90,6 @@ def render_template_python(text, params, ignore_missing=True):
|
|||||||
# Schema Methods
|
# Schema Methods
|
||||||
# =====================
|
# =====================
|
||||||
|
|
||||||
|
|
||||||
def _extend_with_default(validator_class):
|
def _extend_with_default(validator_class):
|
||||||
validate_properties = validator_class.VALIDATORS["properties"]
|
validate_properties = validator_class.VALIDATORS["properties"]
|
||||||
|
|
||||||
@ -115,16 +99,12 @@ def _extend_with_default(validator_class):
|
|||||||
instance.setdefault(property, subschema["default"])
|
instance.setdefault(property, subschema["default"])
|
||||||
|
|
||||||
for error in validate_properties(
|
for error in validate_properties(
|
||||||
validator,
|
validator, properties, instance, schema,
|
||||||
properties,
|
|
||||||
instance,
|
|
||||||
schema,
|
|
||||||
):
|
):
|
||||||
yield error
|
yield error
|
||||||
|
|
||||||
return validators.extend(
|
return validators.extend(
|
||||||
validator_class,
|
validator_class, {"properties" : set_defaults},
|
||||||
{"properties": set_defaults},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user