Compare commits
5 Commits
1dce6fbfcd
...
ffdbfc442b
| Author | SHA1 | Date | |
|---|---|---|---|
| ffdbfc442b | |||
| 2707e3ae76 | |||
| 2a4e25a1c1 | |||
| fccef4879e | |||
| c0621eb01c |
133
kheops/app.py
133
kheops/app.py
@ -13,30 +13,31 @@ from diskcache import Cache
|
||||
|
||||
import kheops.plugin as KheopsPlugins
|
||||
from kheops.controllers import QueryProcessor
|
||||
from kheops.utils import schema_validate
|
||||
from kheops.utils import schema_validate, dict_hash
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
CACHE_CONFIG_EXPIRE = 15
|
||||
CONF_SCHEMA = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"default": {},
|
||||
"required": ["config"],
|
||||
#"$def": {
|
||||
# "$def": {
|
||||
# "backends_items": {},
|
||||
# "backends_config": {},
|
||||
# "rules_items": {},
|
||||
# "rules_config": {},
|
||||
#},
|
||||
# },
|
||||
"properties": {
|
||||
"lookups": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"type": "object",
|
||||
#"properties": {"$ref": "#/$defs/backends_items"},
|
||||
# "properties": {"$ref": "#/$defs/backends_items"},
|
||||
},
|
||||
},
|
||||
"rules": {
|
||||
@ -44,12 +45,11 @@ CONF_SCHEMA = {
|
||||
"default": [],
|
||||
# "arrayItem": { "$ref": "#/$defs/rules_items" },
|
||||
},
|
||||
|
||||
"config": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": True,
|
||||
#"required": ["app"],
|
||||
# "required": ["app"],
|
||||
"properties": {
|
||||
"app": {
|
||||
"type": "object",
|
||||
@ -108,6 +108,7 @@ class KheopsNamespace(GenericInstance, QueryProcessor):
|
||||
Kheops Namespace Class
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, app, name, config=None):
|
||||
"""
|
||||
Kheops Namespace Instance
|
||||
@ -122,14 +123,22 @@ class KheopsNamespace(GenericInstance, QueryProcessor):
|
||||
:type config: Any
|
||||
"""
|
||||
|
||||
config = schema_validate(config, CONF_SCHEMA)
|
||||
super().__init__(config)
|
||||
|
||||
# Init object
|
||||
self.name = name
|
||||
self.app = app
|
||||
self.run = dict(app.run)
|
||||
self.cache = app.cache
|
||||
|
||||
# Init config (from cache)
|
||||
config_hash = "conf_ns_" + dict_hash(config)
|
||||
try:
|
||||
config = self.cache[config_hash]
|
||||
log.debug("Loading namespace '%s' configuration from cache", self.name)
|
||||
except KeyError:
|
||||
config = schema_validate(config, CONF_SCHEMA)
|
||||
self.cache.set(config_hash, config, expire=CACHE_CONFIG_EXPIRE)
|
||||
super().__init__(config)
|
||||
|
||||
# Validate configuration
|
||||
self.run["path_ns"] = str(Path(app.run["config_src"]).parent.resolve())
|
||||
|
||||
|
||||
@ -139,7 +148,7 @@ class Kheops(GenericInstance):
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, config="kheops.yml", namespace="default"):
|
||||
def __init__(self, config="kheops.yml", namespace="default", cache=None):
|
||||
"""
|
||||
Kheops Application Instance
|
||||
|
||||
@ -166,8 +175,17 @@ class Kheops(GenericInstance):
|
||||
|
||||
self.ns_name = namespace
|
||||
self.namespaces = {}
|
||||
|
||||
self.cache = cache or Cache("/tmp/kheops_cache/")
|
||||
self.raw_config = self.parse_conf(config)
|
||||
|
||||
# needle = 'conf_app_' + dict_hash(config)
|
||||
# try:
|
||||
# self.raw_config = self.cache[needle]
|
||||
# except KeyError:
|
||||
# self.raw_config = self.parse_conf(config)
|
||||
# self.cache.set(needle, config, expire=CACHE_CONFIG_EXPIRE)
|
||||
|
||||
def parse_conf(self, config="kheops.yml"):
|
||||
"""
|
||||
Parse Kheops configuration
|
||||
@ -185,17 +203,18 @@ class Kheops(GenericInstance):
|
||||
try:
|
||||
dict_conf = anyconfig.load(config)
|
||||
except Exception as err:
|
||||
raise Exception ("Can't load kheops configuration, got: %s", err)
|
||||
raise Exception("Can't load kheops configuration, got: %s", err)
|
||||
source = f"file:{config}"
|
||||
elif isinstance(config, dict):
|
||||
dict_conf = config
|
||||
source = "dict"
|
||||
|
||||
self.run["conf_source"] = source
|
||||
return dict_conf
|
||||
|
||||
def lookup(
|
||||
self,
|
||||
keys=None,
|
||||
policy=None,
|
||||
scope=None,
|
||||
trace=False,
|
||||
explain=False,
|
||||
@ -216,11 +235,14 @@ class Kheops(GenericInstance):
|
||||
:type scope: dict
|
||||
"""
|
||||
|
||||
ret = {}
|
||||
# Loop over keys
|
||||
ret = {}
|
||||
for key_def in keys:
|
||||
|
||||
key_def = key_def or ""
|
||||
assert isinstance(
|
||||
key_def, str
|
||||
), f"Expected string as key, got {type(key_def)}: {key_def}"
|
||||
|
||||
# Identify namespace and key
|
||||
parts = key_def.split("/")
|
||||
@ -247,71 +269,68 @@ class Kheops(GenericInstance):
|
||||
|
||||
# Prepare output
|
||||
_key = key_name
|
||||
if namespace_prefix == True:
|
||||
if namespace_prefix is True:
|
||||
_key = key_def
|
||||
ret[_key] = result
|
||||
|
||||
# TODO: This may lead to inconsistant output format :/
|
||||
# Return result
|
||||
#if len(keys) > 1:
|
||||
# if len(keys) > 1:
|
||||
# log.debug("Append '%s' to results", key_name)
|
||||
#else:
|
||||
# else:
|
||||
# log.debug("Return '%s' result", key_name)
|
||||
# return result
|
||||
|
||||
if explain:
|
||||
# This is never a really good idea to show direct data ...
|
||||
log.debug("Returned result: %s", ret)
|
||||
return ret
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# To clean/implement
|
||||
|
||||
# def DEPRECATED_dump_schema(self):
|
||||
# """Dump configuration schema"""
|
||||
# def DEPRECATED_dump_schema(self):
|
||||
# """Dump configuration schema"""
|
||||
|
||||
# ret1 = BackendsManager.get_schema(KheopsPlugins, mode="parts")
|
||||
# ret2 = RulesManager.get_schema(KheopsPlugins)
|
||||
# print(json.dumps(ret1, indent=2))
|
||||
# return
|
||||
# ret1 = BackendsManager.get_schema(KheopsPlugins, mode="parts")
|
||||
# ret2 = RulesManager.get_schema(KheopsPlugins)
|
||||
# print(json.dumps(ret1, indent=2))
|
||||
# return
|
||||
|
||||
# # ret = self.schema
|
||||
# # ret["patternProperties"][".*"]["properties"]["tree"]["items"]["properties"] = ret1
|
||||
# # ret["patternProperties"][".*"]["properties"]["tree"]["items"] = ret2
|
||||
# # ret = self.schema
|
||||
# # ret["patternProperties"][".*"]["properties"]["tree"]["items"]["properties"] = ret1
|
||||
# # ret["patternProperties"][".*"]["properties"]["tree"]["items"] = ret2
|
||||
|
||||
# # print(json.dumps(ret, indent=2))
|
||||
# # print(json.dumps(ret, indent=2))
|
||||
|
||||
# def DEPRECATED_gen_docs(self):
|
||||
# """Generate documentation"""
|
||||
# def DEPRECATED_gen_docs(self):
|
||||
# """Generate documentation"""
|
||||
|
||||
# print("WIP")
|
||||
# return None
|
||||
# print("WIP")
|
||||
# return None
|
||||
|
||||
# # src = {
|
||||
# # "app": {
|
||||
# # "config_schema": None,
|
||||
# # "plugin_managers": {
|
||||
# # 'tree': None,
|
||||
# # 'rules': None,
|
||||
# # }
|
||||
# # }
|
||||
# #
|
||||
# # r1 = BackendsManager.get_schema(KheopsPlugins, mode='parts')
|
||||
# # src = {
|
||||
# # "app": {
|
||||
# # "config_schema": None,
|
||||
# # "plugin_managers": {
|
||||
# # 'tree': None,
|
||||
# # 'rules': None,
|
||||
# # }
|
||||
# # }
|
||||
# #
|
||||
# # r1 = BackendsManager.get_schema(KheopsPlugins, mode='parts')
|
||||
|
||||
# # print (json.dumps(r1, indent=2))
|
||||
# # print (json.dumps(r1, indent=2))
|
||||
|
||||
# # ret = {
|
||||
# #
|
||||
# # }
|
||||
# # ret = {
|
||||
# #
|
||||
# # }
|
||||
|
||||
# # part_config = r1.get('config_schema', None)
|
||||
# # part_item = r1['items']['core_schema']
|
||||
# # part_item_plugins = r1['items']['plugin']
|
||||
# # part_config = r1.get('config_schema', None)
|
||||
# # part_item = r1['items']['core_schema']
|
||||
# # part_item_plugins = r1['items']['plugin']
|
||||
|
||||
# # for kind, plugins in part_item_plugins.items():
|
||||
# # for kind, plugins in part_item_plugins.items():
|
||||
|
||||
# # for plugin_name, schema in plugins.items():
|
||||
# # part_item_
|
||||
# # for plugin_name, schema in plugins.items():
|
||||
# # part_item_
|
||||
|
||||
@ -13,17 +13,18 @@ from pathlib import Path
|
||||
from prettytable import PrettyTable
|
||||
|
||||
import kheops.plugin as KheopsPlugins
|
||||
from kheops.utils import render_template_python, str_ellipsis
|
||||
from kheops.utils import render_template_python, str_ellipsis, dict_hash
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
tracer = logging.getLogger(f"{__name__}.explain")
|
||||
|
||||
CACHE_QUERY_EXPIRE = 10
|
||||
|
||||
|
||||
# Helper classes
|
||||
# ------------------------
|
||||
|
||||
|
||||
class LoadPlugin:
|
||||
"""Kheops plugins loader
|
||||
|
||||
@ -60,7 +61,8 @@ class LoadPlugin:
|
||||
# Return plugin Classe
|
||||
return plugin_cls.Plugin
|
||||
|
||||
class BackendCandidate():
|
||||
|
||||
class BackendCandidate:
|
||||
"""Backend Candidate
|
||||
|
||||
This object represents a backend candidate. It holds the value of the
|
||||
@ -97,6 +99,7 @@ class Query:
|
||||
# Query Processor class
|
||||
# ------------------------
|
||||
|
||||
|
||||
class QueryProcessor:
|
||||
"""QueryProcessor
|
||||
|
||||
@ -134,15 +137,20 @@ class QueryProcessor:
|
||||
# ------------------------
|
||||
|
||||
def query(self, key=None, scope=None, explain=False):
|
||||
"""Query key with scope
|
||||
"""Query key with scope"""
|
||||
|
||||
"""
|
||||
# Look into cache
|
||||
query_hash = dict_hash([self.name, key, scope])
|
||||
if query_hash in self.cache:
|
||||
log.debug("Result fetched from cache")
|
||||
self.cache.touch(query_hash, expire=CACHE_QUERY_EXPIRE)
|
||||
return self.cache[query_hash]
|
||||
|
||||
if explain:
|
||||
tracer.setLevel(logging.DEBUG)
|
||||
|
||||
query = Query(key, scope)
|
||||
log.info("Creating new query: %s", query.__dict__)
|
||||
log.info("Creating new query for namespace '%s': %s", self.name, query.__dict__)
|
||||
|
||||
# Match the KeyRule in keys (RULE CACHE)
|
||||
# Get the matching keys
|
||||
@ -160,7 +168,7 @@ class QueryProcessor:
|
||||
|
||||
# Generate explain report
|
||||
if explain:
|
||||
self._explain_lookups(parsed_lookups)
|
||||
self._explain_lookups(parsed_lookups, query)
|
||||
|
||||
# Fetch the module
|
||||
# Retrieve the module instance
|
||||
@ -185,9 +193,9 @@ class QueryProcessor:
|
||||
# TODO: Apply output plugins
|
||||
# result = self._exec_output_plugins(result)
|
||||
|
||||
self.cache.set(query_hash, result, expire=CACHE_QUERY_EXPIRE)
|
||||
return result
|
||||
|
||||
|
||||
# Query parts methods
|
||||
# ------------------------
|
||||
|
||||
@ -210,7 +218,6 @@ class QueryProcessor:
|
||||
|
||||
return rule
|
||||
|
||||
|
||||
def _exec_assemble_lookups(self, lookups, query):
|
||||
|
||||
assert isinstance(lookups, list)
|
||||
@ -219,10 +226,10 @@ class QueryProcessor:
|
||||
# Init the scope list
|
||||
new_lookups1 = []
|
||||
for index, lookup_def in enumerate(lookups):
|
||||
#shortform = False
|
||||
# shortform = False
|
||||
|
||||
if isinstance(lookup_def, str):
|
||||
#shortform = True
|
||||
# shortform = True
|
||||
lookup_def = {
|
||||
"path": lookup_def,
|
||||
}
|
||||
@ -250,7 +257,9 @@ class QueryProcessor:
|
||||
plugin_name = plugin_def.get("module", None)
|
||||
|
||||
if plugin_name:
|
||||
plugin = self.plugin_loader.load("scope", plugin_name)(namespace=self)
|
||||
plugin = self.plugin_loader.load("scope", plugin_name)(
|
||||
namespace=self
|
||||
)
|
||||
ret = plugin.process_items(ret, plugin_def)
|
||||
|
||||
new_lookups2.extend(ret)
|
||||
@ -266,11 +275,12 @@ class QueryProcessor:
|
||||
lookup["path"] = new_path
|
||||
new_lookups3.append(lookup)
|
||||
else:
|
||||
log.warning("Ignore lookup item because of missing scope vars: '%s'", path)
|
||||
log.warning(
|
||||
"Ignore lookup item because of missing scope vars: '%s'", path
|
||||
)
|
||||
|
||||
return new_lookups3
|
||||
|
||||
|
||||
def _exec_backend_plugins(self, lookups, selector="matched"):
|
||||
selector = "matched"
|
||||
assert selector in ["last", "first", "all", "matched"]
|
||||
@ -304,7 +314,7 @@ class QueryProcessor:
|
||||
# Explain methods
|
||||
# ------------------------
|
||||
|
||||
def _explain_lookups(self, parsed_lookups):
|
||||
def _explain_lookups(self, parsed_lookups, query):
|
||||
"""Explain list of lookups"""
|
||||
|
||||
table = PrettyTable()
|
||||
@ -325,7 +335,7 @@ class QueryProcessor:
|
||||
)
|
||||
table.field_names = ["Config", "Runtime"]
|
||||
table.align = "l"
|
||||
tracer.info("Explain lookups:\n%s", str(table))
|
||||
tracer.info("Explain lookups: %s/%s\n%s", self.name, query.key, str(table))
|
||||
|
||||
def _explain_candidates(self, candidates, query):
|
||||
"""Explain list of candidates"""
|
||||
@ -361,4 +371,4 @@ class QueryProcessor:
|
||||
|
||||
table.field_names = ["Status", "Runtime", "Key Value"]
|
||||
table.align = "l"
|
||||
tracer.info("Explain candidates:\n%s", str(table))
|
||||
tracer.info("Explain candidates: %s/%s\n%s", self.name, query.key, str(table))
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
# from pprint import pprint
|
||||
|
||||
import anyconfig
|
||||
@ -9,6 +10,8 @@ from anyconfig.common.errors import BaseError as AnyConfigBaseError
|
||||
from kheops.plugin.common import BackendPlugin, BackendCandidate
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
CACHE_FILE_EXPIRE = 5
|
||||
|
||||
|
||||
class Plugin(BackendPlugin):
|
||||
"""File Backend Plugin
|
||||
@ -19,12 +22,12 @@ class Plugin(BackendPlugin):
|
||||
|
||||
plugin_name = "file"
|
||||
extensions = {
|
||||
".yml": "yaml",
|
||||
".yaml": "yaml",
|
||||
#".toml": "toml",
|
||||
#".ini": "ini",
|
||||
#".json": "json",
|
||||
}
|
||||
".yml": "yaml",
|
||||
".yaml": "yaml",
|
||||
# ".toml": "toml",
|
||||
# ".ini": "ini",
|
||||
# ".json": "json",
|
||||
}
|
||||
|
||||
_schema_config = {
|
||||
"backend_file": {
|
||||
@ -33,43 +36,41 @@ class Plugin(BackendPlugin):
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"extensions": {
|
||||
"title": "File formats",
|
||||
"description": """
|
||||
"title": "File formats",
|
||||
"description": """
|
||||
This object describe which parser is assigned to which extension.
|
||||
Adding more format will have a performance impact because it will try
|
||||
to find all of the specified format. It is better to keep this list as small
|
||||
as possible.
|
||||
""",
|
||||
|
||||
"type": "object",
|
||||
"default": extensions,
|
||||
"additionalProperties": {
|
||||
"title": "Name of the extension with parser",
|
||||
"type": "string"
|
||||
}
|
||||
"type": "object",
|
||||
"default": extensions,
|
||||
"additionalProperties": {
|
||||
"title": "Name of the extension with parser",
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
"path_prefix": {
|
||||
"title": "Prefix string to append to final path",
|
||||
"description": """
|
||||
"title": "Prefix string to append to final path",
|
||||
"description": """
|
||||
String to be added at the end of the resolved path. This is useful to change
|
||||
the place of the root hierarchy.
|
||||
""",
|
||||
"type": "string"
|
||||
},
|
||||
"type": "string",
|
||||
},
|
||||
"path_suffix": {
|
||||
"title": "Suffix string to prepend to final path",
|
||||
"description": """
|
||||
"title": "Suffix string to prepend to final path",
|
||||
"description": """
|
||||
String to be added at the end of the resolved path. This is useful to
|
||||
provide Hiera or Jerakia support.""",
|
||||
"type": "string",
|
||||
"examples": [
|
||||
{ "path_suffix": "/ansible" },
|
||||
]
|
||||
},
|
||||
}
|
||||
}
|
||||
"type": "string",
|
||||
"examples": [
|
||||
{"path_suffix": "/ansible"},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
_schema_props_new = {
|
||||
"path": {
|
||||
@ -118,6 +119,8 @@ class Plugin(BackendPlugin):
|
||||
|
||||
def fetch_data(self, config) -> list:
|
||||
|
||||
cache = self.ns.cache
|
||||
|
||||
path = config.get("path")
|
||||
if self.path_suffix:
|
||||
path = f"{path}{self.path_suffix}"
|
||||
@ -127,18 +130,28 @@ class Plugin(BackendPlugin):
|
||||
extensions = self.config.get("extensions", self.extensions)
|
||||
for ext, parser in extensions.items():
|
||||
new_path = os.path.join(self.top_path, path + ext)
|
||||
if os.path.isfile(new_path):
|
||||
status = "found"
|
||||
try:
|
||||
log.info("Found file: %s", new_path)
|
||||
raw_data = anyconfig.load(new_path, ac_parser=parser)
|
||||
except AnyConfigBaseError as err:
|
||||
status = "broken"
|
||||
raw_data = None
|
||||
log.warning("Could not parse file %s: %s", new_path, err)
|
||||
cache_key = "file_content_" + new_path
|
||||
|
||||
# Stop the loop extension if we found a result.
|
||||
# Check first if content exists in cache
|
||||
try:
|
||||
raw_data = cache[cache_key]
|
||||
status = "found"
|
||||
# log.info("Found cached: %s with %s", new_path, raw_data)
|
||||
break
|
||||
except KeyError:
|
||||
if os.path.isfile(new_path):
|
||||
status = "found"
|
||||
try:
|
||||
log.info("Found file: %s", new_path)
|
||||
raw_data = anyconfig.load(new_path, ac_parser=parser)
|
||||
cache.set(cache_key, raw_data, expire=CACHE_FILE_EXPIRE)
|
||||
except AnyConfigBaseError as err:
|
||||
status = "broken"
|
||||
raw_data = None
|
||||
log.warning("Could not parse file %s: %s", new_path, err)
|
||||
|
||||
# Stop the loop extension if we found a result.
|
||||
break
|
||||
|
||||
log.debug("Skip absent file: %s", new_path)
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@ from kheops.utils import schema_validate
|
||||
from pprint import pprint
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
NoneType = type(None)
|
||||
|
||||
# Vocabulary:
|
||||
# Key Rules
|
||||
@ -22,20 +22,23 @@ log = logging.getLogger(__name__)
|
||||
# Generic Plugin classes
|
||||
# -------------------------
|
||||
|
||||
|
||||
class KheopsPlugin:
|
||||
plugin_name = None
|
||||
plugin_type = None
|
||||
plugin_kind = None
|
||||
|
||||
def __init__(self):
|
||||
assert isinstance(self.plugin_name, str), f"Missing name attribute in plugin: {self.__class__}"
|
||||
assert isinstance(
|
||||
self.plugin_name, str
|
||||
), f"Missing name attribute in plugin: {self.__class__}"
|
||||
assert isinstance(self.plugin_kind, str)
|
||||
|
||||
config_key = f"{self.plugin_kind}_{self.plugin_name}"
|
||||
self.config = self.ns.config["config"].get(config_key, {})
|
||||
self.config_key = config_key
|
||||
|
||||
#if self.config:
|
||||
# if self.config:
|
||||
# log.debug("Load plugin configuration in config with key '%s', got: %s", config_key, self.config)
|
||||
self._init()
|
||||
|
||||
@ -61,6 +64,7 @@ class KheopsItemPlugin(KheopsPlugin):
|
||||
# Plugin classes
|
||||
# -------------------------
|
||||
|
||||
|
||||
class BackendPlugin(KheopsItemPlugin):
|
||||
plugin_kind = "backend"
|
||||
|
||||
@ -140,11 +144,14 @@ class ScopePlugin(KheopsListPlugin):
|
||||
self.ns = namespace
|
||||
super().__init__()
|
||||
|
||||
|
||||
# Helper classes
|
||||
# -------------------------
|
||||
|
||||
class BackendCandidate():
|
||||
|
||||
class BackendCandidate:
|
||||
"""Represent a backend candidate"""
|
||||
|
||||
def __init__(self, path=None, data=None, run=None, status=None):
|
||||
assert isinstance(run, dict)
|
||||
self.path = path
|
||||
@ -156,7 +163,6 @@ class BackendCandidate():
|
||||
return f"Status: {self.status}, Path: {self.path} => {self.data}"
|
||||
|
||||
|
||||
|
||||
class ScopeExtLoop:
|
||||
"""This Scope Extension allow to loop over a lookup"""
|
||||
|
||||
@ -179,10 +185,15 @@ class ScopeExtLoop:
|
||||
}
|
||||
|
||||
def loop_over(
|
||||
self, lookups, conf, module_name, var_name="item", callback_context=None, callback=None
|
||||
self,
|
||||
lookups,
|
||||
conf,
|
||||
module_name,
|
||||
var_name="item",
|
||||
callback_context=None,
|
||||
callback=None,
|
||||
):
|
||||
|
||||
|
||||
var_name = conf.get("var", var_name)
|
||||
var_data_ref = conf.get("data", None)
|
||||
|
||||
@ -207,7 +218,8 @@ class ScopeExtLoop:
|
||||
|
||||
# Validate generated
|
||||
if not isinstance(var_data, list):
|
||||
log.warning("Loop data must be a list, got: %s", var_data)
|
||||
if not isinstance(var_data, NoneType):
|
||||
log.warning("Loop data must be a list, got: '%s'", var_data)
|
||||
continue
|
||||
|
||||
# Create new object
|
||||
@ -223,6 +235,8 @@ class ScopeExtLoop:
|
||||
"variable": var_name,
|
||||
}
|
||||
|
||||
# Note: This implie a performance penalty to do so, but
|
||||
# we really need a full copy of the dict. copy.copy or dict() are not enough
|
||||
new_item = copy.deepcopy(lookup)
|
||||
new_item["_run"]["scope"][var_name] = var_value
|
||||
new_item["_run"][module_name].append(ctx)
|
||||
@ -232,8 +246,6 @@ class ScopeExtLoop:
|
||||
return ret
|
||||
|
||||
|
||||
|
||||
|
||||
# To clean/implement
|
||||
|
||||
|
||||
|
||||
@ -4,6 +4,10 @@ import collections
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
from typing import Dict, Any
|
||||
|
||||
from jinja2 import Template
|
||||
from jsonschema import Draft7Validator, validators
|
||||
from pprint import pprint
|
||||
@ -74,6 +78,17 @@ class Default(dict):
|
||||
return ""
|
||||
|
||||
|
||||
# Source: https://www.doc.ic.ac.uk/~nuric/coding/how-to-hash-a-dictionary-in-python.html
|
||||
def dict_hash(dictionary: Dict[str, Any]) -> str:
|
||||
"""MD5 hash of a dictionary."""
|
||||
dhash = hashlib.md5()
|
||||
# We need to sort arguments so {'a': 1, 'b': 2} is
|
||||
# the same as {'b': 2, 'a': 1}
|
||||
encoded = json.dumps(dictionary, sort_keys=True).encode()
|
||||
dhash.update(encoded)
|
||||
return dhash.hexdigest()
|
||||
|
||||
|
||||
def render_template_python(text, params, ignore_missing=True):
|
||||
"""Render template for a given string"""
|
||||
assert isinstance(params, dict), f"Got: {params}"
|
||||
@ -90,6 +105,7 @@ def render_template_python(text, params, ignore_missing=True):
|
||||
# Schema Methods
|
||||
# =====================
|
||||
|
||||
|
||||
def _extend_with_default(validator_class):
|
||||
validate_properties = validator_class.VALIDATORS["properties"]
|
||||
|
||||
@ -99,12 +115,16 @@ def _extend_with_default(validator_class):
|
||||
instance.setdefault(property, subschema["default"])
|
||||
|
||||
for error in validate_properties(
|
||||
validator, properties, instance, schema,
|
||||
validator,
|
||||
properties,
|
||||
instance,
|
||||
schema,
|
||||
):
|
||||
yield error
|
||||
|
||||
return validators.extend(
|
||||
validator_class, {"properties" : set_defaults},
|
||||
validator_class,
|
||||
{"properties": set_defaults},
|
||||
)
|
||||
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user