Fix: schema issues, path management and config file issues

This commit is contained in:
mrjk 2022-01-17 19:09:48 -05:00
parent d99f01c3ec
commit 5052785784
8 changed files with 238 additions and 167 deletions

View File

@ -47,12 +47,21 @@ class App:
"properties": {
"app": {
"type": "object",
"default": {},
#"default": {},
"additionalProperties": False,
"properties": {
"root": {
"type": "string",
"default": None,
"oneOf": [
{
"type": "null",
"description": "Application current working directory is the `albero.yml` directory",
},
{
"type": "string",
"description": "Application working directory. If a relative path is used, it will be depending on `albero.yml` directory",
},
]
},
},
},
@ -60,6 +69,21 @@ class App:
# "additionalProperties": False,
"type": "object",
"default": {},
"properties": {
"prefix": {
"default": None,
"oneOf": [
{
"type": "null",
"description": "Disable prefix, all files are lookup up from the app root dir.",
},
{
"type": "string",
"description": "Add a path prefix before all paths. This is quite useful to store your YAML data in a dedicated tree.",
},
]
},
}
},
"rules": {
"type": "object",
@ -87,6 +111,7 @@ class App:
def __init__(self, config="albero.yml", namespace="default"):
conf2 = anyconfig.load(config)
self.run = {}
# Validate configuration
schema_validate(conf2, self.schema)
@ -96,23 +121,58 @@ class App:
log.error(f"Can't find namespace '{namespace}' in config '{config}'")
sys.exit(1)
# Init
if not conf2["config"]["app"]["root"]:
conf2["config"]["app"]["root"] = Path(config).parent
# Get application paths
# =====================
# Fetch app root
if conf2["config"]["app"]["root"]:
path_root = Path(conf2["config"]["app"]["root"])
log.debug ("Root path is hard coded.")
else:
conf2["config"]["app"]["root"] = Path(conf2["config"]["app"]["root"])
path_root = Path(config).parent
log.debug ("Root path guessed from conf file.")
#path_prefix = conf2["config"]["app"]["prefix"]
#if not path_prefix:
# path_prefix = ''
#p = Path(path_prefix)
#if not p.is_absolute():
# p = path_root / p
# try:
# p = p.resolve().relative_to(Path.cwd().resolve())
# except ValueError:
# pass
# Save paths
path_cwd = str(Path.cwd().resolve())
path_root = str(path_root.resolve())
self.run['path_cwd'] = path_cwd
self.run['path_root'] = path_root
#self.run['path_prefix'] = str(p.resolve())
log.debug (f"Working directory is {path_root} while cwd is: {path_cwd}")
# path_root = path_root.resolve().relative_to(Path.cwd())
#conf2["config"]["app"]["root"] = str(path_root)
# Finish
self.conf2 = dict(conf2)
log.debug("Loading config: %s", config)
log.debug("Root directory is: %s", path_root)
def lookup(self, key=None, policy=None, scope=None, trace=False, explain=False):
log.debug(f"Lookup key {key} with scope: {scope}")
q = Query(app=self)
r = q.exec(key=key, scope=scope, policy=policy, trace=trace, explain=explain)
print("=== Query Result ===")
print(anyconfig.dumps(r, ac_parser="yaml"))
print("=== Query Result ===")
return r
#print("=== Query Result ===")
print(anyconfig.dumps(r, ac_parser=fmt))
#print("=== Query Result ===")
def dump_schema(self):
@ -120,11 +180,53 @@ class App:
import albero.plugin as AlberoPlugins
from albero.managers import BackendsManager, RulesManager
r1 = BackendsManager.get_schema(AlberoPlugins)
r1 = BackendsManager.get_schema(AlberoPlugins, mode='parts')
r2 = RulesManager.get_schema(AlberoPlugins)
#pprint (r1)
print(json.dumps(r1, indent=2))
return
d = self.schema
d["patternProperties"][".*"]["properties"]["tree"]["items"]["properties"] = r1
d["patternProperties"][".*"]["properties"]["tree"]["items"] = r2
print(json.dumps(d, indent=2))
def gen_docs(self):
import json
import albero.plugin as AlberoPlugins
from albero.managers import BackendsManager, RulesManager
print ("WIP")
#src = {
# "app": {
# "config_schema": None,
# "plugin_managers": {
# 'tree': None,
# 'rules': None,
# }
# }
#
#r1 = BackendsManager.get_schema(AlberoPlugins, mode='parts')
#print (json.dumps(r1, indent=2))
#ret = {
#
# }
#part_config = r1.get('config_schema', None)
#part_item = r1['items']['core_schema']
#part_item_plugins = r1['items']['plugin']
#for kind, plugins in part_item_plugins.items():
# for plugin_name, schema in plugins.items():
# part_item_

View File

@ -106,12 +106,17 @@ class CmdApp:
parser.add_argument(
"-v", "--verbose", action="count", default=0, help="Increase verbosity"
)
parser.add_argument(
"-c", "--config", help="Albero configuration file",
default="albero.yml",
)
parser.add_argument("help", action="count", default=0, help="Show usage")
subparsers = parser.add_subparsers(
title="subcommands", description="valid subcommands", dest="command"
)
# Manage command: schema
add_p = subparsers.add_parser("schema")
add_p = subparsers.add_parser("gen_doc")
# Manage command: demo
add_p = subparsers.add_parser("lookup")
@ -127,6 +132,7 @@ class CmdApp:
add_p.add_argument("-p", "--policy")
add_p.add_argument("-t", "--trace", action="store_true")
add_p.add_argument("-x", "--explain", action="store_true")
add_p.add_argument("-o", "--format", help="Output format", choices=['yaml', 'json', 'xml', 'ini', 'toml'], default='yaml')
add_p.add_argument("key", default=None, nargs="*")
# Manage command: demo
@ -178,14 +184,15 @@ class CmdApp:
self.log.info(f"CLI: {keys} with env: {new_params}")
app = Albero.App(config=config, namespace=self.args.namespace)
app = Albero.App(config=self.args.config, namespace=self.args.namespace)
for key in keys:
app.lookup(
r = app.lookup(
key=key,
scope=new_params,
trace=self.args.trace,
explain=self.args.explain,
)
print(anyconfig.dumps(r, ac_parser=self.args.format))
def cli_schema(self):
"""Display configuration schema"""
@ -196,5 +203,14 @@ class CmdApp:
app.dump_schema()
def cli_gen_doc(self):
"""Generate documentation"""
config = "/home/jez/prj/bell/training/tiger-ansible/tree.yml"
app = Albero.App(config=config) # , namespace=self.args.namespace)
app.gen_docs()
if __name__ == "__main__":
app = CmdApp()

View File

@ -41,19 +41,24 @@ class LoadPlugin:
class Manager:
"""Generic manager class"""
_app_kind = 'core'
plugins_kind = []
_schema_props_default = None
_schema_props_new = None
_props_position = None
@classmethod
def get_schema(cls, plugins_db):
def get_schema(cls, plugins_db, mode='full'):
"""Retrieve configuration schema"""
# Properties
ret = {
"core_schema": {},
"plugin": {},
}
ret3 = {}
for kind in cls.plugins_kind:
# ret[kind] = {}
ret['plugin'][kind] = {}
plugin_kind = getattr(plugins_db, kind)
for plugin_name in [i for i in dir(plugin_kind) if not i.startswith("_")]:
@ -64,7 +69,7 @@ class Manager:
plugin_cls, "_schema_props_new", "MISSING ITEM"
)
if schema_props:
# ret[kind][plugin_name] = schema_props
ret['plugin'][kind][plugin_name + '_schema' ] = schema_props
ret3.update(schema_props)
ret3.update(cls._schema_props_new)
@ -72,13 +77,23 @@ class Manager:
ret1 = cls._schema_props_default
position = cls._props_position
dpath.util.set(ret1, position, ret3)
ret['core_schema'] = cls._schema_props_new
return ret1
if mode == 'full':
return ret1
ret4 = {
"config_schema": {},
"items": ret,
}
return ret4
class BackendsManager(Manager):
"""Backend Manager"""
_app_kind = 'manager'
plugins_kind = ["engine", "backend"]
_schema_props_new = {
@ -212,6 +227,7 @@ class BackendsManager(Manager):
class RulesManager(Manager):
_app_kind = 'rules'
plugins_kind = ["strategy"]
_schema_props_new = {

View File

@ -57,40 +57,36 @@ class Plugin(PluginBackendClass):
def process(self, backends: list, ctx: dict) -> (list, dict):
new_backends = []
for cand in backends:
# Init
# Fetch backend data
plugin_config = cand.get("hier", {})
hier_data = plugin_config.get("data", None)
if not hier_data:
new_backends.append(cand)
continue
# Retrieve config data
hier_var = plugin_config.get("var", "item")
hier_sep = plugin_config.get("separator", "/")
# Retrieve data to loop over
if isinstance(hier_data, str):
# If it's a string, fetch value from scope
hier_data = cand["_run"]["scope"].get(hier_data, None)
# Build a new list
# Do the hierarchical replacement
hier_data = path_assemble_hier(hier_data, hier_sep)
if isinstance(hier_data, str):
r = hier_data.split(hier_sep)
assert isinstance(r, list), f"Got: {r}"
ret1 = []
for index, part in enumerate(r):
try:
prefix = ret1[index - 1]
except IndexError:
prefix = f"{hier_sep}"
prefix = ""
item = f"{prefix}{part}{hier_sep}"
ret1.append(item)
if not isinstance(hier_data, list):
log.warn("Hier module can't loop over non list data, got: {hier_data}")
continue
# Build result list
ret1 = hier_data
log.debug (f"Hier plugin will loop over: {ret1}")
ret2 = []
for item in ret1:
for index, item in enumerate(ret1):
_cand = copy.deepcopy(cand)
run = {
"index": index,

View File

@ -8,6 +8,7 @@ import logging
import anyconfig
import textwrap
log = logging.getLogger(__name__)
class Plugin(PluginBackendClass):

View File

@ -1,5 +1,5 @@
from pathlib import Path
from albero.utils import render_template
from albero.utils import render_template, glob_files
from albero.plugin.common import PluginEngineClass, PluginFileGlob #, Candidate
from pprint import pprint
@ -28,7 +28,6 @@ class Plugin(PluginEngineClass, PluginFileGlob):
_plugin_name = "jerakia"
### OLD
_plugin_engine = "jerakia"
# _schema_props_files = {
_schema_props_new = {
@ -44,6 +43,20 @@ class Plugin(PluginEngineClass, PluginFileGlob):
},
},
]
},
"glob": {
"default": "ansible.yml",
"anyOf": [
{
"type": "string",
},
# {
# "type": "array",
# "items": {
# "type": "string",
# },
# },
]
}
}
@ -62,7 +75,7 @@ class Plugin(PluginEngineClass, PluginFileGlob):
self.paths = paths
self.value = paths
def _preprocess(self, scope):
def _paths_template(self, scope):
# Manage loops
paths = self.paths
@ -77,15 +90,15 @@ class Plugin(PluginEngineClass, PluginFileGlob):
return ret
def _show_paths(self, scope):
def _show_paths(self, path_top, scope):
parsed = self._preprocess(scope)
parsed = self._paths_template(scope)
log.debug("Expanded paths to: %s", parsed)
# Look for files (NOT BE HERE !!!)
ret3 = []
for p in parsed:
globbed = self._glob(p)
globbed = glob_files(path_top / p, 'ansible.yaml')
ret3.extend(globbed)
log.debug(f"Matched globs: %s", ret3)
@ -93,17 +106,32 @@ class Plugin(PluginEngineClass, PluginFileGlob):
def process(self):
# scope = self.scope
# pprint (self.config)
scope = dict(self.config["_run"]["scope"])
# Detect path root and path prefix
path_root = self.app.run['path_root']
path_prefix = self.app.conf2['config']['tree']['prefix']
if path_prefix:
path_prefix = Path(path_prefix)
if path_prefix.is_absolute():
path_top = path_prefix
else:
path_top = Path(path_root) / path_prefix
else:
path_top = path_root
path_top = path_top
log.debug (f"Path Top: {path_top}")
scope = self.config["_run"]["scope"]
key = self.config["_run"]["key"]
assert isinstance(scope, dict), f"Got: {scope}"
assert isinstance(key, (str, type(None))), f"Got: {key}"
t = self._show_paths(scope)
# t = self._show_paths(path_top, scope)
ret = []
for index, path in enumerate(self._show_paths(scope)):
for index, path in enumerate(self._show_paths(path_top, scope)):
log.debug(f"Reading file: {path}")
# Fetch data
found = False
@ -133,49 +161,3 @@ class Plugin(PluginEngineClass, PluginFileGlob):
return ret
######## OLD
# # Read raw file content
# data = anyconfig.load(path, ac_parser="yaml")
#
# ret_obj2 ={
# "_run": _run,
# }
# #### OLD
# ret_obj = FileCandidate(self.config)
# ret_obj.engine = self
# ret_obj.data = None
# found = False
# if key is None:
# ret_obj.data = data
# found = True
# else:
# try:
# ret_obj.data = data[key]
# found = True
# except Exception:
# pass
# # ret_obj.run['path'] = path
# # ret_obj.run['found'] = found
# # ret_obj.run['scope'] = scope
# # ret_obj.run['key'] = key
# be = {
# "index": index,
# "path": path,
# "rel_path": str(Path(path).relative_to(Path.cwd())),
# }
# #qu = {
# # "scope": scope,
# # "key": key,
# # }
# ret_obj.run['backend'] = be
# #ret_obj.run['query'] = qu
# #log.debug(f"Found value: {ret_obj}")
# ret_obj.found = found
# ret.append(ret_obj)

View File

@ -91,6 +91,7 @@ class Plugin(PluginStrategyClass):
trace = rule["trace"]
explain = rule["explain"]
schema = rule.get("schema", None) or self.default_merge_schema
merger = Merger(schema)
t = PrettyTable()

View File

@ -12,56 +12,42 @@ import logging
log = logging.getLogger(__name__)
# # File parsers
# # =====================
#
# class FileParserClass():
#
# def __init__(self, path):
# self.path = path
#
# def from_file(self, file):
# raise Exception ("Not implemented")
#
# def from_string(self, data):
# raise Exception ("Not implemented")
#
# def from_dict(self, data):
# raise Exception ("Not implemented")
#
# class FilesYAMLParser(FileParserClass):
# def get_data(self):
# with open(self.path, "r") as stream:
# try:
# return yaml.safe_load(stream)
# except yaml.YAMLError as exc:
# raise Exception(exc)
# print(exc)
#
#
# class FilesJSONParser(FileParserClass):
# pass
# class FilesRawParser(FileParserClass):
# pass
# class FilesTOMLParser(FileParserClass):
# pass
# class FilesCSVParser(FileParserClass):
# pass
# class FilesINIParser(FileParserClass):
# pass
#
# format_db = {
# ".raw": FilesRawParser,
# ".yml": FilesYAMLParser,
# ".yaml": FilesYAMLParser,
# ".json": FilesJSONParser,
# }
# Utils Methods
# =====================
def glob_files(path, pattern):
log.debug(f"Search glob '{pattern}' in '{path}'")
p = Path(path)
ret = p.glob(pattern)
return [str(i) for i in ret]
def path_assemble_hier(path, sep='/'):
"""Append the previous
"""
if isinstance(path, str):
list_data = path.split(sep)
elif isinstance(path, list):
list_data = []
else:
raise Exception (f"This function only accepts string or lists, got: {path}")
assert isinstance(list_data, list), f'Got: {list_data}'
ret = []
for index, part in enumerate(list_data):
try:
prefix = ret[index - 1]
except IndexError:
prefix = f"{sep}"
prefix = ""
item = f"{prefix}{part}{sep}"
ret.append(item)
return ret
def render_template(path, params):
"""Render template for a given string"""
@ -69,35 +55,6 @@ def render_template(path, params):
t = Template(path)
return t.render(**params)
# def read_file(file):
# with open(file, 'r') as f:
# data = f.read().replace('\n', '')
# return data
#
#
# def parse_file(file, fmt='auto'):
# print ("DEPRECATED")
# raise Exception ("parse_file is deprecated")
#
# data = read_file(file)
#
# # Autodetect format from file name
# if fmt == 'auto':
# p = Path(file)
# fmt = p.suffix
# else:
# fmt = f".{fmt}"
#
# # Retrieve parser
# if fmt is None:
# raise Exception ("No available driver to read file: %s" % p )
# fmt_cls = format_db.get(fmt, None)
#
# # Parse content
# o = fmt_cls(str(p))
# return o.get_data()
# Schema Methods
# =====================