Add: Initial code base as POC

This commit is contained in:
mrjk 2022-01-14 20:00:43 -05:00
commit a3e6bbafd7
19 changed files with 1951 additions and 0 deletions

1
.gituuid Normal file
View File

@ -0,0 +1 @@
0dceea90-7148-4e7d-a8d4-c63c6835e21f

0
ansible_tree/__init__.py Normal file
View File

222
ansible_tree/app.py Executable file
View File

@ -0,0 +1,222 @@
#!/usr/bin/env python3
# import sys
# sys.path.append("/home/jez/prj/bell/training/tiger-ansible/ext/ansible-tree")
import sys
import yaml
import anyconfig
from pprint import pprint
from ansible_tree.files import BackendsManager, RulesManager
from ansible_tree.utils import schema_validate
import anyconfig
# from box import Box
from pathlib import Path
import logging
log = logging.getLogger(__name__)
class Query():
matcher_merge_schema = {
"$schema": 'http://json-schema.org/draft-04/schema#',
"oneOf": [
{
"type": "array",
"mergeStrategy": "append",
# "mergeStrategy": "arrayMergeById",
},
{
"type": "object",
"mergeStrategy": "objectMerge",
},
{
"type": "string",
"mergeStrategy": "overwrite",
},
{
"type": "number",
"mergeStrategy": "overwrite",
},
{
"type": "null",
"mergeStrategy": "overwrite",
},
],
}
def __init__(self, app):
self.app = app
self.key = None
self.scope = None
self.paths = None
self.data = None
self.result = None
self.matcher_schema = {
"$schema": 'http://json-schema.org/draft-04/schema#',
"type": "object",
"additionalProperties": False,
"properties": {
"rule": {
"type": "string",
"default": ".*",
"optional": True,
},
"strategy": {
"type": "string",
"default": "merge",
"optional": True,
"enum": ["first", "last", "merge"],
},
"schema": {
"type": "object",
"default": self.matcher_merge_schema,
#"default": {},
"optional": True,
},
}
}
def exec(self, key=None, scope=None, policy=None, trace=False, explain=False):
bm = BackendsManager(app=self.app)
mm = RulesManager(app=self.app)
log.debug(f"New query created")
candidates = bm.query(key, scope, trace=trace)
result = mm.get_result(candidates, key=key, trace=trace, explain=explain)
return result
def dump(self):
ret = {}
for i in dir(self):
if not i.startswith('_'):
ret[i] = getattr(self, i)
pprint (ret)
class App():
schema = {
"$schema": 'http://json-schema.org/draft-04/schema#',
"type": "object",
"additionalProperties": False,
"default": {},
"patternProperties": {
".*": {
"type": "object",
"optional": True,
"additionalProperties": False,
"properties": {
"config": {
"type": "object",
"default": {},
"additionalProperties": False,
"properties": {
"app": {
"type": "object",
"default": {},
"additionalProperties": False,
"properties": {
"root": {
"type": "string",
"default": None,
},
},
},
"tree": {
#"additionalProperties": False,
"type": "object",
"default": {},
},
"rules": {
"type": "object",
"default": {},
},
},
},
"tree": {
"type": "array",
"default": [],
},
"rules": {
"type": "array",
"default": [],
},
},
},
}
}
def __init__(self, config="albero.yml", namespace='default'):
conf2 = anyconfig.load(config)
# Validate configuration
schema_validate(conf2, self.schema)
try:
conf2 = conf2[namespace]
except KeyError:
log.error (f"Can't find namespace '{namespace}' in config '{config}'")
sys.exit(1)
# Init
if not conf2['config']['app']['root']:
conf2['config']['app']['root'] = Path(config).parent
else:
conf2['config']['app']['root'] = Path(conf2['config']['app']['root'])
# Finish
self.conf2 = dict(conf2)
def lookup(self, key=None, policy=None, scope=None, trace=False, explain=False):
log.debug(f"Lookup key {key} with scope: {scope}")
q = Query(app = self)
r = q.exec(key=key, scope=scope , policy=policy, trace=trace, explain=explain)
print ("=== Query Result ===")
print(anyconfig.dumps(r, ac_parser='yaml'))
print ("=== Query Result ===")
if __name__ == "__main__":
CONFIG_FILE='/home/jez/prj/bell/training/tiger-ansible/tree.yml'
app = App(CONFIG_FILE)
policy = None
#app.lookup(
# "my_key",
# policy=None,
# hostname="myhost-lab.it.ms.bell.ca",
# hostgroups=["Tiger", "Tiger/Test", "Tiger/Test/LastLvl"],
# hostgroup="Tiger/Test/LastLvl"
# )
# app.lookup(
# None,
# hostname="myhost-lab.it.ms.bell.ca",
# hostgroups=["Tiger", "Tiger/Test", "Tiger/Test/LastLvl"],
# hostgroup="Tiger/Test/LastLvl"
# )
#
print ("OKKKKK")

180
ansible_tree/cli.py Normal file
View File

@ -0,0 +1,180 @@
#!/usr/bin/env python3
# Run like this:
# python3 python_cli.py -vvvv demo
# Author: mrjk
import os
import anyconfig
import sys
import logging
import argparse
from pprint import pprint
import sys
# Devel tmp
sys.path.append("/home/jez/prj/bell/training/tiger-ansible/ext/ansible-tree")
import ansible_tree.app as Albero
class CmdApp:
"""Main CmdApp"""
def __init__(self):
"""Start new App"""
self.get_args()
self.get_logger(verbose=self.args.verbose, logger_name="ansible_tree")
self.cli()
def get_logger(self, logger_name=None, create_file=False, verbose=0):
"""Create CmdApp logger"""
# Take default app name
if not logger_name:
logger_name = __name__
# Manage logging level
try:
loglevel = {
0: logging.ERROR,
1: logging.WARN,
2: logging.INFO,
3: logging.DEBUG,
}[verbose]
except KeyError:
loglevel = logging.DEBUG
# Create logger for prd_ci
log = logging.getLogger(logger_name)
log.setLevel(level=loglevel)
# Formatters
format1 = "%(levelname)8s: %(message)s"
format2 = "%(asctime)s.%(msecs)03d|%(name)-16s%(levelname)8s: %(message)s"
format3 = (
"%(asctime)s.%(msecs)03d"
+ " (%(process)d/%(thread)d) "
+ "%(pathname)s:%(lineno)d:%(funcName)s"
+ ": "
+ "%(levelname)s: %(message)s"
)
tformat1 = "%H:%M:%S"
tformat2 = "%Y-%m-%d %H:%M:%S"
formatter = logging.Formatter(format1, tformat1)
# Create console handler for logger.
ch = logging.StreamHandler()
ch.setLevel(level=logging.DEBUG)
ch.setFormatter(formatter)
log.addHandler(ch)
# Create file handler for logger.
if isinstance(create_file, str):
fh = logging.FileHandler(create_file)
fh.setLevel(level=logging.DEBUG)
fh.setFormatter(formatter)
log.addHandler(fh)
# Return objects
self.log = log
self.loglevel = loglevel
def cli(self):
"""Main cli command"""
# Dispatch sub commands
if self.args.command:
method = "cli_" + str(self.args.command)
if hasattr(self, method):
getattr(self, method)()
else:
self.log.error(f"Subcommand {self.args.command} does not exists.")
else:
self.log.error("Missing sub command")
self.parser.print_help()
def get_args(self):
"""Prepare command line"""
# Manage main parser
parser = argparse.ArgumentParser(description="Albero, to lookup hierarchical data")
parser.add_argument(
"-v", "--verbose", action="count", default=0, help="Increase verbosity"
)
parser.add_argument("help", action="count", default=0, help="Show usage")
subparsers = parser.add_subparsers(
title="subcommands", description="valid subcommands", dest="command"
)
# Manage command: demo
add_p = subparsers.add_parser("lookup")
add_p.add_argument("-n", "--namespace", help="Namespace name", default='default')
add_p.add_argument("-f", "--file", help="File with params as dict. Can be stdin - .")
add_p.add_argument("-e", "--scope", dest="scope_param", action="append", default=[])
add_p.add_argument("-p", "--policy")
add_p.add_argument("-t", "--trace", action="store_true")
add_p.add_argument("-x", "--explain", action="store_true")
add_p.add_argument("key", default=None, nargs="*")
# Manage command: demo
add_p = subparsers.add_parser("demo")
add_p.add_argument("--env", default=os.environ.get("APP_SETTING", "Unset"))
add_p.add_argument("--choice", choices=["choice1", "choice2"], type=str)
add_p.add_argument("-s", "--store", action="store_true")
add_p.add_argument("-a", "--append", dest="appended", action="append")
# add_p.add_argument("--short", default=True, required=True)
# add_p.add_argument("argument1")
# add_p.add_argument("double_args", nargs=2)
add_p.add_argument("nargs", nargs="*")
# Manage command: subcommand2
upg_p = subparsers.add_parser("subcommand2")
upg_p.add_argument("name")
# Register objects
self.parser = parser
self.args = parser.parse_args()
def cli_demo(self):
"""Display how to use logging"""
self.log.error("Test Critical message")
self.log.warning("Test Warning message")
self.log.info("Test Info message")
self.log.debug(f"Command line vars: {vars(self.args)}")
def cli_lookup(self):
"""Display how to use logging"""
config = '/home/jez/prj/bell/training/tiger-ansible/tree.yml'
# self.log.debug(f"Command line vars: {vars(self.args)}")
keys = self.args.key or [None]
# Parse payload from enf file:
new_params = {}
if self.args.file:
new_params = anyconfig.load(self.args.file, ac_parser="yaml")
# Parse cli params
for i in self.args.scope_param:
r = i.split('=')
if len(r) != 2:
raise Exception("Malformed params")
new_params[r[0]] = r[1]
self.log.info(f"CLI: {keys} with env: {new_params}")
app = Albero.App(config=config, namespace=self.args.namespace)
for key in keys:
app.lookup(key=key,
scope=new_params,
trace=self.args.trace,
explain=self.args.explain
)
if __name__ == "__main__":
app = CmdApp()

437
ansible_tree/files.py Normal file
View File

@ -0,0 +1,437 @@
import copy
import json
import textwrap
from prettytable import PrettyTable
from pathlib import Path
# from box import Box
from jsonmerge import Merger
import re
import logging
from pprint import pprint
import collections
from ansible_tree.utils import schema_validate, str_ellipsis
import ansible_tree.plugin as TreePlugins
log = logging.getLogger(__name__)
# DEPRECATED class BackendEngineLoader():
# DEPRECATED
# DEPRECATED def get_class(self, item):
# DEPRECATED engine_name = item.get('engine')
# DEPRECATED assert (isinstance(engine_name, str)), f"Got: {engine_name} for {item}"
# DEPRECATED
# DEPRECATED # Check engine presence
# DEPRECATED if not hasattr(TreePlugins.engine, engine_name):
# DEPRECATED raise Exception(f"No plugin {engine_name} found for entry {item}!")
# DEPRECATED
# DEPRECATED cls = getattr(TreePlugins.engine, engine_name).Plugin
# DEPRECATED return cls
# def BackendPluginInit(backends, ctx):
#
# for be in backends:
# be.run = {}
# be.scope = ctx['scope']
# be.key = ctx['key']
#
# return backends
# def BackendPluginHier(backends, ctx):
#
# new_backends = []
# for cand in backends:
#
# # Init
# plugin_config = cand.config.get("hierarchy", None)
# hier_data = plugin_config.get("data", None)
# if not hier_data:
# new_backends.append(cand)
# continue
#
# # Retrieve config data
# hier_var = plugin_config.get("var", "item")
# hier_sep = plugin_config.get("separator", "/")
# if isinstance(hier_data, str):
# hier_data = cand.scope.get(hier_data, None)
#
# # Build a new list
#
# pprint (plugin_config)
# pprint (hier_data)
#
# if isinstance(hier_data, str):
# r = hier_data.split(hier_sep)
# assert (isinstance(r, list)), f"Got: {r}"
#
# ret1 = []
# for index, part in enumerate(r):
#
# try:
# prefix = ret1[index - 1]
# except IndexError:
# prefix = f'{hier_sep}'
# prefix = ""
# item = f"{prefix}{part}{hier_sep}"
# ret1.append(item)
#
# ret2 = []
# for item in ret1:
# _cand = copy.deepcopy(cand)
# run = {
# "index": index,
# "hier_value": item,
# "hier_var": hier_var,
# }
# _cand.run['hier'] = run
# _cand.scope[hier_var] = item
# ret2.append(_cand)
# print ("RESULT")
# pprint (ret2)
#
# new_backends.extend(ret2)
# return new_backends
#
#
#def BackendPluginLoop(backends, ctx):
#
# new_backends = []
# for cand in backends:
#
# # Init
# loop_config = cand.config.get("loop", None)
# loop_data = loop_config.get("data", None)
# if not loop_data:
# new_backends.append(cand)
# continue
#
# # Retrieve config data
# loop_var = loop_config.get("var", "item")
# if isinstance(loop_data, str):
# loop_data = cand.scope.get(loop_data, None)
# assert (isinstance(loop_data, list)), f"Got: {loop_data}"
#
# # Build a new list
# ret = []
# for idx, item in enumerate(loop_data):
# _cand = copy.deepcopy(cand)
# run = {
# "loop_index": idx,
# "loop_value": item,
# "loop_var": loop_var,
# }
# _cand.run['loop'] = run
# _cand.scope[loop_var] = item
# ret.append(_cand)
#
# new_backends.extend(ret)
#
# return new_backends
class LoadPlugin():
def __init__(self, plugins):
self.plugins = plugins
def load(self, kind, name):
assert (isinstance(name, str)), f"Got: {name}"
# Get plugin kind
try:
plugins = getattr(self.plugins, kind)
except Exception as e:
raise Exception(f"Unknown module kind '{kind}': {e}")
# Get plugin class
try:
plugin_cls = getattr(plugins, name)
except Exception as e:
raise Exception(f"Unknown module '{kind}.{name}': {e}")
assert (hasattr(plugin_cls, 'Plugin')), f'Plugin {kind}/{name} is not a valid plugin'
# Return plugin Classe
return plugin_cls.Plugin
class BackendsManager():
_schema_props_default = {
"$schema": 'http://json-schema.org/draft-04/schema#',
"default": "",
"oneOf": [
{
"type": "string",
"default": "BLAAAAHHH"
},
{
"type": "object",
"additionalProperties": True,
"default": {},
"properties": {
"engine": {
"type": "string",
"default": "jerakia",
"optional": False,
},
"value": {
"default": 'UNSET',
"optional": False,
},
},
},
]
}
def _validate_item(self, item):
if isinstance(item, str):
item = {
"engine": self.config_main.default_engine,
"value": item,
}
item = schema_validate(item, self._schema_props_default)
assert (isinstance(item, dict))
return item
def __init__(self, app):
self.app = app
self.config_app = app.conf2['config']['app']
self.config_main = app.conf2['config']['tree']
self.config_items = list(app.conf2['tree'])
# THIS MAKE A BUG !!!! self.plugin_loader = LoadPlugin(TreePlugins)
self.plugins = [
'init',
'loop',
'hier',
]
# Auto init
self.backends = self.config_items
def query(self, key=None, scope=None, trace=False):
backends = self.get_backends(key=key, scope=scope, trace=trace)
ret = self.get_results(backends, trace=trace)
return ret
def get_backends(self, key=None, scope=None, trace=False):
log.debug(f"Look for candidates for key '{key}' in backend: {self.backends}")
# Prepare plugins
plugin_loader = LoadPlugin(TreePlugins)
_run = {
"key": key,
"scope": scope,
}
# Preprocess backends plugins
backends = self.config_items
log.debug(f"Backend preprocessing of {len(backends)} elements")
for plugin in self.plugins:
#backend_cls = plugin_loader.load('backend', plugin)
plugin = plugin_loader.load(
'backend', plugin
)()
log.debug(f"Run {plugin}")
new_backend, _run = plugin.process(backends, _run)
assert(isinstance(new_backend, list)), f"Got: {new_backend}"
assert(isinstance(_run, dict)), f"Got: {_run}"
backends = new_backend
# pprint (backends)
for i in backends:
assert (i.get('engine')), f"Got: {i}"
log.debug(f"Backend preprocessing made {len(backends)} elements")
return backends
def get_results(self, backends, trace=False):
# Prepare plugins
plugin_loader = LoadPlugin(TreePlugins)
new_results = []
for backend in backends:
#result_cls = result_loader.load('result', result)
# print ("BACKKENNDNNDNDNDND")
# pprint(backend)
engine = plugin_loader.load(
'engine', backend['engine']
)(
backend,
parent=self, app=self.app)
log.debug(f"Run engine: {engine}")
new_result = engine.process()
assert(isinstance(new_result, list)), f"Got: {new_result}"
new_results.extend(new_result)
# Filter out? Not here !new_results = [i for i in new_results if i['found'] ]
# pprint (new_results)
# print ("OKKKKKKKKKKKKKKKKKKKKKKKKK SO FAR")
return new_results
class RulesManager():
default_merge_schema = {
"$schema": 'http://json-schema.org/draft-04/schema#',
"oneOf": [
{
"type": "array",
"mergeStrategy": "append",
# "mergeStrategy": "arrayMergeById",
},
{
"type": "object",
"mergeStrategy": "objectMerge",
},
{
"type": "boolean",
"mergeStrategy": "overwrite",
},
{
"type": "string",
"mergeStrategy": "overwrite",
},
{
"type": "integer",
"mergeStrategy": "overwrite",
},
{
"type": "number",
"mergeStrategy": "overwrite",
},
{
"type": "null",
"mergeStrategy": "overwrite",
},
],
}
rule_schema = {
"$schema": 'http://json-schema.org/draft-04/schema#',
"type": "object",
"additionalProperties": False,
"properties": {
"rule": {
"default": ".*",
"optional": True,
"oneOf": [
{
"type": "string",
},
{
"type": "null",
},
],
},
"trace": {
"type": "boolean",
"default": False,
},
"explain": {
"type": "boolean",
"default": False,
},
"strategy": {
"type": "string",
"default": "schema",
# "default": "last",
"optional": True,
# "enum": ["first", "last", "merge"],
},
"schema": {
"type": "object",
"default": None,
"optional": True,
"oneOf": [
{
"type": "string",
},
{
"type": "null",
},
{
"type": "object",
},
],
},
}
}
def __init__(self, app):
self.app = app
self.config_app = app.conf2['config']['app']
self.config_main = app.conf2['config']['rules']
self.config_items = list(app.conf2['rules'])
def get_result(self, candidates, key=None, scope=None, trace=False, explain=False):
#trace=False
rules = self.config_items
key = key or ''
# Filter out invalid candidates
matched_candidates = [i for i in candidates if i['found'] == True]
if len(matched_candidates) == 0:
log.debug("No matched candidates")
return None
# Look for matching key in rules defiunitions
regex_support = False
matched_rule = {}
if regex_support:
raise Exception("Not Implemented")
else:
rule = [ i for i in rules if i.get('rule') == key ]
if len(rule) == 0:
log.debug(f"No matched rule for {key}, applying defaults")
else:
matched_rule = rule[0]
log.debug(f"Matcher rule for {key}: {matched_rule}")
matched_rule['trace'] = trace
matched_rule['explain'] = explain
schema_validate(matched_rule, self.rule_schema)
# Prepare plugins
assert(isinstance(matched_candidates, list)), f"Got: {matched_candidates}"
assert(isinstance(matched_rule, dict)), f"Got: {matched_rule}"
strategy = matched_rule.get('strategy', 'first')
log.debug(f"Key '{key}' matched rule '{rule}' with '{strategy}' strategy")
# Load plugin
log.debug(f"Run strategy: {strategy}")
plugin_loader = LoadPlugin(TreePlugins)
strategy = plugin_loader.load('strategy',
strategy,
)(parent=self, app=self.app)
new_result = strategy.process(matched_candidates, matched_rule)
return new_result

View File

@ -0,0 +1,3 @@
from . import engine
from . import backend
from . import strategy

View File

@ -0,0 +1,3 @@
from . import init
from . import loop
from . import hier

View File

@ -0,0 +1,115 @@
import copy
# from pathlib import Path
# from ansible_tree.utils import render_template
# from ansible_tree.plugin.common import PluginBackendClass
# from pprint import pprint
#
# import logging
# import anyconfig
# import textwrap
from ansible_tree.plugin.common import PluginBackendClass
from pprint import pprint
import logging
log = logging.getLogger(__name__)
class Plugin(PluginBackendClass):
_plugin_name = "hier"
_schema_props_files = {
"path": {
"anyOf": [
{
"type": "string",
},
{
"type": "array",
"items": {
"type": "string",
}
},
]
}
}
sssss_schema_props_default = {
"$schema": 'http://json-schema.org/draft-04/schema#',
"default": "",
"oneOf": [
{
"type": "string",
"default": "BLAAAAHHH"
},
{
"type": "object",
"additionalProperties": True,
"default": {},
"properties": {
"engine": {
"type": "string",
"default": "jerakia",
"optional": False,
},
"value": {
"default": 'UNSET',
"optional": False,
},
},
},
]
}
def process(self, backends: list, ctx: dict) -> (list, dict):
new_backends = []
for cand in backends:
# Init
plugin_config = cand.get("hier", {})
hier_data = plugin_config.get("data", None)
if not hier_data:
new_backends.append(cand)
continue
# Retrieve config data
hier_var = plugin_config.get("var", "item")
hier_sep = plugin_config.get("separator", "/")
if isinstance(hier_data, str):
hier_data = cand['_run']['scope'].get(hier_data, None)
# Build a new list
if isinstance(hier_data, str):
r = hier_data.split(hier_sep)
assert (isinstance(r, list)), f"Got: {r}"
ret1 = []
for index, part in enumerate(r):
try:
prefix = ret1[index - 1]
except IndexError:
prefix = f'{hier_sep}'
prefix = ""
item = f"{prefix}{part}{hier_sep}"
ret1.append(item)
ret2 = []
for item in ret1:
_cand = copy.deepcopy(cand)
run = {
"index": index,
"hier_value": item,
"hier_var": hier_var,
}
_cand['_run']['hier'] = run
_cand['_run']['scope'][hier_var] = item
ret2.append(_cand)
new_backends.extend(ret2)
return new_backends, ctx

View File

@ -0,0 +1,79 @@
from ansible_tree.plugin.common import PluginBackendClass
from pprint import pprint
import logging
log = logging.getLogger(__name__)
import copy
class Plugin(PluginBackendClass):
_plugin_name = "init"
_schema_props_files = {
"path": {
"anyOf": [
{
"type": "string",
},
{
"type": "array",
"items": {
"type": "string",
}
},
]
}
}
sssss_schema_props_default = {
"$schema": 'http://json-schema.org/draft-04/schema#',
"default": "",
"oneOf": [
{
"type": "string",
"default": "BLAAAAHHH"
},
{
"type": "object",
"additionalProperties": True,
"default": {},
"properties": {
"engine": {
"type": "string",
"default": "jerakia",
"optional": False,
},
"value": {
"default": 'UNSET',
"optional": False,
},
},
},
]
}
default_engine = 'jerakia'
def process(self, backends: list, ctx: dict) -> (list, dict):
new_backends = []
for index, item in enumerate(backends):
default = {
"value": item,
}
if not isinstance(item, dict):
item = default
item['engine'] = item.get('engine', self.default_engine )
item['_run'] = copy.deepcopy(ctx)
item['_run']['backend'] = {
"index": index,
}
new_backends.append(item)
return new_backends, ctx

View File

@ -0,0 +1,100 @@
import copy
from pathlib import Path
from ansible_tree.utils import render_template
from ansible_tree.plugin.common import PluginBackendClass
from pprint import pprint
import logging
import anyconfig
import textwrap
class Plugin(PluginBackendClass):
_plugin_name = "loop"
_schema_props_files = {
"path": {
"anyOf": [
{
"type": "string",
},
{
"type": "array",
"items": {
"type": "string",
}
},
]
}
}
sssss_schema_props_default = {
"$schema": 'http://json-schema.org/draft-04/schema#',
"default": "",
"oneOf": [
{
"type": "string",
"default": "BLAAAAHHH"
},
{
"type": "object",
"additionalProperties": True,
"default": {},
"properties": {
"engine": {
"type": "string",
"default": "jerakia",
"optional": False,
},
"value": {
"default": 'UNSET',
"optional": False,
},
},
},
]
}
def process(self, backends: list, ctx: dict) -> (list, dict):
new_backends = []
for cand in backends:
cand = dict(cand)
# Init
loop_config = cand.get("loop", {})
loop_data = loop_config.get("data", None)
if not loop_data:
new_backends.append(cand)
continue
# Retrieve config data
loop_var = loop_config.get("var", "item")
if isinstance(loop_data, str):
loop_data = cand['_run']['scope'].get(loop_data, None)
assert (isinstance(loop_data, list)), f"Got: {loop_data}"
# Build a new list
ret = []
for idx, item in enumerate(loop_data):
_cand = copy.deepcopy(cand)
run = {
"loop_index": idx,
"loop_value": item,
"loop_var": loop_var,
}
_cand['_run']['loop'] = run
_cand['_run']['scope'][loop_var] = item
#_cand.scope[loop_var] = item
ret.append(_cand)
new_backends.extend(ret)
return new_backends, ctx

View File

@ -0,0 +1,294 @@
# from box import Box
import textwrap
from pprint import pprint
import glob
from pathlib import Path
from jinja2 import Template
import yaml
import json
import logging
log = logging.getLogger(__name__)
from ansible_tree.utils import schema_validate
import copy
# Candidate Classes
# =============================
class Candidate():
engine = None
found = False
data = None
run = None
scope = None
key = None
def __init__(self, run):
self.run = copy.deepcopy(run)
def __repr__(self):
return f"{self.__dict__}"
def _report_data(self, data=None):
default_data = {
#"rule": self.config,
"value": self.engine._plugin_value,
"data": self.data,
}
data = data or default_data
d = json.dumps(data, indent=2) #, sort_keys=True, )
return d
# Generic Classes
# =============================
class PluginClass():
_plugin_type = "none"
_plugin_value = None
_schema_props_plugin = {
"engine": {
"type": "string",
# TODO: Fix this ug
"default": "jerakia"
},
"value": {},
}
def __repr__(self):
kind = self._plugin_type
name = self._plugin_name
value = getattr(self, 'value', 'NO VALUE')
return f"{kind}.{name}:{value}"
def __init__(self, config=None, parent=None, app=None):
# assert (isinstance(config, dict)), f"Got: {config}"
self.parent = parent
self.app = app
self.config = config or {}
self._init()
self._validate()
def _init(self):
pass
def _validate(self):
pass
class PluginBackendClass(PluginClass):
_plugin_type = "backend"
def _init(self):
pass
class PluginStrategyClass(PluginClass):
_plugin_type = "strategy"
def _init(self):
pass
class PluginEngineClass(PluginClass):
_plugin_type = "engine"
_schema_props_default = {
"value": {
"default": "UNSET",
},
#### SHOULD NOT BE HERE
"hier": {
"additionalProperties": True,
"optional": True,
"properties": {
"var": {
"type": "string",
"default": "item",
"optional": True,
},
"data": {
"default": None,
"anyOf": [
{ "type": "null" },
{ "type": "string" },
{ "type": "array" },
]
},
"separator": {
"type": "string",
"default": "/",
"optional": True,
},
"reversed": {
"type": "boolean",
"default": False,
"optional": True,
},
}
}
}
# Default plugin API Methods
# =====================
def _init(self):
assert isinstance(self.config, dict), f"Got: {self.config}"
def _validate(self):
# Build schema
schema_keys = [a for a in dir(self) if a.startswith('_schema_props_')]
props = {}
for key in schema_keys:
schema = getattr(self, key)
props.update(schema)
self.schema = {
"$schema": 'http://json-schema.org/draft-04/schema#',
"type": "object",
"additionalProperties": True,
"properties": props,
}
# log.debug (f"Validate {self.config} against {self.schema}")
self.config = schema_validate(self.config, self.schema)
return True
# Public Methods
# =====================
def dump(self):
ret = {
"config": self.config,
}
return ret
def lookup_candidates(self, key=None, scope=None):
raise Exception (f"Module does not implement this method :(")
# It must always return a list of `Candidate` instances
return []
def _example(self):
print (f"Module does not implement this method :(")
return None
# File plugins Extensions
# =============================
class PluginFileGlob():
_schema_props_glob = {
"glob": {
"additionalProperties": False,
"default": {
"file": "ansible.yaml",
},
"properties": {
"file": {
"type": "string",
"default": "ansible",
"optional": True,
},
"ext": {
"type": "array",
"default": [ "yml", "yaml" ],
"optional": True,
},
}
}
}
def _glob(self, item):
# DIRECT CALL TO APP< TOFIX
app_config = self.app.conf2
root = app_config.get("default", {}).get("config", {}).get("root", f"{Path.cwd()}/tree")
#root = self.app.conf2.config.app.root
# TOFIX print ("ITEM! %s" % type(root))
# TOFIX print ("ITEM2 %s" % self.app.conf2.config.app.root)
glob_config = self.config.get("glob", {})
glob_file = glob_config['file']
#glob_ext = glob_config['ext']
item = Path(root) / Path(item) / Path(glob_file)
item = f"{item}"
#file = f"{glob_file}.{glob_ext}"
#print ("ITEM %s" % item)
files = glob.glob(item)
log.debug (f"Matched file for glob '{item}': {files}")
return files
## DEPRECATED !!!!
#class PluginFileLoop():
#
# _schema_props_loop = {
# "loop": {
# "additionalProperties": False,
# "default": {
# "var": "item",
# "data": None,
# },
# "properties": {
# "var": {
# "type": "string",
# "default": "item",
# "optional": True,
# },
# "data": {
# "default": None,
# "anyOf": [
# { "type": "null" },
# { "type": "string" },
# { "type": "array" },
# ]
# },
# },
# }
# }
#
# def _loop(self, item, params):
# print ("_loop IS DEPRECATED")
# loop = self.config.get("loop", None)
#
#
# # Check if loop is enabled
# if loop['data'] is None:
# return [ item ]
#
# log.debug(f"Loop enabled for: {item}")
#
# _var = loop['var']
# ref = loop['data']
#
# # replace value:
# data = params.get(ref, [])
# if data is None or len(data) < 1:
# return [item]
# elif not isinstance(data, list):
# raise Exception(f"Expected a list, got: {data}")
#
# # Loop over lists
# ret = []
# for line in data:
# t = Template(item)
# param_dict = { _var: line}
# r = t.render(**param_dict)
# ret.append(r)
# #print (f"{item} ==> {params} => {r}")
#
# return ret
#
# # Loop over data

View File

@ -0,0 +1 @@
from . import jerakia

View File

@ -0,0 +1,192 @@
from pathlib import Path
from ansible_tree.utils import render_template
from ansible_tree.plugin.common import PluginEngineClass, PluginFileGlob, Candidate
from pprint import pprint
import logging
import anyconfig
import textwrap
log = logging.getLogger(__name__)
class FileCandidate(Candidate):
path = None
def _report_data(self):
data = {
#"rule": self.config,
"value": self.engine._plugin_value,
"data": self.data,
"path": str(self.path.relative_to(Path.cwd())),
}
data = dict(self.config)
return super()._report_data(data)
class Plugin(PluginEngineClass, PluginFileGlob):
_plugin_name = 'jerakia'
### OLD
_plugin_engine = "jerakia"
_schema_props_files = {
"path": {
"anyOf": [
{
"type": "string",
},
{
"type": "array",
"items": {
"type": "string",
}
},
]
}
}
# def __repr__(self):
# engine = self.config.get('engine')
# value = self.
# return f"Plugin instance {engine}: {value}"
def _init(self):
paths = self.config.get('path', self.config.get('value'))
if isinstance(paths, str):
paths = [paths]
elif isinstance(paths, list):
pass
else:
raise Exception (f"Unsupported path value, expected str or dict, got: {paths} in {self.config}")
self.paths = paths
self.value = paths
def _preprocess(self, scope):
# Manage loops
paths = self.paths
# Manage var substr
ret = []
for p in paths:
p = render_template(p, scope)
ret.append(p)
log.debug(f"Render pattern: {ret}")
return ret
def _show_paths(self, scope):
parsed = self._preprocess(scope)
log.debug(f"Expanded paths to: {parsed}")
# Look for files (NOT BE HERE !!!)
ret3 = []
for p in parsed:
globbed = self._glob(p)
ret3.extend(globbed)
log.debug(f"Matched globs: {ret3}")
return ret3
def process(self):
#scope = self.scope
# pprint (self.config)
scope = dict(self.config['_run']['scope'])
key = self.config['_run']['key']
assert isinstance(scope, dict), f"Got: {scope}"
assert isinstance(key, (str, type(None))), f"Got: {key}"
t = self._show_paths(scope)
ret = []
for index, path in enumerate(self._show_paths(scope)):
log.debug(f"Reading file: {path}")
# Fetch data
found = False
raw_data = anyconfig.load(path, ac_parser="yaml")
data = None
if key is None:
data = raw_data
found = True
else:
try:
data = raw_data[key]
found = True
except Exception:
pass
# Build result object
result = {}
result['run'] = {
'path': path,
'rel_path': str(Path(path).relative_to(Path.cwd())),
}
result['parent'] = self.config
result['data'] = data
result['found'] = found
ret.append(result)
return ret
######## OLD
# # Read raw file content
# data = anyconfig.load(path, ac_parser="yaml")
#
# ret_obj2 ={
# "_run": _run,
# }
# #### OLD
# ret_obj = FileCandidate(self.config)
# ret_obj.engine = self
# ret_obj.data = None
# found = False
# if key is None:
# ret_obj.data = data
# found = True
# else:
# try:
# ret_obj.data = data[key]
# found = True
# except Exception:
# pass
# # ret_obj.run['path'] = path
# # ret_obj.run['found'] = found
# # ret_obj.run['scope'] = scope
# # ret_obj.run['key'] = key
# be = {
# "index": index,
# "path": path,
# "rel_path": str(Path(path).relative_to(Path.cwd())),
# }
# #qu = {
# # "scope": scope,
# # "key": key,
# # }
# ret_obj.run['backend'] = be
# #ret_obj.run['query'] = qu
# #log.debug(f"Found value: {ret_obj}")
# ret_obj.found = found
# ret.append(ret_obj)

View File

@ -0,0 +1,2 @@
from . import last
from . import schema

View File

@ -0,0 +1,14 @@
import logging
from ansible_tree.plugin.common import PluginStrategyClass
log = logging.getLogger(__name__)
class Plugin(PluginStrategyClass):
_plugin_name = "last"
def process(self, candidates: list, rule=None) -> (list, dict):
return candidates[-1]

View File

@ -0,0 +1,125 @@
import logging
from ansible_tree.plugin.common import PluginStrategyClass
from ansible_tree.utils import schema_validate, str_ellipsis
log = logging.getLogger(__name__)
import json
from pprint import pprint
from jsonmerge import Merger
from prettytable import PrettyTable
class Plugin(PluginStrategyClass):
_plugin_name = "schema"
default_merge_schema = {
"$schema": 'http://json-schema.org/draft-04/schema#',
"oneOf": [
{
"type": "array",
"mergeStrategy": "append",
# "mergeStrategy": "arrayMergeById",
},
{
"type": "object",
"mergeStrategy": "objectMerge",
},
{
"type": "boolean",
"mergeStrategy": "overwrite",
},
{
"type": "string",
"mergeStrategy": "overwrite",
},
{
"type": "integer",
"mergeStrategy": "overwrite",
},
{
"type": "number",
"mergeStrategy": "overwrite",
},
{
"type": "null",
"mergeStrategy": "overwrite",
},
],
}
def process(self, candidates: list, rule=None) -> (list, dict):
trace = rule['trace']
explain = rule['explain']
schema = rule.get('schema', None) or self.default_merge_schema
merger = Merger(schema)
t = PrettyTable()
t1 = PrettyTable()
new_candidate = None
for index, item in enumerate(candidates):
new_value = item['data']
result = merger.merge(new_candidate, new_value)
backend_info = dict(item['parent'])
backend_run = backend_info.pop("_run")
if explain:
t1.add_row([
index,
'\nBackendRun: ' + str_ellipsis(json.dumps(
backend_run,
default=lambda o: '<not serializable>', indent=2), 70),
'\nRuleRun: ' + str_ellipsis(json.dumps(
item['run'],
default=lambda o: '<not serializable>', indent=2), 70),
'---\nResult: ' + str_ellipsis(json.dumps(
result,
default=lambda o: '<not serializable>', indent=2), 70),
])
if trace:
t.add_row([
index,
'---\nBackendConfig: ' + str_ellipsis(json.dumps(
backend_info,
default=lambda o: '<not serializable>', indent=2), 70) +
'\nBackendRun: ' + str_ellipsis(json.dumps(
backend_run,
default=lambda o: '<not serializable>', indent=2), 70),
'---\nRuleConfig: ' + str_ellipsis(json.dumps(
rule,
default=lambda o: '<not serializable>', indent=2), 70) +
'\nRuleRun: ' + str_ellipsis(json.dumps(
item['run'],
default=lambda o: '<not serializable>', indent=2), 70) +
#'\nSource: ' + str_ellipsis(json.dumps(
# new_candidate,
# default=lambda o: '<not serializable>', indent=2), 70) +
'\nNew data: ' + str_ellipsis(json.dumps(
new_value,
default=lambda o: '<not serializable>', indent=2), 70),
'---\nResult: ' + str_ellipsis(json.dumps(
result,
default=lambda o: '<not serializable>', indent=2), 70),
]
)
new_candidate = result
if trace:
t.field_names = ["Index", "Backend", "Rule", "Data"]
t.align = 'l'
print (t)
if explain:
t1.field_names = ["Index", "Backend", "Rule", "Data"]
t1.align = 'l'
print('Explain:\n' + repr(t1))
return new_candidate

16
ansible_tree/test.sh Normal file
View File

@ -0,0 +1,16 @@
python ./ansible_tree/cli.py
$APP lookup profiles -e
lookup profiles -e "hostgroup=[ 'Tiger/ICN/Tiger/Infra/Prod' ]" -e "hostgroups=[ 'Tiger', 'Tiger/ICN', 'Tiger/ICN/Tiger', 'Tiger/ICN/Tiger/Infra', 'Tiger/ICN/Tiger/Infra/Prod' ]" -e "ansible_fqdn=tiger-ops.it.ms.bell.ca" -e "ansible_dist_name=Rhel" -e "ansible_dist_version=8" -e "tiger_org=ICN"
-e "hostgroup=[ 'Tiger/ICN/Tiger/Infra/Prod' ]"
-e "hostgroup=[ 'Tiger/ICN/Tiger/Infra/Prod' ]"
-e "hostgroup=[ 'Tiger/ICN/Tiger/Infra/Prod' ]"
-e "hostgroups=[ 'Tiger', 'ICN', 'Tiger', 'Infra', 'Prod' ]"

147
ansible_tree/utils.py Normal file
View File

@ -0,0 +1,147 @@
from pathlib import Path
from jinja2 import Template
import yaml
import json
import glob
from jsonschema import validate, Draft7Validator, validators, exceptions
import collections
import logging
log = logging.getLogger(__name__)
# # File parsers
# # =====================
#
# class FileParserClass():
#
# def __init__(self, path):
# self.path = path
#
# def from_file(self, file):
# raise Exception ("Not implemented")
#
# def from_string(self, data):
# raise Exception ("Not implemented")
#
# def from_dict(self, data):
# raise Exception ("Not implemented")
#
# class FilesYAMLParser(FileParserClass):
# def get_data(self):
# with open(self.path, "r") as stream:
# try:
# return yaml.safe_load(stream)
# except yaml.YAMLError as exc:
# raise Exception(exc)
# print(exc)
#
#
# class FilesJSONParser(FileParserClass):
# pass
# class FilesRawParser(FileParserClass):
# pass
# class FilesTOMLParser(FileParserClass):
# pass
# class FilesCSVParser(FileParserClass):
# pass
# class FilesINIParser(FileParserClass):
# pass
#
# format_db = {
# ".raw": FilesRawParser,
# ".yml": FilesYAMLParser,
# ".yaml": FilesYAMLParser,
# ".json": FilesJSONParser,
# }
# Utils Methods
# =====================
def render_template(path, params):
"""Render template for a given string"""
assert (isinstance(params, dict)), f"Got: {params}"
t = Template(path)
return t.render(**params)
#def read_file(file):
# with open(file, 'r') as f:
# data = f.read().replace('\n', '')
# return data
#
#
#def parse_file(file, fmt='auto'):
# print ("DEPRECATED")
# raise Exception ("parse_file is deprecated")
#
# data = read_file(file)
#
# # Autodetect format from file name
# if fmt == 'auto':
# p = Path(file)
# fmt = p.suffix
# else:
# fmt = f".{fmt}"
#
# # Retrieve parser
# if fmt is None:
# raise Exception ("No available driver to read file: %s" % p )
# fmt_cls = format_db.get(fmt, None)
#
# # Parse content
# o = fmt_cls(str(p))
# return o.get_data()
# Schema Methods
# =====================
def _extend_with_default(validator_class):
validate_properties = validator_class.VALIDATORS["properties"]
def set_defaults(validator, properties, instance, schema):
for property, subschema in properties.items():
if "default" in subschema:
instance.setdefault(property, subschema["default"])
try:
for error in validate_properties(
validator, properties, instance, schema,
):
continue
except Exception as e:
print ("CATCHED2222 ", e)
return validators.extend(
validator_class, {"properties" : set_defaults},
)
def schema_validate(config, schema):
# Validate the schema
DefaultValidatingDraft7Validator = _extend_with_default(Draft7Validator)
try:
DefaultValidatingDraft7Validator(schema).validate(config)
except Exception as e:
print (e)
p = list(collections.deque(e.schema_path))
p = '/'.join([ str(i) for i in p ])
p = f"schema/{p}"
raise Exception(
f"Failed validating {p} for resource with content: {config} with !!!!!! schema: {schema}"
)
return config
def str_ellipsis(txt, length=120):
txt = str(txt)
ret = []
for string in txt.splitlines():
string = (string[:length - 4 ] + ' ...') if len(string) > length else string
ret.append(string)
ret = '\n'.join(ret)
return ret

20
pyproject.toml Normal file
View File

@ -0,0 +1,20 @@
[tool.poetry]
name = "ansible-tree"
version = "0.1.0"
description = "Data trees for Ansible"
authors = ["Robin Cordier"]
license = "GNU"
[tool.poetry.dependencies]
python = "^3.8"
jsonschema = "^4.3.3"
jsonmerge = "^1.8.0"
anyconfig = "^0.12.0"
python-box = "^5.4.1"
prettytable = "^3.0.0"
[tool.poetry.dev-dependencies]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"