Compare commits
26 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 1dce6fbfcd | |||
| c8595f63cc | |||
| 0f31a14920 | |||
| f6460b8db3 | |||
| 4a0bc2e201 | |||
| 90522b5b69 | |||
| 1f38ce7dba | |||
| 07ecaa8085 | |||
| 969d78a0ac | |||
| f12d9fd7be | |||
| 5f80881c3c | |||
| 25c6bf36a2 | |||
| ff2f170685 | |||
| 0ce3417360 | |||
| 4616b310d0 | |||
| c8d8f922d3 | |||
| ee3df03829 | |||
| a9e97d98c1 | |||
| eeab4f8583 | |||
| 3eb67e584a | |||
| 8060e285c0 | |||
| 98368ee53a | |||
| 00f87bb4f7 | |||
| 34587710b3 | |||
| 4f28bdc66b | |||
| 62c0d7352f |
107
.github/workflows/sphinx.yml
vendored
Normal file
107
.github/workflows/sphinx.yml
vendored
Normal file
@ -0,0 +1,107 @@
|
||||
# From: https://github.com/rkdarst/sphinx-actions-test/blob/master/.github/workflows/sphinx-build.yml
|
||||
|
||||
name: sphinx
|
||||
on: [push, pull_request]
|
||||
|
||||
env:
|
||||
DEFAULT_BRANCH: "main"
|
||||
#SPHINXOPTS: "-W --keep-going -T"
|
||||
# ^-- If these SPHINXOPTS are enabled, then be strict about the builds and fail on any warnings
|
||||
|
||||
jobs:
|
||||
build-and-deploy:
|
||||
name: Build and gh-pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# https://github.com/marketplace/actions/checkout
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
lfs: true
|
||||
# https://github.com/marketplace/actions/setup-python
|
||||
# ^-- This gives info on matrix testing.
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
# https://docs.github.com/en/actions/guides/building-and-testing-python#caching-dependencies
|
||||
# ^-- How to set up caching for pip on Ubuntu
|
||||
- name: Cache pip
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
${{ runner.os }}-
|
||||
- name: Cache poetry
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
#path: ~/.cache/pip
|
||||
path: ~/.cache/pypoetry/virtualenvs
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('pyproject.toml') }}
|
||||
#key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-poetry-
|
||||
${{ runner.os }}-
|
||||
# https://docs.github.com/en/actions/guides/building-and-testing-python#installing-dependencies
|
||||
# ^-- This gives info on installing dependencies with pip
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install poetry
|
||||
poetry config virtualenvs.create false
|
||||
poetry install
|
||||
|
||||
- name: Debugging information
|
||||
run: |
|
||||
echo "github.ref:" ${{github.ref}}
|
||||
echo "github.event_name:" ${{github.event_name}}
|
||||
echo "github.head_ref:" ${{github.head_ref}}
|
||||
echo "github.base_ref:" ${{github.base_ref}}
|
||||
set -x
|
||||
git rev-parse --abbrev-ref HEAD
|
||||
git branch
|
||||
git branch -a
|
||||
git remote -v
|
||||
python -V
|
||||
pip list --not-required
|
||||
pip list
|
||||
poetry debug
|
||||
|
||||
# Build
|
||||
#- uses: ammaraskar/sphinx-problem-matcher@master
|
||||
- name: Build Sphinx docs
|
||||
working-directory: ./docs
|
||||
run: |
|
||||
./build_doc.sh
|
||||
# make dirhtml
|
||||
# This fixes broken copy button icons, as explained in
|
||||
# https://github.com/coderefinery/sphinx-lesson/issues/50
|
||||
# https://github.com/executablebooks/sphinx-copybutton/issues/110
|
||||
# This can be removed once these PRs are accepted (but the
|
||||
# fixes also need to propagate to other themes):
|
||||
# https://github.com/sphinx-doc/sphinx/pull/8524
|
||||
# https://github.com/readthedocs/sphinx_rtd_theme/pull/1025
|
||||
#sed -i 's/url_root="#"/url_root=""/' _build/html/index.html || true
|
||||
|
||||
|
||||
# Add the .nojekyll file
|
||||
- name: nojekyll
|
||||
working-directory: ./docs
|
||||
#if: ${{ github.event_name == 'push' }}
|
||||
run: |
|
||||
touch _build/html/.nojekyll
|
||||
|
||||
# Deploy
|
||||
# https://github.com/peaceiris/actions-gh-pages
|
||||
- name: Deploy
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
#if: ${{ github.event_name == 'push' }}
|
||||
#if: ${{ success() && github.event_name == 'push' && github.ref == 'refs/heads/$defaultBranch' }}
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./docs/_build/html
|
||||
publish_branch: gh-pages
|
||||
force_orphan: true
|
||||
|
||||
6
.gitignore
vendored
Normal file
6
.gitignore
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
kheops.egg-info/*
|
||||
dist/*
|
||||
setup.py
|
||||
poetry.lock
|
||||
**/__pycache__/**
|
||||
**/.ipynb_checkpoints/**
|
||||
@ -9,26 +9,10 @@ kheops.plugin.backend package
|
||||
Submodules
|
||||
----------
|
||||
|
||||
kheops.plugin.backend.hier module
|
||||
kheops.plugin.backend.file module
|
||||
---------------------------------
|
||||
|
||||
.. automodule:: kheops.plugin.backend.hier
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
kheops.plugin.backend.init module
|
||||
---------------------------------
|
||||
|
||||
.. automodule:: kheops.plugin.backend.init
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
kheops.plugin.backend.loop module
|
||||
---------------------------------
|
||||
|
||||
.. automodule:: kheops.plugin.backend.loop
|
||||
.. automodule:: kheops.plugin.backend.file
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
@ -13,7 +13,7 @@ Subpackages
|
||||
:maxdepth: 4
|
||||
|
||||
kheops.plugin.backend
|
||||
kheops.plugin.engine
|
||||
kheops.plugin.scope
|
||||
kheops.plugin.strategy
|
||||
|
||||
Submodules
|
||||
|
||||
26
docs/api/kheops.plugin.scope.rst
Normal file
26
docs/api/kheops.plugin.scope.rst
Normal file
@ -0,0 +1,26 @@
|
||||
kheops.plugin.scope package
|
||||
===========================
|
||||
|
||||
.. automodule:: kheops.plugin.scope
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
Submodules
|
||||
----------
|
||||
|
||||
kheops.plugin.scope.hier module
|
||||
-------------------------------
|
||||
|
||||
.. automodule:: kheops.plugin.scope.hier
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
kheops.plugin.scope.loop module
|
||||
-------------------------------
|
||||
|
||||
.. automodule:: kheops.plugin.scope.loop
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
@ -17,10 +17,18 @@ kheops.plugin.strategy.last module
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
kheops.plugin.strategy.schema module
|
||||
------------------------------------
|
||||
kheops.plugin.strategy.merge\_deep module
|
||||
-----------------------------------------
|
||||
|
||||
.. automodule:: kheops.plugin.strategy.schema
|
||||
.. automodule:: kheops.plugin.strategy.merge_deep
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
kheops.plugin.strategy.merge\_schema module
|
||||
-------------------------------------------
|
||||
|
||||
.. automodule:: kheops.plugin.strategy.merge_schema
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
@ -33,18 +33,10 @@ kheops.cli module
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
kheops.managers module
|
||||
----------------------
|
||||
kheops.controllers module
|
||||
-------------------------
|
||||
|
||||
.. automodule:: kheops.managers
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
kheops.query module
|
||||
-------------------
|
||||
|
||||
.. automodule:: kheops.query
|
||||
.. automodule:: kheops.controllers
|
||||
:members:
|
||||
:undoc-members:
|
||||
:show-inheritance:
|
||||
|
||||
@ -2,6 +2,6 @@ kheops
|
||||
======
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 8
|
||||
:maxdepth: 4
|
||||
|
||||
kheops
|
||||
|
||||
@ -2,8 +2,8 @@
|
||||
|
||||
make clean
|
||||
|
||||
#sphinx-apidoc -f -M -o ./api ../kheops/
|
||||
sphinx-apidoc -M -o ./api ../kheops/
|
||||
sphinx-apidoc -f -M -o ./api ../kheops/
|
||||
#sphinx-apidoc -M -o ./api ../kheops/
|
||||
|
||||
mkdir -p learn
|
||||
jupyter nbconvert --to markdown --output=../learn/learn101.md jupyter/learn101.ipynb
|
||||
|
||||
@ -53,7 +53,7 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
#
|
||||
html_theme = 'alabaster'
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
|
||||
@ -10,7 +10,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"execution_count": 3,
|
||||
"id": "98d4907b",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -18,12 +18,12 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"/home/jez/volumes/data/prj/bell/training/tiger-ansible/ext/kheops\n"
|
||||
"/home/jez/prj/bell/dev/kheops\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"cd /home/jez/volumes/data/prj/bell/training/tiger-ansible/ext/kheops\n",
|
||||
"cd ${KHEOPS_EXAMPLES_DIR:-/dev/null}\n",
|
||||
"echo $PWD\n",
|
||||
"export KHEOPS_NAMESPACE=ex1_enc \n",
|
||||
"export KHEOPS_CONFIG=examples/kheops.yml\n",
|
||||
@ -48,7 +48,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"execution_count": 4,
|
||||
"id": "6ede46a3",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -56,22 +56,24 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"usage: kheops [-h] [-v] [-c CONFIG]\n",
|
||||
" {schema,gen_doc,lookup,demo,subcommand2} ...\n",
|
||||
"Usage: kheops [OPTIONS] COMMAND [ARGS]...\n",
|
||||
"\n",
|
||||
"Kheops, hierarchical data lookup tool\n",
|
||||
" Khéops, hierarchical key/value store\n",
|
||||
"\n",
|
||||
"options:\n",
|
||||
" -h, --help show this help message and exit\n",
|
||||
" -v, --verbose Increase verbosity (KHEOPS_VERBOSE) (default: 0)\n",
|
||||
" -c CONFIG, --config CONFIG\n",
|
||||
" Kheops configuration file (KHEOPS_CONFIG) (default:\n",
|
||||
" examples/kheops.yml)\n",
|
||||
"Options:\n",
|
||||
" -v, --verbose [default: 0]\n",
|
||||
" -c PATH Last name of person to greet. [env var:\n",
|
||||
" KHEOPS_CONFIG; default: kheops.yml]\n",
|
||||
" --install-completion [bash|zsh|fish|powershell|pwsh]\n",
|
||||
" Install completion for the specified shell.\n",
|
||||
" --show-completion [bash|zsh|fish|powershell|pwsh]\n",
|
||||
" Show completion for the specified shell, to\n",
|
||||
" copy it or customize the installation.\n",
|
||||
" --help Show this message and exit.\n",
|
||||
"\n",
|
||||
"subcommands:\n",
|
||||
" valid subcommands\n",
|
||||
"\n",
|
||||
" {schema,gen_doc,lookup,demo,subcommand2}\n"
|
||||
"Commands:\n",
|
||||
" config\n",
|
||||
" lookup Lookup database\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
@ -149,14 +151,6 @@
|
||||
"A scope is completely optional while keys are required."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "9e6aae44",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "d509fa19",
|
||||
@ -165,9 +159,35 @@
|
||||
"## Basic hierarchy"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "8dd5b6b9",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Let's create a firest hierachy, we will define a first basic hierarchy. In `kheops.yml`, we can find:\n",
|
||||
"\n",
|
||||
"```\n",
|
||||
"ex1_enc:\n",
|
||||
" \n",
|
||||
" config:\n",
|
||||
" file_path_prefix: \"ex1_enc/\"\n",
|
||||
" file_path_suffix: \"/ansible\"\n",
|
||||
"\n",
|
||||
" lookups:\n",
|
||||
"\n",
|
||||
" - path: default # Simplest form, just a path\n",
|
||||
" - path: \"roles/{role}\" # If list, it's auto expanded like in bash\n",
|
||||
" - path: \"env_{env}\" # If list, it's auto expanded like in bash\n",
|
||||
" - path: \"nodes/{node}\"\n",
|
||||
"\n",
|
||||
"```\n",
|
||||
"\n",
|
||||
"Now we have our hierachy, let's create our files:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"execution_count": 9,
|
||||
"id": "e510a46d",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -175,7 +195,7 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001b[01;34mexamples/ex1_enc\u001b[00m\n",
|
||||
"\u001b[01;34mexamples/ex1_enc\u001b[0m\n",
|
||||
"└── default.yml\n",
|
||||
"\n",
|
||||
"0 directories, 1 file\n",
|
||||
@ -217,7 +237,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"execution_count": 8,
|
||||
"id": "06d85b1c",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -225,6 +245,7 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"profile:\n",
|
||||
" env: NO_ENV\n",
|
||||
" product: NO_PRODUCT\n",
|
||||
"\n"
|
||||
@ -245,7 +266,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"execution_count": 10,
|
||||
"id": "c5fad63b",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -253,6 +274,7 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"profile:\n",
|
||||
" env: NO_ENV\n",
|
||||
" product: NO_PRODUCT\n",
|
||||
"\n"
|
||||
@ -263,6 +285,14 @@
|
||||
"kheops lookup -e node=web.infra.net profile"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "a0649351",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Same result, let's check how we can change this behavior."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "53c43757",
|
||||
@ -281,7 +311,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": 11,
|
||||
"id": "9f5aabaa",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -289,9 +319,9 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001b[01;34mexamples/ex1_enc\u001b[00m\n",
|
||||
"\u001b[01;34mexamples/ex1_enc\u001b[0m\n",
|
||||
"├── default.yml\n",
|
||||
"└── \u001b[01;34mroles\u001b[00m\n",
|
||||
"└── \u001b[01;34mroles\u001b[0m\n",
|
||||
" ├── mysql.yml\n",
|
||||
" └── web.yml\n",
|
||||
"\n",
|
||||
@ -336,7 +366,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"execution_count": 12,
|
||||
"id": "4762fa0c",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -383,7 +413,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"execution_count": 13,
|
||||
"id": "0294ec50",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -391,6 +421,7 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"profile:\n",
|
||||
" env: NO_ENV\n",
|
||||
" product: httpd_server\n",
|
||||
" web_top_domain: ''\n",
|
||||
@ -399,6 +430,7 @@
|
||||
" web_user_list:\n",
|
||||
" - sysadmins\n",
|
||||
"\n",
|
||||
"profile:\n",
|
||||
" env: NO_ENV\n",
|
||||
" product: mysql_server\n",
|
||||
" mysql_database: NO_DATABASE\n",
|
||||
@ -436,7 +468,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"execution_count": 14,
|
||||
"id": "c1acb199",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -444,12 +476,12 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001b[01;34mexamples/ex1_enc\u001b[00m\n",
|
||||
"\u001b[01;34mexamples/ex1_enc\u001b[0m\n",
|
||||
"├── default.yml\n",
|
||||
"├── \u001b[01;34mnodes\u001b[00m\n",
|
||||
"├── \u001b[01;34mnodes\u001b[0m\n",
|
||||
"│ ├── mysql.infra.net.yml\n",
|
||||
"│ └── web.infra.net.yml\n",
|
||||
"└── \u001b[01;34mroles\u001b[00m\n",
|
||||
"└── \u001b[01;34mroles\u001b[0m\n",
|
||||
" ├── mysql.yml\n",
|
||||
" └── web.yml\n",
|
||||
"\n",
|
||||
@ -495,7 +527,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"execution_count": 15,
|
||||
"id": "b0d34416",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -503,21 +535,21 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"profile:\n",
|
||||
" env: NO_ENV\n",
|
||||
" product: httpd_server\n",
|
||||
" web_top_domain: ''\n",
|
||||
" web_app: myapp\n",
|
||||
" web_port: 80\n",
|
||||
" web_user_list:\n",
|
||||
"- sysadmins\n",
|
||||
" - domain_org\n",
|
||||
" - domain_org_external\n",
|
||||
"\n",
|
||||
"profile:\n",
|
||||
" env: NO_ENV\n",
|
||||
" product: mysql_server\n",
|
||||
" mysql_database: app_domain_org\n",
|
||||
" mysql_users:\n",
|
||||
"- sysadmin@10.0.42%\n",
|
||||
" - app_domain_org@10.0.51%\n",
|
||||
" mysql_port: 3306\n",
|
||||
" mysql_cluster: false\n",
|
||||
@ -548,7 +580,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"execution_count": 16,
|
||||
"id": "6c92b0cc",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -556,14 +588,14 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"\u001b[01;34mexamples/ex1_enc\u001b[00m\n",
|
||||
"\u001b[01;34mexamples/ex1_enc\u001b[0m\n",
|
||||
"├── default.yml\n",
|
||||
"├── env_dev.yml\n",
|
||||
"├── env_prod.yml\n",
|
||||
"├── \u001b[01;34mnodes\u001b[00m\n",
|
||||
"├── \u001b[01;34mnodes\u001b[0m\n",
|
||||
"│ ├── mysql.infra.net.yml\n",
|
||||
"│ └── web.infra.net.yml\n",
|
||||
"└── \u001b[01;34mroles\u001b[00m\n",
|
||||
"└── \u001b[01;34mroles\u001b[0m\n",
|
||||
" ├── mysql.yml\n",
|
||||
" └── web.yml\n",
|
||||
"\n",
|
||||
@ -619,7 +651,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"execution_count": 17,
|
||||
"id": "9c3200b4",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -627,25 +659,24 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"profile:\n",
|
||||
" env: prod\n",
|
||||
" product: httpd_server\n",
|
||||
" web_top_domain: infra.com\n",
|
||||
" web_app: myapp\n",
|
||||
" web_port: 80\n",
|
||||
" web_user_list:\n",
|
||||
"- sysadmins\n",
|
||||
" - domain_org\n",
|
||||
" - domain_org_external\n",
|
||||
" web_cache: 12h\n",
|
||||
"\n",
|
||||
"profile:\n",
|
||||
" env: dev\n",
|
||||
" product: httpd_server\n",
|
||||
" web_top_domain: dev.infra.net\n",
|
||||
" web_app: myapp\n",
|
||||
" web_port: 80\n",
|
||||
" web_user_list:\n",
|
||||
"- sysadmins\n",
|
||||
"- debug_user\n",
|
||||
" - domain_org\n",
|
||||
" - domain_org_external\n",
|
||||
" web_cache: 1m\n",
|
||||
@ -671,7 +702,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 14,
|
||||
"execution_count": 18,
|
||||
"id": "7c5e86f3",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -679,23 +710,22 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"profile:\n",
|
||||
" env: prod\n",
|
||||
" product: mysql_server\n",
|
||||
" mysql_database: app_domain_org\n",
|
||||
" mysql_users:\n",
|
||||
"- sysadmin@10.0.42%\n",
|
||||
" - app_domain_org@10.0.51%\n",
|
||||
" mysql_port: 3306\n",
|
||||
" mysql_cluster: false\n",
|
||||
" web_top_domain: infra.com\n",
|
||||
" web_cache: 12h\n",
|
||||
"\n",
|
||||
"profile:\n",
|
||||
" env: dev\n",
|
||||
" product: mysql_server\n",
|
||||
" mysql_database: app_domain_org\n",
|
||||
" mysql_users:\n",
|
||||
"- sysadmin@10.0.42%\n",
|
||||
"- debug@10.0.%\n",
|
||||
" - app_domain_org@10.0.51%\n",
|
||||
" mysql_port: 3306\n",
|
||||
" mysql_cluster: false\n",
|
||||
@ -713,22 +743,6 @@
|
||||
"kheops lookup -e node=mysql.infra.net -e role=mysql -e env=dev profile"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "db50e110",
|
||||
"metadata": {},
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "c2c4658d",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "e4bf6d8d",
|
||||
@ -739,7 +753,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 15,
|
||||
"execution_count": 19,
|
||||
"id": "103cb37d",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -747,15 +761,18 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"profile:\n",
|
||||
" env: NO_ENV\n",
|
||||
" product: NO_PRODUCT\n",
|
||||
"\n",
|
||||
"==> Per environment view\n",
|
||||
"profile:\n",
|
||||
" env: prod\n",
|
||||
" product: NO_PRODUCT\n",
|
||||
" web_top_domain: infra.com\n",
|
||||
" web_cache: 12h\n",
|
||||
"\n",
|
||||
"profile:\n",
|
||||
" env: dev\n",
|
||||
" product: NO_PRODUCT\n",
|
||||
" web_top_domain: dev.infra.net\n",
|
||||
@ -767,6 +784,7 @@
|
||||
" debug: true\n",
|
||||
"\n",
|
||||
"==> Per role and environment view\n",
|
||||
"profile:\n",
|
||||
" env: prod\n",
|
||||
" product: mysql_server\n",
|
||||
" mysql_database: NO_DATABASE\n",
|
||||
@ -777,6 +795,7 @@
|
||||
" web_top_domain: infra.com\n",
|
||||
" web_cache: 12h\n",
|
||||
"\n",
|
||||
"profile:\n",
|
||||
" env: prod\n",
|
||||
" product: httpd_server\n",
|
||||
" web_top_domain: infra.com\n",
|
||||
@ -787,14 +806,13 @@
|
||||
" web_cache: 12h\n",
|
||||
"\n",
|
||||
"==> Per node view\n",
|
||||
"profile:\n",
|
||||
" env: dev\n",
|
||||
" product: httpd_server\n",
|
||||
" web_top_domain: dev.infra.net\n",
|
||||
" web_app: myapp\n",
|
||||
" web_port: 80\n",
|
||||
" web_user_list:\n",
|
||||
"- sysadmins\n",
|
||||
"- debug_user\n",
|
||||
" - domain_org\n",
|
||||
" - domain_org_external\n",
|
||||
" web_cache: 1m\n",
|
||||
@ -902,7 +920,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 19,
|
||||
"execution_count": 21,
|
||||
"id": "52f5033b",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -911,6 +929,7 @@
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"+ : Find where a key has been defined\n",
|
||||
"+ : ==========================\n",
|
||||
"+ grep --colour=auto -r '^profile:' examples/ex1_enc\n",
|
||||
"\u001b[35m\u001b[Kexamples/ex1_enc/env_prod.yml\u001b[m\u001b[K\u001b[36m\u001b[K:\u001b[m\u001b[K\u001b[01;31m\u001b[Kprofile:\u001b[m\u001b[K\n",
|
||||
"\u001b[35m\u001b[Kexamples/ex1_enc/roles/mysql.yml\u001b[m\u001b[K\u001b[36m\u001b[K:\u001b[m\u001b[K\u001b[01;31m\u001b[Kprofile:\u001b[m\u001b[K\n",
|
||||
@ -920,6 +939,7 @@
|
||||
"\u001b[35m\u001b[Kexamples/ex1_enc/default.yml\u001b[m\u001b[K\u001b[36m\u001b[K:\u001b[m\u001b[K\u001b[01;31m\u001b[Kprofile:\u001b[m\u001b[K\n",
|
||||
"\u001b[35m\u001b[Kexamples/ex1_enc/env_dev.yml\u001b[m\u001b[K\u001b[36m\u001b[K:\u001b[m\u001b[K\u001b[01;31m\u001b[Kprofile:\u001b[m\u001b[K\n",
|
||||
"+ : Find where a key has been defined and 5 first lines\n",
|
||||
"+ : ==========================\n",
|
||||
"+ grep --colour=auto -r -A 5 web_user_list: examples/ex1_enc\n",
|
||||
"\u001b[35m\u001b[Kexamples/ex1_enc/roles/web.yml\u001b[m\u001b[K\u001b[36m\u001b[K:\u001b[m\u001b[K \u001b[01;31m\u001b[Kweb_user_list:\u001b[m\u001b[K\n",
|
||||
"\u001b[35m\u001b[Kexamples/ex1_enc/roles/web.yml\u001b[m\u001b[K\u001b[36m\u001b[K-\u001b[m\u001b[K - sysadmins\n",
|
||||
@ -937,6 +957,7 @@
|
||||
"\u001b[35m\u001b[Kexamples/ex1_enc/env_dev.yml\u001b[m\u001b[K\u001b[36m\u001b[K-\u001b[m\u001b[K\n",
|
||||
"\u001b[35m\u001b[Kexamples/ex1_enc/env_dev.yml\u001b[m\u001b[K\u001b[36m\u001b[K-\u001b[m\u001b[K debug: true\n",
|
||||
"+ : Search from anything related to database\n",
|
||||
"+ : ==========================\n",
|
||||
"+ grep --colour=auto -R -C 3 database examples/ex1_enc\n",
|
||||
"\u001b[35m\u001b[Kexamples/ex1_enc/roles/mysql.yml\u001b[m\u001b[K\u001b[36m\u001b[K-\u001b[m\u001b[Kprofile:\n",
|
||||
"\u001b[35m\u001b[Kexamples/ex1_enc/roles/mysql.yml\u001b[m\u001b[K\u001b[36m\u001b[K-\u001b[m\u001b[K product: \"mysql_server\"\n",
|
||||
@ -960,12 +981,17 @@
|
||||
"set -x\n",
|
||||
"\n",
|
||||
": Find where a key has been defined\n",
|
||||
": ==========================\n",
|
||||
"grep -r '^profile:' examples/$KHEOPS_NAMESPACE\n",
|
||||
"\n",
|
||||
"\n",
|
||||
": Find where a key has been defined and 5 first lines\n",
|
||||
": ==========================\n",
|
||||
"grep -r -A 5 'web_user_list:' examples/$KHEOPS_NAMESPACE\n",
|
||||
"\n",
|
||||
"\n",
|
||||
": Search from anything related to database\n",
|
||||
": ==========================\n",
|
||||
"grep -R -C 3 'database' examples/$KHEOPS_NAMESPACE\n",
|
||||
"\n",
|
||||
"set +x"
|
||||
@ -989,7 +1015,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 18,
|
||||
"execution_count": 22,
|
||||
"id": "49bc3fc3",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -1050,7 +1076,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 17,
|
||||
"execution_count": 23,
|
||||
"id": "e4a1e8d1",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -1058,9 +1084,10 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"--- /dev/fd/63\t2022-02-01 20:10:53.094525316 -0500\n",
|
||||
"+++ /dev/fd/62\t2022-02-01 20:10:53.094525316 -0500\n",
|
||||
"@@ -1,11 +1,15 @@\n",
|
||||
"--- /dev/fd/63\t2022-02-14 13:45:59.223619144 -0500\n",
|
||||
"+++ /dev/fd/62\t2022-02-14 13:45:59.223619144 -0500\n",
|
||||
"@@ -1,11 +1,14 @@\n",
|
||||
" profile:\n",
|
||||
"- env: prod\n",
|
||||
"+ env: dev\n",
|
||||
" product: httpd_server\n",
|
||||
@ -1069,8 +1096,6 @@
|
||||
" web_app: myapp\n",
|
||||
" web_port: 80\n",
|
||||
" web_user_list:\n",
|
||||
" - sysadmins\n",
|
||||
"+- debug_user\n",
|
||||
" - domain_org\n",
|
||||
" - domain_org_external\n",
|
||||
"- web_cache: 12h\n",
|
||||
@ -1094,14 +1119,6 @@
|
||||
"<(kheops lookup -e node=web.infra.net -e role=web -e env=dev profile)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "a6feb212",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "12b1730b",
|
||||
@ -1112,7 +1129,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 21,
|
||||
"execution_count": 25,
|
||||
"id": "3ac0cc53",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@ -1149,11 +1166,11 @@
|
||||
"+------------------------+------------------------------+\n",
|
||||
" INFO: Explain candidates:\n",
|
||||
"+----------------------------------------------------------------------------------+-------------------------------+------------------------------+\n",
|
||||
"| Status | Runtime | Data |\n",
|
||||
"| Status | Runtime | Key Value |\n",
|
||||
"+----------------------------------------------------------------------------------+-------------------------------+------------------------------+\n",
|
||||
"| | | |\n",
|
||||
"| Status:{ | Runtime:{ | Data:{ |\n",
|
||||
"| \"path\": \"/home/jez/volumes/data/prj/bell/training/tiger-ansible/ext/kheops ... | \"scope\": { | \"env\": \"NO_ENV\", |\n",
|
||||
"| Status:{ | Runtime:{ | Key:{ |\n",
|
||||
"| \"path\": \"/home/jez/volumes/data/prj/bell/dev/kheops/examples/ex1_enc/defau ... | \"scope\": { | \"env\": \"NO_ENV\", |\n",
|
||||
"| \"status\": \"found\", | \"role\": \"web\" | \"product\": \"NO_PRODUCT\" |\n",
|
||||
"| \"rel_path\": \"examples/ex1_enc/default.yml\" | }, | } |\n",
|
||||
"| } | \"key\": \"profile\", | |\n",
|
||||
@ -1164,8 +1181,8 @@
|
||||
"| | \"backend_index\": 0 | |\n",
|
||||
"| | } | |\n",
|
||||
"| | | |\n",
|
||||
"| Status:{ | Runtime:{ | Data:{ |\n",
|
||||
"| \"path\": \"/home/jez/volumes/data/prj/bell/training/tiger-ansible/ext/kheops ... | \"scope\": { | \"product\": \"httpd_server\", |\n",
|
||||
"| Status:{ | Runtime:{ | Key:{ |\n",
|
||||
"| \"path\": \"/home/jez/volumes/data/prj/bell/dev/kheops/examples/ex1_enc/roles ... | \"scope\": { | \"product\": \"httpd_server\", |\n",
|
||||
"| \"status\": \"found\", | \"role\": \"web\" | \"web_top_domain\": \"\", |\n",
|
||||
"| \"rel_path\": \"examples/ex1_enc/roles/web.yml\" | }, | \"web_app\": \"NO_APP\", |\n",
|
||||
"| } | \"key\": \"profile\", | \"web_port\": 80, |\n",
|
||||
@ -1176,6 +1193,7 @@
|
||||
"| | \"backend_index\": 1 | |\n",
|
||||
"| | } | |\n",
|
||||
"+----------------------------------------------------------------------------------+-------------------------------+------------------------------+\n",
|
||||
"profile:\n",
|
||||
" env: NO_ENV\n",
|
||||
" product: httpd_server\n",
|
||||
" web_top_domain: ''\n",
|
||||
@ -1188,13 +1206,13 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"kheops lookup -e role=web profile -x"
|
||||
"kheops lookup -e role=web profile -X"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "cb111da7",
|
||||
"id": "07eeed03",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
|
||||
5
examples/ex1_enc/default.yml
Normal file
5
examples/ex1_enc/default.yml
Normal file
@ -0,0 +1,5 @@
|
||||
---
|
||||
profile:
|
||||
env: "NO_ENV"
|
||||
product: "NO_PRODUCT"
|
||||
|
||||
16
examples/ex1_enc/env_dev.yml
Normal file
16
examples/ex1_enc/env_dev.yml
Normal file
@ -0,0 +1,16 @@
|
||||
---
|
||||
profile:
|
||||
env: dev
|
||||
|
||||
# We change the top domain for dev environment, and reduce the cache
|
||||
web_top_domain: dev.infra.net
|
||||
web_cache: 1m
|
||||
|
||||
# We want a debug users
|
||||
web_user_list:
|
||||
- debug_user
|
||||
mysql_users:
|
||||
- debug@10.0.%
|
||||
|
||||
debug: true
|
||||
|
||||
8
examples/ex1_enc/env_prod.yml
Normal file
8
examples/ex1_enc/env_prod.yml
Normal file
@ -0,0 +1,8 @@
|
||||
---
|
||||
profile:
|
||||
env: prod
|
||||
|
||||
# On production environment, we always want to use public faced domain and 12 hour cache.
|
||||
web_top_domain: infra.com
|
||||
web_cache: 12h
|
||||
|
||||
6
examples/ex1_enc/nodes/mysql.infra.net.yml
Normal file
6
examples/ex1_enc/nodes/mysql.infra.net.yml
Normal file
@ -0,0 +1,6 @@
|
||||
---
|
||||
profile:
|
||||
mysql_database: "app_domain_org"
|
||||
mysql_users:
|
||||
- "app_domain_org@10.0.51%"
|
||||
|
||||
7
examples/ex1_enc/nodes/web.infra.net.yml
Normal file
7
examples/ex1_enc/nodes/web.infra.net.yml
Normal file
@ -0,0 +1,7 @@
|
||||
---
|
||||
profile:
|
||||
web_app: 'myapp'
|
||||
web_user_list:
|
||||
- domain_org
|
||||
- domain_org_external
|
||||
|
||||
10
examples/ex1_enc/roles/mysql.yml
Normal file
10
examples/ex1_enc/roles/mysql.yml
Normal file
@ -0,0 +1,10 @@
|
||||
---
|
||||
profile:
|
||||
product: "mysql_server"
|
||||
|
||||
mysql_database: "NO_DATABASE"
|
||||
mysql_users:
|
||||
- "sysadmin@10.0.42%"
|
||||
mysql_port: 3306
|
||||
mysql_cluster: False
|
||||
|
||||
10
examples/ex1_enc/roles/web.yml
Normal file
10
examples/ex1_enc/roles/web.yml
Normal file
@ -0,0 +1,10 @@
|
||||
---
|
||||
profile:
|
||||
product: "httpd_server"
|
||||
|
||||
web_top_domain: ""
|
||||
web_app: "NO_APP"
|
||||
web_port: 80
|
||||
web_user_list:
|
||||
- sysadmins
|
||||
|
||||
281
examples/kheops.yml
Normal file
281
examples/kheops.yml
Normal file
@ -0,0 +1,281 @@
|
||||
|
||||
|
||||
# Kheops can act as ENC
|
||||
# This is a basic tree that demonstrate how to use Kheops as ENC:
|
||||
# Examples:
|
||||
# kheops -vvv -c examples/kheops.yml lookup2 -n ex1_enc -f examples/q_011.yml profile
|
||||
#
|
||||
ex1_enc:
|
||||
|
||||
#Query keys:
|
||||
# profile,role,class:
|
||||
# profiles,roles,classes:
|
||||
config:
|
||||
file_path_prefix: "ex1_enc/"
|
||||
#file_path_suffix: "/ansible"
|
||||
file_path_suffix: null
|
||||
merge_deep_algo: additive
|
||||
|
||||
lookups:
|
||||
|
||||
- path: default # Simplest form, just a path
|
||||
- path: "roles/{role}" # If list, it's auto expanded like in bash
|
||||
- path: "env_{env}" # If list, it's auto expanded like in bash
|
||||
- path: "nodes/{node}"
|
||||
|
||||
#loop:
|
||||
# totot: loop2
|
||||
#loop_1:
|
||||
# tutu: blaaaa
|
||||
|
||||
|
||||
|
||||
### ADDD NEW EXAMPLES:
|
||||
# Add a way to remove entries, so we need to talk about mergers... Not yet
|
||||
|
||||
exN_scope_plugins:
|
||||
|
||||
#Query keys:
|
||||
# profile,role,class:
|
||||
# profiles,roles,classes:
|
||||
config:
|
||||
file_path_prefix: "01_hello/"
|
||||
#file_path_suffix: "/ansible"
|
||||
file_path_suffix: null
|
||||
merge_deep_algo: additive
|
||||
|
||||
lookups:
|
||||
|
||||
- path: "tree/{item_hier1}" # If list, it's auto expanded like in bash
|
||||
scope:
|
||||
- module: loop
|
||||
data:
|
||||
- toto1
|
||||
- toto2
|
||||
- toto3
|
||||
|
||||
- path: "tree/{item_loop}/{item_hier}" # If list, it's auto expanded like in bash
|
||||
scope:
|
||||
- module: loop
|
||||
data:
|
||||
- prod
|
||||
- dev
|
||||
- module: hier
|
||||
data: node_fqdn
|
||||
|
||||
split: '.'
|
||||
reversed: True
|
||||
start: 2
|
||||
|
||||
|
||||
- path: "tree/{item_hier1}" # If list, it's auto expanded like in bash
|
||||
scope:
|
||||
- module: hier
|
||||
data: test_hg
|
||||
split: '/'
|
||||
reversed: False
|
||||
|
||||
|
||||
# Kheops can store your configuration/data
|
||||
# This is a basic tree that demonstrate how to use Kheops as ENC:
|
||||
ex2_config:
|
||||
|
||||
#Query keys:
|
||||
# profile,role,class:
|
||||
# profiles,roles,classes:
|
||||
|
||||
lookups:
|
||||
|
||||
# Default config
|
||||
- common/default
|
||||
|
||||
# Per distribution config
|
||||
- "common/dist_{ansible_dist_name}"
|
||||
- "common/dist_{ansible_dist_name}{ansible_dist_version}"
|
||||
|
||||
# Nested dir support
|
||||
- path: site/{node_env}/{node_role}
|
||||
engine: jerakia
|
||||
|
||||
# Per node and group config
|
||||
- groups/{node_group}
|
||||
- nodes/{node_fqdn}
|
||||
|
||||
# Override config
|
||||
- common/override
|
||||
#
|
||||
|
||||
|
||||
# Kheops support many backends: file, http, consul, even script !!!
|
||||
ex3_backends:
|
||||
lookups:
|
||||
- backend: file,glob,consul,http,script,vault,none
|
||||
|
||||
- path: node/{node_fqdn}
|
||||
backend: file
|
||||
file:
|
||||
sufix: /ansible.yml
|
||||
|
||||
- path: node/{node_fqdn}
|
||||
backend: glob
|
||||
glob:
|
||||
sufix: /ansible_*.yml
|
||||
|
||||
- backend: http
|
||||
path: https://foreman.corp/host/{node_fqdn}/facts?fmt=json
|
||||
http:
|
||||
format: json
|
||||
http_auth: SECRET_FILE.pass
|
||||
headers:
|
||||
|
||||
# Keops support operations on scope variable
|
||||
ex3_loops_profiles:
|
||||
|
||||
lookups:
|
||||
|
||||
# Group management, axpanded list
|
||||
- path: groups/{item}
|
||||
loop:
|
||||
data: groups
|
||||
|
||||
# A more advanced example, hierarchical exampansion
|
||||
- path: site/{foreman_hostgroups}/{ansible_fqdn} # If list, it's auto expanded like in bash
|
||||
engine: jerakia
|
||||
hier:
|
||||
data: foreman_hostgroup
|
||||
var: foreman_hostgroups
|
||||
reversed: False
|
||||
|
||||
# DO an example of multiple loops
|
||||
|
||||
- nodes/{ansible_fqdn}
|
||||
- common/override
|
||||
|
||||
|
||||
# Kheops support nested hierarchy like in Foreman/Sattelite
|
||||
# Example to use Kheops with Foreman
|
||||
ex4_foreman_hostgroups:
|
||||
|
||||
#Query keys:
|
||||
# profile,role,class:
|
||||
# profiles,roles,classes:
|
||||
|
||||
lookups:
|
||||
- path: site/loc_{foreman_location} # If list, it's auto expanded like in bash
|
||||
engine: jerakia
|
||||
- path: site/org_{foreman_organization} # If list, it's auto expanded like in bash
|
||||
engine: jerakia
|
||||
|
||||
- path: site/{foreman_hostgroups}/{ansible_fqdn} # If list, it's auto expanded like in bash
|
||||
engine: jerakia
|
||||
hier:
|
||||
data: foreman_hostgroup
|
||||
var: foreman_hostgroups
|
||||
reversed: False
|
||||
|
||||
- nodes/{ansible_fqdn}
|
||||
|
||||
|
||||
# Kheops support namespace (For performance reasons) and include to make your code DRY
|
||||
ex5_ns_include:
|
||||
ns1:
|
||||
lookups:
|
||||
- include: common.yml
|
||||
- path: ns1_only/{node_fqdn}
|
||||
ns2:
|
||||
lookups:
|
||||
- include: common.yml
|
||||
- include: ns2_only.yml
|
||||
|
||||
|
||||
# Kheops allow to customize which and how keys are merged together
|
||||
# Explain how to merge the keys
|
||||
ex6_rules:
|
||||
lookups:
|
||||
...
|
||||
rules:
|
||||
- key: "my_key"
|
||||
- key: ".*"
|
||||
- key: ".*_db"
|
||||
- key: "prefix_.*"
|
||||
continue: True
|
||||
merge:
|
||||
last: No merge
|
||||
basic: basic python merge, assemble list and combine dict 1 level.
|
||||
schema: jsonmerge, heavy in resources
|
||||
first: Why ?
|
||||
smart_lists: support +-!~ key identifiers ...
|
||||
smart_dict: support +-!~ key identifiers ...
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# REal and complete example
|
||||
ex7_data:
|
||||
config:
|
||||
app: {}
|
||||
# prefix: ./ext/ansible-tree/tree
|
||||
#totot: titi
|
||||
|
||||
lookups:
|
||||
#prefix: ./ext/ansible-tree/tree
|
||||
prefix: ext/tiger-jerakia/data/data
|
||||
engine: file.auto
|
||||
sufix: ansible.yaml
|
||||
default_engine: jerakia
|
||||
default_item:
|
||||
engine: jerakia
|
||||
|
||||
# File lookup
|
||||
lookups:
|
||||
- common/deprecated
|
||||
- common/all
|
||||
- "common/dist_{ansible_dist_name}"
|
||||
- "common/dist_{ansible_dist_name}{ansible_dist_version}"
|
||||
# Default structure
|
||||
|
||||
# Profile management
|
||||
- path: profiles/{item}
|
||||
loop:
|
||||
data: tiger_profiles
|
||||
|
||||
# THis is not dry, use include to make code DRY
|
||||
|
||||
|
||||
- path: site/loc_{foreman_location} # If list, it's auto expanded like in bash
|
||||
engine: jerakia
|
||||
- path: site/org_{foreman_organization} # If list, it's auto expanded like in bash
|
||||
engine: jerakia
|
||||
|
||||
- path: site/{foreman_hostgroups}/{ansible_fqdn} # If list, it's auto expanded like in bash
|
||||
engine: jerakia
|
||||
hier:
|
||||
data: foreman_hostgroup
|
||||
var: foreman_hostgroups
|
||||
reversed: False
|
||||
# loop:
|
||||
# var: aa_env
|
||||
# data:
|
||||
# - prod
|
||||
# - preprod
|
||||
# - devel
|
||||
|
||||
# - path: site/{foreman_hostgroup}/{ansible_fqdn}
|
||||
# engine: jerakia
|
||||
|
||||
- nodes/{ansible_fqdn}
|
||||
- common/override
|
||||
|
||||
rules:
|
||||
|
||||
- rule: 'profilesss'
|
||||
strategy: first
|
||||
schema:
|
||||
"$schema": http://json-schema.org/draft-04/schema#
|
||||
type: array
|
||||
mergeStrategy: append
|
||||
|
||||
- rule: '(.*)_item'
|
||||
schema: generic item schema
|
||||
|
||||
167
kheops/app.py
167
kheops/app.py
@ -11,89 +11,50 @@ from pathlib import Path
|
||||
import anyconfig
|
||||
from diskcache import Cache
|
||||
|
||||
import kheops.plugin as KheopsPlugins
|
||||
from kheops.controllers import QueryProcessor
|
||||
from kheops.utils import schema_validate
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
CONF_SCHEMA = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"default": {},
|
||||
"$def": {
|
||||
"backends_items": {},
|
||||
"backends_config": {},
|
||||
"rules_items": {},
|
||||
"rules_config": {},
|
||||
},
|
||||
# "patternProperties": {
|
||||
# ".*": {
|
||||
# "type": "object",
|
||||
# "optional": True,
|
||||
# "additionalProperties": False,
|
||||
"required": ["config"],
|
||||
#"$def": {
|
||||
# "backends_items": {},
|
||||
# "backends_config": {},
|
||||
# "rules_items": {},
|
||||
# "rules_config": {},
|
||||
#},
|
||||
"properties": {
|
||||
"lookups": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"type": "object",
|
||||
#"properties": {"$ref": "#/$defs/backends_items"},
|
||||
},
|
||||
},
|
||||
"rules": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
# "arrayItem": { "$ref": "#/$defs/rules_items" },
|
||||
},
|
||||
|
||||
"config": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": True,
|
||||
#"required": ["app"],
|
||||
"properties": {
|
||||
"app": {
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"root": {
|
||||
"default": None,
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "null",
|
||||
"description": "Application current working directory is the `kheops.yml` directory",
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Application working directory. If a relative path is used, it will be depending on `kheops.yml` directory",
|
||||
},
|
||||
],
|
||||
},
|
||||
"cache": {
|
||||
"default": "kheops_cache",
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "null",
|
||||
"description": "Disable cache",
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Path of the cache directory",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
# OLD
|
||||
"tree": {
|
||||
# "additionalProperties": False,
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"deprecated": True,
|
||||
"properties": {
|
||||
"prefix": {
|
||||
"default": None,
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "null",
|
||||
"description": "Disable prefix, all files are lookup up from the app root dir.",
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": """Add a path prefix before all paths. This is quite useful to store your YAML data in a dedicated tree.""",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
"lookups": {
|
||||
# "additionalProperties": False,
|
||||
@ -121,30 +82,7 @@ CONF_SCHEMA = {
|
||||
},
|
||||
},
|
||||
},
|
||||
"tree": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {"$ref": "#/$defs/backends_items"},
|
||||
},
|
||||
},
|
||||
"lookups": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {"$ref": "#/$defs/backends_items"},
|
||||
},
|
||||
},
|
||||
"rules": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
# "arrayItem": { "$ref": "#/$defs/rules_items" },
|
||||
},
|
||||
},
|
||||
# },
|
||||
# },
|
||||
}
|
||||
|
||||
|
||||
@ -184,18 +122,17 @@ class KheopsNamespace(GenericInstance, QueryProcessor):
|
||||
:type config: Any
|
||||
"""
|
||||
|
||||
config = schema_validate(config, CONF_SCHEMA)
|
||||
super().__init__(config)
|
||||
|
||||
self.name = name
|
||||
self.config = config or {}
|
||||
self.app = app
|
||||
self.run = dict(app.run)
|
||||
|
||||
# Validate configuration
|
||||
self.config = schema_validate(self.config, CONF_SCHEMA)
|
||||
|
||||
self.run["path_ns"] = str(Path(app.run["config_src"]).parent.resolve())
|
||||
|
||||
|
||||
|
||||
class Kheops(GenericInstance):
|
||||
"""
|
||||
Kheops Application Class
|
||||
@ -228,6 +165,7 @@ class Kheops(GenericInstance):
|
||||
raise Exception("Need a valid config")
|
||||
|
||||
self.ns_name = namespace
|
||||
self.namespaces = {}
|
||||
self.raw_config = self.parse_conf(config)
|
||||
|
||||
def parse_conf(self, config="kheops.yml"):
|
||||
@ -244,7 +182,10 @@ class Kheops(GenericInstance):
|
||||
|
||||
# Load config
|
||||
if isinstance(config, str):
|
||||
try:
|
||||
dict_conf = anyconfig.load(config)
|
||||
except Exception as err:
|
||||
raise Exception ("Can't load kheops configuration, got: %s", err)
|
||||
source = f"file:{config}"
|
||||
elif isinstance(config, dict):
|
||||
dict_conf = config
|
||||
@ -260,10 +201,14 @@ class Kheops(GenericInstance):
|
||||
explain=False,
|
||||
validate_schema=False,
|
||||
namespace=None,
|
||||
namespace_prefix=False,
|
||||
):
|
||||
"""
|
||||
Lookup a key in hierarchy
|
||||
|
||||
For a given lookup:
|
||||
* keys= [<namespace>:<key>]
|
||||
|
||||
:param keys: List of keys to query.
|
||||
:type keys: list[str]
|
||||
|
||||
@ -278,7 +223,7 @@ class Kheops(GenericInstance):
|
||||
key_def = key_def or ""
|
||||
|
||||
# Identify namespace and key
|
||||
parts = key_def.split(":")
|
||||
parts = key_def.split("/")
|
||||
ns_name = namespace or self.ns_name
|
||||
if len(parts) > 1:
|
||||
ns_name = parts[0]
|
||||
@ -287,17 +232,29 @@ class Kheops(GenericInstance):
|
||||
key_name = parts[0]
|
||||
|
||||
# Load namespace
|
||||
if ns_name in self.namespaces:
|
||||
ns_config = self.namespaces[ns_name]
|
||||
else:
|
||||
try:
|
||||
ns_config = self.raw_config[ns_name]
|
||||
except KeyError as err:
|
||||
raise Exception(f"Unknown kheops namespace: {ns_name}")
|
||||
|
||||
ns = KheopsNamespace(self, ns_name, ns_config)
|
||||
|
||||
# Get result
|
||||
result = ns.query(key=key_name, scope=scope, explain=explain)
|
||||
|
||||
# Prepare output
|
||||
_key = key_name
|
||||
if namespace_prefix == True:
|
||||
_key = key_def
|
||||
ret[_key] = result
|
||||
|
||||
# TODO: This may lead to inconsistant output format :/
|
||||
# Return result
|
||||
#if len(keys) > 1:
|
||||
# log.debug("Append '%s' to results", key_name)
|
||||
ret[key_name] = result
|
||||
#else:
|
||||
# log.debug("Return '%s' result", key_name)
|
||||
# return result
|
||||
@ -311,33 +268,7 @@ class Kheops(GenericInstance):
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# def DEPRECATED_lookup(
|
||||
# self,
|
||||
# keys=None,
|
||||
# policy=None,
|
||||
# scope=None,
|
||||
# trace=False,
|
||||
# explain=False,
|
||||
# validate_schema=False,
|
||||
# ):
|
||||
# """Lookup a key in hierarchy"""
|
||||
# log.debug("Lookup key %s with scope: %s", keys, scope)
|
||||
# assert isinstance(keys, list), f"Got {keys}"
|
||||
|
||||
# query = Query(app=self)
|
||||
# ret = {}
|
||||
# for key in keys:
|
||||
# ret[key] = query.exec(
|
||||
# key=key,
|
||||
# scope=scope,
|
||||
# policy=policy,
|
||||
# trace=trace,
|
||||
# explain=explain,
|
||||
# validate_schema=validate_schema,
|
||||
# )
|
||||
# return ret
|
||||
# To clean/implement
|
||||
|
||||
# def DEPRECATED_dump_schema(self):
|
||||
# """Dump configuration schema"""
|
||||
|
||||
@ -1,3 +1,9 @@
|
||||
"""
|
||||
Kheops controller
|
||||
|
||||
Main Kheops model classes
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
@ -7,35 +13,45 @@ from pathlib import Path
|
||||
from prettytable import PrettyTable
|
||||
|
||||
import kheops.plugin as KheopsPlugins
|
||||
from kheops.utils import render_template, render_template_python, str_ellipsis
|
||||
from kheops.utils import render_template_python, str_ellipsis
|
||||
|
||||
from pprint import pprint
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
tracer = logging.getLogger(f"{__name__}.explain")
|
||||
|
||||
|
||||
|
||||
# Helper classes
|
||||
# ------------------------
|
||||
|
||||
class LoadPlugin:
|
||||
"""Generic class to load plugins"""
|
||||
"""Kheops plugins loader
|
||||
|
||||
This plugin loader is a helper to load a python module (Kheops Plugin) from
|
||||
a plugin kind and name.
|
||||
"""
|
||||
|
||||
def __init__(self, plugins):
|
||||
self.plugins = plugins
|
||||
|
||||
def load(self, kind, name):
|
||||
"""
|
||||
Load a plugin
|
||||
"""
|
||||
|
||||
assert isinstance(name, str), f"Got: {name}"
|
||||
|
||||
# Get plugin kind
|
||||
try:
|
||||
plugins = getattr(self.plugins, kind)
|
||||
except Exception as err:
|
||||
raise Exception(f"Unknown module kind '{kind}': {err}")
|
||||
except AttributeError as err:
|
||||
raise Exception(f"Unknown module kind '{kind}': {err}") from err
|
||||
|
||||
# Get plugin class
|
||||
try:
|
||||
plugin_cls = getattr(plugins, name)
|
||||
except Exception as err:
|
||||
raise Exception(f"Unknown module '{kind}.{name}': {err}")
|
||||
except AttributeError as err:
|
||||
raise Exception(f"Unknown module '{kind}.{name}': {err}") from err
|
||||
|
||||
assert hasattr(
|
||||
plugin_cls, "Plugin"
|
||||
@ -44,11 +60,29 @@ class LoadPlugin:
|
||||
# Return plugin Classe
|
||||
return plugin_cls.Plugin
|
||||
|
||||
class BackendCandidate():
|
||||
"""Backend Candidate
|
||||
|
||||
plugin_loader = LoadPlugin(KheopsPlugins)
|
||||
This object represents a backend candidate. It holds the value of the
|
||||
requested key, but also so source path, the status and some other metadata.
|
||||
"""
|
||||
|
||||
def __init__(self, path=None, data=None, run=None, status=None):
|
||||
assert isinstance(run, dict)
|
||||
self.path = path
|
||||
self.status = status or "unparsed"
|
||||
self.run = run or {}
|
||||
self.data = data or None
|
||||
|
||||
def __repr__(self):
|
||||
return f"Status: {self.status}, Path: {self.path} => {self.data}"
|
||||
|
||||
|
||||
class Query:
|
||||
"""Query object
|
||||
|
||||
Object that hold key and scope.
|
||||
"""
|
||||
|
||||
key = None
|
||||
scope = None
|
||||
@ -60,17 +94,25 @@ class Query:
|
||||
self.rule = None
|
||||
|
||||
|
||||
# class QueryController():
|
||||
|
||||
|
||||
# def exec(self, key=None, scope=None):
|
||||
# query = Query(key, scope)
|
||||
# result = self.processor.exec(query)
|
||||
# return result
|
||||
|
||||
# Query Processor class
|
||||
# ------------------------
|
||||
|
||||
class QueryProcessor:
|
||||
"""QueryProcessor class provides all the methods to be able to make queries"""
|
||||
"""QueryProcessor
|
||||
|
||||
This class helps to do queries for a given key and scope. It provides a single
|
||||
public method. It also implement an explain mechanism to help to troubleshoot query
|
||||
lookup issues.
|
||||
|
||||
The query process consists in:
|
||||
* Create a new query with the key and th scope
|
||||
* Fetch and expand the lookup list (_exec_assemble_lookups)
|
||||
* Fetch the rule that match the key (_exec_get_rule)
|
||||
* Fetch the strategy that match the key
|
||||
* Query all backends with lookup list (_exec_backend_plugins)
|
||||
* Return result
|
||||
|
||||
"""
|
||||
|
||||
default_match_rule = {
|
||||
"key": None,
|
||||
@ -84,19 +126,17 @@ class QueryProcessor:
|
||||
"continue": True,
|
||||
}
|
||||
|
||||
# def __init__(self, app):
|
||||
# self.app = app
|
||||
|
||||
# self.config = app.conf2['config'] or {}
|
||||
# self.lookups = app.conf2['lookups'] or []
|
||||
# self.rules = app.conf2['rules'] or []
|
||||
|
||||
def CHILDREN_INIT(self, config):
|
||||
def __init__(self, config):
|
||||
self.plugin_loader = LoadPlugin(KheopsPlugins)
|
||||
self.config = config
|
||||
pass
|
||||
|
||||
# def exec(self, key=None, scope=None):
|
||||
# Query methods
|
||||
# ------------------------
|
||||
|
||||
def query(self, key=None, scope=None, explain=False):
|
||||
"""Query key with scope
|
||||
|
||||
"""
|
||||
|
||||
if explain:
|
||||
tracer.setLevel(logging.DEBUG)
|
||||
@ -109,27 +149,23 @@ class QueryProcessor:
|
||||
# Assemble if more than one and merge when continue.
|
||||
# Got the Matched rule (RULE CACHE)
|
||||
# We'll need strategy, and it's selector field: matched/first/last/all
|
||||
# key_rule = self._get_key_rule(key) or {}
|
||||
# key_strategy = key_rule.get('strategy', None)
|
||||
key_rule = self._exec_get_rule(query)
|
||||
log.info("Matched rule for key '%s': %s", query.key, key_rule)
|
||||
|
||||
# Build the lookups [] => []
|
||||
# Fetch static config from app (for include and NS:includes ...)
|
||||
# Loop over lookups and process each lookup with ScopePlugins
|
||||
lookups = self.config.get("lookups", {}).copy()
|
||||
lookups = self.config["lookups"].copy()
|
||||
parsed_lookups = self._exec_assemble_lookups(lookups, query)
|
||||
|
||||
# Generate explain report
|
||||
if explain:
|
||||
self._explain_lookups(parsed_lookups)
|
||||
|
||||
# FEtch the module
|
||||
# Fetch the module
|
||||
# Retrieve the module instance
|
||||
# Get it's match policy
|
||||
# TODO
|
||||
plugin_name = key_rule.get("strategy", None)
|
||||
strategy_plugin = plugin_loader.load("strategy", plugin_name)(self)
|
||||
strategy_plugin = self.plugin_loader.load("strategy", plugin_name)(self)
|
||||
|
||||
# Get the data (strategy.selector)
|
||||
# For each entry, ask the backend to return the data: file, http, consul ...
|
||||
@ -151,94 +187,29 @@ class QueryProcessor:
|
||||
|
||||
return result
|
||||
|
||||
def _explain_lookups(self, parsed_lookups):
|
||||
"""Explain list of lookups"""
|
||||
|
||||
table = PrettyTable()
|
||||
for item in parsed_lookups:
|
||||
col1 = json.dumps(
|
||||
{k: v for k, v in item.items() if k not in ["_run"]},
|
||||
default=lambda o: "<not serializable>",
|
||||
indent=2,
|
||||
)
|
||||
col2 = json.dumps(
|
||||
item["_run"], default=lambda o: "<not serializable>", indent=2
|
||||
)
|
||||
table.add_row(
|
||||
[
|
||||
"\nConfig:" + str_ellipsis(col1, 60),
|
||||
"\nRuntime:" + str_ellipsis(col2, 60),
|
||||
]
|
||||
)
|
||||
table.field_names = ["Config", "Runtime"]
|
||||
table.align = "l"
|
||||
tracer.info("Explain lookups:\n" + str(table))
|
||||
# Query parts methods
|
||||
# ------------------------
|
||||
|
||||
def _explain_candidates(self, candidates, query):
|
||||
"""Explain list of candidates"""
|
||||
def _exec_get_rule(self, query, mode="match"):
|
||||
|
||||
# TOFIX: query is not needed here !
|
||||
key = query.key
|
||||
rules = self.config["rules"] or {}
|
||||
|
||||
table = PrettyTable()
|
||||
for item_obj in candidates:
|
||||
item = item_obj.__dict__
|
||||
item["rel_path"] = str(Path(item["path"]).relative_to(Path.cwd()))
|
||||
|
||||
col1 = json.dumps(
|
||||
{k: v for k, v in item.items() if k not in ["run", "data"]},
|
||||
default=lambda o: "<not serializable>",
|
||||
indent=2,
|
||||
)
|
||||
col2 = json.dumps(
|
||||
item["run"]["_run"], default=lambda o: "<not serializable>", indent=2
|
||||
)
|
||||
col3 = (
|
||||
item_obj.data.get(query.key, "NOT FOUND")
|
||||
if query.key is not None
|
||||
else item_obj.data
|
||||
)
|
||||
col3 = json.dumps(col3, default=lambda o: "<not serializable>", indent=2)
|
||||
table.add_row(
|
||||
[
|
||||
"\nStatus:" + str_ellipsis(col1, 80),
|
||||
"\nRuntime:" + str_ellipsis(col2, 60),
|
||||
"\nKey:" + str_ellipsis(col3, 60),
|
||||
]
|
||||
)
|
||||
|
||||
table.field_names = ["Status", "Runtime", "Key Value"]
|
||||
table.align = "l"
|
||||
tracer.info("Explain candidates:\n" + str(table))
|
||||
|
||||
def _exec_backend_plugins(self, lookups, selector="matched"):
|
||||
selector = "matched"
|
||||
assert selector in ["last", "first", "all", "matched"]
|
||||
assert isinstance(lookups, list)
|
||||
# lookups = self.config.get("lookups", {}).copy()
|
||||
|
||||
plugins = {}
|
||||
ret = []
|
||||
for index, lookup_def in enumerate(lookups):
|
||||
|
||||
# Update object
|
||||
lookup_def["_run"]["backend_index"] = index
|
||||
|
||||
# Load plugin
|
||||
plugin_name = lookup_def["backend"]
|
||||
if plugin_name in plugins:
|
||||
plugin = plugins[plugin_name]
|
||||
if mode == "match":
|
||||
rule = dict(self.default_match_rule)
|
||||
rules = [i for i in rules if i.get("key", None) == key]
|
||||
if len(rules) > 0:
|
||||
match = rules[0]
|
||||
rule.update(match)
|
||||
else:
|
||||
plugin = plugin_loader.load("backend", plugin_name)(namespace=self)
|
||||
log.debug("Applying default rule for key '%s'", key)
|
||||
rule = self.default_match_rule
|
||||
else:
|
||||
raise Exception(f"Mode '{mode}' is not implemented")
|
||||
|
||||
# Get candidates
|
||||
candidates = plugin.fetch_data(lookup_def)
|
||||
return rule
|
||||
|
||||
# Apply selector
|
||||
for candidate in candidates:
|
||||
if candidate.status == "found" or selector == "all":
|
||||
ret.append(candidate)
|
||||
|
||||
return ret
|
||||
|
||||
def _exec_assemble_lookups(self, lookups, query):
|
||||
|
||||
@ -248,10 +219,10 @@ class QueryProcessor:
|
||||
# Init the scope list
|
||||
new_lookups1 = []
|
||||
for index, lookup_def in enumerate(lookups):
|
||||
shortform = False
|
||||
#shortform = False
|
||||
|
||||
if isinstance(lookup_def, str):
|
||||
shortform = True
|
||||
#shortform = True
|
||||
lookup_def = {
|
||||
"path": lookup_def,
|
||||
}
|
||||
@ -279,7 +250,7 @@ class QueryProcessor:
|
||||
plugin_name = plugin_def.get("module", None)
|
||||
|
||||
if plugin_name:
|
||||
plugin = plugin_loader.load("scope", plugin_name)(namespace=self)
|
||||
plugin = self.plugin_loader.load("scope", plugin_name)(namespace=self)
|
||||
ret = plugin.process_items(ret, plugin_def)
|
||||
|
||||
new_lookups2.extend(ret)
|
||||
@ -295,25 +266,99 @@ class QueryProcessor:
|
||||
lookup["path"] = new_path
|
||||
new_lookups3.append(lookup)
|
||||
else:
|
||||
log.info("Ignore because of missing scope vars: '%s'", path)
|
||||
log.warning("Ignore lookup item because of missing scope vars: '%s'", path)
|
||||
|
||||
return new_lookups3
|
||||
|
||||
def _exec_get_rule(self, query, mode="match"):
|
||||
|
||||
key = query.key
|
||||
rules = self.config["rules"] or {}
|
||||
def _exec_backend_plugins(self, lookups, selector="matched"):
|
||||
selector = "matched"
|
||||
assert selector in ["last", "first", "all", "matched"]
|
||||
assert isinstance(lookups, list)
|
||||
# lookups = self.config.get("lookups", {}).copy()
|
||||
|
||||
if mode == "match":
|
||||
rule = dict(self.default_match_rule)
|
||||
rules = [i for i in rules if i.get("key", None) == key]
|
||||
if len(rules) > 0:
|
||||
match = rules[0]
|
||||
rule.update(match)
|
||||
plugins = {}
|
||||
ret = []
|
||||
for index, lookup_def in enumerate(lookups):
|
||||
|
||||
# Update object
|
||||
lookup_def["_run"]["backend_index"] = index
|
||||
|
||||
# Load plugin
|
||||
plugin_name = lookup_def["backend"]
|
||||
if plugin_name in plugins:
|
||||
plugin = plugins[plugin_name]
|
||||
else:
|
||||
log.debug("Applying default rule for key '%s'", key)
|
||||
rule = self.default_match_rule
|
||||
else:
|
||||
raise Exception(f"Mode '{mode}' is not implemented")
|
||||
plugin = self.plugin_loader.load("backend", plugin_name)(namespace=self)
|
||||
|
||||
return rule
|
||||
# Get candidates
|
||||
candidates = plugin.fetch_data(lookup_def)
|
||||
|
||||
# Apply selector
|
||||
for candidate in candidates:
|
||||
if candidate.status == "found" or selector == "all":
|
||||
ret.append(candidate)
|
||||
|
||||
return ret
|
||||
|
||||
# Explain methods
|
||||
# ------------------------
|
||||
|
||||
def _explain_lookups(self, parsed_lookups):
|
||||
"""Explain list of lookups"""
|
||||
|
||||
table = PrettyTable()
|
||||
for item in parsed_lookups:
|
||||
col1 = json.dumps(
|
||||
{k: v for k, v in item.items() if k not in ["_run"]},
|
||||
default=lambda o: "<not serializable>",
|
||||
indent=2,
|
||||
)
|
||||
col2 = json.dumps(
|
||||
item["_run"], default=lambda o: "<not serializable>", indent=2
|
||||
)
|
||||
table.add_row(
|
||||
[
|
||||
"\nConfig:" + str_ellipsis(col1, 60),
|
||||
"\nRuntime:" + str_ellipsis(col2, 60),
|
||||
]
|
||||
)
|
||||
table.field_names = ["Config", "Runtime"]
|
||||
table.align = "l"
|
||||
tracer.info("Explain lookups:\n%s", str(table))
|
||||
|
||||
def _explain_candidates(self, candidates, query):
|
||||
"""Explain list of candidates"""
|
||||
|
||||
# TOFIX: query is not needed here !
|
||||
|
||||
table = PrettyTable()
|
||||
for item_obj in candidates:
|
||||
item = item_obj.__dict__
|
||||
item["rel_path"] = str(Path(item["path"]).relative_to(Path.cwd()))
|
||||
|
||||
col1 = json.dumps(
|
||||
{k: v for k, v in item.items() if k not in ["run", "data"]},
|
||||
default=lambda o: "<not serializable>",
|
||||
indent=2,
|
||||
)
|
||||
col2 = json.dumps(
|
||||
item["run"]["_run"], default=lambda o: "<not serializable>", indent=2
|
||||
)
|
||||
col3 = (
|
||||
item_obj.data.get(query.key, "NOT FOUND")
|
||||
if query.key is not None and isinstance(item_obj.data, dict)
|
||||
else item_obj.data
|
||||
)
|
||||
col3 = json.dumps(col3, default=lambda o: "<not serializable>", indent=2)
|
||||
table.add_row(
|
||||
[
|
||||
"\nStatus:" + str_ellipsis(col1, 80),
|
||||
"\nRuntime:" + str_ellipsis(col2, 60),
|
||||
"\nKey:" + str_ellipsis(col3, 60),
|
||||
]
|
||||
)
|
||||
|
||||
table.field_names = ["Status", "Runtime", "Key Value"]
|
||||
table.align = "l"
|
||||
tracer.info("Explain candidates:\n%s", str(table))
|
||||
|
||||
@ -2,40 +2,75 @@
|
||||
|
||||
import os
|
||||
import logging
|
||||
from pathlib import Path
|
||||
# from pprint import pprint
|
||||
|
||||
import anyconfig
|
||||
|
||||
from kheops.utils import render_template, glob_files, render_template_python
|
||||
from anyconfig.common.errors import BaseError as AnyConfigBaseError
|
||||
from kheops.plugin.common import BackendPlugin, BackendCandidate
|
||||
|
||||
from pprint import pprint
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# class FileCandidate(Candidate):
|
||||
# path = None
|
||||
#
|
||||
# def _report_data(self):
|
||||
# data = {
|
||||
# # "rule": self.config,
|
||||
# "value": self.engine._plugin_value,
|
||||
# "data": self.data,
|
||||
# "path": str(self.path.relative_to(Path.cwd())),
|
||||
# }
|
||||
# data = dict(self.config)
|
||||
# return super()._report_data(data)
|
||||
|
||||
|
||||
# class Plugin(PluginEngineClass, PluginFileGlob):
|
||||
class Plugin(BackendPlugin):
|
||||
"""Generic Plugin Class"""
|
||||
"""File Backend Plugin
|
||||
|
||||
This backend allows to lookup data into a file hierarchy. All files can be one of the
|
||||
cupported by the anyconfig python library.
|
||||
"""
|
||||
|
||||
plugin_name = "file"
|
||||
extensions = {
|
||||
".yml": "yaml",
|
||||
".yaml": "yaml",
|
||||
#".toml": "toml",
|
||||
#".ini": "ini",
|
||||
#".json": "json",
|
||||
}
|
||||
|
||||
_schema_config = {
|
||||
"backend_file": {
|
||||
"title": "File Backend",
|
||||
"description": "This backend will look for data inside a file hierarchy.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"extensions": {
|
||||
"title": "File formats",
|
||||
"description": """
|
||||
This object describe which parser is assigned to which extension.
|
||||
Adding more format will have a performance impact because it will try
|
||||
to find all of the specified format. It is better to keep this list as small
|
||||
as possible.
|
||||
""",
|
||||
|
||||
"type": "object",
|
||||
"default": extensions,
|
||||
"additionalProperties": {
|
||||
"title": "Name of the extension with parser",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"path_prefix": {
|
||||
"title": "Prefix string to append to final path",
|
||||
"description": """
|
||||
String to be added at the end of the resolved path. This is useful to change
|
||||
the place of the root hierarchy.
|
||||
""",
|
||||
"type": "string"
|
||||
},
|
||||
"path_suffix": {
|
||||
"title": "Suffix string to prepend to final path",
|
||||
"description": """
|
||||
String to be added at the end of the resolved path. This is useful to
|
||||
provide Hiera or Jerakia support.""",
|
||||
"type": "string",
|
||||
"examples": [
|
||||
{ "path_suffix": "/ansible" },
|
||||
]
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_plugin_name = "file"
|
||||
|
||||
_plugin_engine = "file"
|
||||
# _schema_props_files = {
|
||||
_schema_props_new = {
|
||||
"path": {
|
||||
"anyOf": [
|
||||
@ -66,19 +101,17 @@ class Plugin(BackendPlugin):
|
||||
},
|
||||
}
|
||||
|
||||
extensions = {".yml": "yaml", ".yaml": "yaml"}
|
||||
|
||||
def _init(self):
|
||||
|
||||
# Guess top path
|
||||
# Build file prefix
|
||||
top_path = self.ns.run["path_config"]
|
||||
path_prefix = self.ns.config["config"].get("file_path_prefix", None)
|
||||
path_prefix = self.config.get("path_prefix", None)
|
||||
if path_prefix:
|
||||
top_path = os.path.join(top_path, path_prefix)
|
||||
self.top_path = top_path
|
||||
|
||||
# Fetch module config
|
||||
path_suffix = self.ns.config["config"].get("file_path_suffix", "auto")
|
||||
# Build file sufix
|
||||
path_suffix = self.config.get("path_suffix", "")
|
||||
if path_suffix == "auto":
|
||||
path_suffix = f"/{self.ns.name}"
|
||||
self.path_suffix = path_suffix
|
||||
@ -91,18 +124,24 @@ class Plugin(BackendPlugin):
|
||||
|
||||
raw_data = None
|
||||
status = "not_found"
|
||||
for ext, parser in self.extensions.items():
|
||||
extensions = self.config.get("extensions", self.extensions)
|
||||
for ext, parser in extensions.items():
|
||||
new_path = os.path.join(self.top_path, path + ext)
|
||||
log.debug("Looking into %s", new_path)
|
||||
if os.path.isfile(new_path):
|
||||
status = "found"
|
||||
try:
|
||||
log.info("Found file: %s", new_path)
|
||||
raw_data = anyconfig.load(new_path, ac_parser=parser)
|
||||
except Exception:
|
||||
except AnyConfigBaseError as err:
|
||||
status = "broken"
|
||||
raw_data = None
|
||||
log.warning("Could not parse file %s: %s", new_path, err)
|
||||
|
||||
# Stop the loop extension if we found a result.
|
||||
break
|
||||
|
||||
log.debug("Skip absent file: %s", new_path)
|
||||
|
||||
ret = BackendCandidate(
|
||||
path=new_path,
|
||||
status=status,
|
||||
|
||||
@ -19,13 +19,24 @@ log = logging.getLogger(__name__)
|
||||
# BackendPlugin[1]
|
||||
|
||||
|
||||
# Generic classes
|
||||
# Generic Plugin classes
|
||||
# -------------------------
|
||||
|
||||
class KheopsPlugin:
|
||||
plugin_name = None
|
||||
plugin_type = None
|
||||
plugin_kind = None
|
||||
|
||||
def __init__(self):
|
||||
assert isinstance(self.plugin_name, str), f"Missing name attribute in plugin: {self.__class__}"
|
||||
assert isinstance(self.plugin_kind, str)
|
||||
|
||||
config_key = f"{self.plugin_kind}_{self.plugin_name}"
|
||||
self.config = self.ns.config["config"].get(config_key, {})
|
||||
self.config_key = config_key
|
||||
|
||||
#if self.config:
|
||||
# log.debug("Load plugin configuration in config with key '%s', got: %s", config_key, self.config)
|
||||
self._init()
|
||||
|
||||
def _init(self):
|
||||
@ -47,20 +58,60 @@ class KheopsItemPlugin(KheopsPlugin):
|
||||
pass
|
||||
|
||||
|
||||
# Other classes
|
||||
class BackendCandidate:
|
||||
def __init__(self, path=None, data=None, run=None, status=None):
|
||||
assert isinstance(run, dict)
|
||||
self.path = path
|
||||
self.status = status or "unparsed"
|
||||
self.run = run or {}
|
||||
self.data = data or None
|
||||
# Plugin classes
|
||||
# -------------------------
|
||||
|
||||
def __repr__(self):
|
||||
return f"Status: {self.status}, Path: {self.path} => {self.data}"
|
||||
class BackendPlugin(KheopsItemPlugin):
|
||||
plugin_kind = "backend"
|
||||
|
||||
schema_prop = {
|
||||
"backend": {}, # GENERIC, String
|
||||
"file": {},
|
||||
"glob": {},
|
||||
"http": {},
|
||||
"consul": {},
|
||||
"vault": {},
|
||||
}
|
||||
|
||||
def fetch_data(self, config) -> list:
|
||||
raise Exception("Not implemented")
|
||||
|
||||
def __init__(self, namespace):
|
||||
self.ns = namespace
|
||||
super().__init__()
|
||||
|
||||
|
||||
class StrategyPlugin(KheopsItemPlugin):
|
||||
plugin_kind = "strategy"
|
||||
schema_prop = {
|
||||
"_strategy": {}, # GENERIC, String
|
||||
"merge": {},
|
||||
"first": {},
|
||||
"last": {},
|
||||
"smart": {},
|
||||
"schema": {},
|
||||
}
|
||||
|
||||
def merge_results(self, candidates, rule) -> list:
|
||||
pass
|
||||
|
||||
def __init__(self, namespace):
|
||||
self.ns = namespace
|
||||
super().__init__()
|
||||
|
||||
|
||||
class OutPlugin(KheopsItemPlugin):
|
||||
plugin_kind = "out"
|
||||
schema_prop = {
|
||||
"_out": {}, # GENERIC, List of dict
|
||||
"toml": {},
|
||||
"validate": {},
|
||||
}
|
||||
|
||||
def process_item(self, item) -> list:
|
||||
pass
|
||||
|
||||
|
||||
# Specific classes
|
||||
class ConfPlugin(KheopsListPlugin):
|
||||
plugin_kind = "conf"
|
||||
schema_prop = {
|
||||
@ -89,6 +140,22 @@ class ScopePlugin(KheopsListPlugin):
|
||||
self.ns = namespace
|
||||
super().__init__()
|
||||
|
||||
# Helper classes
|
||||
# -------------------------
|
||||
|
||||
class BackendCandidate():
|
||||
"""Represent a backend candidate"""
|
||||
def __init__(self, path=None, data=None, run=None, status=None):
|
||||
assert isinstance(run, dict)
|
||||
self.path = path
|
||||
self.status = status or "unparsed"
|
||||
self.run = run or {}
|
||||
self.data = data or None
|
||||
|
||||
def __repr__(self):
|
||||
return f"Status: {self.status}, Path: {self.path} => {self.data}"
|
||||
|
||||
|
||||
|
||||
class ScopeExtLoop:
|
||||
"""This Scope Extension allow to loop over a lookup"""
|
||||
@ -165,55 +232,9 @@ class ScopeExtLoop:
|
||||
return ret
|
||||
|
||||
|
||||
class BackendPlugin(KheopsItemPlugin):
|
||||
plugin_kind = "backend"
|
||||
|
||||
schema_prop = {
|
||||
"backend": {}, # GENERIC, String
|
||||
"file": {},
|
||||
"glob": {},
|
||||
"http": {},
|
||||
"consul": {},
|
||||
"vault": {},
|
||||
}
|
||||
|
||||
def fetch_data(self, lookups) -> list:
|
||||
raise Exception("Not implemented")
|
||||
|
||||
def __init__(self, namespace):
|
||||
self.ns = namespace
|
||||
super().__init__()
|
||||
|
||||
|
||||
class StrategyPlugin(KheopsItemPlugin):
|
||||
plugin_kind = "strategy"
|
||||
schema_prop = {
|
||||
"_strategy": {}, # GENERIC, String
|
||||
"merge": {},
|
||||
"first": {},
|
||||
"last": {},
|
||||
"smart": {},
|
||||
"schema": {},
|
||||
}
|
||||
|
||||
def merge_results(self, candidates, rule) -> list:
|
||||
pass
|
||||
|
||||
def __init__(self, namespace):
|
||||
self.ns = namespace
|
||||
super().__init__()
|
||||
|
||||
|
||||
class OutPlugin(KheopsItemPlugin):
|
||||
plugin_kind = "out"
|
||||
schema_prop = {
|
||||
"_out": {}, # GENERIC, List of dict
|
||||
"toml": {},
|
||||
"validate": {},
|
||||
}
|
||||
|
||||
def process_item(self, item) -> list:
|
||||
pass
|
||||
# To clean/implement
|
||||
|
||||
|
||||
# # Candidate Classes
|
||||
|
||||
@ -1,20 +1,18 @@
|
||||
"""Hierarchy backend plugin"""
|
||||
|
||||
|
||||
import logging
|
||||
#from pprint import pprint
|
||||
|
||||
from kheops.plugin.common import ScopePlugin, ScopeExtLoop
|
||||
from kheops.utils import path_assemble_hier
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
from pprint import pprint
|
||||
|
||||
|
||||
class Plugin(ScopePlugin, ScopeExtLoop):
|
||||
"""Hierarchy plugin"""
|
||||
|
||||
_plugin_name = "hier"
|
||||
plugin_name = "hier"
|
||||
_schema_props_new = {
|
||||
"hier": {
|
||||
"default": None,
|
||||
|
||||
@ -10,9 +10,9 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Plugin(ScopePlugin, ScopeExtLoop):
|
||||
"""Hierarchy plugin"""
|
||||
"""Loop plugin"""
|
||||
|
||||
_plugin_name = "hier"
|
||||
plugin_name = "loop"
|
||||
_schema_props_new = {
|
||||
"hier": {
|
||||
"default": None,
|
||||
|
||||
@ -15,7 +15,7 @@ class Plugin(StrategyPlugin):
|
||||
"""Last strategy plugin"""
|
||||
|
||||
|
||||
_plugin_name = "merge_schema"
|
||||
plugin_name = "merge_schema"
|
||||
_schema_props_new = None
|
||||
|
||||
selector = "matched"
|
||||
|
||||
@ -90,30 +90,21 @@ def render_template_python(text, params, ignore_missing=True):
|
||||
# Schema Methods
|
||||
# =====================
|
||||
|
||||
|
||||
def _extend_with_default(validator_class):
|
||||
validate_properties = validator_class.VALIDATORS["properties"]
|
||||
|
||||
def set_defaults(validator, properties, instance, schema):
|
||||
|
||||
for prop, subschema in properties.items():
|
||||
for property, subschema in properties.items():
|
||||
if "default" in subschema:
|
||||
instance.setdefault(prop, subschema["default"])
|
||||
instance.setdefault(property, subschema["default"])
|
||||
|
||||
try:
|
||||
for error in validate_properties(
|
||||
validator,
|
||||
properties,
|
||||
instance,
|
||||
schema,
|
||||
validator, properties, instance, schema,
|
||||
):
|
||||
continue
|
||||
except Exception as err:
|
||||
log.debug("Jsonschema validation error: %s", err)
|
||||
yield error
|
||||
|
||||
return validators.extend(
|
||||
validator_class,
|
||||
{"properties": set_defaults},
|
||||
validator_class, {"properties" : set_defaults},
|
||||
)
|
||||
|
||||
|
||||
|
||||
625
poetry.lock
generated
625
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,9 +1,9 @@
|
||||
[tool.poetry]
|
||||
name = "kheops"
|
||||
version = "0.1.0"
|
||||
description = "Data trees for Ansible"
|
||||
version = "0.2.0"
|
||||
description = "Hierarchical key/value store"
|
||||
authors = ["Robin Cordier"]
|
||||
license = "GNU"
|
||||
license = "Apache License"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
@ -16,6 +16,7 @@ dpath = "^2.0.5"
|
||||
diskcache = "^5.4.0"
|
||||
mergedeep = "^1.3.4"
|
||||
typer = "^0.4.0"
|
||||
Jinja2 = "^3.0.3"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
json-schema-for-humans = "^0.40"
|
||||
@ -29,6 +30,10 @@ bash_kernel = "^0.7.2"
|
||||
jupyter = "^1.0.0"
|
||||
pandoc = "^2.0.1"
|
||||
pytest = "^6.2.5"
|
||||
pytest-icdiff = "^0.5"
|
||||
pytest-instafail = "^0.4.2"
|
||||
pytest-cov = "^3.0.0"
|
||||
sphinx-rtd-theme = "^1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user