Compare commits

...

No commits in common. "master" and "dev" have entirely different histories.
master ... dev

49 changed files with 1857 additions and 2451 deletions

View File

@ -1,13 +0,0 @@
# Build files
/build-fx
/build-cg
/*.g1a
/*.g3a
# Python bytecode
__pycache__/
# Common IDE files
*.sublime-project
*.sublime-workspace
.vscode

View File

@ -1,92 +0,0 @@
#! /usr/bin/make -f
#
# variable definition
#
# color definition, for swagg :D
red := \033[1;31m
green := \033[1;32m
blue := \033[1;34m
white := \033[1;37m
nocolor := \033[1;0m
src := $(foreach path,\
$(shell find src -not -path "*/\.*" -type d), \
$(wildcard $(path)/*.c) \
$(wildcard $(path)/*.S) \
$(wildcard $(path)/*.s))
obj := $(patsubst src_%,$(VXSDK_PREFIX_BUILD)/%.o,$(subst /,_,$(src)))
obj += $(patsubst \
$(VXSDK_ASSETS_SRC)/%,\
$(VXSDK_ASSETS_BUILD)/%.o,\
$(wildcard $(VXSDK_ASSETS_SRC)/*.c) \
)
cflags := -ffreestanding -nostdlib -m4-nofpu -fPIE -O1
cflags += -mb -fstrict-volatile-bitfields
cflags += $(VXSDK_CFLAGS_INCLUDE) -I.. -Iinclude
# debug vars
VERBOSE ?= false
#
# build rules
#
vxaddin: $(obj)
@ printf "$(blue)Create $(red)$@$(nocolor)\n"
sh-elf-vhex-gcc \
-T $(VXSDK_PREFIX_LIB)/fxcg50-dynamic.ld -Wl,-q -Wl,-M \
$(VXSDK_CFLAGS_LINK) \
-o $@ $^ \
-lvhex-fxcg50 -lc -lgcc \
> $(VXSDK_PREFIX_BUILD)/map.txt
vxsdk conv addin -b $@ -n vxaddin -o /tmp/vxaddin
version:
@echo "$(VXSDK_PKG_VERSION)"
help:
@ echo 'Rules listing:'
@ echo '... all the default, if no target is provided'
@ echo '... clean remove build object'
@ echo '... fclean remove all generated object'
@ echo '... re same as `make fclean all`'
@ echo '... version display version'
@ echo '... install install the library'
@ echo '... uninstall uninstall the library'
.PHONY: help version
#
# Object rules
#
$(VXSDK_PREFIX_BUILD)%.o:
ifeq ($(VERBOSE),true)
@ mkdir -p $(dir $@)
sh-elf-vhex-gcc \
$(cflags) -D FXCG50 \
-o $@ \
-c $(addprefix src/,$(subst _,/,$(notdir $(basename $@))))
else
@ mkdir -p $(dir $@)
@ printf "$(green)>$(nocolor) $(white)$@$(nocolor)\n"
@ sh-elf-vhex-gcc \
$(cflags) -D FXCG50 \
-o $@ \
-c $(addprefix src/,$(subst _,/,$(notdir $(basename $@))))
endif
$(VXSDK_ASSETS_BUILD)%.o: $(VXSDK_ASSETS_SRC)/%
ifeq ($(VERBOSE),true)
@ mkdir -p $(dir $@)
sh-elf-vhex-gcc $(cflags) -D FXCG50 -o $@ -c $<
else
@ mkdir -p $(dir $@)
@ printf "$(green)>$(nocolor) $(white)$@$(nocolor)\n"
@ sh-elf-vhex-gcc $(cflags) -D FXCG50 -o $@ -c $<
endif

View File

@ -1,14 +0,0 @@
#include <vhex/display.h>
#include <vhex/keyboard.h>
int main(void)
{
dclear(C_WHITE);
dtext(1, 1, C_BLACK, "Sample fxSDK add-in.");
dupdate();
while (1) { __asm__("sleep"); }
//getkey();
return 1;
}

View File

@ -1,8 +0,0 @@
[project]
name = 'vxaddin'
[dependencies]
vxKernel = 'dev'
[build]
build = 'make'

View File

@ -1,5 +1,6 @@
#! /usr/bin/env bash
projdir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &> /dev/null && pwd)"
prefix="$HOME/.local"
VERSION='0.12.0'
@ -15,10 +16,11 @@ Usage $0 [ACTION]
Actions:
bootstrap Try to bootstrap the vxSDK itself [default]
install Try to install the VsSDK
uninstall Try to uninstall the VsSDK
install Install the VxSDK
uninstall Uninstall the VxSDK
Options:
--force Force reinstallation
--prefix=<PREFIX> Installation prefix (default is ~/.local)
-h, --help Display this help
OEF
@ -32,6 +34,7 @@ OEF
#
target='install'
force_reinstall='false'
for arg; do case "$arg" in
--help | -h)
@ -39,6 +42,9 @@ for arg; do case "$arg" in
-v | --version)
echo "$VERSION"
exit 0;;
--force)
force_reinstall='true'
;;
install)
target='install';;
@ -46,6 +52,8 @@ for arg; do case "$arg" in
target='update';;
uninstall)
target='uninstall';;
uninstall-cached)
target='uninstall-cached';;
*)
echo "error: unreconized argument '$arg', giving up." >&2
exit 1
@ -59,82 +67,131 @@ esac; done
if [[ "$target" = "install" ]]
then
if [[ -d "$prefix/lib/vxsdk/vxsdk" ]]; then
if [[ -d "$prefix/lib/vxsdk/vxsdk" ]]
then
echo 'warning : vxsdk is already installed !' >&2
read -n 1 -p 'Do you whant to re-install the vxSDK (package will not be removed) [y/N] ? ' reinstall
read -n 1 -p 'Do you whant to re-install the vxSDK [y/N] ? ' -r reinstall
[[ "$reinstall" != 'y' ]] && exit 1
echo ''
./install.sh uninstall
"$projdir/install.sh" uninstall-cached
fi
install -d $prefix/lib/vxsdk/vxsdk
cp -r requirements.txt assets vxsdk $prefix/lib/vxsdk/vxsdk
install -d "$prefix/lib/vxsdk/vxsdk"
cp -r \
"$projdir/requirements.txt" \
"$projdir/assets" \
"$projdir/vxsdk" \
"$prefix/lib/vxsdk/"
install -d $prefix/bin
echo '#! /usr/bin/env bash' > $prefix/bin/vxsdk
echo '' >> $prefix/bin/vxsdk
echo "source $prefix/lib/vxsdk/vxsdk/venv/bin/activate" >> $prefix/bin/vxsdk
echo "python3 $prefix/lib/vxsdk/vxsdk/vxsdk \$@" >> $prefix/bin/vxsdk
echo 'deactivate' >> $prefix/bin/vxsdk
chmod +x $prefix/bin/vxsdk
install -d "$prefix/bin"
echo '#! /usr/bin/env bash' > "$prefix/bin/vxsdk"
echo '' >> "$prefix/bin/vxsdk"
echo "source $prefix/lib/vxsdk/venv/bin/activate" >> "$prefix/bin/vxsdk"
echo "python3 $prefix/lib/vxsdk/vxsdk \$@" >> "$prefix/bin/vxsdk"
echo 'deactivate' >> "$prefix/bin/vxsdk"
chmod +x "$prefix/bin/vxsdk"
build_date=$(date '+%Y-%m-%d')
build_hash=$(git rev-parse --short HEAD)
f="$prefix/lib/vxsdk/vxsdk/vxsdk/__main__.py"
sed -e "s*%VERSION%*$VERSION*; s*%BUILD_HASH%*$build_hash*; s*%BUILD_DATE%*$build_date*" vxsdk/__main__.py > $f
f="$prefix/lib/vxsdk/vxsdk/__main__.py"
sed \
-e "s*%VERSION%*$VERSION*" \
-e "s*%BUILD_HASH%*$build_hash*" \
-e "s*%BUILD_DATE%*$build_date*" \
"$projdir/vxsdk/__main__.py" \
> "$f"
mkdir -p $prefix/share/vxsdk
mkdir -p "$prefix/share/vxsdk"
cd $prefix/lib/vxsdk/vxsdk
python3 -m venv venv
source venv/bin/activate
pip install --upgrade pip 2>&1 > /dev/null
pip install -r requirements.txt
deactivate
exit 0
fi
if [[ "$target" = "update" ]]
then
git clone git@github.com:Vhex-org/vxSDK.git --depth=1 /tmp/vxSDK > /dev/null 2>&1 || exit 84
cd /tmp/vxSDK
if [[ "$(./install.sh --version)" == "$VERSION" ]]
cd "$prefix/lib/vxsdk" || exit 1
if [[ ! -d venv ]] || [[ "$force_reinstall" == 'true' ]]
then
rm -rf /tmp/vxSDK
echo 'already up to date !'
exit 0
set -x; python3 -m venv venv; set +x
source venv/bin/activate
set -x
{ pip install --upgrade pip > /dev/null ; } 2>&1
pip install -r requirements.txt
set +x
deactivate
else
source venv/bin/activate
set -x; pip install -r requirements.txt; set +x
deactivate
fi
_check=$(echo -e "$(./install.sh --version)\n$VERSION" | sort -V | head -n1)
if [[ "$_check" != "$VERSION" ]]; then
rm -rf /tmp/vxSDK
echo 'already up to date !'
exit 0
fi
echo "update $VERSION -> $(./install.sh --version)"
./install.sh uninstall
./install.sh install
rm -rf /tmp/vxSDK
exit 0
fi
# same as uninstall but do not remove the venv
if [[ "$target" = 'uninstall-cached' ]]
then
set -x
rm "$prefix/bin/vxsdk"
rm -rf "$prefix/lib/vxsdk/vxsdk"
rmdir "$prefix/share/vxsdk" 2>/dev/null || exit 0
echo 'vxSDK has been partially removed'
exit 0
fi
if [[ "$target" = "uninstall" ]]
if [[ "$target" = 'uninstall' ]]
then
rm $prefix/bin/vxsdk
rm -rf $prefix/lib/vxsdk
rmdir $prefix/share/vxsdk 2>/dev/null || exit 0
echo 'note: repositories cloned by vxSDK have not been removed'
# TODO : rm -rf dependencies too
# TODO : add confirmation input
rm "$prefix/bin/vxsdk"
rm -rf "$prefix/lib/vxsdk"
rmdir "$prefix/share/vxsdk" 2>/dev/null || exit 0
echo 'vxSDK has been removed'
exit 0
fi
#if [[ "$target" = "update" ]]
#then
#
# git \
# clone \
# git@github.com:Vhex-org/vxSDK.git \
# --depth=1 \
# /tmp/vxSDK \
# > /dev/null 2>&1 || exit 84
# cd /tmp/vxSDK
#
# if [[ "$(./install.sh --version)" == "$VERSION" ]]
# then
# rm -rf /tmp/vxSDK
# echo 'already up to date !'
# exit 0
# fi
#
# _check=$(echo -e "$(./install.sh --version)\n$VERSION" | sort -V | head -n1)
#
# if [[ "$_check" != "$VERSION" ]]; then
# rm -rf /tmp/vxSDK
# echo 'already up to date !'
# exit 0
# fi
#
# echo "update $VERSION -> $(./install.sh --version)"
#
# ./install.sh uninstall
# ./install.sh install
#
# rm -rf /tmp/vxSDK
#
#fi
#
#
#
#if [[ "$target" = "uninstall" ]]
#then
#
# rm $prefix/bin/vxsdk
# rm -rf $prefix/lib/vxsdk
# rmdir $prefix/share/vxsdk 2>/dev/null || exit 0
# echo 'note: repositories cloned by vxSDK have not been removed'
# exit 0
#
#fi

View File

@ -0,0 +1,312 @@
"""
exposed_func - checker for vxnorm
This file does not expose an explicite VxChecker object declaration to avoid
dependencies handling, you just need to provide:
======================= ===============================================
parse_file() Parse the source file
======================= ===============================================
"""
#---
# Private
#---
## checker rules functions
def _checker_rule_obj(name, checker, mfile, token):
""" mutual class/def checker
"""
if len(token) <= 1:
checker.notify(mfile.line, f"malformated {name} declaraction")
return ''
objname = token[1]
if (has_parenthesis := objname.find('(')) > 0:
objname = token[1][:has_parenthesis]
return objname
def _checker_rule_class(checker, mfile, token):
""" handle `class <>` formalism
"""
checker.select_rule('class')
return _checker_rule_obj('class', checker, mfile, token)
def _checker_rule_def(checker, mfile, token):
""" handle `def <>(<>)` formalism
"""
checker.select_rule('function')
return _checker_rule_obj('def', checker, mfile, token)
def _checker_rule_from_import(_, __, token):
""" handle `from <>` formalism
"""
return token[0]
def _checker_rule_context(checker, mfile, token):
""" handle special context indication formalism
"""
checker.select_rule('context')
if not (token := mfile.readline()):
checker.notify(mfile.line, 'malformated context switch')
return ''
if token not in ['# Public\n', '# Internals\n']:
return ''
context = token[2:-1]
if (token := mfile.readline()) not in ['#\n', '#---\n']:
checker.notify(
mfile.line,
"context switch should have empty line between context and desc."
)
return context
def _checker_rule_all(checker, mfile, line_token):
""" handle __all__ formalism
"""
checker.select_rule('__all__')
if line_token != ['__all__', '=', '[']:
checker.notify(mfile.line, 'malformated __all__ declaration')
return ''
funclist = []
for line in mfile.getlines():
for token in line.split():
if token == ']':
return funclist
if token[0] not in ['\'', '"']:
checker.notify(mfile.line, 'malformated function declaraction')
continue
if token[0] == '"':
checker.notify(
mfile.line,
'function declaraction should start with single quote'
)
funclist.append(token[1: (-1 + (token[-1] != ',')) - 1])
return []
## checker layout functions
def _checker_layout_import(checker, info, token):
""" check from / import position
@rules
> any import must be placed before __all__ declaration
> any import must be performed before any active context
> 'import' must be place before 'from'
"""
if info['dall']:
checker.notify(
token['line'],
'__all__ declaration should be placed after any import'
)
if token['tag'] == 'import':
if info['dfrom']:
checker.notify(
token['line'],
'\'import\' should be placed before any \'from\''
)
else:
info['dfrom'] = True
def _checker_layout_all(checker, info, token):
""" check __all__ position
@rules
> '__all__' must be declared only one time
"""
if info['dall']:
checker.notify(
token['line'],
'multiple definition of __all__ declaration'
)
info['sym_exposed'] += token['data']
info['dall'] = True
def _checker_layout_context(checker, info, token):
""" check context layout
@rules
> 'Public' layout must be placed after 'Private'
> multiple context switch not allowed (Privte -> Public or Public only)
"""
if info['context']:
if info['context'] == token['data']:
checker.notify(
token['line'],
f"multiple '{token['data']}' context definition"
)
return
if info['context'] == 'Public':
checker.notify(
token['line'],
'switching between Public -> Private context'
)
return
info['context'] = token['data']
def _checker_layout_class(checker, info, token):
""" check class definition
@rules
> [~] handle default context if no one is specified
> if 'Public' context -> must not start with '_'
> if 'Private' context -> must start with '_'
> name must start with capital letter
"""
if not info['context']:
checker.notify(
token['line'],
'missing explicit context selection, switch to \'Public\''
)
info['context'] = 'Public'
if info['context'] == 'Public':
if token['data'][0] == '_':
checker.notify(
token['line'],
'public class must not start with underscore'
)
return
if not token['data'][0].isupper():
checker.notify(
token['line'],
'class must start its name with capital letter'
)
info['symbols'].append(token['data'])
else:
if token['data'][0] != '_':
checker.notify(
token['line'],
'private class must start with underscore'
)
return
if not token['data'][1].isupper():
checker.notify(
token['line'],
'class must start its name with capital letter'
)
def _checker_layout_def(checker, info, token):
""" check function name
@rules
> no capital letters
> no numeric caracters
> [~] handle default context if no one is specified
> in Private -> must start with one underscore
> in Public -> must start with the modulename and no capital letter
"""
if any(char.isupper() for char in token['data']):
checker.notify(
token['line'],
'function must no contain capital letters'
)
if not info['context']:
checker.notify(
token['line'],
'missing explicit context selection, switch to \'Public\''
)
info['context'] = 'Public'
if info['context'] != 'Public':
if token['data'][0] != '_':
checker.notify(
token['line'],
'private function must start with underscore'
)
return
if token['data'][1] == '_':
checker.notify(
token['line'],
'private function must only have one underscore'
)
else:
info['symbols'].append(token['data'])
if not info['modname'] or info['special']:
return
if token['data'].find(info['modname']) != 0:
checker.notify(
token['line'],
'public function must start with the module name '
f"({info['modname']})"
)
#---
# Public
#---
def parse_file(checker, mfile, pathinfo):
""" parse the mapped file
The file is mapped using mmap() and seeked through offset 0 to avoid too
many I/O operations with classical file primitive.
@args
> checker (VxChecker) - current checker instance for this file
> mfile (mmap) - mmap instance of the file, seeked at 0
> pathname (str) - file pathname
@return
> Nothing
"""
table = [
('__all__', _checker_rule_all, _checker_layout_all),
('class', _checker_rule_class, _checker_layout_class),
('def', _checker_rule_def, _checker_layout_def),
('#---', _checker_rule_context, _checker_layout_context),
('from', _checker_rule_from_import, _checker_layout_import),
('import', _checker_rule_from_import, _checker_layout_import),
]
layout = []
checker.select_rule('expofunc')
for line in mfile.getlines():
if line[0].isspace():
continue
if not (token := line.split()):
continue
for keyword in table:
if token[0] != keyword[0]:
continue
line = mfile.line
if not (data := keyword[1](checker, mfile, token)):
continue
layout.append({
'tag' : keyword[0],
'data' : data,
'line' : line
})
modname = pathinfo['filename'][:-3]
if pathinfo['filename'] == '__init__.py':
modname = ''
# special behaviour for CLI exposition file
info = {
'modname' : modname,
'dall' : False,
'dfrom' : False,
'sym_exposed' : [],
'context' : '',
'symbols' : [],
'special' : 'cli' in pathinfo['dirs']
}
find = False
checker.select_rule('layout')
for token in layout:
find = False
for keyword in table:
if token['tag'] != keyword[0]:
continue
keyword[2](checker, info, token)
find = True
break
if not find:
checker.notify('internal', f"unknown token '{token['tag']}'")
if not info['symbols'] or info['special']:
return
if info['symbols'] != info['sym_exposed']:
text = '__all__ = [\n '
text += ',\n '.join(info['symbols'])
checker.notify(
0,
f"mismatch exposed function, you should use:\n{text}\n]"
)

View File

@ -0,0 +1,35 @@
"""
line_len - checker for vxnorm
This file does not expose an explicite VxChecker object declaration to avoid
dependencies handling, you just need to provide:
======================= ===============================================
parse_file() Parse the source file
======================= ===============================================
"""
#---
# Public
#---
def parse_file(checker, mfile, _):
""" parse the mapped file
The file is mapped using mmap() and seeked through offset 0 to avoid too
many I/O operations with classical file primitive.
@args
> checker (VxChecker) - current checker instance for this file
> mfile (mmap) - mmap instance of the file, seeked at 0
> pathname (str) - file pathname
@return
> Nothing
"""
counter = 0
checker.select_rule('linelen')
for line in mfile.getlines():
counter += 1
if len(line) <= 80 or line[0] == '#':
continue
checker.notify(f"{counter}", f"too long line ({len(line)}/80)")

View File

@ -0,0 +1,61 @@
"""
module_docstring - checker for vxnorm
This file does not expose an explicite VxChecker object declaration to avoid
dependencies handling, you just need to provide:
======================= ===============================================
parse_file() Parse the source file
======================= ===============================================
"""
#---
# Public
#---
def parse_file(checker, mfile, pathinfo):
""" parse the mapped file
The file is mapped using mmap() and seeked through offset 0 to avoid too
many I/O operations with classical file primitive.
@args
> checker (VxChecker) - current checker instance for this file
> mfile (VxMmap) - custom mmap instance of the file, seeked at 0
> pathinfo (dict) - path information
@return
> Nothing
"""
if pathinfo['filename'] == '__main__.py':
return
line = ''
for line in mfile.getlines():
if line[0] != '#':
break
if line[0] == '#':
return
checker.select_rule('modocstr')
if line == '""""\n':
checker.notify(0, 'missing docstring')
modinfo = mfile.readline().split(' ', 2)
modname = f"{'.'.join(pathinfo['dirs'])}"
if pathinfo['filename'] != '__init__.py':
modname += f".{pathinfo['filename'][:-3]}"
if modinfo[0] != modname:
checker.notify(
0,
f"malformated module name ({modinfo[0]} != {modname})"
)
return
if modinfo[1] != '-':
checker.notify(0, 'missing separator')
return
if mfile.readline() not in ['\n', '"""\n']:
checker.notify(
0,
'missing empty line between declaration and docstring end'
)

View File

@ -0,0 +1,49 @@
"""
pylint - checker for vxnorm
This file does not expose an explicite VxChecker object declaration to avoid
dependencies handling, you just need to provide:
======================= ===============================================
parse_file() Parse the source file
======================= ===============================================
"""
import subprocess
#---
# Public
#---
def parse_file(checker, _, pathinfo):
""" parse the mapped file
The file is mapped using mmap() and seeked through offset 0 to avoid too
many I/O operations with classical file primitive.
@args
> checker (VxChecker) - current checker instance for this file
> mfile (mmap) - mmap instance of the file, seeked at 0
> pathname (str) - file pathname
@return
> Nothing
"""
status = subprocess.run(
['pylint', pathinfo['filepath']], capture_output=True, check=False
)
if status.returncode == 0:
return
for line in status.stdout.decode('utf8').split('\n'):
if not line:
continue
if line[0] == '*':
continue
if line[0] == '-':
break
if not (line := line.split(' ', 2)):
continue
checker.notify(
line[0].split(':', 1)[1][:-1],
f"[{line[1][:-1]}] {line[2]}",
'pylint'
)

View File

@ -0,0 +1,44 @@
"""
trainling_spaces - checker for vxnorm
This file does not expose an explicite VxChecker object declaration to avoid
dependencies handling, you just need to provide:
======================= ===============================================
parse_file() Parse the source file
======================= ===============================================
"""
#---
# Public
#---
def parse_file(checker, mfile, _):
""" parse the mapped file
The file is mapped using mmap() and seeked through offset 0 to avoid too
many I/O operations with classical file primitive.
@args
> checker (VxChecker) - current checker instance for this file
> mfile (mmap) - mmap instance of the file, seeked at 0
> pathname (str) - file pathname
@return
> Nothing
"""
counter = 0
checker.select_rule('trailing')
for line in mfile.getlines():
counter += 1
line_end = len(line) - 1
line_end = line_end - (line[line_end] == '\n')
if line_end <= 0:
continue
for i in range(line_end, 0, -1):
if not line[i].isspace():
break
if i != line_end:
checker.notify(
f"{counter}:{i+1}",
f"trailing space at ({i+1} != {line_end})",
)

View File

@ -3,12 +3,14 @@ vxSDK is a suitable of tools used in conjunction with the Vhex Operating System
to develop game, add-in, abstract build step with dependencies resolver and
more.
"""
import sys
import os
import sys
from core.logger import log
#---
# Internals
#---
# Program version (inserted at compile-time)
__VXSDK_VERSION__ = "%VERSION%"
@ -20,18 +22,18 @@ __VXSDK_HELP__ = r"""
Vhex' Software Developement Kit
USAGE:
vxsdk [+toolchain] [OPTIONS] [SUBCOMMAND]
vxsdk [OPTIONS] [SUBCOMMAND]
OPTIONS:
-v, --version Print version info and exit
--list List installed command
--update Try to update the ymtools
--update Try to update the vxSDK
-h, --help Print helps information
Default sub-commands used:
project Project abstraction
pkg Package manager
build Build abstraction
project Project abstraction
See `vxsdk <sub-command> --help` for more information on a specific command
""".strip()
@ -41,14 +43,15 @@ def _list_modules():
try:
mod = __import__(f"cli.{name.split('.')[0]}", fromlist=[
'__VXSDK_MODULE_META__',
'cli_parse'
'cli_validate'
'cli_parse',
])
if not hasattr(mod, '__VXSDK_MODULE_META__'):
continue
if not hasattr(mod, 'cli_validate'):
continue
if not hasattr(mod, 'cli_parse'):
continue
if not hasattr(mod, '__VXSDK_MODULE_META__'):
continue
yield mod
except ImportError as err:
log.warn(f"[vxsdk] module '{name}' cannot be imported")
@ -66,6 +69,7 @@ def _subcommand_list():
mod_info = mod.__VXSDK_MODULE_META__[1]
args = str(mod.__VXSDK_MODULE_META__[0]).strip('[]')
log.user(f" {args}".ljust(32) + f"{mod_info}")
return 0
def _main(argv):
if not argv:
@ -84,8 +88,8 @@ def _main(argv):
sys.exit(0)
if argv[0] == '--update':
return os.system(
os.path.dirname(__file__) + '/../install.sh update'
sys.exit(
os.system(f"{os.path.dirname(__file__)}/../install.sh update")
)
if argv[0] in ['-v', '-vv', '-vvv']:
@ -96,7 +100,11 @@ def _main(argv):
sys.exit(84)
if len(argv) == 1 and argv[0] == '--list':
return _subcommand_list()
return _subcommand_parse(argv)
sys.exit(_subcommand_list())
sys.exit(_subcommand_parse(argv))
#---
# Public
#---
_main(sys.argv[1:])

View File

@ -1,12 +1,10 @@
"""vxsdk-build modules
This module provides package abstraction and build core function for the Vhex
Operating system project.
"""
cli.build - vxSDK build interface
"""
from core.logger import log
from cli.build.default import build_default_cli
from cli.build.doctor import build_doctor_cli
from cli.build.default import default_build_cli
from cli.build.doctor import doctor_build_cli
__all__ = [
@ -23,114 +21,32 @@ __VXSDK_MODULE_META__ = (
Build System Abstraction
USAGE:
vxsdk build(-<platform>) [OPTIONS]
vxsdk build(-<target>) [OPTIONS]
DESCRIPTION:
Compile Vhex project.
Compile a Vhex project.
NOTES:
The Vex build system is extremely powerful and polyvalent. It allows the
user to totally ignore the build part and the dependencies management.
The Vhex build system is extremely powerful and polyvalent. It allows the
user to totally ignore the build part and the dependencies management of
the project.
All Vhex projects use a <.vxsdk.toml> file wich should be stored at the root
of the project directory (you cam generate a project template using the
`vxsdk project new <project>`). This file uses the TOML language to control
the build step of a project.
```
# Default meta-data information for the pacakge manager.
[project]
name = 'myaddin' # required
version = '1.0.0' # required
type = 'addin' # optional ('addin' or 'app')
description = ''' # optional
An old-school demo-scene !
Written by Yatis :)
'''
# Dependencies information for the build manager. (optional)
#
# The version can target <tag> or branch name.
#
# Note that you can use a powerfull operations for managing version of
# dependencies if the 'version' is detected to respect the correct
# semantic versioning like caret (^), tilde (~) and wildcard (*):
#
# Caret requirements (^)
# ^1.2.3 := >=1.2.3, <2.0.0
# ^1.2 := >=1.2.0, <2.0.0
# ^1 := >=1.0.0, <2.0.0
# ^0.2.3 := >=0.2.3, <0.3.0
# ^0.2 := >=0.2.0, <0.3.0
# ^0.0.3 := >=0.0.3, <0.0.4
# ^0.0 := >=0.0.0, <0.1.0
# ^0 := >=0.0.0, <1.0.0
#
# Tilde requirements (~)
# ~1.2.3 := >=1.2.3, <1.3.0
# ~1.2 := >=1.2.0, <1.3.0
# ~1 := >=1.0.0, <2.0.0
#
# Wildcard requirements (*)
# * := >=0.0.0
# 1.* := >=1.0.0, <2.0.0
# 1.2.* := >=1.2.0, <1.3.0
#
# Note that it's possible that a package can have two same versions (one
# for branch name and another for a tag), by default, the tag is always
# selected, but here the display information will explicitly describe if
# the version is a tag and / or a branch.
[dependencies]
vxkernel = 'master' # recommanded
sh-elf-vhex = 'master' # recommanded
custom-lib = '^1.5' # exemple of carret (^) operation
# Manual build indication (option)
#
# Note that if this section is specified then only this build indication
# will be executed, no default step will be used
#
# All building steop are, in order:
# > configure
# > build
# > install
# And during all of theses building step, the vxSDk setup some
# environment variable:
# > VXSDK_PKG_NAME - project name
# > VXSDK_PKG_VERSION - project version
# > VXSDK_PREFIX_BUILD - project build prefix (for object files)
# > VXSDK_PREFIX_INSTALL - project installation prefix
# > VXSDK_PREFIX_LIB - prefix for all stored librairy
# > VXSDK_CFLAGS_INCLUDE - Include flags for GCC
# > VXSDK_CFLAGS_LINK - Linker flags for GCC
# > VXSDK_ASSETS_SRC - Assets sources file directory
# > VXSDK_ASSETS_BUILD - Assets build directory (for object)
[build]
configure = 'mkdir -p build && cd build && ../configure --verbose'
build = 'cd build && make'
# Dependencies Hook (optional)
#
# You need to create a section wich is named like [extra.<package_name>]
# and you can "hook" some step of particular dependencies (here
# vxkernel for exemple).
#
# A hook is simply additional string that will be send to the
# appropriate step of the build.
[extra.vxkernel]
configure = '--static --verbose'
```
Above is a exemple of a defaut project configuration file that can be used
for a new project.
All Vhex projects use a <vxsdk.toml> file which should be stored at the
root of the project directory (you can generate a project template using
the `vxsdk project new <project>`). This file uses the TOML language to
control the build behaviour.
This is an exemple of a project description. We recommand you to see the
vxKernel one because it uses a lot of advanced feature like target and
environement configuration. You can also check the wiki to have a complet
documentation about this file.
OPTIONS:
-v, --verbose Disable threading and display log information
-v, --verbose Display more information during the build
-r, --rebuild Force rebuild the project
-u, --update Update dependencies
--extra-conf [ARG]... Add extra configuration flags
--extra-build [ARG]... Add extra build flags
-h, --help Display this help message
ACTIONS:
doctor Display all information about one package
@ -152,5 +68,5 @@ def cli_parse(argv):
log.user(__VXSDK_MODULE_META__[2])
return 0
if argv[1] == 'doctor':
return build_doctor_cli(argv[2:])
return build_default_cli(argv)
return doctor_build_cli(argv[2:])
return default_build_cli(argv)

View File

@ -1,5 +1,5 @@
"""
Vhex default build management
cli.build.default - Vhex default build management
"""
import sys
@ -7,16 +7,18 @@ from core.build.project import VxProject
from core.logger import log
__all__ = [
'build_default_cli'
'default_build_cli'
]
def build_default_cli(argv):
"""Parse CLI arguments"""
board_target = None
if argv[0].find('build-') == 0:
board_target = argv[0][6:]
#---
# Public
#---
path = None
def default_build_cli(argv):
"""Parse CLI arguments"""
target = argv[0][6:] if argv[0].find('build-') == 0 else ''
path = ''
verbose = False
extra_conf = {
'configure' : '',
@ -43,7 +45,4 @@ def build_default_cli(argv):
sys.exit(85)
path = arg
return VxProject(path, extra_conf=extra_conf).build(
board_target,
verbose
)
return VxProject(path, target=target, extra_conf=extra_conf).build(verbose)

View File

@ -1,20 +1,25 @@
"""
Display pacakge build information
cli.build.doctor - inspect package's build step
Display all package information (path, name, ...) and it will try to display
the dependencies information (if dependencies have been found, dependencies
graph, ...)
"""
import sys
from core.logger import log
from core.build.project import VxProject
#from core.build.project import VxProject
__all__ = [
'build_doctor_cli'
'doctor_build_cli'
]
def build_doctor_cli(argv):
r""" Package doctor
#---
# Public
#---
This function will display all package information (path, name, ...) and it
will try to display the dependencies information (if dependencies have been
found, dependencies graph, ...).
"""
log.user(VxProject(None if len(argv) <= 0 else argv[0]))
return 0
def doctor_build_cli(_):
""" Argument handling """
#log.user(VxProject(None if len(argv) <= 0 else argv[0]))
log.user("build doctor not implemented")
sys.exit(84)

View File

@ -1,11 +1,19 @@
from core.config import config_set, config_get
"""
cli.config - vxSDK configuration interface
"""
from core.logger import log
from core.config import config
__all__ = [
'__VXSDK_MODULE_META__',
'cli_validate',
'cli_validate',
'cli_parse',
]
#---
# Public
#---
__VXSDK_MODULE_META__ = (
['config'],
"vxSDK configuration module",
@ -20,27 +28,22 @@ DESCRIPTION:
"""
)
# TODO: list all key available
# TODO: level selection : 'local', 'global'
def cli_validate(name):
""" validate the module name """
return name in __VXSDK_MODULE_META__[0]
def cli_parse(argv):
""" Config subcommand entry """
if '--help' in argv or '-h' in argv:
logger(LOG_USER, __VXSDK_MODULE_META__[2])
log.user(__VXSDK_MODULE_META__[2])
return 0
if len(argv) == 2:
logger(LOG_USER, config_get(argv[1]))
log.user(config.get(argv[1]))
return 0
if len(argv) == 3:
if old := config_set(argv[1], argv[2]):
logger(LOG_USER, f"previous value = {old}")
if old := config.set(argv[1], argv[2]):
log.user(f"previous value = {old}")
return 0
logger(LOG_EMERG, __VXSDK_MODULE_META__[2])
log.error(__VXSDK_MODULE_META__[2])
return 84

View File

@ -1,61 +0,0 @@
"""vxsdk-converter modules
This package provides conversion abstraction (image -> source code, ELF ->
addin, ...) for the Vhex project.
"""
from core.logger import log
from cli.conv.doctor import conv_doctor_cli_parse
from cli.conv.asset import conv_asset_cli_parse
from cli.conv.addin import conv_addin_cli_parse
__all__ = [
'__VXSDK_MODULE_META__',
'cli_validate',
'cli_parse',
]
__VXSDK_MODULE_META__ = (
['conv'],
'assets converter',
r"""vxsdk-conv
Project assets conv
USAGE:
vxsdk conv(-<ACTION>) [OPTIONS] ...
DESCRIPTION:
Convert vhex project assets (or binary) into various form. By default, if no
action is specified, the "asset" conversion is selected.
ACTIONS:
doctor try to display assets and addin information (debug)
asset convert asset into source file or binary file
addin convert binary into addin file for vxOS
See `vxsdk conv <action> --help` for more information on a specific action
"""
)
def cli_validate(name):
""" validate the module name """
return name.find('conv') == 0
def cli_parse(argv):
""" Build subcommand entry """
if '--help' in argv or '-h' in argv:
log.user(__VXSDK_MODULE_META__[2])
return 0
if argv[0].find('conv-') != 0:
argv[0] = 'conv-asset'
action = argv[0][5:]
if action == 'doctor':
return conv_doctor_cli_parse(argv[1:])
if action == 'asset':
return conv_asset_cli_parse(argv[1:])
if action == 'addin':
return conv_addin_cli_parse(argv[1:])
log.error(f"unable to find action '{action}'")
return 84

View File

@ -1,47 +0,0 @@
from core.conv.addin import generate_addin
__all__ = [
'conv_addin_cli_parse'
]
__HELP__ = r"""vxsdk-converter-addin
Converte binary file into Vhex OS addin.
USAGE:
vxsdk conv addin -b BINARY ...
DESCRIPTION:
Convert a binary file into an application for the Vhex operating system.
OPTIONS:
-b <binary path> ELF binary file (no check is performed in this file)
-i <icon path> 92x62 pixel image path
-o <output path> output path for the generated addin
-n <internal name> internal addin name
-v <internal version> internal addin version
"""
def conv_addin_cli_parse(argv):
"""Process CLI arguments"""
if '-h' in argv or '--help' in argv:
logger(LOG_USER, __HELP__, exit=0)
action = None
info = [None, None, None, None, None]
for arg in argv:
if action == '-b': info[0] = arg
if action == '-i': info[1] = arg
if action == '-n': info[2] = arg
if action == '-o': info[3] = arg
if action == '-v': info[4] = arg
if action:
action = None
continue
if arg in ['-b', '-i', '-n', '-o', '-v']:
action = arg
continue
if info[0] == None:
logger(LOG_ERR, 'converter: need binary path !', exit=84)
return generate_addin(info[0], info[1], info[2], info[3], info[4])

View File

@ -1,108 +0,0 @@
"""
Vhex asset converter user interface
"""
import os
from core.logger import log
from core.conv import assets_generate
__all__ = [
'conv_asset_cli_parse'
]
__HELP__ = r"""vxsdk-converter-asset
Convert all assets file in the project directory.
USAGE:
vxsdk conv-asset [project path] [OPTIONS]
DESCRIPTION:
Convert all assets file in the asset directory. This part of the converter
module will scan the provided folder (or the current working directory) and
will try to find all `vxconv.txt` file, which describe all assets that
should be converted.
If no argument is provided, then the current working directory is used as
asset prefix and a storag for all generated source file. You can modify this
behaviour using OPTIONS.
The vxconv.txt file is structured like basic key/value file:
```
<exposed_symbols_name>:
type: <image type> (font, bitmap) - required
path: <image path> - required
...
<next_exposed_symbols_name>:
...
```
Each asset file description should have at least type and name information,
and each type have potentially its own requierements.
type = bitmap:
================================== =========================================
Keys name and value type Description
================================== =========================================
profile: <name> Select the bitmap pixel profile
| rgb4 | RGB 4 (indexed)
| rgb4a | RGBA 4 (indexed)
| rgb8 | RGB 8 (indexed)
| rgb8a | RGBA 8 (indexed)
| rgb16 | RGB 16 (5:R, 6:G, 5:B)
| rgb16a | RGBA 16 (5:R, 5:G, 5:B, 1:A)
================================== =========================================
type = font:
================================== =========================================
Keys name and value type Description
================================== =========================================
grid_size: 8x9 (widthxheight) caracter size in pixel
grid_padding: <pixel> space between caracter
grig_border: <pixel> space around grid
proportional: <true,false> caracter are cropped
line_height: <pixel> caracter line alignement
charset: <default,unicode> charset specification
char_spacing <pixel> space between character
================================== =========================================
OPTIONS:
-o <output prefix> The prefix for source file that will be generated
-h, --help Display this help
"""
def conv_asset_cli_parse(argv):
"""Process CLI arguments"""
# check obvious flags
if '-h' in argv or '--help' in argv:
log.user(__HELP__)
return 0
# fetch user indication
manual_output = False
prefix_output = None
prefix_asset = None
for arg in argv:
if arg == '-o':
manual_output = True
continue
if manual_output:
prefix_output = arg
continue
if prefix_asset:
log.warn(f"warning: previous path ({prefix_asset}) dropped")
prefix_asset = arg
# check indication
if not prefix_asset:
prefix_asset = os.getcwd()
if not prefix_output:
prefix_output = os.getcwd()
prefix_asset = os.path.abspath(prefix_asset)
prefix_output = os.path.abspath(prefix_output)
# generate asset information
return assets_generate(prefix_asset, prefix_output)

View File

@ -1,13 +0,0 @@
__all__ = [
'conv_doctor_cli_parse'
]
def conv_doctor_cli_parse(argv):
"""Process CLI handling
TODO:
> give asset file description to check error
> try to display asset and addin information based on the project type
"""
logger(LOG_WARN, 'conv: doctor action not implemented yet')
return 0

View File

@ -1,4 +1,5 @@
"""vxsdk-pkg modules
"""
cli.pkg - pacakge abstraction
This module provides package management utilities that track installed Vhex
packages. It features : dependency support, package groups, install and
@ -7,15 +8,24 @@ to automatically upgrade packages.
"""
from core.logger import log
from cli.pkg.search import pkg_search_cli_parse
from cli.pkg.update import pkg_update_cli_parse
from cli.pkg.clone import pkg_clone_cli_parse
from cli.pkg.search import search_pkg_cli
from cli.pkg.update import update_pkg_cli
from cli.pkg.clone import clone_pkg_cli
__all__ = [
'__VXSDK_MODULE_META__',
'cli_validate',
'cli_parse'
]
#---
# Public
#---
__VXSDK_MODULE_META__ = (
['pkg'],
"package manager for Vhex's project",
r"""vxsdk package
"""vxsdk package
Package manager for Vhex
SYNOPSIS:
@ -38,11 +48,11 @@ def cli_parse(argv):
""" Vhex pacakge CLI parser entry """
if len(argv) > 2:
if argv[1] == 'search':
return pkg_search_cli_parse(argv[2:])
return search_pkg_cli(argv[2:])
if argv[1] == 'clone':
return pkg_clone_cli_parse(argv[2:])
return clone_pkg_cli(argv[2:])
if argv[1] == 'update':
return pkg_update_cli_parse(argv[2:])
return update_pkg_cli(argv[2:])
if '-h' in argv or '--help' in argv:
log.user(__VXSDK_MODULE_META__[2])
return 0

View File

@ -1,19 +1,20 @@
"""
Vhex package cloning user interface
cli.pkg.clone - Vhex package cloning user interface
"""
import os
import sys
from core.logger import log
import core.pkg
from core.logger import log
__all__ = [
'pkg_clone_cli_parse'
'clone_pkg_cli'
]
#---
# Internals
#---
__HELP__ = r"""
__HELP__ = """
vxsdk-pkg-clone
Package manager cloning part
@ -28,8 +29,8 @@ DESCRIPTION:
This will allow the searching to try to match package with specific version
information. This part is very powerfull because If the 'version' is
detected to respect the correct semantic versioning, you can perform version
operations in the version target:
detected to respect the correct semantic versioning, you can perform
version operations in the version target:
Caret requirements (^)
^1.2.3 := >=1.2.3, <2.0.0
@ -58,15 +59,17 @@ DESCRIPTION:
OPTIONS:
-y, --yes Do not ask for interactive confirmation
-n, --no-build Do not build the project, just clone
-c, --confirm Ask for interactive confirmation
-o, --output=<path> Clone (or link) at <path>
--bare Do not clone the package in the global storage
-h, --help Print this help and exit
""".strip()
#---
# Entry point of the module
# Public
#---
def pkg_clone_cli_parse(argv):
def clone_pkg_cli(argv):
""" Clone a particular package """
if not argv:
log.error(__HELP__)
@ -75,15 +78,41 @@ def pkg_clone_cli_parse(argv):
log.user(__HELP__)
sys.exit(0)
output = ''
bare = False
confirm = True
workaround = False
package_target = ''
for arg in argv:
if workaround:
output = arg
workaround = False
continue
if arg == '-o':
workaround = True
continue
if arg in ['-y', '--yes', '-c', '--confirm']:
confirm = arg in ['-c', '--confirm']
continue
core.pkg.clone(
arg.split('@')[0],
None if len(arg.split('@')) != 2 else arg.split('@')[1],
os.getcwd(),
confirm
)
return 0
if arg.find('--output=') == 0:
output = arg[9:]
continue
if arg == '--bare':
bare = True
continue
if not package_target:
package_target = arg.split('@')
if len(package_target) < 2:
package_target = [arg, '']
continue
log.error(f"argument '{arg}' unrecognized")
sys.exit(84)
path = core.pkg.clone(
package_target[0],
None if len(package_target) != 2 else package_target[1],
output,
confirm,
bare
)
return 0 if path else -1

View File

@ -1,18 +1,20 @@
"""
Vhex package searching user interface
cli.pkg.search - Vhex package searching user interface
"""
import sys
from core.logger import log
import core.pkg
from core.logger import log
__all__ = [
'pkg_search_cli_parse'
'search_pkg_cli'
]
#---
# Internals
#---
__HELP__ = r"""
__HELP__ = """
vxsdk-pkg-search
Package manager : search commands
@ -28,8 +30,8 @@ DESCRIPTION:
This will allow the searching to try to match package with specific version
information. This part is very powerfull because If the 'version' is
detected to respect the correct semantic versioning, you can perform version
operations in the version target:
detected to respect the correct semantic versioning, you can perform
version operations in the version target:
Caret requirements (^)
^1.2.3 := >=1.2.3, <2.0.0
@ -59,17 +61,12 @@ DESCRIPTION:
OPTIONS:
-a, --all Display all package found
-l, --local Performs search in "local" and local
-L, --local-only Performs search only in "local"
-r, --remote Performs search only in "remote"
-i, --info Print extra information for each repositories
-s, --short Print short information for each repositories (default)
-h, --help Print this help and exit
""".strip()
#---
# Internals
#---
def _pkg_list_display(pkg_list, version, display_extra_info=False):
for pkg in pkg_list:
indent = ' '
@ -112,7 +109,7 @@ def _pkg_list_display(pkg_list, version, display_extra_info=False):
# Public
#---
def pkg_search_cli_parse(argv):
def search_pkg_cli(argv):
""" Search command handling """
if not argv:
log.notice(__HELP__)
@ -140,6 +137,9 @@ def pkg_search_cli_parse(argv):
# - handle search exception when all package a requested
if arg in ['-a', '--all']:
pkg_list = core.pkg.find(None, None, local, remote)
if not pkg_list:
log.user("No package found")
continue
else:
pkg_list = core.pkg.find(
arg.split('@')[0],
@ -147,9 +147,9 @@ def pkg_search_cli_parse(argv):
local,
remote
)
if not pkg_list:
log.warn(f"{arg}: package not found, skipped")
continue
if not pkg_list:
log.warn(f"{arg}: package not found, skipped")
continue
# display package information
_pkg_list_display(
@ -157,5 +157,4 @@ def pkg_search_cli_parse(argv):
None if len(arg.split('@')) != 2 else arg.split('@')[1],
display_extra_info
)
return 0

View File

@ -1,15 +1,19 @@
"""
Vhex's packages updater subcommand
cli.pkg.update - Vhex packages updater subcommand
"""
import sys
from core.logger import log
__all__ = [
'pkg_update_cli_parse'
'update_pkg_cli'
]
def pkg_update_cli_parse(_):
#---
# Public
#---
def update_pkg_cli(_):
""" Vhex pacakge CLI parser entry """
log.critical("pacakge updater not implemented yet o(x_x)o")
sys.exit(85)

View File

@ -1,34 +0,0 @@
import sys
from core.project import project_new
__VXSDK_MODULE_META__ = (
['p', 'project'],
"project abstraction",
r"""vxsdk project
Abstract project manipulation
USAGE:
vxsdk project <COMMAND> [OPTIONS]
OPTIONS:
--list List installed command
-h, --help Print helps information
Common used commands:
n, new Create a new project
See `vxsdk project help <action>` for more information on a specific command
"""
)
def cli_parse(_, argv):
if argv:
if argv[0] == 'n' or argv[0] == 'new':
for path in argv[1:]:
project_new(path)
sys.exit(0)
if '-h' in argv or '--help' in argv:
logger(LOG_USER, __VXSDK_MODULE_META__[2])
sys.exit(0)
logger(LOG_EMERG, __VXSDK_MODULE_META__[2])

70
vxsdk/core/build/cmake.py Normal file
View File

@ -0,0 +1,70 @@
"""
core.build.cmake - CMake abstraction
"""
import os
import sys
import subprocess
__all__ = [
'cmake_configure',
'cmake_build',
'cmake_install',
'cmake_uninstall',
]
#---
# Public
#---
def cmake_configure(pkg_meta, _, env):
""" Abstract cmake configuration """
prefix_src = f"{pkg_meta.path}"
prefix_build = f"{pkg_meta.build_prefix}/build/{pkg_meta.name}"
toolchain_flag = ''
if toolchain_path := env.get('VXSDK_HOOK_CMAKE_TOOLCHAIN'):
toolchain_flag = f"-DCMAKE_TOOLCHAIN_FILE={toolchain_path}"
shell_cmd = f"cmake {toolchain_flag} -B {prefix_build} -S {prefix_src}"
return subprocess.run(shell_cmd.split(), check=False).returncode
def cmake_build(pkg_meta, verbose, _):
""" Abstract cmake configuration """
prefix_build = f"{pkg_meta.build_prefix}/build/{pkg_meta.name}"
shell_cmd = f"cmake --build {prefix_build}"
if verbose:
shell_cmd += ' --verbose'
return subprocess.run(shell_cmd.split(), check=False).returncode
def cmake_install(pkg_meta, verbose, _):
""" Abstract cmake installation """
prefix_build = f"{pkg_meta.build_prefix}/build/{pkg_meta.name}"
shell_cmd = f"cmake --install {prefix_build}"
if verbose:
shell_cmd += ' --verbose'
return subprocess.run(shell_cmd.split(), check=False).returncode
def cmake_uninstall(pkg_meta, verbose, _):
""" Abstract cmake uninstall
Note that CMake does not offert a easy way to uninstall project, but it
generate a file which contains all installed pathname
"""
prefix_build = f"{pkg_meta.build_prefix}/build/{pkg_meta.name}"
manifile = f"{prefix_build}/install_manifest.txt"
if not os.path.exists(manifile):
print('project not installed')
return -1
retcode = 0
with open(manifile, 'r', encoding='utf8') as manifest:
for pathname in manifest.readlines():
pathname = pathname.strip()
if not os.path.exists(pathname):
continue
if verbose:
print("-- Removing {pathname}")
ret = subprocess.run(f"rm {pathname}".split(), check=False)
if ret.returncode == 0:
continue
print("warning : error during removing file", file=sys.stderr)
retcode -= 1
return retcode

View File

@ -1,48 +1,37 @@
"""
Compilation hadling using the dependency DAG graph
core.build.compile - Compilation hadling using the dependency DAG graph
"""
from core.logger import log
from core.build.rules import project_rules_exec
import core.conv
from core.build.rules import rules_project_exec
__all__ = [
'project_compile'
'compile_project'
]
#---
# Internals
#---
def __dep_generate_assets(dep_info, _, __):
log.user(f"[{dep_info['meta'].name}] generate assets...")
core.conv.assets_generate(
f"{dep_info['meta'].path}/assets/",
f"{dep_info['meta'].parent_path}/.vxsdk/converter/{dep_info['meta'].name}/src"
)
return 0
def __dep_build_sources(dep_info, env_extra, verbose):
def _dep_build_sources(dep_info, verbose):
log.user(f"[{dep_info['meta'].name}] build sources...")
return project_rules_exec(
return rules_project_exec(
dep_info['meta'],
dep_info['target'],
['configure', 'build'],
verbose,
env_extra
dep_info['env'],
verbose
)
def __dep_install(dep_info, env_extra, verbose):
def _dep_install(dep_info, verbose):
log.user(f"[{dep_info['meta'].name}] install...")
return project_rules_exec(
return rules_project_exec(
dep_info['meta'],
dep_info['target'],
['intall'],
verbose,
env_extra
['install'],
dep_info['env'],
verbose
)
def __compile_dependency(dep, env_extra, verbose):
def _compile_dependency(dep, verbose):
""" Compile dependency
@args
@ -52,43 +41,20 @@ def __compile_dependency(dep, env_extra, verbose):
@return
> 0 on success, negative value otherwise
"""
if __dep_generate_assets(dep['info'], env_extra, verbose) != 0:
log.error(f"[{dep['info']['meta'].name}] error during asset generation")
dep_meta = dep['info']['meta']
if _dep_build_sources(dep['info'], verbose) != 0:
log.error(f"[{dep_meta.name}] error during source build")
return -1
if __dep_build_sources(dep['info'], env_extra, verbose) != 0:
log.error(f"[{dep['info']['meta'].name}] error during source build")
if _dep_install(dep['info'], verbose) != 0:
log.error(f"[{dep_meta.name}] error during installation")
return -2
if __dep_install(dep['info'], env_extra, verbose) != 0:
log.error(f"[{dep['info']['meta'].name}] error during installation")
return -3
return 0
def __env_extra_fetch(dep_graph):
""" Generate extra environement information
@args
> dep_graph (list) : DAG graph
@return
> a dictionary with all common env export
"""
env_extra = {}
for dep in dep_graph:
dep_meta = dep['info']['meta']
dep_env_extra = dep_meta.get_env_extra(dep['info']['target'])
for key in dep_env_extra:
if key.upper() != key:
log.warn(f"[{dep_meta.name}] : {key} : env key must be upper")
if key in env_extra:
log.warn(f"[{dep_meta.name}] : {key} : already set, overrided")
env_extra[key] = dep_env_extra[key]
return env_extra
#---
# Public
#---
def project_compile(dep_graph, verbose=False):
def compile_project(dep_graph, verbose=False):
r""" Build the entire project
@args
@ -98,10 +64,6 @@ def project_compile(dep_graph, verbose=False):
Return:
> 0 for succes, negative vale otherwise
"""
# generate "extra" environement configuration
env_extra = __env_extra_fetch(dep_graph)
# main build loop
while True:
completed = True
for dep in dep_graph:
@ -125,8 +87,7 @@ def project_compile(dep_graph, verbose=False):
enable_verbose = True
# build the package
error = __compile_dependency(dep, env_extra, enable_verbose)
if error != 0:
if (error := _compile_dependency(dep, enable_verbose)) != 0:
return error
# mark as completed

View File

@ -1,21 +1,26 @@
"""
Dependencies resolver
core.build.dependency - Dependencies resolver
"""
import os
import sys
import core.pkg
from core.logger import log
from core.build.meta import VxProjectMeta
import core.pkg
from core.build.env import env_config_merge, env_pkg_configure
__all__ = [
'project_dependency_clone'
'dependency_project_clone'
]
#---
# Internals
#---
def __dep_graph_update(dep_graph, dep_parent_id, pkg_info):
## DAG struct handling
def _dep_graph_update(dep_graph, dep_parent_id, pkg_info):
"""
Update dependency graph
"""
@ -32,70 +37,87 @@ def __dep_graph_update(dep_graph, dep_parent_id, pkg_info):
dep_graph[dep_parent_id]['dependencies'].append(dep_id)
return dep_id
def __recurs_clone(parent_path, dep_info, pkg_info, prefix, dep_stack):
def _recurs_clone(build_prefix, dep_info, pkg_info, prefix):
"""Clone all dependency and generate dependency graph
@args
> dep_graph (list) - list dependencies
> dep_parent_id (int) - parent index in `dep_graph`
> pkg_info (dict) - package information
> prefix (str) - prefix for package cloning
> build_prefix (str) - build prefix information
> dep_info (list) - contains dependencies list, stack and parent ID
> pkg_info (dict) - package information
> prefix (str) - prefix for package cloning
@return
> 0 if success, negative value otherwise
> The current public env of the package
"""
# fetch info
dep_graph = dep_info[0]
dep_parent_id = dep_info[1]
dep_graph, dep_parent_id, dep_stack = dep_info
# check circular dependency and update the stack
# check circular dependency error and update the stack
for dep in dep_stack:
if dep != pkg_info:
continue
log.error(f"circular dependency with '{pkg_info['name']}' detected")
return -1
sys.exit(84)
dep_stack.append(pkg_info)
# try to clone the package
pkg_path = core.pkg.clone(
pkg_info['name'],
pkg_info['version'],
prefix
f"{prefix}/"
)
if not pkg_path:
log.error(
'unable to clone the dependency '
f"{pkg_info['name']}@{pkg_info['version']}"
)
sys.exit(84)
log.debug(f"pkg path = {pkg_path}")
# check pacakge validity
# @todo
# Find a way to not be dependent of VxProjectMeta to avoid spaghetti code
target = pkg_info['target']
pkg_meta = VxProjectMeta(pkg_path, parent_path, pkg_info['extra_conf'])
if target not in pkg_meta.target_support:
log.error(f"[{pkg_meta.name}] target '{target}' not supported")
return -2
pkg_meta = VxProjectMeta(
pkg_path,
build_prefix,
pkg_info['target'],
pkg_info['extra_conf']
)
# generate dependency information
pkg_dep_id = __dep_graph_update(
dep_graph,
dep_parent_id,
{
'meta' : pkg_meta,
'target' : pkg_info['target']
}
)
for dep in pkg_meta.get_dependencies(target):
__recurs_clone(
parent_path,
(dep_graph, pkg_dep_id),
pkg_env_config = pkg_meta.get_env_config()
pkg_depinfo = {
'meta' : pkg_meta,
'target' : pkg_info['target'],
'env' : None
}
pkg_dep_id = _dep_graph_update(dep_graph, dep_parent_id, pkg_depinfo)
# Handle dependencies and update the ENV information
dep_env_commit = {}
for dep in pkg_meta.get_dependencies():
dep_env_commit[dep['name']] = _recurs_clone(
build_prefix,
(dep_graph, pkg_dep_id, dep_stack.copy()),
dep,
prefix,
dep_stack.copy()
prefix
)
return 0
# Generate env configuration and return the "current" package env
# configuration
log.debug(f"[{pkg_meta.name}] dep pre-env = {pkg_env_config['public']}")
pkg_depinfo['env'] = env_pkg_configure(
pkg_meta,
pkg_env_config['private'],
dep_env_commit
)
env_config_merge(pkg_env_config['public'], dep_env_commit)
log.debug(f"[{pkg_meta.name}] dep post-env = {pkg_env_config['public']}")
return pkg_env_config['public']
#---
# Public
#---
def project_dependency_clone(pkg, target):
def dependency_project_clone(pkg):
r""" Clone dependencies of package and generate a DAG graph
This function will clone all dependency of a package optimised for a
@ -104,7 +126,6 @@ def project_dependency_clone(pkg, target):
@args
> pkg (dict) - first pacakge information
> target (str) - build target
@return
> A list with package and dependency information : [
@ -120,31 +141,33 @@ def project_dependency_clone(pkg, target):
# @note
# All dependencies will be cloned in "global" path, only symbolic link will
# be generated here
prefix = f"{pkg.parent_path}/.vxsdk/dependencies"
prefix = f"{pkg.build_prefix}/dependencies/"
if not os.path.exists(prefix):
os.makedirs(prefix)
# clone all dependencies and generate a DAG graph
# generate the root DAG node
dep_graph = []
pkg_env_config = pkg.get_env_config()
pkg_depinfo = {'meta' : pkg, 'target' : pkg.target, 'env' : None}
dep_origin_id = _dep_graph_update(dep_graph, -1, pkg_depinfo)
# clone all dependencies and update the DAG
# @note
# we need to manualy bootstrap the current project as a dependency to
# facilitate graph generation with a unified data structure
dep_graph = []
dep_origin_id = __dep_graph_update(
dep_graph,
-1,
{
'meta' : pkg,
'target' : target,
}
)
for dep in pkg.get_dependencies(target):
__recurs_clone(
pkg.parent_path,
(dep_graph, dep_origin_id),
dep_env_commit = {}
for dep in pkg.get_dependencies():
dep_env_commit[dep['name']] = _recurs_clone(
pkg.build_prefix,
(dep_graph, dep_origin_id, []),
dep,
prefix,
[]
prefix
)
# generate package env configuration
env = env_pkg_configure(pkg, pkg_env_config['private'], dep_env_commit)
log.debug(f"[{pkg.name}] generated env = {env}")
pkg_depinfo['env'] = env
# return the DAG
return dep_graph

269
vxsdk/core/build/env.py Normal file
View File

@ -0,0 +1,269 @@
"""
core.build.env - Handle env configuration
"""
import os
from core.logger import log
from core.config import config
__all__ = [
'env_config_merge',
'env_config_evaluate',
'env_pkg_header',
'env_pkg_configure'
]
#---
# Internals
#---
def _env_config_patch(env, commit, env_key_bitmap):
""" Patch missing key information
@args
> env (dict) - { <KEY> : <value>, ... }
> commit (dict) - { <dep_name> : { <KEY> : <value>, ...}, ... }
> env_key_bitmap (list) - already handled key information
@return
> Nothing
"""
for dep_name in commit.keys():
for dep_item in commit[dep_name].items():
if dep_item[0] in env_key_bitmap:
continue
if dep_item[0] not in env:
env[dep_item[0]] = dep_item[1]
continue
env[dep_item[0]] += dep_item[1]
def _env_config_preprocess(env, commit):
""" Handle special '@<dep_name>@' information
@args
> env (dict) - { <KEY> : <value>, ... }
> commit (dict) - { <dep_name> : { <KEY> : <value>, ...}, ... }
@return
> A list with all keys that has been handled
"""
env_real = {}
env_key_bitmap = []
for env_key, env_data_list in env.items():
# handle all preprocessing except for @COMMON@, that should be
# performed later
env_dep_bitmap = []
env_data_almost = []
for env_data in env_data_list:
if env_data == '@COMMON@':
env_data_almost.append(env_data)
continue
if env_data[0] != '@' or env_data[-1] != '@':
env_data_almost.append(env_data)
continue
if not (env_data_target := env_data[1:-1]):
log.warn("empty preprocessing indication")
continue
if env_data_target not in commit.keys():
log.warn(f"unable to find env target '{env_data_target}'")
continue
if env_key not in commit[env_data_target]:
continue
env_data_almost.append(commit[env_data_target][env_key])
env_dep_bitmap.append(env_data_target)
# generate @COMMON@ data list with the rest
env_data_common = []
for dep_name in commit.keys():
if dep_name in env_dep_bitmap or env_key not in commit[dep_name]:
continue
env_data_common += commit[dep_name][env_key]
# replace all @COMMON@ indication
env_data_real = []
for env_data in env_data_almost:
if env_data != '@COMMON@':
env_data_real.append(env_data)
continue
if not env_data_common:
log.warn('multiple @COMMON@ indication, skipped')
continue
env_data_real += env_data_common
env_data_common = []
# save the processed data
env_real[env_key] = env_data_real
# indicate which key as been handled. This information will be used to
# "patch" missing key information
env_key_bitmap.append(env_key)
# return patched env
_env_config_patch(env_real, commit, env_key_bitmap)
return env_real
#---
# Public
#---
def env_config_merge(env, commit):
""" Merge the current env configuration with a commit
A commit is a collection of all dependencies "public" env configuration.
Note that we should update the content of the `env` argument because this
dictionary is linked to the DAG internal information.
@args
> env (dict) - { <KEY> : <value>, ... }
> commit (dict) - { <dep_name> : { <KEY> : <value>, ...}, ... }
@return
> Nothing, `env` will be modified
"""
log.debug(f"[env merge] env = {env} && commit = {commit}")
env.update(_env_config_preprocess(env, commit))
def env_config_evaluate(env, glossary=None):
""" Evaluate vars' content
@args
> env (dict) - { '<KEY>' : [ <value0>, <value1>, ...]
@return
> Nothing, `env` will be modified
"""
eval_env = {}
for key, data_list in env.items():
eval_env[key] = []
for data in data_list:
eval_data = ''
while True:
if (sidx := data.find('{'))<0 or (eidx := data.find('}'))<0:
eval_data += data
break
eval_data += data[:sidx]
eval_key = data[sidx + 1 : eidx]
eval_key_data = env.get(eval_key)
if glossary:
eval_key_data = glossary.get(eval_key)
if eval_key_data:
eval_data += eval_key_data
else:
log.warn(f" - '{key}' : unable to find")
data = data[eidx + 1:]
eval_env[key].append(eval_data)
env.update(eval_env)
def env_pkg_header(pkg_meta):
""" Default exported variable information
=============================== =======================================
Type Description
=============================== =======================================
VXSDK_PKG_NAME project name
VXSDK_PKG_TARGET project target
VXSDK_PKG_VERSION project version
VXSDK_PKG_IS_ORIGINAL 'true' if the package is the original
VXSDK_PREFIX_BUILD project build prefix (for object files)
VXSDK_PREFIX_INSTALL project installation prefix
VXSDK_ASSETS_PREFIX_SRC assets "build" directory
VXSDK_BUILD_CFLAGS include flags for GCC
VXSDK_BUILD_LDFLAGS include flags for GCC
VXSDK_CURRENT_SOURCE_DIR current package path
VXSDK_PREFIX_SYSROOT potential sysroot path for the package
=============================== =======================================
@arg
> pkg_meta (obj) - pacakge meta information
@return
> Dictionary with default information
"""
# alias
prefix = pkg_meta.build_prefix
# generate sysroot information
pkg_prefix_sysroot = os.path.expanduser(config.get('path.sysroot'))
pkg_prefix_sysroot = f"{pkg_prefix_sysroot}/{pkg_meta.name}"
# generate VXSDK_PREFIX_BUILD information
pkg_prefix_build = f"{prefix}/build/{pkg_meta.name}"
if not os.path.exists(pkg_prefix_build):
os.makedirs(pkg_prefix_build)
# generate VXSDK_PREFIX_INSTALL information
pkg_prefix_install = f"{prefix}/install"
if pkg_meta.type == 'app':
pkg_prefix_install = os.path.expanduser(config.get('path.bin'))
if not os.path.exists(pkg_prefix_install):
os.makedirs(pkg_prefix_install)
# generate VXSDK_ASSETS_* information
pkg_assets_src = f"{prefix}/converter"
# generate VXSDK_PKG_* information
pkg_name = pkg_meta.name
pkg_is_original = str(pkg_meta.is_original)
# workaround to avoid CMake error with non-standar symver
# (fixme/E8A7) : support version export
pkg_version = pkg_meta.version
pkg_version = '0.0.0'
# generate "dependence-specific" env
# (todo/97EA) : remove CFLAGS
# (todo/97EA) : remove LDFLAGS
return {
'VXSDK_PKG_NAME' : pkg_name,
'VXSDK_PKG_TARGET' : pkg_meta.target,
'VXSDK_PKG_VERSION' : pkg_version,
'VXSDK_PKG_IS_ORIGINAL' : pkg_is_original,
'VXSDK_PREFIX_BUILD' : pkg_prefix_build,
'VXSDK_PREFIX_INSTALL' : pkg_prefix_install,
'VXSDK_ASSETS_PREFIX_SRC' : pkg_assets_src,
'VXSDK_BUILD_CFLAGS' : '',
'VXSDK_BUILD_LDFLAGS' : '',
'VXSDK_CURRENT_SOURCE_DIR' : pkg_meta.path,
'VXSDK_PREFIX_SYSROOT' : pkg_prefix_sysroot
}
def env_pkg_configure(pkg_meta, pkg_env, commit):
""" Generate the complet package env configuration
A commit is a collection of all dependencies "public" env configuration.
Note that we should update the content of the `env` argument because this
dictionary is linked to the DAG internal information.
@args
> pkg_meta (dict) - pacakge meta information
> pkg_env (dict) - pacakge env information
> commit (dict) - { <dep_name> : { <KEY> : <value>, ...}, ... }
@return
> Dictionary with all env variable setuped
"""
# performs preproc pass
log.debug(f"[{pkg_meta.name}] env_pkg_conf() -> commit = {commit}")
env_config_merge(pkg_env, commit)
# generate pacakge "header" env information
# (todo/8DC1) : isolate the PKG ENV information (shouldn't be edited)
envp = env_pkg_header(pkg_meta)
# merge extra env configuration
for item in pkg_env.items():
data = item[1]
if isinstance(item[1], (tuple, list)):
data = ' '.join(item[1])
if item[0] in envp:
envp[item[0]] += f' {data}'
continue
envp[item[0]] = data
# clean list for cmake
for item in envp.items():
data = item[1].split()
envp[item[0]] = ';'.join(data)
log.debug(f"[{pkg_meta.name}] generated env = {envp}")
return envp

View File

@ -1,16 +1,25 @@
"""
vxsdk.toml document parser
core.build.meta - vxsdk.toml document parser
"""
import os
import toml
from core.logger import log
from core.pkg.version import version_get
from core.build.env import env_config_evaluate, env_pkg_header
__all__ = [
'VxProjectMetaException',
'VxProjectMeta'
]
#---
# Public
#---
class VxProjectMetaException(Exception):
""" Simple exception handling """
class VxProjectMeta():
r""" Represente the project meta-information
@ -19,21 +28,22 @@ class VxProjectMeta():
This class exposes:
================================== =========================================
================================== =======================================
Property Description
================================== =========================================
================================== =======================================
name (str) Project name
version (str) Project versions
path (str) Project path
description (str) Project description
type (str) Project type
target_support (list,str) List of all supported target
================================== =========================================
================================== =======================================
Methods Description
================================== =========================================
================================== =======================================
get_dependencies (list,dict) List of all dependencies information
get_build_rules (dict) Building rules information
================================== =========================================
get_env_config (dict) List all env configuration var
================================== =======================================
For the file to be valid, it should expose *at least* the 'project' section
with the name of the project. Other information like the project type have
@ -71,13 +81,13 @@ class VxProjectMeta():
'superh.path.sysroot_test' = '{superh.path.sysroot}/test/yes'
```
"""
def __init__(self, path, parent_path=None, extra_conf=None):
def __init__(self, path, build_prefix='', target='', extra_conf=None):
""" Open and parse the vxsdk.toml file
@args
> path (str) : project path which we can find `vxsdk.toml` file
> parent_path (str) : project parent path
> extra_conf (dict) : extra flags information about particular steps
> path (str) : project path which we can find `vxsdk.toml` file
> build_prefix (str) : project build prefix
> extra_conf (dict) : extra flags information about particular steps
@todo
> proper handle exception
@ -88,17 +98,46 @@ class VxProjectMeta():
self._toml = toml.loads(file.read())
# check mandatory information
# @notes
# - 'project' section
# - project name information
if 'project' not in self._toml or 'name' not in self._toml['project']:
raise Exception(f"'{self._path}': missing project name information")
raise VxProjectMetaException(
f"'{self._path}': missing project name information"
)
# check if the requested target is valid
target_support = []
if 'target' in self._toml['project']:
target_support = self._toml['project']['target']
if target_support:
error = 0
if not target:
log.error(
f"[{self.name}] no target specified, "
" you should specify a target between:"
)
error = -1
if target not in target_support:
log.error(
f"[{self.name}] target specified not supported, "
" you should specify a target between:"
)
error = -2
if error:
for target_name in target_support:
log.error(f" - {target_name}")
raise VxProjectMetaException(
f"[{self.name}] target {target} not supported"
)
# setup information and cache
self._type = None
self._target_support = None
self._parent_path = parent_path
self._type = ''
self._target = target
self._extra_conf = extra_conf
self._asset_prefix_list = []
# handle build prefix
self._build_prefix = build_prefix
if not self._build_prefix:
self._build_prefix = f"{self._path}/.vxsdk/{self._target}"
def __str__(self):
""" Display project information """
@ -106,7 +145,7 @@ class VxProjectMeta():
content += '\n'
content += 'project meta\n'
content += ' - path:'.ljust(16) + f'{self.path}\n'
content += ' - parent:'.ljust(16) + f'{self.parent_path}\n'
content += ' - build:'.ljust(16) + f'{self.build_prefix}\n'
content += ' - type:'.ljust(16) + f'{self.type}\n'
content += '\n'
content += 'Build information\n'
@ -125,7 +164,7 @@ class VxProjectMeta():
# content += ' No dependencies for this project\n'
#else:
# for dep in self.dependencies:
# content += ' - ' + f'{dep.name}'.ljust(16) + f'({dep.version})\n'
# content += f" - {dep.name:<16}({dep.version})\n"
return content
def __repr__(self):
@ -153,14 +192,14 @@ class VxProjectMeta():
return ''
@property
def parent_path(self):
"""<property> Return project parent path """
return self._parent_path if self._parent_path else self._path
def build_prefix(self):
"""<property> Return project build path """
return self._build_prefix
@property
def is_original(self):
"""<property> Return if the project is the target or a dependency"""
return not self._parent_path
return self._build_prefix == f"{self._path}/.vxsdk"
@property
def extra_conf(self):
@ -182,30 +221,23 @@ class VxProjectMeta():
)
return self._type
@property
def target_support(self):
"""<property> get supported target list"""
if not self._target_support:
self._target_support = []
if 'target' in self._toml['project']:
self._target_support = self._toml['project']['target']
return self._target_support
@property
def version(self):
"""<property> get current package version"""
return version_get(self.path)
@property
def target(self):
"""<property> Return build target """
return self._target
#---
# Methods
#---
def get_dependencies(self, target=None):
def get_dependencies(self):
""" Get project dependency list for a particular board target
@args
> target (str) - target name
@return
> Return a list of dependencies information : [
{
@ -217,12 +249,6 @@ class VxProjectMeta():
...
]
"""
# check mandatory target requirement
if self.target_support:
if not target or target not in self.target_support:
log.warn(f"[{self.name}] target '{target}' not supported")
return []
# help particular section dump
def section_dep_fetch(section, target):
if 'dependencies' not in section:
@ -245,12 +271,15 @@ class VxProjectMeta():
return dep_list
# fetch dependencies information in common and target-specific section
dep_list = section_dep_fetch(self._toml, target)
if target and target in self._toml:
dep_list += section_dep_fetch(self._toml[target], target)
dep_list = section_dep_fetch(self._toml, self._target)
if self._target and self._target in self._toml:
dep_list += section_dep_fetch(
self._toml[self._target],
self._target
)
return dep_list
def get_build_rules(self, target=None):
def get_build_rules(self):
""" Get project building rules
@args
@ -265,13 +294,10 @@ class VxProjectMeta():
}
"""
def section_rules_fetch(section):
if 'build' not in section:
log.warn(f"[{self.name}] no building rules")
return {}
rules = {}
for rule in section['build']:
for rule in section:
if rule in ['configure', 'build', 'install', 'uninstall']:
rules[rule] = section['build'][rule]
rules[rule] = section[rule]
continue
log.warn(
f"[{self.name}] building rule '{rule}' doesn't exists"
@ -279,10 +305,83 @@ class VxProjectMeta():
return rules
# check if we need to fetch common rules or target-specific one
if target:
if self.target_support and target not in self.target_support:
log.error(f"[{self.name}] not supported target '{target}'")
return {}
if target in self._toml:
return section_rules_fetch(self._toml[target])
return section_rules_fetch(self._toml)
if self._target \
and self._target in self._toml \
and 'build' in self._toml[self._target]:
return section_rules_fetch(self._toml[self._target]['build'])
if 'build' in self._toml:
return section_rules_fetch(self._toml['build'])
return {}
def get_env_config(self):
""" Get environment configuration
@args
> target (str) - project target
@return
> a dictionary with public and private environment variables
"""
def _key_update(env, key, value):
if key in env:
env[key] += value
return
env[key] = value
def _dump_section_env_conf(env_config, section):
for item in section.items():
if item[0].find('VXSDK_COMMON') == 0:
key_name = f"VXSDK_{item[0][13:]}"
_key_update(env_config['public'], key_name, item[1])
_key_update(env_config['private'], key_name, item[1])
elif item[0].find('VXSDK_PUBLIC') == 0:
key_name = f"VXSDK_{item[0][13:]}"
_key_update(env_config['public'], key_name, item[1])
elif item[0].find('VXSDK_PRIVATE') == 0:
key_name = f"VXSDK_{item[0][14:]}"
_key_update(env_config['private'], key_name, item[1])
else:
log.warn(f"Unable to convert {item[0]}, skipped")
env_config = {
'public' : {},
'private' : {}
}
if self._target \
and self._target in self._toml \
and 'env' in self._toml[self._target]:
_dump_section_env_conf(env_config, self._toml[self._target]['env'])
if 'env' in self._toml:
_dump_section_env_conf(env_config, self._toml['env'])
log.debug(f"[{self.name}] get_env_config() pre-eval = {env_config}")
# evaluate env content
# (todo/8DC1) : isolate the PKG ENV information (shouldn't be edited)
pkg_env_head = env_pkg_header(self)
env_config_evaluate(env_config['public'], pkg_env_head)
env_config_evaluate(env_config['private'], pkg_env_head)
# return env information
log.debug(f"[{self.name}] get_env_config() post-eval = {env_config}")
return env_config
def get_assets_prefix_list(self):
""" Fetch all assets prefix
@return
> a list with all assets path
"""
if self._asset_prefix_list:
return self._asset_prefix_list
if 'converter' not in self._toml:
return [f"{self._path}/assets"]
if 'assets_prefix' not in self._toml['converter']:
return [f"{self._path}/assets"]
self._asset_prefix_list = []
for asset in self._toml['converter']['assets_prefix']:
asset_prefix = f"{self._path}/{asset}"
if not os.path.exists(asset_prefix):
log.warn(f"{self.name}: assets path '{asset}' doesn't exists")
continue
self._asset_prefix_list.append(asset_prefix)
return self._asset_prefix_list

View File

@ -1,16 +1,13 @@
r""" build.core - Core building function
This module exports:
VxProject - a class representing a "Vhex" project
"""
core.build.project - Core building function
"""
import os
from core.logger import log
from core.build.meta import VxProjectMeta
from core.build.dependency import project_dependency_clone
from core.build.compile import project_compile
from core.build.rules import project_rules_exec
from core.build.dependency import dependency_project_clone
from core.build.compile import compile_project
from core.build.rules import rules_project_exec
__all__ = [
'VxProject'
@ -18,19 +15,19 @@ __all__ = [
#---
# Public object
# Public
#---
class VxProject(VxProjectMeta):
r"""Represent a Vhex project
A Vhex project is a folder which contains a 'vxsdk.toml' file. This file
describe information about the project, like its name, version, description,
and many other information.
describe information about the project, like its name, version,
description, and many other information.
This class will handle this particular file, and will abstract some complex
tasks like build abstraction, assets conversion, manage project dependencies
and more.
tasks like build abstraction, assets conversion, manage project
dependencies and more.
All project information and file generation (file objects, build logs,
assets conversion, ...) will be stored in a '.vxsdk' folder at the root of
@ -38,34 +35,34 @@ class VxProject(VxProjectMeta):
The VxProject exposes:
================================== =========================================
================================== =======================================
Property VxprojectMeta Description
================================== =========================================
================================== =======================================
name (str) * Holds name
type (str) * Holds type (lib, app or addin)
path (str) * Holds project path
dependencies (list) * Holds project dependencies
description (str) * Holds description
build_rules (dict) * Holds custom build information
board_support (list,str) * List of all supported board (None if all)
board_support (list,str) * List of all supported board
extra_conf (dict) Holds extra rules information
parent (str) Holds project parent path
version (str) Holds version
assets (list) Holds project assets paths
is_original (bool) Return True if its the original package
================================== =========================================
================================== =======================================
================================== =========================================
================================== =======================================
Method Description
================================== =========================================
================================== =======================================
update() Try to update project's dependencies
install() Install the project
uninstall() Uninstall the project
build() Build the entiere project (dep, src, ...)
build() Build the entiere project (dep,src,...)
rebuild() Rebuild the entiere project
================================== =========================================
================================== =======================================
"""
def __init__(self, path=None, parent_path=None, extra_conf=None):
def __init__(self, path='', build_prefix='', target='', extra_conf=None):
r""" Try to read the TOML project file of a project
This constructor will simply try to read the vxsdk.toml file stored at
@ -74,11 +71,11 @@ class VxProject(VxProjectMeta):
current working directory.
The `parent` path is used to know if the project is the original one or
a dependency. If it a dependency, its build genereted files will be done
in the `.vxsdk/*` directory of the original one.
a dependency. If it a dependency, its build genereted files will be
done in the `.vxsdk/*` directory of the original one.
Extra information for each building steps can be provided in the form of
dictionary. Like this:
Extra information for each building steps can be provided in the form
of dictionary. Like this:
```
{
@ -89,7 +86,7 @@ class VxProject(VxProjectMeta):
@args
> path (str) - the project path
> parent_path (str) - the project parent path
> build_prefix (str) - the project build prefix
> extra (dict) - extra flags information about particular steps
@raise
@ -97,7 +94,8 @@ class VxProject(VxProjectMeta):
"""
super().__init__(
os.path.abspath(path) if path else os.getcwd(),
parent_path,
build_prefix,
target,
extra_conf
)
@ -106,7 +104,7 @@ class VxProject(VxProjectMeta):
#---
def update(self):
r"""Update project's dependencies.
"""Update project's dependencies.
This method will try to bump all dependencies to the possible newest
version, always validating the dependencies versioning match
@ -118,47 +116,39 @@ class VxProject(VxProjectMeta):
log.error('dependencies update not implemented yet')
return -1
def install(self, target, verbose=False):
r""" Install the project.
def install(self, verbose=False):
""" Install the project.
@args
> target (str) : targeted board for operation (fxcg50, SDL, ...)
> verbose (bool) : display project logs or just strict minimum
@return
> True if no error, False otherwise
"""
if target and self.target_support and target not in self.target_support:
log.error(f"[{self.name}] target '{target}' not supported")
return -1
return project_rules_exec(
return rules_project_exec(
self,
target,
['install'],
self.get_env_config()['public'],
verbose
)
def uninstall(self, target, verbose=False):
def uninstall(self, verbose=False):
""" Uninstall the project
@args
> target (str) : targted board for operation (fxcg50, SDL, ...)
> verbose (bool) : display project logs or just strict minimum
@return
> True if no error, False otherwise
"""
if target and self.target_support and target not in self.target_support:
log.error(f"[{self.name}] target '{target}' not supported")
return -1
return project_rules_exec(
return rules_project_exec(
self,
target,
['uninstall'],
self.get_env_config()['public'],
verbose
)
def build(self, target=None, verbose=False):
def build(self, verbose=False):
"""Build the entire project
Args:
@ -170,26 +160,11 @@ class VxProject(VxProjectMeta):
"""
log.user(f"[{self.name}] start building package...")
# check target availability
if target and self.target_support and target not in self.target_support:
log.error(f"[{self.name}] target '{target}' not supported")
return -1
# check if the project is compatible with the board
build_rules = self.get_build_rules(target)
if not 'config' in build_rules \
and not 'build' in build_rules \
and not 'install' in build_rules:
log.error(f"[{self.name}] project not compatible for '{target}'")
return -2
# clone dependencies
log.debug(f"target test = {target}")
dep_graph = project_dependency_clone(self, target)
if not dep_graph:
log.error(f"[{self.name}] unable to perform dependeincy relovation")
return -3
if not (dep_graph := dependency_project_clone(self)):
log.error(f"[{self.name}] unable to perform dependency relovation")
return -1
log.debug(f"dep_graph = {dep_graph}")
# compile the entire project
log.debug(f"dep_graph = {dep_graph}")
return project_compile(dep_graph, verbose)
return compile_project(dep_graph, verbose)

View File

@ -1,106 +1,58 @@
"""
Vhex build shell command abstraction
core.build.rules - Vhex build shell command abstraction
"""
import os
import subprocess
from core.logger import log
from core.config import config_get
from core.build.cmake import (
cmake_configure,
cmake_build,
cmake_install,
cmake_uninstall
)
__all__ = [
'project_rules_exec'
'rules_project_exec'
]
#---
# Internals
#---
def __project_generate_env(env_extra, pkg_meta, target):
r""" Generate environment variables
def _exec_default_rules(pkg_info, rules, verbose, env):
""" Execute default rules if no one found
================================ =======================================
Type Description
================================ =======================================
VXSDK_PKG_NAME project name
VXSDK_PKG_VERSION project version
VXSDK_PKG_IS_ORIGINAL 'true' if the package is the original
VXSDK_PREFIX_BUILD project build prefix (for object files)
VXSDK_PREFIX_INSTALL project installation prefix
VXSDK_PREFIX_LIB prefix for all stored library
VXSDK_GCC_CFLAGS include flags for GCC
VXSDK_ASSETS_SRC assets sources file directory
VXSDK_ASSETS_BUILD assets build directory
VXSDK_PATH_SYSROOT_LIB sysroot library path
VXSDK_PATH_SYSROOT_INCLUDE sysroot include path
================================ =======================================
@args
> pkg_info (obj) - package information
> rules (list) - list of rules
> verbose (bool) - enable verbose or not
> env (dict) - can contain VXSDK_HOOK_* information
@arg
> board_target (str) - targeted board
@return
> 0 if success, negative value otherwise
"""
prefix = f"{pkg_meta.parent_path}/.vxsdk"
# generate VXSDK_PREFIX_BUILD information
pkg_prefix_build = f'{prefix}/build/{pkg_meta.name}'
if not os.path.exists(pkg_prefix_build):
os.makedirs(pkg_prefix_build)
# generate VXSDK_PREFIX_INSTALL information
pkg_prefix_install = f'{prefix}/lib'
if pkg_meta.type == 'app':
pkg_prefix_install = os.path.expanduser(config_get('path.bin'))
if not os.path.exists(pkg_prefix_install):
os.makedirs(pkg_prefix_install)
# generate VXSDK_PREFIX_LIB information
pkg_prefix_lib = f'{prefix}/lib'
if not os.path.exists(pkg_prefix_lib):
os.makedirs(pkg_prefix_lib)
# generate VXSDK_GCC_CFLAGS information
#@todo : compiler specific !
#pkg_gcc_cflags = f'-L. -Llib/ -L{prefix}/lib/'
#pkg_gcc_cflags += f' -I. -Iinclude/ -I{prefix}/lib/include/'
# generate VXSDK_ASSETS_* information
pkg_assets_src = f'{prefix}/converter/{pkg_meta.name}/src'
pkg_assets_obj = f'{prefix}/converter/{pkg_meta.name}/obj'
# generate VXSDK_PKG_* information
pkg_name = pkg_meta.name
pkg_is_original = str(pkg_meta.is_original)
pkg_version = pkg_meta.version
pkg_target = target
# generate "dependence-specific" env
envp = {
'VXSDK_PKG_NAME' : pkg_name,
'VXSDK_PKG_TARGET' : pkg_target,
'VXSDK_PKG_VERSION' : pkg_version,
'VXSDK_PKG_IS_ORIGINAL' : pkg_is_original,
'VXSDK_PREFIX_BUILD' : pkg_prefix_build,
'VXSDK_PREFIX_INSTALL' : pkg_prefix_install,
'VXSDK_PREFIX_LIB' : pkg_prefix_lib,
'VXSDK_ASSETS_SRC' : pkg_assets_src,
'VXSDK_ASSETS_BUILD' : pkg_assets_obj,
'VXSDK_CURRENT_SOURCE_DIR' : pkg_meta.path
default_rules = {
'configure' : cmake_configure,
'build' : cmake_build,
'install' : cmake_install,
'uninstall' : cmake_uninstall
}
# merge extra env configuration
for key in env_extra:
if key in envp:
log.warn(f"[{pkg_name}] extra env key '{key}' already exist")
envp[key] += f' {env_extra[key]}'
# update env
log.debug(f"{envp}")
os.environ.update(envp)
for rule in rules:
if rule not in default_rules:
log.warn(f"rule '{rule}' not recognized, skipped")
continue
log.debug(f"default rule '{rule}' : with env = {env}")
if (ret := default_rules[rule](pkg_info, verbose, env)) != 0:
return ret
return 0
#---
# Public
#---
def project_rules_exec(pkg_meta, target, rule_list, env_extra, verbose):
""" Walk through project rules and performs target operation if needed
def rules_project_exec(pkg_meta, rule_list, env_config, verbose):
""" Walk through project rules
This method will build the project source using the custom build
information set by the author in the TOML description.
@ -110,10 +62,10 @@ def project_rules_exec(pkg_meta, target, rule_list, env_extra, verbose):
(see more information at 'VxProject.extra')
@args
> board_target (str) : targeted board for operation (fxcg50, SDL, ...)
> rules (array,str) : list of wanted operations to performs
> extra (dict,str) : user extra information about operations
> verbose (bool) : capture (verbose=False) or not the operation print
> pkg_meta (obj) - package information
> rule_list (list,str) - list of wanted operations to performs
> env_config (dict) - "extra" env configuration
> verbose (bool) - capture (verbose=False) or not the operation print
@return
> 0 if success, negative value otherwise
@ -125,8 +77,13 @@ def project_rules_exec(pkg_meta, target, rule_list, env_extra, verbose):
# move to the project path
os.chdir(pkg_meta.path)
# fetch target specific rules
project_rules = pkg_meta.get_build_rules(target)
# update ENV
os.environ.update(env_config)
# fetch build rules
project_rules = pkg_meta.get_build_rules()
if not project_rules:
return _exec_default_rules(pkg_meta, rule_list, verbose, env_config)
# main loop. Fetch operation, check if available, generate env
# information, and perform said operation
@ -138,11 +95,13 @@ def project_rules_exec(pkg_meta, target, rule_list, env_extra, verbose):
cmd = project_rules[rule].strip()
if pkg_meta.extra_conf and rule in pkg_meta.extra_conf:
cmd += ' ' + pkg_meta.extra_conf[rule]
__project_generate_env(env_extra, pkg_meta, target)
log.debug(f"[{pkg_meta.name}] rule : {rule} -> cmd : ${cmd}$")
log.debug(
f"[{pkg_meta.name}] rule : {rule} -> cmd : ${cmd}$ "
f"(with env = {env_config})"
)
ret = subprocess.run(
cmd.split(),
capture_output=verbose,
capture_output=False,
check=False
)
if ret.returncode != 0:

View File

@ -1,5 +1,5 @@
"""
Configuration file wrapper
core.config - Configuration file wrapper
"""
import os
import toml
@ -7,12 +7,14 @@ import toml
from core.logger import log
__all__ = [
'config_get',
'config_set',
'config_set_default'
'config'
]
DEFAULT_CONFIG_KEYVAL = [
#---
# Internals
#---
__DEFAULT_CONFIG_KEYVAL = [
('path.config', '~/.config/vxsdk/config.toml'),
('path.sysroot', '~/.local/share/vxsdk/sysroot'),
('path.packages', '~/.local/share/vxsdk/packages'),
@ -26,143 +28,152 @@ DEFAULT_CONFIG_KEYVAL = [
('pkg.local_storage', '{path.packages}')
]
__CACHED_CONFIG_FILE = None
__CACHED_CONFIG_PATH = None
class _VxConfigure():
""" configuration abstraction class """
def __init__(self, default_info):
self._conf_file = {}
self._conf_path = ''
self._set_default(default_info)
#---
# Internals
#---
#---
# Internals
#---
def __setitem_dots(dictionary, key, value, path=""):
if "." not in key:
old = dictionary[key] if key in dictionary else None
dictionary[key] = value
def _setitem_dots(self, dictionary, key, value, path=""):
if "." not in key:
old = dictionary[key] if key in dictionary else None
dictionary[key] = value
return old
group, key = key.split(".", 1)
if group in dictionary and not isinstance(dictionary[group], dict):
raise ValueError(f"cannot assign {value} into value {path+group}")
if group not in dictionary:
dictionary[group] = {}
return self._setitem_dots(
dictionary[group], key, value, path + group + "."
)
def _config_control(self, name, value=None):
""" Common configuration file manipulation
If the `value` parameter is not set, only read operation will be
performed, otherwise the complete file will be updated.
@args
> name (str) - dot-separated ker (ex : `default.board.name`)
> value (str) - value for the key
"""
# check if config file information are cached
# @notes
# - create the configuration file folder if needed
# - load the TOML content
# - cache pathname to avoid path manipulation
if not self._conf_path:
self._conf_path = os.path.expanduser(
'~/.config/vxsdk/config.toml'
)
if not self._conf_file:
if os.path.exists(self._conf_path):
with open(self._conf_path, 'r', encoding='utf8') as cfg:
self._conf_file = toml.loads(cfg.read())
# check "read-only" request (just fetch value)
if not value:
conf = self._conf_file
targets = name.split('.')
while targets:
if not targets[0] in conf:
log.debug(f"[config] unable to find target '{name}'")
return None
conf = conf[targets[0]]
targets = targets[1:]
return conf
# perform "write-only" request (update the configuration file)
old = self._setitem_dots(self._conf_file, name, value)
if not os.path.exists(self._conf_path):
os.makedirs(os.path.dirname(self._conf_path))
with open(self._conf_path, 'w+', encoding='utf-8') as file:
file.write(toml.dumps(self._conf_file))
# return the previous information of the update field (if available)
return old
group, key = key.split(".", 1)
if group in dictionary and not isinstance(dictionary[group], dict):
raise ValueError(f"cannot assign {value} into value {path+group}")
if group not in dictionary:
dictionary[group] = {}
return __setitem_dots(dictionary[group], key, value, path + group + ".")
def __config_control(name, value=None):
""" Common configuration file manipulation
If the `value` parameter is not set, only read operation will be performed,
otherwise the complete file will be updated.
@args
> name (str) - dot-separated ker (ex : `default.board.name`)
> value (str) - value for the key
"""
global __CACHED_CONFIG_PATH
global __CACHED_CONFIG_FILE
# check if config file information are cached
# @notes
# - create the configuration file folder if needed
# - load the TOML content
# - cache pathname to avoid path manipulation
if not __CACHED_CONFIG_PATH:
__CACHED_CONFIG_PATH = os.path.expanduser('~/.config/vxsdk/config.toml')
if not __CACHED_CONFIG_FILE:
cache_basename = os.path.basename(__CACHED_CONFIG_PATH)
if not os.path.exists(cache_basename):
os.makedirs(cache_basename)
with open(__CACHED_CONFIG_PATH, 'r+', encoding='utf-8') as file:
__CACHED_CONFIG_FILE = toml.loads(file.read())
# check "read-only" request (just fetch value)
if not value:
conf = __CACHED_CONFIG_FILE
targets = name.split('.')
while targets:
if not targets[0] in conf:
log.debug(f"[config] unable to find target '{name}'")
return None
conf = conf[targets[0]]
targets = targets[1:]
return conf
# perform "write-only" request (update the configuration file)
old = __setitem_dots(__CACHED_CONFIG_FILE, name, value)
with open(__CACHED_CONFIG_PATH, "w", encoding='utf-8') as file:
file.write(toml.dumps(__CACHED_CONFIG_FILE))
# return the previous information of the update field (if available)
return old
def _generate_value(name, val):
if not val:
return None
while val.find('{') >= 0:
if val.find('}') < 0:
break
key = val[val.find('{') + 1: val.find('}')]
res = __config_control(key)
if not res:
log.warn(f"[config] {name} = {val} : unable to find '{key}'")
def _generate_value(self, name, val):
if not val:
return None
val = val[:val.find('{')] + res + val[val.find('}') + 1:]
return val
while val.find('{') >= 0:
if val.find('}') < 0:
break
key = val[val.find('{') + 1: val.find('}')]
res = self._config_control(key)
if not res:
log.warn(f"[config] {name} = {val} : unable to find '{key}'")
return None
val = val[:val.find('{')] + res + val[val.find('}') + 1:]
return val
def _set_default(self, list_of_keyval):
""" Set default key / value
This function will setup all default key value if the key doest not
exists. This is usefull to centralise all default user configuration
information in this file instead of in all project files.
@arg
> list_of_keyval: list of tuple - [(key, value), ...]
"""
for key, value in list_of_keyval:
if not self._config_control(key):
self._config_control(key, value)
#---
# Public methods
#---
def get(self, key, default_value = None):
""" Get configuration key/value
This function will try to find the key value of `key`. If the key does
not exists then None will be returned. You can specify a default value
if the key doest not exist.
@args
> key: string - the key name
> default_value: string - the key default value if not found
@return
> return the key value or None if not found nor default value set
"""
if ret := self._config_control(key):
return self._generate_value(key, ret)
default_value = self._generate_value(key, default_value)
if default_value:
self._config_control(key, default_value)
return default_value
def set(self, key, value):
""" Set configuration key = value
This function will try to update the user vxSDK configuration file to
add key / value information. Note that the `value` can have placeholder
in its content like `{path.sysroot}/superh` wich will fetch the
'path.sysroot' configuration key.
@args
> key: string - the key name
> name: string - the key value
@return
> the old key value or None if new
"""
return self._config_control(key, value)
#---
# Public functions
# Publics
#---
def config_get(key: str, default_value: str = None) -> str:
""" Get configuration key/value
This function will try to find the key value of `key`. If the key doest not
exists then None will be returned. You can specify a default value if the
key doest not exist.
@args
> key: string - the key name
> default_value: string - the key default value if not found
@return
> return the key value or None if not found nor default value set
"""
if ret := __config_control(key):
return _generate_value(key, ret)
default_value = _generate_value(key, default_value)
if default_value:
__config_control(key, default_value)
return default_value
def config_set(key: str, value: str) -> str:
""" Set configuration key = value
This function will try to update the user vxSDK configuration file to add
key / value information. Note that the `value` can have placeholder in its
content like `{path.sysroot}/superh` wich will fetch the 'path.sysroot'
configuration key.
@args
> key: string - the key name
> name: string - the key value
@return
> the old key value or None if new
"""
return __config_control(key, value)
def config_set_default(list_of_keyval: list):
""" Set default key / value
This function will setup all default key value if the key doest not exists.
This is usefull to centralise all default user configuration information in
this file instead of in all project files.
@arg
> list_of_keyval: list of tuple - [(key, value), ...]
"""
for key, value in list_of_keyval:
if not __config_control(key):
__config_control(key, value)
# workaround
config_set_default(DEFAULT_CONFIG_KEYVAL)
config = _VxConfigure(__DEFAULT_CONFIG_KEYVAL)

View File

@ -1,68 +0,0 @@
"""
Vhex converter module
"""
from core.conv.asset import conv_assets_generate
__all__ = [
'assets_generate'
]
def assets_generate(prefix_assets, prefix_src):
r"""Generate Vhex assets.
This function abstract the asset convertion for the Vhex Operating System.
It will walk througt the `<source_prefix>` folder and will try to find some
files named 'vxconv.txt' wich discribe assets information of a potential
project. Then it will use them to convert assets into an appropriate source
file in C without using any Vhex-specific function (so, you can use this
converter for other project).
The vxconv.txt file is structured like basic key/value file:
```
<exposed_symbols_name>:
type: <image type> (font, bitmap) - required
path: <image path> - required
...
<next_exposed_symbols_name>:
...
```
Each asset file description should have at least type and name information,
and each type have potentially its own requierements.
type = bitmap:
================================== =========================================
Keys name and value type Description
================================== =========================================
profile: <name> Select the bitmap pixel profile
| rgb4 | RGB 4 (indexed)
| rgb4a | RGBA 4 (indexed)
| rgb8 | RGB 8 (indexed)
| rgb8a | RGBA 8 (indexed)
| rgb16 | RGB 16 (5:R, 6:G, 5:B)
| rgb16a | RGBA 16 (5:R, 5:G, 5:B, 1:A)
================================== =========================================
type = font:
================================== =========================================
Keys name and value type Description
================================== =========================================
grid.size: 8x9 (widthxheight) caracter size in pixel
grid.padding: <pixel> space between caracter
grig.border: <pixel> space around grid
proportional: <true,false> caracter are cropped
line_height: <pixel> caracter line alignement
charset: <print,unicode> charset specification
================================== =========================================
@args:
> path (str) - the path to find assets
> source_prefix (str) - the path to generate image source file
@return:
> a list of string which represents all assets sources files path
"""
return conv_assets_generate(prefix_assets, prefix_src)

View File

@ -1,67 +0,0 @@
from core.conv.pixel import rgba8conv
from PIL import Image
import os
__all__ = [
'generate_addin'
]
def generate_addin(binary, icon=None, name=None, output=None, version=None):
r"""Generate an addin for the Vhex Operating System.
The addin name (passed through the `name` argument) is optional here. In
this case, the name will use the internal name...which can be guessed using
the binary name (e.i '/path/to/the/super_addin.elf' -> internal name =
'super_addin' -> output name = '/path/to/the/super_addin').
The output path for the generated file is, by defautl, the same path that
the binary but the suffix '.vxos' will be added.
if the icon is not specified, a default blank icon will be used.
Args:
> binary (str) - binary path
> icon (str) - addin icon path (optional)
> name (str) - addin name (displayed in the menu) (optional)
> output (str) - output path for the generated addin (optional)
FIXME:
> generate default internal name
> change 8-bits icon into rgb565
> add internal addin version in the header
"""
if not os.path.exists(binary):
logger(LOG_ERR, 'binary path is invalid')
sys.exit(84)
if icon and not os.path.exists(icon):
logger(LOG_WARN, f'{icon}: icon does not exists, ignored')
icon = None
if not name:
name = ''
if not output:
output = binary + '.vxos'
if icon:
bitmap = Image.open(icon)
if bitmap.size != (92, 64):
logger(
LOG_ERR,
f'{icon}:icon size does not match {bitmap.size} != (92, 64)',
exit=84
)
with open(binary, 'rb') as b:
with open(output, 'wb') as a:
a.write(b'VHEX')
a.write(name.encode('utf8'))
a.write(b'\x00')
if icon:
for pixel in bitmap.getdata():
a.write(rgba8conv(pixel).to_bytes(1, 'big'))
else:
a.write(bytes(92*64))
a.write(b.read())
return 0

View File

@ -1,126 +0,0 @@
"""
Vhex assets converter
"""
import os
import toml
from core.logger import log
from core.conv.type.font import conv_font_generate
from core.conv.type.image import conv_image_generate
__all__ = [
'conv_assets_generate'
]
#---
# Private
#---
class _VxAsset():
"""Represent a asset object
This is an internal class which represents assets information with some
methods to abstract conversion and file type manipulation (for asset type
font and asset type bitmap).
Also note that this class is private because we use a tricky optimization to
parse the `vxconv.txt` file, this is why we have no "private" property with
setter and getter, and why this class is "hidden".
Some important methods to note:
================================== =========================================
Name Description
================================== =========================================
generate() Generate the source file (C)
================================== =========================================
"""
def __init__(self, prefix, name, meta):
if 'path' not in meta:
raise Exception(f"[{name}] missing required path information")
if 'type' not in meta:
raise Exception(f"[{name}] missing required type information")
if meta['type'] not in ['font', 'image']:
raise Exception(f"asset type '{meta[type]}' is not known")
self._name = name
self._meta = meta
self._type = meta['type']
self._path = prefix + '/' + meta['path']
if not os.path.exists(self.path):
raise Exception("asset path '{self._path}' cannot be openned")
def __repr__(self):
return f'<_VxAssetObj, {self.name}>'
def __str__(self):
content = f"[{self.name}]\n"
content += f" - type: {self.type}\n"
content += f" - path: {self.path}\n"
return content
#---
# Getter
#---
@property
def path(self):
"""<property> path"""
return self._path
@property
def name(self):
"""<property> name"""
return self._name
@property
def type(self):
"""<property> type"""
return self._type
@property
def meta(self):
"""<property> meta"""
return self._meta
#---
# Public method
#---
def generate_source_file(self, prefix_output):
"""generate source file """
if self.type == 'font':
return conv_font_generate(self, prefix_output)
return conv_image_generate(self, prefix_output)
#---
# Public
#---
def conv_assets_generate(prefix_assets, prefix_output):
""" Walk through the assets prefix and generate all source file
@args
> prefix_asset (str) - prefix used for recursivly search for asset info
> prefix_output (str) - prefix used for the output of generated file
@return
> a list of all generated sources pathname
"""
if not os.path.exists(prefix_output):
os.makedirs(prefix_output)
generated = []
for root, _, files in os.walk(prefix_assets):
if not 'vxconv.toml' in files:
continue
with open(root + '/vxconv.toml', "r", encoding='utf-8') as inf:
content = toml.loads(inf.read())
for asset_name in content:
log.user(f"converting {asset_name}...")
asset = _VxAsset(root, asset_name, content[asset_name])
generated += asset.generate_source_file(prefix_output)
return generated

View File

@ -1,45 +0,0 @@
""" Pixel converter utilities
This file expose many 32 bits RGBA into various pixel format
"""
__all__ = [
'rgb1conv',
'rgb8conv',
'rgba8conv',
'rgb16conv',
'rgba16conv'
]
def rgb24to16(rgb):
_r = (rgb[0] & 0xff) >> 3
_g = (rgb[1] & 0xff) >> 2
_b = (rgb[2] & 0xff) >> 3
return (_r << 11) | (_g << 5) | _b
def rgb1conv(pixel):
return pixel == (0, 0, 0)
def rgb8conv(pixel):
return int((pixel[0] * 7) / 255) << 5 \
| int((pixel[1] * 4) / 255) << 3 \
| int((pixel[2] * 7) / 255) << 0
def rgba8conv(pixel):
return int((pixel[0] * 4) / 256) << 6 \
| int((pixel[1] * 8) / 256) << 3 \
| int((pixel[2] * 4) / 256) << 1 \
| (len(pixel) >= 4 and pixel[3] == 0)
def rgb16conv(pixel):
return int((pixel[0] * 31) / 255) << 11 \
| int((pixel[1] * 63) / 255) << 5 \
| int((pixel[2] * 31) / 255) << 0
def rgba16conv(pixel):
return int((pixel[0] * 31) / 255) << 11 \
| int((pixel[1] * 63) / 255) << 6 \
| int((pixel[2] * 31) / 255) << 1 \
| (pixel[3] != 0)

View File

@ -1,466 +0,0 @@
"""
Vhex font converter
"""
from PIL import Image
from core.logger import log
__all__ = [
'conv_font_generate'
]
#---
# Internal vxconv.tmol handling routines
#---
def __font_fetch_info(asset):
""" Check and fetch font information
@arg
> asset (VxAsset) - asset information
@return
> dictionary with font information
"""
# generate font default information
font_info = {
# user can customise
'charset' : 'normal',
'grid_size_x' : 0,
'grid_size_y' : 0,
'grid_padding' : 1,
'grid_border' : 1,
'is_proportional' : False,
'line_height' : 0,
'char_spacing' : 1,
# generated "on-the-fly" by the conversion step
# @notes
# This is mainly to provide cache for the Vhex operating system to
# speed-up render calculation by avoiding recurent caculation.
'glyph_size' : 0,
'glyph_height' : 0,
'font_size' : 0,
'data' : []
}
# handle user meta-indication
if 'charset' in asset.meta:
if asset.meta['charset'] not in ['default', 'unicode']:
log.error(f"Unknown charset '{asset.meta['charset']}', abord")
return None
font_info['charset'] = asset.meta['charset']
if 'grid_size' not in asset.meta:
log.error("Missing critical grid size information, abord")
return None
grid_size = asset.meta['grid_size'].split('x')
font_info['grid_size_x'] = int(grid_size[0])
font_info['grid_size_y'] = int(grid_size[1])
if 'grid_padding' in asset.meta:
font_info['grid_padding'] = int(asset.meta['grid_padding'])
if 'grid_border' in asset.meta:
font_info['grid_border'] = int(asset.meta['grid_border'])
if 'proportional' in asset.meta:
font_info['is_proportional'] = asset.meta['proportional']
font_info['line_height'] = font_info['grid_size_y']
if 'line_height' in asset.meta:
font_info['line_height'] = asset.meta['line_height']
if 'char_spacing' in asset.meta:
font_info['char_spacing'] = asset.meta['char_spacing']
font_info['glyph_height'] = font_info['grid_size_y']
# return font information
return font_info
#---
# Internal glyph routines
#---
def __glyph_get_wgeometry(geometry_info, img_raw, img_size, pos, grid_size):
""" Generate glyph width geometry information
@args
> geometry_info (dict) - geometry information
> img_raw (list) - list of all pixel of the image
> img_size (tuple) - image width and image height
> pos (tuple) - glyph position information (X and Y in pixel)
> grid_size (tuple) - glyph grid size information (width and height)
@return
> Nothing
"""
geometry_info['wstart'] = -1
geometry_info['wend'] = -1
_px = pos[0]
_py = pos[1]
log.debug(f'[geometry] X:{pos[0]} Y:{int(pos[1]/img_size[0])}')
log.debug(f' - grid_size = {grid_size}')
for _ in range(0, grid_size[1]):
for offx in range(0, grid_size[0]):
if img_raw[_py + (_px + offx)][:3] == (255, 255, 255):
continue
if geometry_info['wstart'] < 0 or offx < geometry_info['wstart']:
geometry_info['wstart'] = offx
if geometry_info['wstart'] < 0 or offx > geometry_info['wend']:
geometry_info['wend'] = offx
_py += img_size[0]
geometry_info['wend'] += 1
log.debug(f' - geometry = {geometry_info}')
def __glyph_encode(data_info, img_info, geometry, posx, posy):
""" Encode glyph bitmap
@args
> data_info (dict) - internal data information (list, index and shift)
> img_info (dict) - image-related information (object and raw content)
> geometry (dict) - geometry information
> posx (int) - X-axis position in pixel
> posy (int) - Y-axis position in pixel
@return
> Nothing
"""
# fetch information
img = img_info['obj']
img_raw = img_info['raw']
data = data_info['table']
data_idx = data_info['idx']
data_shift = data_info['shift']
wstart = geometry['wstart']
wend = geometry['wend']
# encode the glyph
yoff = 0
log.debug(f'[encode] X:{posx} Y:{int(posy/img.size[0])}')
for _h in range(geometry['hstart'], geometry['hend']):
for _w in range(wstart, wend):
if img_raw[(posy + yoff) + (posx + _w)][:3] == (0, 0, 0):
log.debug('#', end='')
data[data_idx] |= 0x80000000 >> data_shift
else:
log.debug('.', end='')
data[data_idx] &= ~(0x80000000 >> data_shift)
if (data_shift := data_shift + 1) >= 32:
data_shift = 0
data_idx += 1
log.debug('')
yoff += img.size[0]
# commit modification
data_info['idx'] = data_idx
data_info['shift'] = data_shift
#---
# Intenal font conversion
#---
def __font_convert_proportional(packed_info):
""" Generate proportional font
Proportional font means that each character have its own width size (but
have a common height). We need to performs more complexe handling than the
monospaced one.
@args
> asset (VxAsset) - asset information
> font_information (dict) - font indication
@return
> 0 if success, negative value otherwise
"""
# unpack information
font_info = packed_info[0]
img_info = packed_info[1]
glyph_info = packed_info[2]
data_info = packed_info[4]
geometry_info = packed_info[5]
# isolate needed information
img = img_info['obj']
img_raw = img_info['raw']
nb_col = packed_info[3][0]
nb_row = packed_info[3][1]
gwidth = glyph_info[0]
gheight = glyph_info[1]
# main loop, walk glyph per glyph
_py = (font_info['grid_border'] + font_info['grid_padding']) * img.size[0]
for _ in range(0, nb_row):
_px = font_info['grid_border'] + font_info['grid_padding']
for _ in range(0, nb_col):
# generate width geometry information
__glyph_get_wgeometry(
geometry_info,
img_raw,
img.size,
(_px, _py),
(font_info['grid_size_x'], font_info['grid_size_y'])
)
# save critical glyph geometry information that will be encoded in
# the final C source file
font_info['glyph_props'].append((
geometry_info['wend'] - geometry_info['wstart'],
data_info['idx'],
data_info['shift']
))
# encode glyph information
__glyph_encode(data_info, img_info, geometry_info, _px, _py)
# update loop information
font_info['glyph_count'] += 1
_px += gwidth
_py += gheight * img.size[0]
return 0
def __font_convert_monospaced(packed_info):
""" Generate proportional font
Proportional font means that each character have its own width size (but
have a common height). We need to performs more complexe handling than the
monospaced one.
@args
> asset (VxAsset) - asset information
> font_information (dict) - font indication
@return
> 0 if success, negative value otherwise
"""
# unpack information
font_info = packed_info[0]
img_info = packed_info[1]
glyph_info = packed_info[2]
grid_info = packed_info[3]
data_info = packed_info[4]
geometry_info = packed_info[5]
# isolate needed information
img = img_info['obj']
nb_row = grid_info[1]
nb_col = grid_info[0]
gwidth = glyph_info[0]
gheight = glyph_info[1]
# main loop, walk glyph per glyph
_py = (font_info['grid_border'] + font_info['grid_padding']) * img.size[0]
for _ in range(0, nb_row):
_px = font_info['grid_border'] + font_info['grid_padding']
for _ in range(0, nb_col):
__glyph_encode(data_info, img_info, geometry_info, _px, _py)
font_info['glyph_count'] += 1
_px += gwidth
_py += gheight * img.size[0]
return 0
def __font_convert(asset, font_info):
""" Generate font information
@args
> asset (VxAsset) - asset information
> font_info (dict) - font information
@return
> 0 if success, negative value otherwise
"""
# generate image information
img = Image.open(asset.path)
img_raw = img.getdata()
img_info = {
'obj' : img,
'raw' : img_raw
}
# pre-calculate the "real" glyph width and height using padding information
glyph_info = [0, 0]
glyph_info[0] = font_info['grid_size_x'] + font_info['grid_padding']
glyph_info[1] = font_info['grid_size_y'] + font_info['grid_padding']
gheight = glyph_info[1]
gwidth = glyph_info[0]
log.debug(f"gwidth = {gwidth} && gheight = {gheight}")
# pre-calculate the number of row and column of the font
grid_info = [0, 0]
grid_info[0] = int((img.size[0] - (font_info['grid_border'] * 2)) / gwidth)
grid_info[1] = int((img.size[1] - (font_info['grid_border'] * 2)) / gheight)
nb_col = grid_info[0]
nb_row = grid_info[1]
log.debug(f"nb_row = {nb_row} && nb_col = {nb_col}")
# pre-calculate and prepare per-glyph information
# @note
# The generated data is designed for 4-alignement padding. This to have
# speed-up on drawing function.
font_info['glyph_size'] = font_info['grid_size_x'] * font_info['grid_size_y']
font_info['font_size'] = font_info['glyph_size'] * nb_row * nb_col
font_info['glyph_count'] = 0
font_info['glyph_props'] = []
font_info['data'] = [0] * int((font_info['font_size'] + 31) / 32)
log.debug(f"data original = {id(font_info['data'])}")
# generate data information
data_info = {
'table' : font_info['data'],
'idx' : 0,
'shift' : 0
}
log.debug(f"data packed = {id(data_info['table'])}")
# generate geometry information
geometry_info = {
'hstart' : 0,
'hend' : font_info['grid_size_y'],
'wstart' : 0,
'wend' : font_info['grid_size_x'],
}
# select the converter
converter = __font_convert_monospaced
if font_info['is_proportional']:
converter = __font_convert_proportional
# convert font
converter((
font_info,
img_info,
glyph_info,
grid_info,
data_info,
geometry_info
))
log.debug(f"data packed end = {id(data_info['table'])}")
return 0
#---
# Source file generation
#---
def __font_generate_unicode_source(_):
"""Unicode special chaset directory"""
log.error("unicode conversion not implemented yet o(x_x)o")
return ''
def __font_generate_normal_source(font_info):
"""Print chaset is a image file
"""
content = "\t.glyph = {\n"
content += f"\t\t.height = {font_info['glyph_height']},\n"
content += f"\t\t.line_height = {font_info['line_height']},\n"
# encode font bitmap
line = 0
log.debug(f"data = {font_info['data']}")
content += "\t\t.data = (uint32_t[]){\n"
for pixel in font_info['data']:
if line == 0:
content += '\t\t\t'
if line >= 1:
content += ' '
content += f"{pixel:#010x},"
if (line := line + 1) == 4:
content += '\n'
line = 0
if line != 0:
content += '\n'
content += '\t\t},\n'
# indicate the number of glyph in the bitmap
content += f"\t\t.count = {font_info['glyph_count']},\n"
# encode proportional information if needed
if font_info['is_proportional']:
content += '\t\t.prop = (struct __workaround[]){\n'
for prop in font_info['glyph_props']:
content += "\t\t\t{\n"
content += f"\t\t\t\t.width = {prop[0]},\n"
content += f"\t\t\t\t.index = {prop[1]},\n"
content += f"\t\t\t\t.shift = {prop[2]},\n"
content += "\t\t\t},\n"
else:
content += "\t\t.mono = {,\n"
content += f"\t\t\t.width = {font_info['glyph_width']},\n"
content += f"\t\t\t.size = {font_info['glyph_size']},\n"
content += "\t\t},\n"
content += "\t},\n"
# skip unicode struct
content += "\t.unicode = {\n"
content += "\t\t.blocks = NULL,\n"
content += "\t\t.block_count = 0,\n"
content += "\t}\n"
return content
def __font_generate_source_file(asset, font_info):
"""Generate font source file content
@args
> asset (VxAsset) - asset information
> info (dict) - hold font information
@return
> file C content string
"""
# generate basic header
content = "#include <vhex/display/font.h>\n"
content += "\n"
content += f"/* {asset.name} - Vhex asset\n"
content += " This object has been converted by using the vxSDK "
content += "converter */\n"
content += f"struct font const {asset.name} = " + "{\n"
content += f"\t.name = \"{asset.name}\",\n"
# shape information
content += "\t.shape = {\n"
content += "\t\t.bold = 0,\n"
content += "\t\t.italic = 0,\n"
content += "\t\t.serif = 0,\n"
content += "\t\t.mono = 0,\n"
content += f"\t\t.prop = {int(font_info['is_proportional'])},\n"
content += "\t},\n"
# manage display indication
content += f"\t.char_spacing = {font_info['char_spacing']},\n"
# handle special charset behaviour
if font_info['charset'] == 'unicode':
content += __font_generate_unicode_source(font_info)
else:
content += __font_generate_normal_source(font_info)
# closure and return
content += '};\n'
return content
#---
# Public
#---
def conv_font_generate(asset, prefix_output):
""" Convert an image asset to a C source file
@args
> asset (_VxAsset) - minimal asset information
> prefix_output (str) - prefix for source file generation
@return
> pathname of the generated file
"""
# generate font information
if not (font_info := __font_fetch_info(asset)):
return ''
if __font_convert(asset, font_info) != 0:
return ''
content = __font_generate_source_file(asset, font_info)
# create the source file
asset_src = f'{prefix_output}/{asset.name}_vxfont.c'
with open(asset_src, "w", encoding='utf8') as file:
file.write(content)
log.debug(f"source file generated at {asset_src}")
return asset_src

View File

@ -1,397 +0,0 @@
"""
Vhex image converter
"""
from PIL import Image
from core.logger import log
from core.conv.pixel import rgb24to16
__all__ = [
'conv_image_generate'
]
#---
# Private profile color management
#---
def __profile_gen(profile, name, palette=None, alpha=None):
r""" Internal image profile class
================================== =========================================
Property Description
================================== =========================================
id (int) profile ID
names (array of str) list all profile names
format (str) profile format name (vhex API)
has_alpha (bool) indicate if the profil has alpha
alpha (int) alpha index in the palette (or mask)
is_indexed (bool) indicate if the profile should be indexed
palette_base (int) indicate base index for color inserting
palette_color_count (int) indicate the number of color (palette)
palette_trim (bool) indicate if the palette should be trimed
================================== =========================================
"""
profile = {
'profile' : profile,
'name' : name,
'has_alpha' : (alpha is not None),
'alpha' : alpha,
'is_indexed': (palette is not None),
'palette' : None
}
if palette is not None:
profile['palette_base'] = palette[0]
profile['palette_color_count'] = palette[1]
profile['palette_trim'] = palette[2]
return profile
# all supported profile information
VX_PROFILES = [
__profile_gen('IMAGE_RGB565', "p16"),
__profile_gen('IMAGE_RGB565A', "p16a", alpha=0x0001),
__profile_gen('IMAGE_P8_RGB565', "p8", palette=(0,256,True)),
__profile_gen('IMAGE_P8_RGB565A', "p8a", palette=(1,256,True), alpha=0),
__profile_gen('IMAGE_P4_RGB565', "p4", palette=(0,16,False)),
__profile_gen('IMAGE_P4_RGB565A', "p4a", palette=(1,16,False), alpha=0),
]
def __profile_find(name):
"""Find a profile by name."""
for profile in VX_PROFILES:
if name == profile['name']:
return profile
return None
#---
# Private image manipulation
#---
def __image_isolate_alpha(info):
""" Isolate alpha corlor of the image
Vhex use a particular handling for alpha color and this information should
use a strict encoding way. Things that Pillow don't do properly. So, lets
manually setup our alpha isolation and patch Pillow alpha palette handling.
@args
> info (dict) - contains all needed information (image, data, ...)
@return
> Nothing
"""
# fetch needed information
img = info['img']
profile = info['profile']
# Save the alpha channel and make it 1-bit. We need to do this because
# the alpha value is handled specialy in Vhex and the image conversion
# to palette-oriented image is weird : the alpha colors is also converted
# in the palette
if profile['has_alpha']:
alpha_channel = img.getchannel("A").convert("1", dither=Image.NONE)
else:
alpha_channel = Image.new("1", img.size, 1)
alpha_pixels = alpha_channel.load()
img = img.convert("RGB")
# Transparent pixels have random values on the RGB channels, causing
# them to use up palette entries during quantization. To avoid that, set
# their RGB data to a color used somewhere else in the image.
pixels = img.load()
bg_color = next(
(
pixels[x,y]
for x in range(img.width)
for y in range(img.height)
if alpha_pixels[x,y] > 0
),
(0,0,0)
)
for _y in range(img.height):
for _x in range(img.width):
if alpha_pixels[_x, _y] == 0:
pixels[_x, _y] = bg_color
# update external information
info['img'] = img
info['img_pixel_list_alpha'] = alpha_pixels
info['img_pixel_list_clean'] = pixels
def __image_encode_palette(info):
""" Generate palette information
This routine is involved only if the targeted profile is indexed. We need to
generate (and isolate) color palette.
@args
> info (dict) - contains all needed information (image, data, ...)
@return
> Nothing
"""
# fetch needed information
img = info['img']
profile = info['profile']
# convert image into palette format
# note: we remove one color slot in the palette for the alpha one
color_count = profile['palette_color_count'] - int(profile['has_alpha'])
img = img.convert(
'P',
dither=Image.NONE,
palette=Image.ADAPTIVE,
colors=color_count
)
# The palette format is a list of N triplets ([r, g, b, ...]). But,
# sometimes, colors after img.convert() are not numbered 0 to
# `color_count`, because the palette don't need to be that big. So,
# we calculate the "palette size" by walking throuth the bitmap and
# by saving the biggest index used.
pixels = img.load()
nb_triplet = 1 + max(
pixels[x,y]
for y in range(img.height)
for x in range(img.width)
)
palette = img.getpalette()[:3 * nb_triplet]
palette = list(zip(palette[::3], palette[1::3], palette[2::3]))
# For formats with transparency, add an "unused" palette slot which
# will used has pink/purple in case of a bad application try to use
# this value anyway
if profile['has_alpha']:
palette = [(255, 0, 255)] + palette
nb_triplet += 1
# Also keep track of how to remap indices from the values generated
# by img.convert() into the palette, which is shifted by 1 due to
# alpha and also starts at profile.palette_base.
#
# Note: profile.palette_base already starts 1 value later for
# formats with alpha.
palette_map = [
(profile['palette_base'] + i) % profile['palette_color_count']
for i in range(nb_triplet)
]
# Encode the palette
palette_color_count = nb_triplet
if not profile['palette_trim']:
palette_color_count = profile['palette_color_count']
palette_data = [0] * palette_color_count
for i, rgb24 in enumerate(palette):
palette_data[i] = rgb24to16(rgb24)
# update internal information
info['palette_map'] = palette_map
info['palette_data'] = palette_data
info['palette_color_count'] = palette_color_count
info['nb_triplet'] = nb_triplet
info['img_pixel_list_clean'] = pixels
def __image_encode_bitmap(info):
""" Encode the bitmap
This routine will generate the main data list which will contains the bitmap
using Vhex-specific encoding.
@args
> info (dict) - contains all needed information (image, data, ...)
@return
> Nothing
"""
# fetch needed information
img = info['img']
profile = info['profile']
alpha_pixels = info['img_pixel_list_alpha']
pixels = info['img_pixel_list_clean']
palette_map = info['palette_map']
# generate profile-specific geometry information
if profile['name'] in ['p16', 'p16a']:
# Preserve alignment between rows by padding to 4 bytes
nb_stride = ((img.width + 1) // 2) * 4
data_size = (nb_stride * img.height) * 2
elif profile['name'] in ['p8', 'p8a']:
nb_stride = img.width
data_size = img.width * img.height
else:
# Pad whole bytes
nb_stride = (img.width + 1) // 2
data_size = nb_stride * img.height
# Generate the real data map
data = [0] * data_size
# encode the bitmap
for _y in range(img.height):
for _x in range(img.width):
# get alpha information about this pixel
_a = alpha_pixels[_x, _y]
if profile['name'] in ['p16', 'p16a']:
# If c lands on the alpha value, flip its lowest bit to avoid
# ambiguity with alpha
_c = profile['alpha'] if _a else rgb24to16(pixels[_x, _y]) & ~1
data[(img.width * _y) + _x] = _c
elif profile['name'] in ['p8', 'p8a']:
_c = palette_map[pixels[_x, _y]] if _a > 0 else profile['alpha']
data[(img.width * _y) + _x] = _c
else:
_c = palette_map[pixels[_x, _y]] if _a > 0 else profile['alpha']
offset = (nb_stride * _y) + (_x // 2)
if _x % 2 == 0:
data[offset] |= (_c << 4)
else:
data[offset] |= _c
# update external information
info['data'] = data
info['data_size'] = data_size
info['nb_stride'] = nb_stride
info['data_size'] = data_size
def __image_convert(asset, profile_name):
""" Image asset convertion
@args
> asset (_VxAsset) - asset information
> profile_name (str) - profile name information
@return
> a dictionary with all image information
"""
# generate critical information and check posible error
img_info = {
'img' : Image.open(asset.path),
'profile' : __profile_find(profile_name)
}
if not img_info['img']:
log.error(f"unable to open the asset '{asset.path}', abord")
return None
if not img_info['profile']:
log.error(f"unable to find the color profile '{profile_name}', abord")
return None
# convert the bitmap and generate critical information
__image_isolate_alpha(img_info)
if img_info['profile']['is_indexed']:
__image_encode_palette(img_info)
__image_encode_bitmap(img_info)
# return generated information
return img_info
#---
# Internal source file content generation
#---
def __display_array(array, prefix='\t\t'):
""" Display array information (only for p16* profile) """
line = 0
content = ''
for pixels in array:
if line == 0:
content += prefix
if line >= 1:
content += ' '
content += f'{pixels:#06x},'
if (line := line + 1) >= 8:
content += '\n'
line = 0
if line != 0:
content += '\n'
return content
def __image_generate_source_file(asset, info):
"""Generate image source file
@args
> asset (VxAsset) - asset information
> info (dict) - hold image information
@return
> file C content string
"""
img = info['img']
profile = info['profile']
# generate basic header
content = "#include <vhex/display/image/types.h>\n"
content += "\n"
content += f"/* {asset.name} - Vhex asset\n"
content += " This object has been converted by using the vxSDK "
content += "converter */\n"
content += "const image_t " + f"{asset.name} = " + "{\n"
content += f"\t.format = {profile['profile']},\n"
content += "\t.flags = IMAGE_FLAGS_RO | IMAGE_FLAGS_OWN,\n"
content += f"\t.color_count = {profile['palette_color_count']},\n"
content += f"\t.width = {img.width},\n"
content += f"\t.height = {img.height},\n"
content += f"\t.stride = {info['nb_stride']},\n"
# encode bitmap table
encode = 16 if profile['profile'] in ['p16', 'p16a'] else 8
content += f"\t.data = (void*)(const uint{encode}_t [])" + "{\n"
for _y in range(img.height):
content += '\t\t'
for _x in range(info['nb_stride']):
pixel = info['data'][(_y * info['nb_stride']) + _x]
if profile['profile'] in ['p16', 'p16a']:
content += f'{pixel:#06x},'
elif profile['profile'] in ['p8', 'p8a']:
content += f'{pixel:#04x},'
else:
content += f'{pixel:3},'
content += '\n'
content += '\t},\n'
# add palette information
if 'palette_data' in info:
content += "\t.palette = (void*)(const uint16_t []){\n"
content += __display_array(info['palette_data'])
content += "\t},\n"
else:
content += "\t.palette = NULL,\n"
# closure and return
content += '};'
return content
#---
# Public
#---
def conv_image_generate(asset, prefix_output):
""" Convert an image asset to a C source file
@args
> asset (_VxAsset) - minimal asset information
> prefix_output (str) - prefix for source file generation
@return
> pathname of the generated file
"""
# check critical requirement
if 'profile' not in asset.meta:
log.error(f"[{asset.name}] missing profile information!")
return ''
#generate the source file content
if not (img_info := __image_convert(asset, asset.meta['profile'])):
return ''
content = __image_generate_source_file(asset, img_info)
# generate the source file
asset_src = f'{prefix_output}/{asset.name}_vximage.c'
with open(asset_src, "w", encoding='utf8') as file:
file.write(content)
return asset_src

View File

@ -1,5 +1,5 @@
"""
Log wrapper
core.logger - Log wrapper
"""
import sys
@ -7,23 +7,23 @@ __all__ = [
'log'
]
LOG_DEBUG = 7
LOG_INFO = 6
LOG_NOTICE = 5
LOG_USER = 4
LOG_WARN = 3
LOG_ERR = 2
LOG_CRIT = 1
LOG_EMERG = 0
#---
# Internals
#---
_LOG_DEBUG = 7
_LOG_INFO = 6
_LOG_NOTICE = 5
_LOG_USER = 4
_LOG_WARN = 3
_LOG_ERR = 2
_LOG_CRIT = 1
_LOG_EMERG = 0
class _VxLogger():
def __init__(self, logfile=None):
self._logfile = logfile
self._level = LOG_USER
self._level = _LOG_USER
self._indent = 0
#---
@ -33,7 +33,7 @@ class _VxLogger():
def _print(self, level, text, skip_indent, fileno):
if self._level < level:
return 0
if not skip_indent and self._level == LOG_DEBUG and self._indent > 0:
if not skip_indent and self._level == _LOG_DEBUG and self._indent > 0:
text = ('>>> ' * self._indent) + text
print(text, file=fileno, end='', flush=True)
return len(text) + 1
@ -50,19 +50,19 @@ class _VxLogger():
@level.setter
def level(self, level):
""" <property> handle print level """
if level < LOG_EMERG or level > LOG_DEBUG:
if level < _LOG_EMERG or level > _LOG_DEBUG:
print(f"[log] level update to {level} is not possible, ignored")
return
self._level = level
@property
def indent(self):
""" <property> handle indentation level for LOG_DEBUG """
""" <property> handle indentation level for __LOG_DEBUG """
return self._indent
@indent.setter
def indent(self, indent):
""" <property> handle indentation level for LOG_DEBUG """
""" <property> handle indentation level for __LOG_DEBUG """
if indent < 0:
print(f"[log] indent update to {indent} is not possible, ignored")
return
@ -74,39 +74,49 @@ class _VxLogger():
def debug(self, text, end='\n', skip_indent=False):
""" print debug log """
return self._print(LOG_DEBUG, text + end, skip_indent, sys.stdout)
return self._print(
_LOG_DEBUG, f"[DEBUG] {text}{end}", skip_indent, sys.stdout
)
def info(self, text, end='\n', skip_indent=False):
""" print info log """
return self._print(LOG_INFO, text + end, skip_indent, sys.stdout)
return self._print(
_LOG_INFO, f"[INFO] {text}{end}", skip_indent, sys.stdout
)
def notice(self, text, end='\n', skip_indent=False):
""" print notice log """
return self._print(LOG_NOTICE, text + end, skip_indent, sys.stdout)
return self._print(
_LOG_NOTICE, f"[NOTICE] {text}{end}", skip_indent, sys.stdout
)
def user(self, text, end='\n', skip_indent=False):
""" print user log """
return self._print(LOG_USER, text + end, skip_indent, sys.stdout)
return self._print(_LOG_USER, f"{text}{end}", skip_indent, sys.stdout)
def warn(self, text, end='\n', skip_indent=False):
""" print warning log """
return self._print(LOG_WARN, text + end, skip_indent, sys.stderr)
return self._print(
_LOG_WARN, f"[WARN] {text}{end}", skip_indent, sys.stderr
)
def error(self, text, end='\n', skip_indent=False):
""" print error log """
return self._print(LOG_ERR, text + end, skip_indent, sys.stderr)
return self._print(
_LOG_ERR, f"[ERROR] {text}{end}", skip_indent, sys.stderr
)
def critical(self, text, end='\n', skip_indent=False):
""" print critical log """
return self._print(LOG_CRIT, text + end, skip_indent, sys.stderr)
return self._print(
_LOG_CRIT, f"[CRITICAL] {text}{end}", skip_indent, sys.stderr
)
def emergency(self, text, end='\n', skip_indent=False):
""" print emergency log """
return self._print(LOG_EMERG, text + end, skip_indent, sys.stderr)
return self._print(
_LOG_EMERG, f"[EMERGENCY] {text}{end}", skip_indent, sys.stderr
)
#---
# Public functions

View File

@ -1,8 +1,8 @@
"""
Provide package primitives (mainly for syntax sugar)
core.pkg - Provide package primitives (mainly for syntax sugar)
"""
from core.pkg.find import pkg_find
from core.pkg.clone import pkg_clone
from core.pkg.find import find_pkg
from core.pkg.clone import clone_pkg
__all__ = [
'find',
@ -34,9 +34,9 @@ def find(name, version=None, local=True, remote=True):
]
> None if error
"""
return pkg_find(name, version, local, remote)
return find_pkg(name, version, local, remote)
def clone(name, version=None, prefix=None, confirm=False):
def clone(name, version=None, prefix=None, confirm=False, bare=False):
r""" Clone package with appropriate version
This function will try to find the wanted package with the appropriate
@ -48,12 +48,13 @@ def clone(name, version=None, prefix=None, confirm=False):
described in <vxsdk/core/pkg/version.py>
@args
> prefix (str) - clone path prefix
> name (str) - exact valid package name
> version (str) - version query string
> confirm (bool) - display user input to confirm the clone
> prefix (str) - clone path prefix
> name (str) - exact valid package name
> version (str) - version query string
> confirm (bool) - display user input to confirm the clone
> bare (bool) - do not clone the package to the global storage
@return
> the package path if successfully cloned, None otherwise
"""
return pkg_clone(name, version, prefix, confirm)
return clone_pkg(name, version, prefix, confirm, bare)

View File

@ -1,7 +1,8 @@
"""Remote backend constructor
"""
core.pkg.backend - Remote backend constructor
This package will exposes the major important object for the package core remote
part of the vxsdk.
This package will exposes the major important object for the package core
remote part of the vxsdk.
=========================== ============================================
Object name Description
@ -26,16 +27,27 @@ import sys
from core.pkg.backend.local import VxBackendLocal
from core.logger import log
from core.config import config_get
from core.config import config
__all__ = [
'PKG_CORE_BACKEND_REMOTE',
'PKG_CORE_BACKEND_LOCAL',
]
backend_remote_name = config_get('pkg.backend.name')
backend_remote_url = config_get('pkg.backend.url')
backend_local_url = os.path.expanduser(config_get('pkg.local_storage'))
#---
# Internals
#---
class _VxPKGBackendException(Exception):
""" custom backend exception wrapper """
#---
# Public
#---
backend_remote_name = config.get('pkg.backend.name')
backend_remote_url = config.get('pkg.backend.url')
backend_local_url = os.path.expanduser(config.get('pkg.local_storage'))
PKG_CORE_BACKEND_REMOTE = None
PKG_CORE_BACKEND_LOCAL = None
@ -45,7 +57,7 @@ try:
fromlist=['VxBackendRemote']
)
if not hasattr(mod, 'VxBackendRemote'):
raise Exception(
raise _VxPKGBackendException(
f"backend '{backend_remote_name}' doesn't expose "
"VxBackendRemote class"
)

View File

@ -1,5 +1,5 @@
"""
Vhex package core abstract class
core.pkg.backend.core - Vhex package core abstract class
"""
import abc
@ -9,25 +9,29 @@ __all__ = [
'VxRemoteBackendCore'
]
#---
# Public
#---
class VxRemoteBackendCore(abc.ABC):
r"""Represent a remote backend 'core' class.
This class is a simple abstract class that should be used by all internal
'backend' wich should expose some common methods and property:
================================== =========================================
================================== =======================================
Property Description
================================== =========================================
================================== =======================================
name (str) Backend name (for debug)
url (str) Backend URL (for internal use)
================================== =========================================
================================== =======================================
================================== =========================================
================================== =======================================
Method Description
================================== =========================================
================================== =======================================
find() find packages
clone() clone a package
================================== =========================================
================================== =======================================
"""
def __init__(self, url):
self._url = url
@ -78,7 +82,7 @@ class VxRemoteBackendCore(abc.ABC):
'full_name' (str) : full named package (author+name)
'description' (str) : package description
'versions' (list) : [
List of :obj:VxVersion that match the `version` argument
List of :obj:VxVersion that match `version` argument
]
},
...

View File

@ -1,5 +1,5 @@
"""
Vhex core backend for Gitea instance
core.pkg.backend.gitea - Vhex core backend for Gitea instance
"""
import os
import subprocess
@ -8,13 +8,16 @@ import requests
from core.pkg.backend.core import VxRemoteBackendCore
from core.pkg.version import VxVersion
from core.logger import log
from core.config import config_get
from core.config import config
__all__ = [
'VxBackendRemote'
]
#---
# Public
#---
class VxBackendRemote(VxRemoteBackendCore):
"""
Vhex Gitea backend class
@ -49,7 +52,8 @@ class VxBackendRemote(VxRemoteBackendCore):
params={
'q' : 'vxsdk',
'topic': 'True'
}
},
timeout=10
)
if not resp.ok:
log.warn(
@ -75,6 +79,7 @@ class VxBackendRemote(VxRemoteBackendCore):
'updated' : pkg['updated_at'].split('T')[0],
'author' : pkg['owner']['login'],
'default_branch' : pkg['default_branch'],
'storage' : 'remote',
'versions' : []
})
return self._pkg_list
@ -96,7 +101,7 @@ class VxBackendRemote(VxRemoteBackendCore):
# request branches information
pkg['versions'] = []
url = f"{self.url}/api/v1/repos/{pkg['full_name']}"
resp = requests.get(f'{url}/branches')
resp = requests.get(f'{url}/branches', timeout=10)
if not resp.ok:
log.warn(
f'[pkg]: backend: gitea: branches requests error\n'
@ -110,7 +115,7 @@ class VxBackendRemote(VxRemoteBackendCore):
)
# request tag information
resp = requests.get(f'{url}/tags')
resp = requests.get(f'{url}/tags', timeout=10)
if not resp.ok:
log.warn(
f'[pkg]: backend: gitea: tags requests error\n'
@ -126,28 +131,26 @@ class VxBackendRemote(VxRemoteBackendCore):
# Public methods
#---
def package_clone(self, pkg, _=None):
def package_clone(self, pkg, prefix=''):
""" Clone the package in global storage
@args
> pkg (dict) - package information returned by package_find()
> prefix (str) - clone path prefix
@return
> Complet path for the package (str), or None if error
"""
# fetch global storage prefix
# @notes
# - create it if its doesn't exists
prefix = os.path.expanduser(config_get('path.packages'))
if not os.path.exists(prefix):
os.makedirs(prefix)
# generate clone information
# @note
# - create clone folder if not exists
# generate clonning information and handle cache
pkg_ver = pkg['version']
pkg_name = f"{pkg['author']}@{pkg['name']}@{pkg_ver.name}@{pkg_ver.type}"
pkg_path = f"{prefix}/{pkg_name}"
pkg_path = f"{prefix}/{pkg['name']}"
if not prefix:
prefix = os.path.expanduser(config.get('path.packages'))
if not os.path.exists(prefix):
os.makedirs(prefix)
pkg_name = f"{pkg['author']}@{pkg['name']}"
pkg_name = f"{pkg_name}@{pkg_ver.name}@{pkg_ver.type}"
pkg_path = f"{prefix}/{pkg_name}"
if os.path.exists(pkg_path):
log.warn(f"[clone]: {pkg_name} already exists, skipped")
return pkg_path
@ -156,15 +159,17 @@ class VxBackendRemote(VxRemoteBackendCore):
cmd = [
'git',
'-c', 'advice.detachedHead=false',
'clone', '--branch', pkg_ver.name,
pkg['url'], pkg_path,
'clone', pkg['url'],
pkg_path,
'--depth=1'
]
if pkg_ver.name:
cmd += ['--branch', pkg_ver.name]
log.debug(f"[gitea] {cmd}")
status = subprocess.run(cmd, capture_output=True, check=False)
if status.returncode != 0:
log.error(f"[clone] : unable to clone {pkg_name}, abord")
return []
log.error(f"[clone] : unable to clone {pkg['name']}, abord")
return ''
# return the package path
return pkg_path

View File

@ -1,7 +1,8 @@
"""
Vhex core backend for local package
core.pkg.backend.local - Vhex core backend for local package
"""
import os
import subprocess
from core.logger import log
from core.pkg.backend.core import VxRemoteBackendCore
@ -11,6 +12,10 @@ __all__ = [
'VxBackendLocal'
]
#---
# Public
#---
class VxBackendLocal(VxRemoteBackendCore):
"""
Vhex backend local package class
@ -57,6 +62,7 @@ class VxBackendLocal(VxRemoteBackendCore):
'updated' : None,
'author' : file.split('@')[0],
'default_branch' : None,
'storage' : 'local',
'versions' : [
VxVersion(file.split('@')[2], file.split('@')[3], 'local')
]
@ -74,16 +80,31 @@ class VxBackendLocal(VxRemoteBackendCore):
# Public methods
#---
def package_clone(self, pkg, prefix=None):
if not prefix:
prefix = os.getcwd()
if not os.path.exists(prefix):
os.makedirs(prefix)
if not os.path.exists(f"{prefix}/{pkg['name']}"):
log.debug(f"[local] link '{pkg['url']}' > '{prefix}/{pkg['name']}'")
os.symlink(
pkg['url'],
f"{prefix}/{pkg['name']}",
target_is_directory=True
)
return f"{prefix}/{pkg['name']}"
def package_clone(self, pkg, prefix='', bare=False):
if prefix:
output_path = prefix
if prefix[-1] == '/':
output_path = f"{prefix}{pkg['name']}"
if not os.path.isdir(prefix):
log.error(
f"package '{pkg['name']}' cannot be cloned at "
f"'{prefix}' because this path is not a directory"
)
return ''
else:
if not os.path.exists(prefix := os.getcwd()):
os.makedirs(prefix)
output_path = f"{prefix}/{pkg['name']}"
if not os.path.exists(output_path):
if not bare:
log.debug(f"[local] link '{pkg['url']}' > '{output_path}'")
os.symlink(pkg['url'], output_path, target_is_directory=True)
else:
log.debug(f"[local] bare cpy '{pkg['url']}' > '{output_path}'")
subprocess.run(
f"cp -r {pkg['url']} {output_path}".split(),
check=False
)
else:
log.debug(f"[local] file '{output_path}' already exists, skipped")
return output_path

View File

@ -1,49 +1,58 @@
"""
Package clone backend abstraction
core.pkg.clone - Package clone backend abstraction
"""
import os
from core.pkg.backend import PKG_CORE_BACKEND_REMOTE, PKG_CORE_BACKEND_LOCAL
from core.pkg.version import VxVersion
from core.pkg.find import pkg_find
from core.pkg.find import find_pkg
from core.logger import log
__all__ = [
'pkg_clone'
'clone_pkg'
]
#---
# Internals
#---
def _pkg_clone_core(pkg, prefix):
"""
def _pkg_clone_core(pkg, prefix, bare):
""" Perform remote clone if needed then local clone if needed too
@args
> pkg (dict) - package information
> pkg (dict) - package information
> prefix (str) - clone path prefix
> bare (bool) - do not performs local installation
@return
> the package path if successfully cloned, None otherwise
"""
version = pkg['version']
# handle prefix. If not bare clone is required then force the "remote"
# cloning operation to be performed in the global storage
remote_prefix = prefix if bare else ''
# be sure that the package target is in the global storage
log.user(f"cloning package {pkg['name']}...")
pkg_path = PKG_CORE_BACKEND_REMOTE.package_clone(pkg)
if not pkg_path:
log.error(
f"{pkg['name']}@{version.name}: unable to clone the package, abord",
)
return None
if pkg['storage'] != 'local':
log.user(f"cloning package {pkg['name']}...")
version = pkg['version']
pkg_path = PKG_CORE_BACKEND_REMOTE.package_clone(pkg, remote_prefix)
if not pkg_path:
log.error(
f"{pkg['name']}@{version.name}: unable to clone the package",
)
return ''
if bare:
return pkg_path
pkg['url'] = pkg_path
# "clone" the package (create a symbolic link)
pkg['url'] = pkg_path
return PKG_CORE_BACKEND_LOCAL.package_clone(pkg, prefix)
return PKG_CORE_BACKEND_LOCAL.package_clone(pkg, prefix, bare)
#---
# Public
#---
def pkg_clone(name, version, prefix, confirm=False):
def clone_pkg(name, version, prefix='', confirm=False, bare=False):
r""" Clone the package
This function will try to clone the package with the exact selected
@ -51,19 +60,26 @@ def pkg_clone(name, version, prefix, confirm=False):
information about the process.
@args
> name (str) - exact valid package name
> version (str) - version query string
> prefix (str) - clone path prefix
> confirm (bool) - display user input to confirm the clone
> name (str) - exact valid package name
> version (str) - version query string
> prefix (str) - clone path prefix
> confirm (bool) - display user input to confirm the clone
> bare (bool) - do not clone the package to the global storage
@return
> the package path if successfully cloned, None otherwise
"""
# if no output prefix is provided, then use the current working directory
if not prefix:
prefix = f"{os.getcwd()}/"
# try to find the package anywhere that the vxSDK allow
pkg_list = pkg_find(name, version, local=True, remote=True)
pkg_list = find_pkg(name, version, local=True, remote=False)
if not pkg_list:
pkg_list = find_pkg(name, version, local=False, remote=True)
if not pkg_list:
log.error("[pkg] pacakge find error")
return None
return ''
if len(pkg_list) != 1:
log.warn("[pkg] multiple package found, other will be ignored")
@ -72,18 +88,19 @@ def pkg_clone(name, version, prefix, confirm=False):
if not pkg_info['version']:
if version:
log.error(f"{name}@{version}: unable to find the version")
return None
return ''
pkg_info['version'] = VxVersion(pkg_info['default_branch'], 'branch')
# wait user interaction if needed
if confirm and not 'local' in pkg_info['version'].sources:
if confirm and pkg_info['storage'] != 'local':
log.user(
f"Do you want to install '{pkg_info['full_name']}'? (Y/n) ",
end = ''
)
valid = input()
if valid and not valid in ['Y', 'y', 'yes', 'Yes']:
return None
return ''
# "real" clone the package
return _pkg_clone_core(pkg_info, prefix)
test = _pkg_clone_core(pkg_info, prefix, bare)
return test

View File

@ -1,12 +1,12 @@
"""
Package find backend abstraction
core.pkg.find - Package find backend abstraction
"""
from core.logger import log
from core.pkg.backend import PKG_CORE_BACKEND_REMOTE, PKG_CORE_BACKEND_LOCAL
__all__ = [
'pkg_find'
'find_pkg'
]
#---
@ -63,7 +63,7 @@ def _pkg_find_select_best(pkg_remote, pkg_local):
# Public
#---
def pkg_find(name, version=None, local=False, remote=True):
def find_pkg(name, version=None, local=False, remote=True):
r""" Find the most appropriate package information
This function will request to the remote backend the list of all version of

View File

@ -1,3 +1,6 @@
"""
core.pkg.version - version abstraction
"""
import os
import re
import subprocess
@ -8,8 +11,12 @@ __all__ = [
'version_get'
]
class VxVersion(object):
r"""Represent a Version object.
#---
# Public
#---
class VxVersion():
"""Represent a Version object.
This version mecanism is a strong part of the package managing because If
the 'version' is detected to respect the correct semantic versioning, you
@ -45,8 +52,8 @@ class VxVersion(object):
> Use 'releases' section too ?
> Define explicitely version type (tag, branch, releases ?)
"""
def __init__(self, name, type=None, source=None):
self._type = type
def __init__(self, name, vertype=None, source=None):
self._type = vertype
self._name = name
self._source_list = [source]
@ -55,12 +62,15 @@ class VxVersion(object):
@property
def name(self):
""" version name """
return self._name
@property
def type(self):
""" version type """
return self._type
@property
def sources(self):
""" version source list """
return self._source_list
@ -73,7 +83,8 @@ class VxVersion(object):
Note that the pattern can be None, in this case, the validation step is
always success. Beside, the pattern can be a non-valid version semantic
request, in this case the version name should match the entiere pattern.
request, in this case the version name should match the entiere
pattern.
Args:
> pattern (None, str) - pattern used to check if the version is valid
@ -83,7 +94,7 @@ class VxVersion(object):
"""
return version_is_valid(self.name, pattern)
def compare(self, version):
def compare(self, vers):
r"""Compare two version
This methods will compare versionning inforation and return an interger
@ -94,24 +105,30 @@ class VxVersion(object):
> a positive value - self is greater than 'version'
"""
try:
s1 = tuple([int(d) for d in self.name.split(".") if d] + [0,0,0])[:3]
s2 = tuple([int(d) for d in version.name.split(".") if d] + [0,0,0])[:3]
ver1 = tuple([int(d) for d in self.name.split(".") if d] + [0,0,0])
ver2 = tuple([int(d) for d in vers.name.split(".") if d] + [0,0,0])
if s1[0] - s2[0] != 0: return s1[0] - s2[0]
if s1[1] - s2[1] != 0: return s1[1] - s2[1]
return s1[2] - s2[2]
except Exception as _:
ver1 = ver1[:3]
ver2 = ver2[:3]
if ver1[0] - ver2[0] != 0:
return ver1[0] - ver2[0]
if ver1[1] - ver2[1] != 0:
return ver1[1] - ver2[1]
return ver1[2] - ver2[2]
except ValueError:
return 0
def addSource(self, source):
def add_source(self, source):
""" add source """
self._source_list.append(source)
def version_is_valid(version, pattern):
r"""Check if the version validate a pattern
"""Check if the version validate a pattern
If the 'version' is detected to respect the correct semantic versioning, you
can perform version operations: caret (^), tilde (~) and wildcard (*)
If the 'version' is detected to respect the correct semantic versioning,
you can perform version operations: caret (^), tilde (~) and wildcard (*)
Caret requirements (^)
^1.2.3 := >=1.2.3, <2.0.0
@ -152,13 +169,13 @@ def version_is_valid(version, pattern):
return tuple(digits + [0,0,0])[:3]
# Index of first nonzero component
def _first_nonzero(v, default):
return next((i for i, value in enumerate(v) if value), default)
def _first_nonzero(ver, default):
return next((i for i, value in enumerate(ver) if value), default)
# Increment at specified position
def _increment_at(v, position):
padding = len(v) - position - 1
return tuple(list(v[:position]) + [v[position]+1] + [0] * padding)
def _increment_at(ver, position):
padding = len(ver) - position - 1
return tuple(list(ver[:position]) + [ver[position]+1] + [0] * padding)
# Parse a spec like ^1.2.3, ~1.2 or 1.2.* into a min/max pair
def _parse_spec(spec):
@ -167,21 +184,20 @@ def version_is_valid(version, pattern):
return None
spec_length = spec.count(".") + 1
v = _parse_version(
ver = _parse_version(
spec.replace("^","").replace("~","").replace("*","")
)
M, m, p = v
_maj, _min, _fix = ver
if spec[0] == '^':
return v, _increment_at(v, _first_nonzero(v, spec_length-1))
elif spec[0] == '~':
return v, _increment_at(v, min(spec_length-1, 1))
elif spec == "*":
return ver, _increment_at(ver, _first_nonzero(ver, spec_length-1))
if spec[0] == '~':
return ver, _increment_at(ver, min(spec_length-1, 1))
if spec == "*":
return ((0, 0, 0), None)
elif "*" in spec:
return v, _increment_at(v, spec_length-2)
else:
return (M,m,p), (M,m,p+1)
if "*" in spec:
return ver, _increment_at(ver, spec_length-2)
return (_maj, _min, _fix), (_maj, _min, _fix + 1)
# Check if version is between requested bounds
def _version_is_suitable(version, min_version, max_version):
@ -195,10 +211,11 @@ def version_is_valid(version, pattern):
return version == pattern
fmt_version = _parse_version(version)
return _version_is_suitable(fmt_version, pair[0], pair[1])
except:
except ValueError:
return False
def version_get(path):
""" fetch project version """
saved_pwd = os.getcwd()
os.chdir(path)
ret = subprocess.run(

View File

@ -1,20 +0,0 @@
import os
import shutil
from core.logger import log
__all__ = [
'project_new'
]
#TODO: change internal project name
def project_new(project_path):
if os.path.exists(project_path):
logger(LOG_WARN, f'The path {project_path} already exists !')
return True
origin_path = os.path.dirname(__file__)
shutil.copytree(
origin_path + '/../../assets/project/',
project_path
)
logger(LOG_USER, f"project '{project_path}' successfully created !")