sanitycheck: move all related files to sanity_chk
Signed-off-by: Anas Nashif <anas.nashif@intel.com>
This commit is contained in:
parent
63d418f0da
commit
db3d55ff1b
6 changed files with 4 additions and 4 deletions
249
scripts/sanity_chk/expr_parser.py
Normal file
249
scripts/sanity_chk/expr_parser.py
Normal file
|
@ -0,0 +1,249 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright (c) 2016 Intel Corporation.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import sys
|
||||
import os
|
||||
import copy
|
||||
import threading
|
||||
import re
|
||||
|
||||
try:
|
||||
import ply.lex as lex
|
||||
import ply.yacc as yacc
|
||||
except ImportError:
|
||||
print("PLY library for Python 3 not installed.")
|
||||
print("Please install the python3-ply package using your workstation's")
|
||||
print("package manager or the 'pip' tool.")
|
||||
sys.exit(1)
|
||||
|
||||
reserved = {
|
||||
'and' : 'AND',
|
||||
'or' : 'OR',
|
||||
'not' : 'NOT',
|
||||
'in' : 'IN',
|
||||
}
|
||||
|
||||
tokens = [
|
||||
"HEX",
|
||||
"STR",
|
||||
"INTEGER",
|
||||
"EQUALS",
|
||||
"NOTEQUALS",
|
||||
"LT",
|
||||
"GT",
|
||||
"LTEQ",
|
||||
"GTEQ",
|
||||
"OPAREN",
|
||||
"CPAREN",
|
||||
"OBRACKET",
|
||||
"CBRACKET",
|
||||
"COMMA",
|
||||
"SYMBOL",
|
||||
"COLON",
|
||||
] + list(reserved.values())
|
||||
|
||||
def t_HEX(t):
|
||||
r"0x[0-9a-fA-F]+"
|
||||
t.value = str(int(t.value, 16))
|
||||
return t
|
||||
|
||||
def t_INTEGER(t):
|
||||
r"\d+"
|
||||
t.value = str(int(t.value))
|
||||
return t
|
||||
|
||||
def t_STR(t):
|
||||
r'\"([^\\\n]|(\\.))*?\"|\'([^\\\n]|(\\.))*?\''
|
||||
# nip off the quotation marks
|
||||
t.value = t.value[1:-1]
|
||||
return t
|
||||
|
||||
t_EQUALS = r"=="
|
||||
|
||||
t_NOTEQUALS = r"!="
|
||||
|
||||
t_LT = r"<"
|
||||
|
||||
t_GT = r">"
|
||||
|
||||
t_LTEQ = r"<="
|
||||
|
||||
t_GTEQ = r">="
|
||||
|
||||
t_OPAREN = r"[(]"
|
||||
|
||||
t_CPAREN = r"[)]"
|
||||
|
||||
t_OBRACKET = r"\["
|
||||
|
||||
t_CBRACKET = r"\]"
|
||||
|
||||
t_COMMA = r","
|
||||
|
||||
t_COLON = ":"
|
||||
|
||||
def t_SYMBOL(t):
|
||||
r"[A-Za-z_][0-9A-Za-z_]*"
|
||||
t.type = reserved.get(t.value, "SYMBOL")
|
||||
return t
|
||||
|
||||
t_ignore = " \t\n"
|
||||
|
||||
def t_error(t):
|
||||
raise SyntaxError("Unexpected token '%s'" % t.value)
|
||||
|
||||
lex.lex()
|
||||
|
||||
precedence = (
|
||||
('left', 'OR'),
|
||||
('left', 'AND'),
|
||||
('right', 'NOT'),
|
||||
('nonassoc' , 'EQUALS', 'NOTEQUALS', 'GT', 'LT', 'GTEQ', 'LTEQ', 'IN'),
|
||||
)
|
||||
|
||||
def p_expr_or(p):
|
||||
'expr : expr OR expr'
|
||||
p[0] = ("or", p[1], p[3])
|
||||
|
||||
def p_expr_and(p):
|
||||
'expr : expr AND expr'
|
||||
p[0] = ("and", p[1], p[3])
|
||||
|
||||
def p_expr_not(p):
|
||||
'expr : NOT expr'
|
||||
p[0] = ("not", p[2])
|
||||
|
||||
def p_expr_parens(p):
|
||||
'expr : OPAREN expr CPAREN'
|
||||
p[0] = p[2]
|
||||
|
||||
def p_expr_eval(p):
|
||||
"""expr : SYMBOL EQUALS const
|
||||
| SYMBOL NOTEQUALS const
|
||||
| SYMBOL GT number
|
||||
| SYMBOL LT number
|
||||
| SYMBOL GTEQ number
|
||||
| SYMBOL LTEQ number
|
||||
| SYMBOL IN list
|
||||
| SYMBOL COLON STR"""
|
||||
p[0] = (p[2], p[1], p[3])
|
||||
|
||||
def p_expr_single(p):
|
||||
"""expr : SYMBOL"""
|
||||
p[0] = ("exists", p[1])
|
||||
|
||||
def p_list(p):
|
||||
"""list : OBRACKET list_intr CBRACKET"""
|
||||
p[0] = p[2]
|
||||
|
||||
def p_list_intr_single(p):
|
||||
"""list_intr : const"""
|
||||
p[0] = [p[1]]
|
||||
|
||||
def p_list_intr_mult(p):
|
||||
"""list_intr : list_intr COMMA const"""
|
||||
p[0] = copy.copy(p[1])
|
||||
p[0].append(p[3])
|
||||
|
||||
def p_const(p):
|
||||
"""const : STR
|
||||
| number"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_number(p):
|
||||
"""number : INTEGER
|
||||
| HEX"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_error(p):
|
||||
if p:
|
||||
raise SyntaxError("Unexpected token '%s'" % p.value)
|
||||
else:
|
||||
raise SyntaxError("Unexpected end of expression")
|
||||
|
||||
parser = yacc.yacc()
|
||||
|
||||
def ast_sym(ast, env):
|
||||
if ast in env:
|
||||
return str(env[ast])
|
||||
return ""
|
||||
|
||||
def ast_sym_int(ast, env):
|
||||
if ast in env:
|
||||
v = env[ast]
|
||||
if v.startswith("0x") or v.startswith("0X"):
|
||||
return int(v, 16)
|
||||
else:
|
||||
return int(v, 10)
|
||||
return 0
|
||||
|
||||
def ast_expr(ast, env):
|
||||
if ast[0] == "not":
|
||||
return not ast_expr(ast[1], env)
|
||||
elif ast[0] == "or":
|
||||
return ast_expr(ast[1], env) or ast_expr(ast[2], env)
|
||||
elif ast[0] == "and":
|
||||
return ast_expr(ast[1], env) and ast_expr(ast[2], env)
|
||||
elif ast[0] == "==":
|
||||
return ast_sym(ast[1], env) == ast[2]
|
||||
elif ast[0] == "!=":
|
||||
return ast_sym(ast[1], env) != ast[2]
|
||||
elif ast[0] == ">":
|
||||
return ast_sym_int(ast[1], env) > int(ast[2])
|
||||
elif ast[0] == "<":
|
||||
return ast_sym_int(ast[1], env) < int(ast[2])
|
||||
elif ast[0] == ">=":
|
||||
return ast_sym_int(ast[1], env) >= int(ast[2])
|
||||
elif ast[0] == "<=":
|
||||
return ast_sym_int(ast[1], env) <= int(ast[2])
|
||||
elif ast[0] == "in":
|
||||
return ast_sym(ast[1], env) in ast[2]
|
||||
elif ast[0] == "exists":
|
||||
return True if ast_sym(ast[1], env) else False
|
||||
elif ast[0] == ":":
|
||||
return True if re.compile(ast[2]).match(ast_sym(ast[1], env)) else False
|
||||
|
||||
mutex = threading.Lock()
|
||||
|
||||
def parse(expr_text, env):
|
||||
"""Given a text representation of an expression in our language,
|
||||
use the provided environment to determine whether the expression
|
||||
is true or false"""
|
||||
|
||||
# Like it's C counterpart, state machine is not thread-safe
|
||||
mutex.acquire()
|
||||
try:
|
||||
ast = parser.parse(expr_text)
|
||||
finally:
|
||||
mutex.release()
|
||||
|
||||
return ast_expr(ast, env)
|
||||
|
||||
# Just some test code
|
||||
if __name__ == "__main__":
|
||||
|
||||
local_env = {
|
||||
"A" : "1",
|
||||
"C" : "foo",
|
||||
"D" : "20",
|
||||
"E" : 0x100,
|
||||
"F" : "baz"
|
||||
}
|
||||
|
||||
|
||||
for line in open(sys.argv[1]).readlines():
|
||||
lex.input(line)
|
||||
for tok in iter(lex.token, None):
|
||||
print(tok.type, tok.value)
|
||||
|
||||
parser = yacc.yacc()
|
||||
print(parser.parse(line))
|
||||
|
||||
print(parse(line, local_env))
|
||||
|
||||
|
||||
|
||||
|
37
scripts/sanity_chk/ini2yaml.py
Executable file
37
scripts/sanity_chk/ini2yaml.py
Executable file
|
@ -0,0 +1,37 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import ConfigParser, os
|
||||
import yaml
|
||||
import sys
|
||||
|
||||
|
||||
sample = False
|
||||
in_file = sys.argv[1]
|
||||
if sys.argv[2] == 'sample':
|
||||
sample = True
|
||||
|
||||
out_file = os.path.join(os.path.dirname(in_file), sys.argv[2] + ".yaml")
|
||||
|
||||
config = ConfigParser.ConfigParser()
|
||||
config.readfp(open(sys.argv[1]))
|
||||
y = {'tests': 'tests'}
|
||||
|
||||
tests = []
|
||||
for section in config.sections():
|
||||
tc = {}
|
||||
for opt in config.options(section):
|
||||
value = config.get(section, opt)
|
||||
if value in ['false', 'true']:
|
||||
tc[opt] = True if value == 'true' else False
|
||||
else:
|
||||
tc[opt] = value
|
||||
|
||||
test = { section : tc}
|
||||
tests.append(test)
|
||||
|
||||
y['tests'] = tests
|
||||
if sample:
|
||||
y['sample'] = { 'name': "TBD", 'description': "TBD" }
|
||||
|
||||
with open(out_file, "w") as f:
|
||||
yaml.dump(y, f, width=50, indent=4, default_flow_style=False)
|
47
scripts/sanity_chk/sanitycheck-platform-schema.yaml
Normal file
47
scripts/sanity_chk/sanitycheck-platform-schema.yaml
Normal file
|
@ -0,0 +1,47 @@
|
|||
#
|
||||
# Schema to validate a YAML file describing a Zephyr test platform
|
||||
#
|
||||
# We load this with pykwalify
|
||||
# (http://pykwalify.readthedocs.io/en/unstable/validation-rules.html),
|
||||
# a YAML structure validator, to validate the YAML files that describe
|
||||
# Zephyr test platforms
|
||||
#
|
||||
# The original spec comes from Zephyr's sanitycheck script
|
||||
#
|
||||
|
||||
type: map
|
||||
mapping:
|
||||
"identifier":
|
||||
type: str
|
||||
"name":
|
||||
type: str
|
||||
"type":
|
||||
type: str
|
||||
enum: [ 'mcu', 'qemu', 'sim' ]
|
||||
"arch":
|
||||
type: str
|
||||
"toolchain":
|
||||
type: seq
|
||||
seq:
|
||||
-
|
||||
type: str
|
||||
enum: [ 'gccarmemb', 'issm', 'xcc', 'zephyr', 'espressif']
|
||||
"ram":
|
||||
type: int
|
||||
"flash":
|
||||
type: int
|
||||
"supported":
|
||||
type: seq
|
||||
seq:
|
||||
-
|
||||
type: str
|
||||
"testing":
|
||||
type: map
|
||||
mapping:
|
||||
"default":
|
||||
type: bool
|
||||
"ignore_tags":
|
||||
type: seq
|
||||
seq:
|
||||
-
|
||||
type: str
|
102
scripts/sanity_chk/sanitycheck-tc-schema.yaml
Normal file
102
scripts/sanity_chk/sanitycheck-tc-schema.yaml
Normal file
|
@ -0,0 +1,102 @@
|
|||
#
|
||||
# Schema to validate a YAML file describing a Zephyr test platform
|
||||
#
|
||||
# We load this with pykwalify
|
||||
# (http://pykwalify.readthedocs.io/en/unstable/validation-rules.html),
|
||||
# a YAML structure validator, to validate the YAML files that describe
|
||||
# Zephyr test platforms
|
||||
#
|
||||
# The original spec comes from Zephyr's sanitycheck script
|
||||
#
|
||||
type: map
|
||||
mapping:
|
||||
# The sample descriptor, if present
|
||||
"sample":
|
||||
type: map
|
||||
required: no
|
||||
mapping:
|
||||
"name":
|
||||
type: str
|
||||
required: yes
|
||||
"description":
|
||||
type: str
|
||||
required: no
|
||||
"platforms":
|
||||
type: str
|
||||
required: no
|
||||
# The list of testcases -- IDK why this is a sequence of
|
||||
# maps maps, shall just be a sequence of maps
|
||||
# maybe it is just an artifact?
|
||||
"tests":
|
||||
type: seq
|
||||
required: yes
|
||||
sequence:
|
||||
- type: map
|
||||
matching-rule: "any"
|
||||
mapping:
|
||||
# The key for the testname is any, so
|
||||
# regex;(([a-zA-Z0-9_]+)) for this to work, note below we
|
||||
# make it required: no
|
||||
regex;(([a-zA-Z0-9_]+)):
|
||||
type: map
|
||||
# has to be not-required, otherwise the parser gets
|
||||
# confused and things it never found it
|
||||
required: no
|
||||
mapping:
|
||||
"arch_exclude":
|
||||
type: str
|
||||
required: no
|
||||
"arch_whitelist":
|
||||
type: str
|
||||
required: no
|
||||
"build_only":
|
||||
type: bool
|
||||
required: no
|
||||
"build_on_all":
|
||||
type: bool
|
||||
required: no
|
||||
"depends_on":
|
||||
type: str
|
||||
required: no
|
||||
"extra_args":
|
||||
type: str
|
||||
required: no
|
||||
"extra_sections":
|
||||
type: str
|
||||
required: no
|
||||
"filter":
|
||||
type: str
|
||||
required: no
|
||||
"min_ram":
|
||||
type: int
|
||||
required: no
|
||||
"min_flash":
|
||||
type: int
|
||||
required: no
|
||||
"platform_exclude":
|
||||
type: str
|
||||
required: no
|
||||
"platform_whitelist":
|
||||
type: str
|
||||
required: no
|
||||
"tags":
|
||||
type: str
|
||||
required: yes
|
||||
"timeout":
|
||||
type: int
|
||||
required: no
|
||||
"toolchain_exclude":
|
||||
type: str
|
||||
required: no
|
||||
"toolchain_whitelist":
|
||||
type: str
|
||||
required: no
|
||||
"type":
|
||||
type: str
|
||||
enum: [ 'unit' ]
|
||||
"skip":
|
||||
type: bool
|
||||
required: no
|
||||
"slow":
|
||||
type: bool
|
||||
required: no
|
72
scripts/sanity_chk/scl.py
Normal file
72
scripts/sanity_chk/scl.py
Normal file
|
@ -0,0 +1,72 @@
|
|||
#! /usr/bin/python
|
||||
#
|
||||
# Zephyr's Sanity Check library
|
||||
#
|
||||
# Set of code that other projects can also import to do things on
|
||||
# Zephyr's sanity check testcases.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import yaml
|
||||
|
||||
log = logging.getLogger("scl")
|
||||
|
||||
#
|
||||
#
|
||||
def yaml_load(filename):
|
||||
"""
|
||||
Safely load a YAML document
|
||||
|
||||
Follows recomendations from
|
||||
https://security.openstack.org/guidelines/dg_avoid-dangerous-input-parsing-libraries.html.
|
||||
|
||||
:param str filename: filename to load
|
||||
:raises yaml.scanner: On YAML scan issues
|
||||
:raises: any other exception on file access erors
|
||||
:return: dictionary representing the YAML document
|
||||
"""
|
||||
try:
|
||||
with open(filename, 'r') as f:
|
||||
return yaml.safe_load(f)
|
||||
except yaml.scanner.ScannerError as e: # For errors parsing schema.yaml
|
||||
mark = e.problem_mark
|
||||
cmark = e.context_mark
|
||||
log.error("%s:%d:%d: error: %s (note %s context @%s:%d:%d %s)",
|
||||
mark.name, mark.line, mark.column, e.problem,
|
||||
e.note, cmark.name, cmark.line, cmark.column, e.context)
|
||||
raise
|
||||
|
||||
# If pykwalify is installed, then the validate functionw ill work --
|
||||
# otherwise, it is a stub and we'd warn about it.
|
||||
try:
|
||||
import pykwalify.core
|
||||
# Don't print error messages yourself, let us do it
|
||||
logging.getLogger("pykwalify.core").setLevel(50)
|
||||
|
||||
def _yaml_validate(data, schema):
|
||||
if not schema:
|
||||
return
|
||||
c = pykwalify.core.Core(source_data = data, schema_data = schema)
|
||||
c.validate(raise_exception = True)
|
||||
|
||||
except ImportError as e:
|
||||
log.warning("can't import pykwalify; won't validate YAML (%s)", e)
|
||||
def _yaml_validate(data, schema):
|
||||
pass
|
||||
|
||||
def yaml_load_verify(filename, schema):
|
||||
"""
|
||||
Safely load a testcase/sample yaml document and validate it
|
||||
against the YAML schema, returing in case of success the YAML data.
|
||||
|
||||
:param str filename: name of the file to load and process
|
||||
:param dict schema: loaded YAML schema (can load with :func:`yaml_load`)
|
||||
|
||||
# 'document.yaml' contains a single YAML document.
|
||||
:raises yaml.scanner.ScannerError: on YAML parsing error
|
||||
:raises pykwalify.errors.SchemaError: on Schema violation error
|
||||
"""
|
||||
# 'document.yaml' contains a single YAML document.
|
||||
y = yaml_load(filename)
|
||||
_yaml_validate(y, schema)
|
||||
return y
|
Loading…
Add table
Add a link
Reference in a new issue