Commit 5e3a533d authored by canterburym's avatar canterburym

Merge branch 'feature/linter' into 'rel16'

Feature/linter

See merge request !12
parents 8f69f5cb 3dbbca04
Pipeline #3211 passed with stage
in 16 seconds
......@@ -12,6 +12,12 @@ checkASN1:
script:
- python3 testing/check_asn1.py
lintASN1:
stage: Check Schemas
script:
- python3 testing/lint_asn1.py
allow_failure: true
checkXSD:
stage: Check Schemas
script:
......
from asn1tools import parse_files, ParseError
import sys
import logging
from asn1tools import parse_files, compile_dict, ParseError, CompileError
from glob import glob
from pathlib import Path
from pprint import pprint
def parseASN1File (asnFile):
try:
parse_files(asnFile)
except ParseError as ex:
return [ex]
return []
def parseASN1Files (fileList):
if len(fileList) == 0:
logging.warning ("No files specified")
return {}
errors = {}
logging.info("Parsing files...")
for f in fileList:
ex = parseASN1File(f)
if ex:
logging.info (f" {f}: Failed - {ex!r}")
else:
logging.info (f" {f}: OK")
errors[f] = ex
return errors
schemaFileGlob = glob("*.asn1")
for schemaFile in schemaFileGlob:
def compileASN1Files (fileList):
logging.info("Compiling files...")
errors = []
try:
print("Checking file: {0}".format(schemaFile), end="")
parse_files(schemaFile)
print(" OK")
d = parse_files(fileList)
for modulename, module in d.items():
# Weird fix because the compiler doesn't like RELATIVE-OID as a type
# Not sure if the on-the-wire encoding would be affected or not
# but for most checking purposes this doesn't matter
module['types']["RELATIVE-OID"] = {'type' : 'OBJECT IDENTIFIER'}
c = compile_dict(d)
except CompileError as ex:
logging.info (f"Compiler error: {ex}")
errors.append(ex)
except ParseError as ex:
sys.exit("ASN1 parser error: " + str(ex))
logging.info (f"Parse error: {ex}")
errors.append(ex)
logging.info ("Compiled OK")
return errors
def validateASN1Files (fileList):
parseErrors = parseASN1Files(fileList)
# if len(parseErrors > 0):
# logging.info ("Abandonding compile due to parse errors")
compileErrors = compileASN1Files(fileList)
return parseErrors, compileErrors
def validateAllASN1FilesInPath (path):
globPattern = str(Path(path)) + '/*.asn1'
logging.info("Searching: " + globPattern)
schemaGlob = glob(globPattern, recursive=True)
return validateASN1Files(schemaGlob)
if __name__ == '__main__':
parseErrors, compileErrors = validateAllASN1FilesInPath("./")
parseErrorCount = 0
print ("ASN.1 Parser checks:")
print ("-----------------------------")
for filename, errors in parseErrors.items():
if len(errors) > 0:
parseErrorCount += len(errors)
print (f"{filename}: {len(errors)} errors")
for error in errors:
print (" " + str(error))
else:
print (f"{filename}: OK")
print ("-----------------------------")
print ("ASN.1 Compilation:")
print ("-----------------------------")
if len(compileErrors) > 0:
for error in compileErrors:
print (" " + str(error))
else:
print ("Compilation OK")
print ("-----------------------------")
print (f"{parseErrorCount} parse errors, {len(compileErrors)} compile errors")
exit (parseErrorCount + len(compileErrors))
print ("{0} ASN.1 schemas checked".format(len(schemaFileGlob)))
import logging
import glob
import sys
from pathlib import Path
from pprint import pprint
if __name__ == '__main__':
from lxml import etree
from xml.etree.ElementTree import ParseError
from xmlschema import XMLSchema, XMLSchemaParseError
def BuildSchemaDictonary (fileList):
if len(fileList) == 0:
logging.info("No schema files provided")
return []
logging.info("Schema locations:")
schemaLocations = []
for schemaFile in fileList:
try:
xs = XMLSchema(schemaFile, validation='skip')
schemaLocations.append((xs.default_namespace, str(Path(schemaFile).resolve())))
logging.info(" [ {0} -> {1} ]".format(xs.default_namespace, schemaFile))
except ParseError as ex:
logging.warning (" [ {0} failed to parse: {1} ]".format(schemaFile, ex))
return schemaLocations
def BuildSchema (coreFile, fileList = None):
schemaLocations = []
if fileList and len(fileList) > 0:
schemaLocations = BuildSchemaDictonary(fileList)
coreSchema = XMLSchema(str(Path(coreFile)), locations=schemaLocations)
return coreSchema
if sys.version_info <= (3, 5):
sys.exit('ERROR: You need at least Python 3.5 to run this tool')
try:
from lxml import etree
except ImportError:
sys.exit('ERROR: You need to install the Python lxml library')
def ValidateXSDFiles (fileList):
if len(fileList) == 0:
logging.info("No schema files provided")
return {}
schemaLocations = BuildSchemaDictonary(fileList)
errors = {}
try:
import xmlschema
except ImportError:
sys.exit('ERROR: You need to install the xml schema library')
logging.info("Schema validation:")
for schemaFile in fileList:
try:
schema = XMLSchema(schemaFile, locations = schemaLocations)
logging.info(schemaFile + ": OK")
errors[schemaFile] = []
except XMLSchemaParseError as ex:
logging.warning(schemaFile + ": Failed validation ({0})".format(ex.message))
if (ex.schema_url) and (ex.schema_url != ex.origin_url):
logging.warning(" Error comes from {0}, suppressing".format(ex.schema_url))
else:
errors[schemaFile] = [ex]
return errors
schemaFiles = glob.glob('*.xsd')
def ValidateAllXSDFilesInPath (path):
globPattern = str(Path(path)) + '/*.xsd'
logging.info("Searching: " + globPattern)
schemaGlob = glob.glob(globPattern, recursive=True)
return ValidateXSDFiles(schemaGlob)
def ValidateInstanceDocuments (coreFile, supportingSchemas, instanceDocs):
if (instanceDocs is None) or len(instanceDocs) == 0:
logging.warning ("No instance documents provided")
return []
schema = BuildSchema(coreFile, supportingSchemas)
errors = []
for instanceDoc in instanceDocs:
try:
schema.validate(instanceDoc)
logging.info ("{0} passed validation".format(instanceDoc))
except Exception as ex:
logging.error ("{0} failed validation: {1}".format(instanceDoc, ex))
return errors
if __name__ == '__main__':
results = ValidateAllXSDFilesInPath("./")
for schemaFile in schemaFiles:
print("Checking file: {0}".format(schemaFile), end="")
xs = xmlschema.XMLSchema(schemaFile)
print(" OK")
print ("XSD validation checks:")
print ("-----------------------------")
errorCount = 0
for fileName, errors in results.items():
if len(errors) > 0:
errorCount += len(errors)
print (f" {fileName}: {len(errors)} errors")
for error in errors:
if isinstance(error, XMLSchemaParseError):
print (error.msg)
else:
print (f" {str(error)}")
else:
print (f" {fileName}: OK")
print ("{0} XSD schemas checked".format(len(schemaFiles)))
\ No newline at end of file
print ("-----------------------------")
print (f"{errorCount} errors detected")
exit(errorCount)
\ No newline at end of file
import logging
from asn1tools import parse_files, compile_dict, ParseError, CompileError
from glob import glob
from pathlib import Path
import string
from pprint import pprint
import functools
moduleLevelTests = []
typeLevelTests = []
fileLevelTests = []
def lintingTest (testName, testKind, testDescription):
def decorate (func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
logging.debug (f" Running test {testName}")
errors = func(*args, **kwargs)
for error in errors:
error['testName'] = testName
error['testKind'] = testKind
error['testDescription'] = testDescription
return errors
if (testKind == "type"):
typeLevelTests.append(wrapper)
if (testKind == "module"):
moduleLevelTests.append(wrapper)
if (testKind == "file"):
fileLevelTests.append(wrapper)
return wrapper
return decorate
def formatFailure(f):
return f"{f['testName']}: {f['message']}"
def appendFailure(failures, context, newFailure):
combinedFailure = {**context, **newFailure}
logging.info (f"Test Failure: {combinedFailure}")
failures.append(combinedFailure)
#--------------------------------------------------------------------
# File level tests
#--------------------------------------------------------------------
@lintingTest(testName = "D.4.9",
testKind = "file",
testDescription = "Fields, tags, types and flags are space aligned")
def D41 (fileLines, context):
errors = []
for lineNumber, line in enumerate(fileLines):
if '\t' in line:
appendFailure(errors, context, { "line" : lineNumber,
"message" : f"Line {lineNumber} contains tab characters"})
return errors
@lintingTest(testName = "D.4.11",
testKind = "file",
testDescription = "Braces are given their own line")
def D41 (fileLines, context):
errors = []
for lineNumber, line in enumerate(fileLines):
if ('{' in line and line.strip().replace(",","") != '{') or ('}' in line and line.strip().replace(",","") != '}'):
if "itu-t(0)" in line: continue
if "OBJECT IDENTIFIER" in line: continue
if "RELATIVE-OID" in line: continue
appendFailure(errors, context, { "line" : lineNumber + 1,
"message" : f"Line {lineNumber + 1} contains a brace but also other characters ('{line}')"})
return errors
#--------------------------------------------------------------------
# Module level tests
#--------------------------------------------------------------------
@lintingTest(testName = "D.4.1",
testKind = "module",
testDescription = "EXTENSIBILITY IMPLIED directive set")
def D41 (module, context):
errors = []
if (not ('extensibility-implied' in module.keys()) or (module['extensibility-implied'] == False)):
appendFailure(errors, context, {"message" : "EXTENSIBILITY IMPLIED directive not set"})
return errors
@lintingTest(testName = "D.4.2",
testKind = "module",
testDescription = "AUTOMATIC TAGS not used")
def D42(module, context):
errors = []
if (module['tags'] == 'AUTOMATIC'):
appendFailure(errors, context, {"message" : "AUTOMATIC TAGS directive used"})
return errors
#--------------------------------------------------------------------
# Type level tests
#--------------------------------------------------------------------
@lintingTest(testName = "D.3.4",
testKind = "type",
testDescription = "Field names only contain characters A-Z, a-z, 0-9")
def D34(t, context):
if not 'members' in t.keys():
logging.debug (f" D34 ignoring {context['module']} '{context['type']}' as it has no members")
return []
errors = []
for m in t['members']:
logging.debug (f" D34 checking member {m}")
badLetters = list(set([letter for letter in m['name'] if not ((letter in string.ascii_letters) or (letter in string.digits)) ]))
if len(badLetters) > 0:
appendFailure (errors, context, { "field" : m['name'],
"message" : f"Field '{m['name']}' contains disallowed characters {badLetters!r}"})
return errors
@lintingTest(testName = "D.4.3",
testKind = "type",
testDescription = "Tag numbers start at zero")
def D43 (t, context):
errors = []
if (t['type'] == 'SEQUENCE') or (t['type'] == 'CHOICE'):
if t['members'][0]['tag']['number'] != 1:
appendFailure (errors, context, {"message" : f"Tag numbers for {context['type']} start at {t['members'][0]['tag']['number']}, not 1"})
return errors
@lintingTest(testName = "D.4.4",
testKind = "type",
testDescription = "Enumerations start at zero")
def D44 (t, context):
errors = []
if t['type'] == 'ENUMERATED':
if t['values'][0][1] != 1:
appendFailure(errors, context, { "message" : f"Enumerations for {context['type']} start at {t['values'][0][1]}, not 1"})
return errors
@lintingTest(testName = "D.4.5",
testKind = "type",
testDescription = "No anonymous types")
def checkD45 (t, context):
if not 'members' in t:
logging.debug (f" D45: No members in type {context['type']}, ignoring")
return []
errors = []
for m in t['members']:
if m['type'] in ['ENUMERATED','SEQUENCE','CHOICE', 'SET']:
appendFailure(errors, context, { "field" : m['name'],
"message" : f"Field '{m['name']}' in {context['type']} is an anonymous {m['type']}"})
return errors
def lintASN1File (asnFile):
errors = []
context = {'file' : asnFile}
try:
logging.info ("Checking file {0}...".format(asnFile))
with open(asnFile) as f:
s = f.read().splitlines()
for test in fileLevelTests:
errors += test(s, context)
d = parse_files(asnFile)
for moduleName, module in d.items():
logging.info (" Checking module {0}".format(moduleName))
for test in moduleLevelTests:
context['module'] = moduleName
errors += test(module, context)
for typeName, typeDef in module['types'].items():
context['type'] = typeName
context['module'] = moduleName
for test in typeLevelTests:
errors += test(typeDef, context)
except ParseError as ex:
logging.error("ParseError: {0}".format(ex))
return ["ParseError: {0}".format(ex)]
return errors
def lintASN1Files (fileList):
if len(fileList) == 0:
logging.warning ("No files specified")
return []
errorMap = {}
logging.info("Checking files...")
for f in fileList:
errorMap[f] = lintASN1File(f)
return errorMap
def lintAllASN1FilesInPath (path):
globPattern = str(Path(path)) + '/*.asn1'
logging.info("Searching: " + globPattern)
schemaGlob = glob(globPattern, recursive=True)
return lintASN1Files(schemaGlob)
if __name__ == '__main__':
result = lintAllASN1FilesInPath("./")
totalErrors = 0
print ("Drafting rule checks:")
print ("-----------------------------")
for filename, results in result.items():
print ("{0}: {1}".format(filename, "OK" if len(results) == 0 else "{0} errors detected".format(len(results))))
for error in results:
print(" " + formatFailure(error))
totalErrors += len(results)
print ("-----------------------------")
print ("{0} non-compliances detected".format(totalErrors))
exit(totalErrors)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment