# See the License for the specific language governing permissions and
# limitations under the License.
#
-
-import re
import os
from sdkconfig import SDKConfig
from pyparsing import alphanums
from pyparsing import Word
from pyparsing import alphas
-from pyparsing import ParseBaseException
+from pyparsing import ParseFatalException
from pyparsing import Suppress
from pyparsing import Group
from pyparsing import Literal
from pyparsing import ZeroOrMore
from pyparsing import Optional
from pyparsing import originalTextFor
-from common import LdGenFailure
+from pyparsing import Forward
+from pyparsing import indentedBlock
+from collections import namedtuple
+import abc
+
+
+KeyGrammar = namedtuple("KeyGrammar", "grammar min max required")
-class FragmentFileModel():
+class FragmentFile():
"""
Fragment file internal representation. Parses and stores instances of the fragment definitions
contained within the file.
"""
- def __init__(self, fragment_file):
- path = os.path.realpath(fragment_file.name)
-
- sections = Sections.get_fragment_grammar()
- scheme = Scheme.get_fragment_grammar()
- mapping = Mapping.get_fragment_grammar()
+ def __init__(self, fragment_file, sdkconfig):
+ try:
+ fragment_file = open(fragment_file, "r")
+ except TypeError:
+ pass
- # Each fragment file is composed of sections, scheme or mapping fragments. The grammar
- # for each of those objects are defined it the respective classes
- parser = OneOrMore(sections | scheme | mapping)
+ path = os.path.realpath(fragment_file.name)
- # Set any text beginnning with # as comment
- parser.ignore("#" + restOfLine)
+ indent_stack = [1]
+
+ class parse_ctx:
+ fragment = None # current fragment
+ key = "" # current key
+ keys = list() # list of keys parsed
+ key_grammar = None # current key grammar
+
+ @staticmethod
+ def reset():
+ parse_ctx.fragment_instance = None
+ parse_ctx.key = ""
+ parse_ctx.keys = list()
+ parse_ctx.key_grammar = None
+
+ def fragment_type_parse_action(toks):
+ parse_ctx.reset()
+ parse_ctx.fragment = FRAGMENT_TYPES[toks[0]]() # create instance of the fragment
+ return None
+
+ def expand_conditionals(toks, stmts):
+ try:
+ stmt = toks["value"]
+ stmts.append(stmt)
+ except KeyError:
+ try:
+ conditions = toks["conditional"]
+ for condition in conditions:
+ try:
+ _toks = condition[1]
+ _cond = condition[0]
+ if sdkconfig.evaluate_expression(_cond):
+ expand_conditionals(_toks, stmts)
+ break
+ except IndexError:
+ expand_conditionals(condition[0], stmts)
+ except KeyError:
+ for tok in toks:
+ expand_conditionals(tok, stmts)
+
+ def key_body_parsed(pstr, loc, toks):
+ stmts = list()
+ expand_conditionals(toks, stmts)
+
+ if parse_ctx.key_grammar.min and len(stmts) < parse_ctx.key_grammar.min:
+ raise ParseFatalException(pstr, loc, "fragment requires at least %d values for key '%s'" %
+ (parse_ctx.key_grammar.min, parse_ctx.key))
+
+ if parse_ctx.key_grammar.max and len(stmts) > parse_ctx.key_grammar.max:
+ raise ParseFatalException(pstr, loc, "fragment requires at most %d values for key '%s'" %
+ (parse_ctx.key_grammar.max, parse_ctx.key))
+
+ try:
+ parse_ctx.fragment.set_key_value(parse_ctx.key, stmts)
+ except Exception as e:
+ raise ParseFatalException(pstr, loc, "unable to add key '%s'; %s" % (parse_ctx.key, e.message))
+ return None
+
+ key = Word(alphanums + "_") + Suppress(":")
+ key_stmt = Forward()
+
+ condition_block = indentedBlock(key_stmt, indent_stack)
+ key_stmts = OneOrMore(condition_block)
+ key_body = Suppress(key) + key_stmts
+ key_body.setParseAction(key_body_parsed)
+
+ condition = originalTextFor(SDKConfig.get_expression_grammar()).setResultsName("condition")
+ if_condition = Group(Suppress("if") + condition + Suppress(":") + condition_block)
+ elif_condition = Group(Suppress("elif") + condition + Suppress(":") + condition_block)
+ else_condition = Group(Suppress("else") + Suppress(":") + condition_block)
+ conditional = (if_condition + Optional(OneOrMore(elif_condition)) + Optional(else_condition)).setResultsName("conditional")
+
+ def key_parse_action(pstr, loc, toks):
+ key = toks[0]
+
+ if key in parse_ctx.keys:
+ raise ParseFatalException(pstr, loc, "duplicate key '%s' value definition" % parse_ctx.key)
+
+ parse_ctx.key = key
+ parse_ctx.keys.append(key)
+
+ try:
+ parse_ctx.key_grammar = parse_ctx.fragment.get_key_grammars()[key]
+ key_grammar = parse_ctx.key_grammar.grammar
+ except KeyError:
+ raise ParseFatalException(pstr, loc, "key '%s' is not supported by fragment" % key)
+ except Exception as e:
+ raise ParseFatalException(pstr, loc, "unable to parse key '%s'; %s" % (key, e.message))
+
+ key_stmt << (conditional | Group(key_grammar).setResultsName("value"))
+
+ return None
+
+ def name_parse_action(pstr, loc, toks):
+ parse_ctx.fragment.name = toks[0]
+
+ key.setParseAction(key_parse_action)
+
+ ftype = Word(alphas).setParseAction(fragment_type_parse_action)
+ fid = Suppress(":") + Word(alphanums + "_.").setResultsName("name")
+ fid.setParseAction(name_parse_action)
+ header = Suppress("[") + ftype + fid + Suppress("]")
+
+ def fragment_parse_action(pstr, loc, toks):
+ key_grammars = parse_ctx.fragment.get_key_grammars()
+ required_keys = set([k for (k,v) in key_grammars.items() if v.required])
+ present_keys = required_keys.intersection(set(parse_ctx.keys))
+ if present_keys != required_keys:
+ raise ParseFatalException(pstr, loc, "required keys %s for fragment not found" %
+ list(required_keys - present_keys))
+ return parse_ctx.fragment
+
+ fragment_stmt = Forward()
+ fragment_block = indentedBlock(fragment_stmt, indent_stack)
+
+ fragment_if_condition = Group(Suppress("if") + condition + Suppress(":") + fragment_block)
+ fragment_elif_condition = Group(Suppress("elif") + condition + Suppress(":") + fragment_block)
+ fragment_else_condition = Group(Suppress("else") + Suppress(":") + fragment_block)
+ fragment_conditional = (fragment_if_condition + Optional(OneOrMore(fragment_elif_condition)) +
+ Optional(fragment_else_condition)).setResultsName("conditional")
+
+ fragment = (header + OneOrMore(indentedBlock(key_body, indent_stack, False))).setResultsName("value")
+ fragment.setParseAction(fragment_parse_action)
+ fragment.ignore("#" + restOfLine)
+
+ fragment_stmt << (Group(fragment) | Group(fragment_conditional))
+
+ def fragment_stmt_parsed(pstr, loc, toks):
+ stmts = list()
+ expand_conditionals(toks, stmts)
+ return stmts
+
+ parser = ZeroOrMore(fragment_stmt)
+ parser.setParseAction(fragment_stmt_parsed)
- try:
- self.fragments = parser.parseFile(fragment_file, parseAll=True)
- except ParseBaseException as e:
- # the actual parse error is kind of useless for normal users, so just point to the location of
- # the error
- raise LdGenFailure("Parse error in linker fragment %s: error at line %d col %d (char %d)" % (
- fragment_file.name, e.lineno, e.column, e.loc))
+ self.fragments = parser.parseFile(fragment_file, parseAll=True)
for fragment in self.fragments:
fragment.path = path
-class Fragment:
+class Fragment():
+ __metaclass__ = abc.ABCMeta
"""
Encapsulates a fragment as defined in the generator syntax. Sets values common to all fragment and performs processing
such as checking the validity of the fragment name and getting the entry values.
IDENTIFIER = Word(alphas + "_", alphanums + "_")
ENTITY = Word(alphanums + ".-_$")
- def __init__(self, name, entries):
- self.path = None
- self.name = name
- self.entries = entries
+ @abc.abstractmethod
+ def set_key_value(self, key, parse_results):
+ pass
+ @abc.abstractmethod
+ def get_key_grammars(self):
+ pass
-class Sections(Fragment):
- def __init__(self, name, entries):
- Fragment.__init__(self, name, entries)
- self._process_entries()
+class Sections(Fragment):
- def _process_entries(self):
- # Quietly ignore duplicate entries
- self.entries = set(self.entries)
- self.entries = list(self.entries)
+ grammars = {
+ "entries": KeyGrammar(Word(alphanums + "+.").setResultsName("section"), 1, None, True)
+ }
"""
Utility function that returns a list of sections given a sections fragment entry,
else:
return (sections_entry, None)
- @staticmethod
- def get_fragment_grammar():
- name = Fragment.IDENTIFIER
- header = Suppress("[") + Suppress("sections") + Suppress(":") + name.setResultsName("name") + Suppress("]")
- entry = Word(alphanums + "+" + ".")
- entries = Suppress("entries") + Suppress(":") + Group(OneOrMore(entry)).setResultsName("entries")
-
- sections = Group(header + entries)
-
- sections.setParseAction(lambda t: Sections(t[0].name, t[0].entries))
-
- sections.ignore("#" + restOfLine)
+ def set_key_value(self, key, parse_results):
+ if key == "entries":
+ self.entries = set()
+ for result in parse_results:
+ self.entries.add(result["section"])
- return sections
+ def get_key_grammars(self):
+ return self.__class__.grammars
class Scheme(Fragment):
Encapsulates a scheme fragment, which defines what target input sections are placed under.
"""
- def __init__(self, name, items):
- Fragment.__init__(self, name, items)
- self._process_entries()
-
- def _process_entries(self):
- processed = set()
-
- # Store entries as a set of tuples. Quietly ignores duplicate entries.
- for entry in self.entries:
- processed.add((entry.sections, entry.target))
-
- self.entries = processed
-
- @staticmethod
- def get_fragment_grammar():
- name = Fragment.IDENTIFIER
- header = Suppress("[") + Suppress("scheme") + Suppress(":") + name.setResultsName("name") + Suppress("]")
-
- # Scheme entry in the form 'sections -> target'
- sections = Fragment.IDENTIFIER
- target = Fragment.IDENTIFIER
- entry = Group(sections.setResultsName("sections") + Suppress("->") + target.setResultsName("target"))
-
- entries = Suppress("entries") + Suppress(":") + Group(OneOrMore(entry)).setResultsName("entries")
-
- scheme = Group(header + entries)
+ grammars = {
+ "entries": KeyGrammar(Fragment.IDENTIFIER.setResultsName("sections") + Suppress("->") +
+ Fragment.IDENTIFIER.setResultsName("target"), 1, None, True)
+ }
- scheme.setParseAction(lambda t: Scheme(t[0].name, t[0].entries))
+ def set_key_value(self, key, parse_results):
+ if key == "entries":
+ self.entries = set()
+ for result in parse_results:
+ self.entries.add((result["sections"], result["target"]))
- scheme.ignore("#" + restOfLine)
-
- return scheme
+ def get_key_grammars(self):
+ return self.__class__.grammars
class Mapping(Fragment):
Encapsulates a mapping fragment, which defines what targets the input sections of mappable entties are placed under.
"""
- # Name of the default condition entry
- DEFAULT_CONDITION = "default"
MAPPING_ALL_OBJECTS = "*"
- def __init__(self, archive, entries):
- self.archive = archive
-
- # Generate name from archive value by replacing all non-alphanumeric
- # characters with underscore
- name = Mapping.get_mapping_name_from_archive(self.archive)
- Fragment.__init__(self, name, entries)
-
- self._process_entries()
+ def set_key_value(self, key, parse_results):
+ if key == "archive":
+ self.archive = parse_results[0]["archive"]
+ elif key == "entries":
+ self.entries = set()
- def _create_mappings_set(self, mappings):
- mapping_set = set()
-
- for mapping in mappings:
- obj = mapping.object
- symbol = mapping.symbol
- scheme = mapping.scheme
-
- if symbol == "":
+ for result in parse_results:
+ obj = None
symbol = None
+ scheme = None
- # Quietly handle duplicate definitions under the same condition
- mapping_set.add((obj, symbol, scheme))
-
- return mapping_set
+ try:
+ obj = result["object"]
+ except KeyError:
+ pass
- def _process_entries(self):
- processed = []
-
- for normal_group in self.entries[0]:
- # Get the original string of the condition
- condition = next(iter(normal_group.condition.asList())).strip()
- mappings = self._create_mappings_set(normal_group[1])
-
- processed.append((condition, mappings))
-
- default_group = self.entries[1]
-
- if len(default_group) > 1:
- mappings = self._create_mappings_set(default_group[1])
- else:
- mappings = self._create_mappings_set(default_group[0])
+ try:
+ symbol = result["symbol"]
+ except KeyError:
+ pass
- processed.append(("default", mappings))
+ try:
+ scheme = result["scheme"]
+ except KeyError:
+ pass
- self.entries = processed
-
- @staticmethod
- def get_mapping_name_from_archive(archive):
- return re.sub(r"[^0-9a-zA-Z]+", "_", archive)
-
- @staticmethod
- def get_fragment_grammar():
-
- # Match header [mapping]
- header = Suppress("[") + Suppress("mapping") + Suppress("]")
+ self.entries.add((obj, symbol, scheme))
+ def get_key_grammars(self):
# There are three possible patterns for mapping entries:
# obj:symbol (scheme)
# obj (scheme)
symbol = Suppress(":") + Fragment.IDENTIFIER.setResultsName("symbol")
scheme = Suppress("(") + Fragment.IDENTIFIER.setResultsName("scheme") + Suppress(")")
- pattern1 = Group(obj + symbol + scheme)
- pattern2 = Group(obj + scheme)
- pattern3 = Group(Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName("object") + scheme)
-
- mapping_entry = pattern1 | pattern2 | pattern3
-
- # To simplify parsing, classify groups of condition-mapping entry into two types: normal and default
- # A normal grouping is one with a non-default condition. The default grouping is one which contains the
- # default condition
- mapping_entries = Group(ZeroOrMore(mapping_entry)).setResultsName("mappings")
-
- normal_condition = Suppress(":") + originalTextFor(SDKConfig.get_expression_grammar())
- default_condition = Optional(Suppress(":") + Literal(Mapping.DEFAULT_CONDITION))
-
- normal_group = Group(normal_condition.setResultsName("condition") + mapping_entries)
- default_group = Group(default_condition + mapping_entries).setResultsName("default_group")
-
- normal_groups = Group(ZeroOrMore(normal_group)).setResultsName("normal_groups")
+ pattern1 = obj + symbol + scheme
+ pattern2 = obj + scheme
+ pattern3 = Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName("object") + scheme
- # Any mapping fragment definition can have zero or more normal group and only one default group as a last entry.
- archive = Suppress("archive") + Suppress(":") + Fragment.ENTITY.setResultsName("archive")
- entries = Suppress("entries") + Suppress(":") + (normal_groups + default_group).setResultsName("entries")
+ entry = pattern1 | pattern2 | pattern3
- mapping = Group(header + archive + entries)
+ grammars = {
+ "archive": KeyGrammar(Fragment.ENTITY.setResultsName("archive"), 1, 1, True),
+ "entries": KeyGrammar(entry, 1, None, True)
+ }
- mapping.setParseAction(lambda t: Mapping(t[0].archive, t[0].entries))
+ return grammars
- mapping.ignore("#" + restOfLine)
- return mapping
+FRAGMENT_TYPES = {
+ "sections": Sections,
+ "scheme": Scheme,
+ "mapping": Mapping
+}