Archived
1
0

Updating KLL 0.5c lexer

- Added more complicated Animation definition expression
- Added 0.3d compiler lexer ignores (so it will still parse the 0.3d compatible sections of KLL 0.5)
- Improved funcparserlib/parser.py to include next token (if available) on NoParseError
This commit is contained in:
Jacob Alexander 2016-10-09 22:00:30 -07:00
parent d7b7752dc1
commit 3cd55cf5a4
7 changed files with 44 additions and 19 deletions

View File

@ -27,7 +27,7 @@ kll <kll files>
### Kiibohd Controller Usage ### Kiibohd Controller Usage
```bash ```bash
kll.py <misc kll files> --config <config/capability kll files> --base <basemap kll files) --default <default layer kll files> --partial <partial layer 1 kll files> --partial <partial layer 2 kll files> kll <misc kll files> --config <config/capability kll files> --base <basemap kll files) --default <default layer kll files> --partial <partial layer 1 kll files> --partial <partial layer 2 kll files>
``` ```
See `kll --help` for the most up to date documentation See `kll --help` for the most up to date documentation

View File

@ -864,7 +864,7 @@ class OperationSpecificsStage( Stage ):
( 'AnimationStart', ( r'A\[', ) ), ( 'AnimationStart', ( r'A\[', ) ),
( 'CodeBegin', ( r'\[', ) ), ( 'CodeBegin', ( r'\[', ) ),
( 'CodeEnd', ( r'\]', ) ), ( 'CodeEnd', ( r'\]', ) ),
( 'Position', ( r'r?[xyz]:[0-9]+(.[0-9]+)?', ) ), ( 'Position', ( r'r?[xyz]:-?[0-9]+(.[0-9]+)?', ) ),
( 'Comma', ( r',', ) ), ( 'Comma', ( r',', ) ),
( 'Dash', ( r'-', ) ), ( 'Dash', ( r'-', ) ),
@ -887,7 +887,7 @@ class OperationSpecificsStage( Stage ):
( 'ScanCodeStart', ( r'S\[', ) ), ( 'ScanCodeStart', ( r'S\[', ) ),
( 'CodeBegin', ( r'\[', ) ), ( 'CodeBegin', ( r'\[', ) ),
( 'CodeEnd', ( r'\]', ) ), ( 'CodeEnd', ( r'\]', ) ),
( 'Position', ( r'r?[xyz]:[0-9]+(.[0-9]+)?', ) ), ( 'Position', ( r'r?[xyz]:-?[0-9]+(.[0-9]+)?', ) ),
( 'PixelOperator', ( r'(\+:|-:|>>|<<)', ) ), ( 'PixelOperator', ( r'(\+:|-:|>>|<<)', ) ),
( 'String', ( r'"[^"]*"', ) ), ( 'String', ( r'"[^"]*"', ) ),
@ -897,7 +897,7 @@ class OperationSpecificsStage( Stage ):
( 'Dash', ( r'-', ) ), ( 'Dash', ( r'-', ) ),
( 'Plus', ( r'\+', ) ), ( 'Plus', ( r'\+', ) ),
( 'Parenthesis', ( r'\(|\)', ) ), ( 'Parenthesis', ( r'\(|\)', ) ),
( 'Percent', ( r'0|([1-9][0-9]*)%', ) ), ( 'Percent', ( r'(0|([1-9][0-9]*))%', ) ),
( 'Number', ( r'-?(0x[0-9a-fA-F]+)|(0|([1-9][0-9]*))', ) ), ( 'Number', ( r'-?(0x[0-9a-fA-F]+)|(0|([1-9][0-9]*))', ) ),
( 'Name', ( r'[A-Za-z_][A-Za-z_0-9]*', ) ), ( 'Name', ( r'[A-Za-z_][A-Za-z_0-9]*', ) ),
] ]

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
''' '''
KLL Kiibohd .h File Emitter KLL Kiibohd .h/.c File Emitter
''' '''
# Copyright (C) 2016 by Jacob Alexander # Copyright (C) 2016 by Jacob Alexander
@ -238,6 +238,8 @@ class Kiibohd( Emitter, TextEmitter ):
variables = full_context.query( 'AssignmentExpression', 'Variable' ) variables = full_context.query( 'AssignmentExpression', 'Variable' )
for dkey, dvalue in sorted( defines.data.items() ): for dkey, dvalue in sorted( defines.data.items() ):
if dvalue.name in variables.data.keys(): if dvalue.name in variables.data.keys():
# TODO Handle arrays
if not isinstance( variables.data[ dvalue.name ].value, list ):
self.fill_dict['Defines'] += "\n#define {0} {1}".format( self.fill_dict['Defines'] += "\n#define {0} {1}".format(
dvalue.association, dvalue.association,
variables.data[ dvalue.name ].value.replace( '\n', ' \\\n' ), variables.data[ dvalue.name ].value.replace( '\n', ' \\\n' ),

View File

@ -3,3 +3,7 @@ Array[] = a b c "b c" 3;
Index[5] = "this text" thing; # Single element Index[5] = "this text" thing; # Single element
Index[6] = moar; Index[6] = moar;
Variable => Variable_define;
Array => Array_define;
Index => Index_define;

View File

@ -54,6 +54,10 @@ A[BLEEdsing2, 8] <= U"B"(40,50,0x60);
A[BLEEdsing2, 9] <= S120(40,50,0x60); A[BLEEdsing2, 9] <= S120(40,50,0x60);
A[BLEEdsing2, 10] <= S[0x10](40,50,0x60); A[BLEEdsing2, 10] <= S[0x10](40,50,0x60);
A[RainbowFillInterp] <= start, interp:on;
A[RainbowFillInterp, 1] <= P[c:20%](255,255,0), P[c:40%](255,0,0), P[c:60%](127,0,255), P[c:80%](0,0,255);
A[RainbowFillInterp, 2] <= P[c:0%](0,255,0), P[c:20%](255,255,0), P[c:40%](255,0,0), P[c:60%](127,0,255), P[c:80%](0,0,255);
# Animation Triggers # Animation Triggers
myCapability => myFunc( myArg1 : 1, myArg2 : 4 ); myCapability => myFunc( myArg1 : 1, myArg2 : 4 );
A[BLEEdsing, 3] : myCapability( 0x8, 0x25 ); A[BLEEdsing, 3] : myCapability( 0x8, 0x25 );

View File

@ -151,7 +151,7 @@ class Parser(object):
tok = tokens[max] tok = tokens[max]
else: else:
tok = '<EOF>' tok = '<EOF>'
raise NoParseError('%s: %s' % (e.msg, tok), e.state) raise NoParseError('%s: %s' % (e.msg, tok), e.state, tok)
def __add__(self, other): def __add__(self, other):
"""Parser(a, b), Parser(a, c) -> Parser(a, _Tuple(b, c)) """Parser(a, b), Parser(a, c) -> Parser(a, _Tuple(b, c))
@ -265,9 +265,10 @@ class State(object):
class NoParseError(Exception): class NoParseError(Exception):
def __init__(self, msg='', state=None): def __init__(self, msg='', state=None, token=None):
self.msg = msg self.msg = msg
self.state = state self.state = state
self.token = token # Next token
def __str__(self): def __str__(self):
return self.msg return self.msg
@ -294,7 +295,7 @@ def finished(tokens, s):
if s.pos >= len(tokens): if s.pos >= len(tokens):
return None, s return None, s
else: else:
raise NoParseError('should have reached <EOF>', s) raise NoParseError('should have reached <EOF>', s, tokens[s.pos])
finished.name = 'finished' finished.name = 'finished'
@ -344,7 +345,7 @@ def some(pred):
else: else:
if debug: if debug:
log.debug('failed "%s", state = %s' % (t, s)) log.debug('failed "%s", state = %s' % (t, s))
raise NoParseError('got unexpected token', s) raise NoParseError('got unexpected token', s, t)
_some.name = '(some)' _some.name = '(some)'
return _some return _some

20
kll.py
View File

@ -149,10 +149,14 @@ def tokenize( string ):
( 'LedCodeStart', ( r'LED\[', ) ), ( 'LedCodeStart', ( r'LED\[', ) ),
( 'ScanCode', ( r'S((0x[0-9a-fA-F]+)|([0-9]+))', ) ), ( 'ScanCode', ( r'S((0x[0-9a-fA-F]+)|([0-9]+))', ) ),
( 'ScanCodeStart', ( r'S\[', ) ), ( 'ScanCodeStart', ( r'S\[', ) ),
( 'PixelCodeStart', ( r'P\[.*', ) ), # Discarded, needs KLL 0.5
( 'AnimationStart', ( r'A\[.*', ) ), # Discarded, needs KLL 0.5
( 'CodeStart', ( r'\[', ) ),
( 'CodeEnd', ( r'\]', ) ), ( 'CodeEnd', ( r'\]', ) ),
( 'String', ( r'"[^"]*"', ) ), ( 'String', ( r'"[^"]*"', ) ),
( 'SequenceString', ( r"'[^']*'", ) ), ( 'SequenceString', ( r"'[^']*'", ) ),
( 'Operator', ( r'=>|:\+|:-|::|:|=', ) ), ( 'Position', ( r'r?[xyz]:-?[0-9]+(.[0-9]+)?', ) ),
( 'Operator', ( r'<=|=>|:\+|:-|::|:|=', ) ),
( 'Number', ( r'(-[ \t]*)?((0x[0-9a-fA-F]+)|(0|([1-9][0-9]*)))', VERBOSE ) ), ( 'Number', ( r'(-[ \t]*)?((0x[0-9a-fA-F]+)|(0|([1-9][0-9]*)))', VERBOSE ) ),
( 'Comma', ( r',', ) ), ( 'Comma', ( r',', ) ),
( 'Dash', ( r'-', ) ), ( 'Dash', ( r'-', ) ),
@ -167,6 +171,9 @@ def tokenize( string ):
# Tokens to filter out of the token stream # Tokens to filter out of the token stream
useless = ['Space', 'Comment'] useless = ['Space', 'Comment']
# Discarded expresssions (KLL 0.4+)
useless.extend( ['PixelCodeStart', 'AnimationStart'] )
tokens = make_tokenizer( specs ) tokens = make_tokenizer( specs )
return [x for x in tokens( string ) if x.type not in useless] return [x for x in tokens( string ) if x.type not in useless]
@ -596,6 +603,7 @@ sysCode = tokenType('SysCode') >> make_sysCode
none = tokenType('None') >> make_none none = tokenType('None') >> make_none
name = tokenType('Name') name = tokenType('Name')
number = tokenType('Number') >> make_number number = tokenType('Number') >> make_number
position = tokenType('Position')
comma = tokenType('Comma') comma = tokenType('Comma')
dash = tokenType('Dash') dash = tokenType('Dash')
plus = tokenType('Plus') plus = tokenType('Plus')
@ -606,6 +614,7 @@ seqString = tokenType('SequenceString') >> make_seqString
unseqString = tokenType('SequenceString') >> make_unseqString # For use with variables unseqString = tokenType('SequenceString') >> make_unseqString # For use with variables
# Code variants # Code variants
code_start = tokenType('CodeStart')
code_end = tokenType('CodeEnd') code_end = tokenType('CodeEnd')
# Scan Codes # Scan Codes
@ -669,7 +678,7 @@ resultCode_outerList = ( ( capFunc_sequence >> optionExpansion ) | none ) >>
#| <variable> = <variable contents>; #| <variable> = <variable contents>;
variable_contents = name | content | string | number | comma | dash | unseqString variable_contents = name | content | string | number | comma | dash | unseqString
variable_expression = name + skip( operator('=') ) + oneplus( variable_contents ) + skip( eol ) >> set_variable variable_expression = name + skip( maybe( code_start + maybe( number ) + code_end ) ) + skip( operator('=') ) + oneplus( variable_contents ) + skip( eol ) >> set_variable
#| <capability name> => <c function>; #| <capability name> => <c function>;
capability_arguments = name + skip( operator(':') ) + number + skip( maybe( comma ) ) capability_arguments = name + skip( operator(':') ) + number + skip( maybe( comma ) )
@ -683,11 +692,14 @@ operatorTriggerResult = operator(':') | operator(':+') | operator(':-') | operat
scanCode_expression = triggerCode_outerList + operatorTriggerResult + resultCode_outerList + skip( eol ) >> map_scanCode scanCode_expression = triggerCode_outerList + operatorTriggerResult + resultCode_outerList + skip( eol ) >> map_scanCode
usbCode_expression = triggerUSBCode_outerList + operatorTriggerResult + resultCode_outerList + skip( eol ) >> map_usbCode usbCode_expression = triggerUSBCode_outerList + operatorTriggerResult + resultCode_outerList + skip( eol ) >> map_usbCode
### Ignored expressions
ignore_expression = scanCode_expanded | scanCode + operator('<=') + oneplus( position + maybe( skip( comma ) )) + eol
def parse( tokenSequence ): def parse( tokenSequence ):
"""Sequence(Token) -> object""" """Sequence(Token) -> object"""
# Top-level Parser # Top-level Parser
expression = scanCode_expression | usbCode_expression | variable_expression | capability_expression | define_expression expression = ignore_expression | scanCode_expression | usbCode_expression | variable_expression | capability_expression | define_expression
kll_text = many( expression ) kll_text = many( expression )
kll_file = maybe( kll_text ) + skip( finished ) kll_file = maybe( kll_text ) + skip( finished )
@ -708,6 +720,8 @@ def processKLLFile( filename ):
try: try:
tree = parse( tokenSequence ) tree = parse( tokenSequence )
except (NoParseError, KeyError) as err: except (NoParseError, KeyError) as err:
# Ignore data association expressions KLL 0.4+ required
if err.token.value != '<=':
print ( "{0} Parsing error in '{1}' - {2}".format( ERROR, filename, err ) ) print ( "{0} Parsing error in '{1}' - {2}".format( ERROR, filename, err ) )
sys.exit( 1 ) sys.exit( 1 )