Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
P
parser-fplo
Manage
Activity
Members
Code
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Deploy
Releases
Container registry
Model registry
Analyze
Contributor analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
GitLab community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
This project is archived. Its data is
read-only
.
Show more breadcrumbs
nomad-lab
parser-fplo
Commits
00da1578
Commit
00da1578
authored
Nov 4, 2016
by
Henning Glawe
Browse files
Options
Downloads
Patches
Plain Diff
mark token.match as protected by relabeling to token._match
parent
dc44b46f
Branches
Branches containing commit
Tags
Tags containing commit
No related merge requests found
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
parser/parser-fplo/FploInputParser.py
+16
-12
16 additions, 12 deletions
parser/parser-fplo/FploInputParser.py
with
16 additions
and
12 deletions
parser/parser-fplo/FploInputParser.py
+
16
−
12
View file @
00da1578
...
@@ -34,16 +34,20 @@ class token(object):
...
@@ -34,16 +34,20 @@ class token(object):
def
__init__
(
self
,
line
,
pos_in_line
):
def
__init__
(
self
,
line
,
pos_in_line
):
"""
token constructor takes re.match object as arg
"""
"""
token constructor takes re.match object as arg
"""
self
.
match
=
self
.
regex
.
match
(
line
,
pos_in_line
)
match
=
self
.
regex
.
match
(
line
,
pos_in_line
)
if
self
.
match
is
None
:
if
match
is
None
:
raise
TokenMatchError
raise
TokenMatchError
self
.
_match
=
match
self
.
value
=
self
.
match2value
()
self
.
value
=
self
.
match2value
()
def
highlighted
(
self
):
def
highlighted
(
self
):
"""
return ANSI-highlighted token
"""
"""
return ANSI-highlighted token
"""
m
=
self
.
cRE_end_newline
.
match
(
self
.
match
.
group
(
0
))
m
=
self
.
cRE_end_newline
.
match
(
self
.
_
match
.
group
(
0
))
return
self
.
highlight_start
+
m
.
group
(
1
)
+
self
.
highlight_end
+
m
.
group
(
2
)
return
self
.
highlight_start
+
m
.
group
(
1
)
+
self
.
highlight_end
+
m
.
group
(
2
)
def
match_end
(
self
):
return
self
.
_match
.
end
()
def
match2value
(
self
):
def
match2value
(
self
):
return
None
return
None
...
@@ -75,7 +79,7 @@ class token_literal(token):
...
@@ -75,7 +79,7 @@ class token_literal(token):
)
)
def
match2value
(
self
):
def
match2value
(
self
):
match
=
self
.
match
match
=
self
.
_
match
if
match
.
group
(
'
str_d
'
)
is
not
None
:
if
match
.
group
(
'
str_d
'
)
is
not
None
:
return
match
.
group
(
'
str_d
'
)
return
match
.
group
(
'
str_d
'
)
if
match
.
group
(
'
str_s
'
)
is
not
None
:
if
match
.
group
(
'
str_s
'
)
is
not
None
:
...
@@ -102,7 +106,7 @@ class token_datatype(token):
...
@@ -102,7 +106,7 @@ class token_datatype(token):
subtype_dict
=
{}
subtype_dict
=
{}
def
match2value
(
self
):
def
match2value
(
self
):
value_index
=
self
.
subtype_dict
.
get
(
self
.
match
.
group
(
1
),
None
)
value_index
=
self
.
subtype_dict
.
get
(
self
.
_
match
.
group
(
1
),
None
)
if
value_index
is
None
:
if
value_index
is
None
:
raise
TokenMatchError
raise
TokenMatchError
self
.
value_index
=
value_index
self
.
value_index
=
value_index
...
@@ -126,7 +130,7 @@ class token_keyword(token):
...
@@ -126,7 +130,7 @@ class token_keyword(token):
subtype_dict
=
{}
subtype_dict
=
{}
def
match2value
(
self
):
def
match2value
(
self
):
value_index
=
self
.
subtype_dict
.
get
(
self
.
match
.
group
(
1
),
None
)
value_index
=
self
.
subtype_dict
.
get
(
self
.
_
match
.
group
(
1
),
None
)
if
value_index
is
None
:
if
value_index
is
None
:
raise
TokenMatchError
raise
TokenMatchError
self
.
value_index
=
value_index
self
.
value_index
=
value_index
...
@@ -145,7 +149,7 @@ class token_identifier(token):
...
@@ -145,7 +149,7 @@ class token_identifier(token):
regex
=
re
.
compile
(
r
'
\s*([a-zA-Z_][a-zA-Z0-9_]*)
'
)
regex
=
re
.
compile
(
r
'
\s*([a-zA-Z_][a-zA-Z0-9_]*)
'
)
def
match2value
(
self
):
def
match2value
(
self
):
return
self
.
match
.
group
(
1
)
return
self
.
_
match
.
group
(
1
)
class
token_subscript_begin
(
token
):
class
token_subscript_begin
(
token
):
...
@@ -160,7 +164,7 @@ class token_operator(token):
...
@@ -160,7 +164,7 @@ class token_operator(token):
regex
=
re
.
compile
(
r
'
\s*(\+=|\-=|=|,|-|\+|/|\*)
'
)
regex
=
re
.
compile
(
r
'
\s*(\+=|\-=|=|,|-|\+|/|\*)
'
)
def
match2value
(
self
):
def
match2value
(
self
):
return
self
.
match
.
group
(
1
)
return
self
.
_
match
.
group
(
1
)
class
token_block_begin
(
token
):
class
token_block_begin
(
token
):
...
@@ -179,7 +183,7 @@ class token_line_comment(token):
...
@@ -179,7 +183,7 @@ class token_line_comment(token):
regex
=
re
.
compile
(
r
'
\s*(?:(//|#)|(/\*))(?P<comment>.*)
'
)
regex
=
re
.
compile
(
r
'
\s*(?:(//|#)|(/\*))(?P<comment>.*)
'
)
def
match2value
(
self
):
def
match2value
(
self
):
return
self
.
match
.
group
(
'
comment
'
)
return
self
.
_
match
.
group
(
'
comment
'
)
class
token_trailing_whitespace
(
token
):
class
token_trailing_whitespace
(
token
):
regex
=
re
.
compile
(
r
'
\s+$
'
)
regex
=
re
.
compile
(
r
'
\s+$
'
)
...
@@ -188,13 +192,13 @@ class token_bad_input(token):
...
@@ -188,13 +192,13 @@ class token_bad_input(token):
regex
=
re
.
compile
(
'
(.+)$
'
)
regex
=
re
.
compile
(
'
(.+)$
'
)
def
match2value
(
self
):
def
match2value
(
self
):
return
self
.
match
.
group
(
1
)
return
self
.
_
match
.
group
(
1
)
class
token_flag_value
(
token
):
class
token_flag_value
(
token
):
regex
=
re
.
compile
(
r
'
\(([+-])\)
'
)
regex
=
re
.
compile
(
r
'
\(([+-])\)
'
)
def
match2value
(
self
):
def
match2value
(
self
):
if
self
.
match
.
group
(
1
)
==
'
+
'
:
if
self
.
_
match
.
group
(
1
)
==
'
+
'
:
return
True
return
True
else
:
else
:
return
False
return
False
...
@@ -638,7 +642,7 @@ class FploInputParser(object):
...
@@ -638,7 +642,7 @@ class FploInputParser(object):
self
.
current_concrete_statement
.
append
(
this_token
)
self
.
current_concrete_statement
.
append
(
this_token
)
else
:
else
:
raise
Exception
(
"
Unhandled token type
"
+
this_token
.
__class__
.
__name__
)
raise
Exception
(
"
Unhandled token type
"
+
this_token
.
__class__
.
__name__
)
return
this_token
.
match
.
end
()
return
this_token
.
match
_
end
()
def
onBad_input
(
self
):
def
onBad_input
(
self
):
"""
hook: called at the end of parsing if there was any bad input
"""
"""
hook: called at the end of parsing if there was any bad input
"""
...
...
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
sign in
to comment