@@ -21,7 +21,9 @@ use super::tokenizer::*;
21
21
use std:: error:: Error ;
22
22
use std:: fmt;
23
23
24
+ #[ cfg( feature = "cst" ) ]
24
25
use crate :: builder;
26
+
25
27
use crate :: { cst, cst:: SyntaxKind as SK } ;
26
28
27
29
#[ derive( Debug , Clone , PartialEq ) ]
@@ -41,6 +43,7 @@ macro_rules! parser_err {
41
43
pub struct Marker {
42
44
/// position in the token stream (`parser.index`)
43
45
index : usize ,
46
+ #[ cfg( feature = "cst" ) ]
44
47
builder_checkpoint : builder:: Checkpoint ,
45
48
}
46
49
@@ -83,13 +86,16 @@ pub struct Parser {
83
86
tokens : Vec < Token > ,
84
87
/// The index of the first unprocessed token in `self.tokens`
85
88
index : usize ,
89
+
90
+ #[ cfg( feature = "cst" ) ]
86
91
builder : builder:: GreenNodeBuilder < ' static > ,
87
92
88
93
// TBD: the parser currently provides an API to move around the token
89
94
// stream without restrictions (`next_token`/`prev_token`), while the
90
95
// `builder` does not. To work around this, we keep a list of "pending"
91
96
// tokens which have already been processed via `next_token`, but may
92
97
// be put back via `prev_token`.
98
+ #[ cfg( feature = "cst" ) ]
93
99
pending : Vec < ( cst:: SyntaxKind , rowan:: SmolStr ) > ,
94
100
}
95
101
@@ -112,16 +118,21 @@ macro_rules! ret {
112
118
impl Parser {
113
119
/// Parse the specified tokens
114
120
pub fn new ( tokens : Vec < Token > ) -> Self {
121
+ #[ allow( unused_mut) ]
115
122
let mut parser = Parser {
116
123
tokens,
117
124
index : 0 ,
125
+ #[ cfg( feature = "cst" ) ]
118
126
builder : builder:: GreenNodeBuilder :: new ( ) ,
127
+ #[ cfg( feature = "cst" ) ]
119
128
pending : vec ! [ ] ,
120
129
} ;
130
+ #[ cfg( feature = "cst" ) ]
121
131
parser. builder . start_node ( SK :: ROOT . into ( ) ) ;
122
132
parser
123
133
}
124
134
135
+ #[ cfg( feature = "cst" ) ]
125
136
pub fn syntax ( mut self ) -> cst:: SyntaxNode {
126
137
if self . peek_token ( ) . is_some ( ) {
127
138
// Not at end-of-file: either some extraneous tokens left after
@@ -807,6 +818,7 @@ impl Parser {
807
818
self . flush_pending_buffer ( ) ;
808
819
Marker {
809
820
index : self . index ,
821
+ #[ cfg( feature = "cst" ) ]
810
822
builder_checkpoint : self . builder . checkpoint ( ) ,
811
823
}
812
824
}
@@ -826,19 +838,25 @@ impl Parser {
826
838
827
839
pub fn reset ( & mut self , m : Marker ) {
828
840
self . index = m. index ;
841
+ #[ cfg( feature = "cst" ) ]
829
842
self . pending . truncate ( 0 ) ;
843
+ #[ cfg( feature = "cst" ) ]
830
844
self . builder . reset ( m. builder_checkpoint ) ;
831
845
}
832
846
847
+ #[ allow( unused_variables) ]
833
848
pub fn complete < T > ( & mut self , m : Marker , kind : cst:: SyntaxKind , rv : T ) -> T {
834
849
self . flush_pending_buffer ( ) ;
850
+ #[ cfg( feature = "cst" ) ]
835
851
self . builder
836
852
. start_node_at ( m. builder_checkpoint , kind. into ( ) ) ;
853
+ #[ cfg( feature = "cst" ) ]
837
854
self . builder . finish_node ( ) ;
838
855
rv
839
856
}
840
857
841
858
pub fn flush_pending_buffer ( & mut self ) {
859
+ #[ cfg( feature = "cst" ) ]
842
860
for ( kind, s) in self . pending . drain ( ..) {
843
861
self . builder . token ( kind. into ( ) , s) ;
844
862
}
@@ -886,8 +904,11 @@ impl Parser {
886
904
self . index += 1 ;
887
905
#[ allow( clippy:: let_and_return) ]
888
906
let token = self . tokens . get ( self . index - 1 ) ;
889
- if let Some ( t) = token {
890
- self . pending . push ( ( t. kind ( ) , t. to_string ( ) . into ( ) ) ) ;
907
+ #[ cfg( feature = "cst" ) ]
908
+ {
909
+ if let Some ( t) = token {
910
+ self . pending . push ( ( t. kind ( ) , t. to_string ( ) . into ( ) ) ) ;
911
+ }
891
912
}
892
913
token
893
914
}
@@ -900,10 +921,13 @@ impl Parser {
900
921
assert ! ( self . index > 0 ) ;
901
922
self . index -= 1 ;
902
923
903
- if !self . pending . is_empty ( ) {
904
- self . pending . pop ( ) ;
905
- } else {
906
- assert ! ( self . index >= self . tokens. len( ) ) ; // past EOF
924
+ #[ cfg( feature = "cst" ) ]
925
+ {
926
+ if !self . pending . is_empty ( ) {
927
+ self . pending . pop ( ) ;
928
+ } else {
929
+ assert ! ( self . index >= self . tokens. len( ) ) ; // past EOF
930
+ }
907
931
}
908
932
909
933
if let Some ( Token :: Whitespace ( _) ) = self . tokens . get ( self . index ) {
@@ -913,6 +937,7 @@ impl Parser {
913
937
// There may be only one non-whitespace token `pending` as by
914
938
// convention, backtracking (i.e. going more than one token back)
915
939
// is done via `start`/`reset` instead.
940
+ #[ cfg( feature = "cst" ) ]
916
941
for tok in & self . pending {
917
942
assert ! ( tok. 0 == SK :: Whitespace ) ;
918
943
}
@@ -941,9 +966,12 @@ impl Parser {
941
966
Some ( Token :: Word ( ref k) ) if expected. eq_ignore_ascii_case ( & k. keyword ) => {
942
967
self . next_token ( ) ;
943
968
// TBD: a hack to change the "kind" of the token just processed
944
- let mut p = self . pending . pop ( ) . unwrap ( ) ;
945
- p. 0 = SK :: KW . into ( ) ;
946
- self . pending . push ( p) ;
969
+ #[ cfg( feature = "cst" ) ]
970
+ {
971
+ let mut p = self . pending . pop ( ) . unwrap ( ) ;
972
+ p. 0 = SK :: KW ;
973
+ self . pending . push ( p) ;
974
+ }
947
975
true
948
976
}
949
977
_ => false ,
0 commit comments