-
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathanalyzer.rs
More file actions
73 lines (65 loc) · 2.45 KB
/
analyzer.rs
File metadata and controls
73 lines (65 loc) · 2.45 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
use super::shared::DataType;
use crate::ast::Module;
use crate::common::default::default;
use crate::mem::manager::{Dealloc, Manager};
use crate::tokenizer::{create_transition_maps, JsonToken, TokenizerState, TransitionMaps};
#[derive(Default)]
pub struct AnalyzerParameters {
data_type: DataType,
}
#[derive(Default)]
pub enum AnalyzerDiagnostic {
#[default]
OK,
// TODO: add error, warning diagnostics.
}
pub struct AnalyzerResults<D: Dealloc> {
pub module: Module<D>,
pub diagnostics: Vec<AnalyzerDiagnostic>,
}
pub struct AnalyzerState<M: Manager> {
parameters: AnalyzerParameters,
tokenizer_state: TokenizerState<M::Dealloc>,
tokenizer_maps: TransitionMaps<M>,
diagnostics_len: usize,
// TODO: add line number, column number tracking fields (needed for diagnostics).
module: Module<M::Dealloc>,
diagnostics: Vec<AnalyzerDiagnostic>,
}
impl<M: Manager + 'static> AnalyzerState<M> {
/// Creates a new analyzer staring state. The caller should check `diagnostics` for errors
/// immediately after creation (since `parameters` value can be inconsistent).
pub fn new(parameters: AnalyzerParameters) -> Self {
Self {
parameters,
tokenizer_state: default(),
tokenizer_maps: create_transition_maps(),
module: default(),
diagnostics: default(),
diagnostics_len: 0,
}
}
/// Updates analyzer state with a next input character; the result is the increment in the count
/// of `diagnostics`. It's up to the caller to check what was added at the end of `diagnostics`
/// - are there any fatal errors, from the point of view of the current parsing session?
pub fn push_mut(&mut self, manager: M, c: char) -> usize {
for token in self
.tokenizer_state
.push_mut(manager, c, &self.tokenizer_maps)
{
self.process_token(token);
}
let prior_diagnostics_len = self.diagnostics_len;
self.diagnostics_len = self.diagnostics.len();
self.diagnostics_len - prior_diagnostics_len
}
/// Completes the analysis.
pub fn end(self) -> AnalyzerResults<M::Dealloc> {
// TODO: in case the current state is not a valid end state, add an error to self.diagnostics.
AnalyzerResults {
module: self.module,
diagnostics: self.diagnostics,
}
}
fn process_token(&mut self, _token: JsonToken<M::Dealloc>) {}
}