tokenizer.js
1.35 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
var SyntaxError = require('./SyntaxError');
var TAB = 9;
var N = 10;
var F = 12;
var R = 13;
var SPACE = 32;
var Tokenizer = function(str) {
this.str = str;
this.pos = 0;
};
Tokenizer.prototype = {
charCodeAt: function(pos) {
return pos < this.str.length ? this.str.charCodeAt(pos) : 0;
},
charCode: function() {
return this.charCodeAt(this.pos);
},
nextCharCode: function() {
return this.charCodeAt(this.pos + 1);
},
nextNonWsCode: function(pos) {
return this.charCodeAt(this.findWsEnd(pos));
},
findWsEnd: function(pos) {
for (; pos < this.str.length; pos++) {
var code = this.str.charCodeAt(pos);
if (code !== R && code !== N && code !== F && code !== SPACE && code !== TAB) {
break;
}
}
return pos;
},
substringToPos: function(end) {
return this.str.substring(this.pos, this.pos = end);
},
eat: function(code) {
if (this.charCode() !== code) {
this.error('Expect `' + String.fromCharCode(code) + '`');
}
this.pos++;
},
peek: function() {
return this.pos < this.str.length ? this.str.charAt(this.pos++) : '';
},
error: function(message) {
throw new SyntaxError(message, this.str, this.pos);
}
};
module.exports = Tokenizer;