Skip to content

Commit 7aed920

Browse files
fixed upper cases
1 parent e65a6aa commit 7aed920

File tree

3 files changed

+58
-1
lines changed

3 files changed

+58
-1
lines changed

src/tokenizer.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ class Tokenizer {
5555
&& (!lastToken || Tokenizer.SEPARATOR_CHARACTERS.includes(lastToken.buffer.slice(-1)))) {
5656
tokens.push(new Token(Token.TYPE.USER, buffer, index - buffer.length + 1))
5757
buffer = ""
58-
} else if(!Tokenizer.USER_ALLOWED_CHARACTERS.includes(nextCharacter) && !Tokenizer.CHANNEL_ALLOWED_CHARACTERS.includes(nextCharacter)) {
58+
} else if(nextCharacter === null || (!Tokenizer.USER_ALLOWED_CHARACTERS.includes(nextCharacter.toLowerCase()) && !Tokenizer.CHANNEL_ALLOWED_CHARACTERS.includes(nextCharacter.toLowerCase()))) {
5959
if(lastToken && lastToken.type === Token.TYPE.TEXT) {
6060
let newBuffer = tokens[tokens.length - 1].buffer + buffer
6161
let bufferIndex = index - buffer.length - tokens[tokens.length - 1].buffer.length + 1

test/parser.test.js

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,33 @@ describe("parser", () => {
9696
assert.deepEqual(message_____.channels, [])
9797
assert.deepEqual(message_____.users, [])
9898

99+
100+
const message______ = Parser.parse(`#fFSFSD #dsadsaDas #dFsads @dsaFGDSG`)
101+
102+
assert.strictEqual(message______.fragments[0].type, "CHANNEL")
103+
assert.strictEqual(message______.fragments[0].content, "#fFSFSD")
104+
105+
assert.strictEqual(message______.fragments[1].type, "TEXT")
106+
assert.strictEqual(message______.fragments[1].content, " ")
107+
108+
assert.strictEqual(message______.fragments[2].type, "CHANNEL")
109+
assert.strictEqual(message______.fragments[2].content, "#dsadsaDas")
110+
111+
assert.strictEqual(message______.fragments[3].type, "TEXT")
112+
assert.strictEqual(message______.fragments[3].content, " ")
113+
114+
assert.strictEqual(message______.fragments[4].type, "CHANNEL")
115+
assert.strictEqual(message______.fragments[4].content, "#dFsads")
116+
117+
assert.strictEqual(message______.fragments[5].type, "TEXT")
118+
assert.strictEqual(message______.fragments[5].content, " ")
119+
120+
assert.strictEqual(message______.fragments[6].type, "USER")
121+
assert.strictEqual(message______.fragments[6].content, "@dsaFGDSG")
122+
123+
assert.deepEqual(message______.channels, ["fFSFSD", "dsadsaDas", "dFsads"])
124+
assert.deepEqual(message______.users, ["dsaFGDSG"])
125+
99126
})
100127

101128
})

test/tokenizer.test.js

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,36 @@ describe("tokenize", () => {
9595
assert.strictEqual(tokens_____[0].buffer, "@dsadsa#dsads")
9696
assert.strictEqual(tokens_____[0].bufferIndex, 0)
9797

98+
const tokens______ = Tokenizer.tokenize(`#fFSFSD #dsadsaDas #dFsads @dsaFGDSG`)
99+
100+
assert.strictEqual(tokens______[0].type, "CHANNEL")
101+
assert.strictEqual(tokens______[0].buffer, "#fFSFSD")
102+
assert.strictEqual(tokens______[0].bufferIndex, 0)
103+
104+
assert.strictEqual(tokens______[1].type, "TEXT")
105+
assert.strictEqual(tokens______[1].buffer, " ")
106+
assert.strictEqual(tokens______[1].bufferIndex, 7)
107+
108+
assert.strictEqual(tokens______[2].type, "CHANNEL")
109+
assert.strictEqual(tokens______[2].buffer, "#dsadsaDas")
110+
assert.strictEqual(tokens______[2].bufferIndex, 8)
111+
112+
assert.strictEqual(tokens______[3].type, "TEXT")
113+
assert.strictEqual(tokens______[3].buffer, " ")
114+
assert.strictEqual(tokens______[3].bufferIndex, 18)
115+
116+
assert.strictEqual(tokens______[4].type, "CHANNEL")
117+
assert.strictEqual(tokens______[4].buffer, "#dFsads")
118+
assert.strictEqual(tokens______[4].bufferIndex, 19)
119+
120+
assert.strictEqual(tokens______[5].type, "TEXT")
121+
assert.strictEqual(tokens______[5].buffer, " ")
122+
assert.strictEqual(tokens______[5].bufferIndex, 26)
123+
124+
assert.strictEqual(tokens______[6].type, "USER")
125+
assert.strictEqual(tokens______[6].buffer, "@dsaFGDSG")
126+
assert.strictEqual(tokens______[6].bufferIndex, 27)
127+
98128
})
99129

100130
})

0 commit comments

Comments
 (0)