/processor/parser/lexer_test.go

https://github.com/boyter/sc · Go · 431 lines · 423 code · 7 blank · 1 comment · 6 complexity · ff2ebaeab3be38d218d4dcc880d574df MD5 · raw file

  1. // SPDX-License-Identifier: MIT OR Unlicense
  2. package parser
  3. import (
  4. "testing"
  5. )
  6. func TestNext(t *testing.T) {
  7. lex := NewLexer(`test`)
  8. for lex.Next() != 0 {
  9. if 0 > 1 { // we just want to ensure this does not crash hence the oddness
  10. t.Error("wot the")
  11. }
  12. }
  13. }
  14. func TestPeek(t *testing.T) {
  15. lex := NewLexer(`test`)
  16. for i := 0; i < 100; i++ {
  17. lex.Peek()
  18. }
  19. }
  20. func TestNextEnd(t *testing.T) {
  21. lex := NewLexer(``)
  22. token := lex.NextToken()
  23. if token.Type != "END" {
  24. t.Error(`expected END got`, token.Type)
  25. }
  26. }
  27. func TestNextTokenParenOpen(t *testing.T) {
  28. lex := NewLexer(`(`)
  29. token := lex.NextToken()
  30. if token.Type != "PAREN_OPEN" {
  31. t.Error(`expected PAREN_OPEN got`, token.Type)
  32. }
  33. }
  34. func TestNextTokenParenClose(t *testing.T) {
  35. lex := NewLexer(`)`)
  36. token := lex.NextToken()
  37. if token.Type != "PAREN_CLOSE" {
  38. t.Error(`expected PAREN_CLOSE got`, token.Type)
  39. }
  40. }
  41. func TestNextTokenParenOpenParenClose(t *testing.T) {
  42. lex := NewLexer(`()`)
  43. token := lex.NextToken()
  44. if token.Type != "PAREN_OPEN" {
  45. t.Error(`expected PAREN_OPEN got`, token.Type)
  46. }
  47. token = lex.NextToken()
  48. if token.Type != "PAREN_CLOSE" {
  49. t.Error(`expected PAREN_CLOSE got`, token.Type)
  50. }
  51. }
  52. func TestNextTokenQuote(t *testing.T) {
  53. lex := NewLexer(`"`)
  54. token := lex.NextToken()
  55. if token.Type != "QUOTED_TERM" {
  56. t.Error(`expected QUOTED_TERM got`, token.Type)
  57. }
  58. }
  59. func TestNextTokenMultipleEmptyQuote(t *testing.T) {
  60. lex := NewLexer(`("")`)
  61. token := lex.NextToken()
  62. if token.Type != "PAREN_OPEN" {
  63. t.Error(`expected PAREN_OPEN got`, token.Type)
  64. }
  65. token = lex.NextToken()
  66. if token.Type != "QUOTED_TERM" {
  67. t.Error(`expected QUOTED_TERM got`, token.Type)
  68. }
  69. token = lex.NextToken()
  70. if token.Type != "PAREN_CLOSE" {
  71. t.Error(`expected PAREN_CLOSE got`, token.Type)
  72. }
  73. }
  74. func TestNextTokenIgnoresSpace(t *testing.T) {
  75. lex := NewLexer(` (`)
  76. token := lex.NextToken()
  77. if token.Type != "PAREN_OPEN" {
  78. t.Error(`expected PAREN_OPEN got`, token.Type)
  79. }
  80. }
  81. func TestNextTokenIgnoresSpaceMultiple(t *testing.T) {
  82. lex := NewLexer(` ( ) `)
  83. token := lex.NextToken()
  84. if token.Type != "PAREN_OPEN" {
  85. t.Error(`expected PAREN_OPEN got`, token.Type)
  86. }
  87. token = lex.NextToken()
  88. if token.Type != "PAREN_CLOSE" {
  89. t.Error(`expected PAREN_CLOSE got`, token.Type)
  90. }
  91. }
  92. func TestNextTokenQuotedTerm(t *testing.T) {
  93. lex := NewLexer(`"test"`)
  94. token := lex.NextToken()
  95. if token.Type != "QUOTED_TERM" {
  96. t.Error(`expected QUOTED_TERM got`, token.Type)
  97. }
  98. if token.Value != `test` {
  99. t.Error("expected test got", token.Value)
  100. }
  101. }
  102. func TestNextTokenQuotedTermSpace(t *testing.T) {
  103. lex := NewLexer(`"test things"`)
  104. token := lex.NextToken()
  105. if token.Type != "QUOTED_TERM" {
  106. t.Error(`expected QUOTED_TERM got`, token.Type)
  107. }
  108. if token.Value != `test things` {
  109. t.Error("expected test things got", token.Value)
  110. }
  111. }
  112. func TestNextTokenQuotedTermNoEnd(t *testing.T) {
  113. lex := NewLexer(`"test`)
  114. token := lex.NextToken()
  115. if token.Type != "QUOTED_TERM" {
  116. t.Error(`expected QUOTED_TERM got`, token.Type)
  117. }
  118. if token.Value != `test` {
  119. t.Error("expected test got", token.Value)
  120. }
  121. }
  122. func TestNextTokenQuotedTermMultiple(t *testing.T) {
  123. lex := NewLexer(`"test" "something"`)
  124. token := lex.NextToken()
  125. if token.Type != "QUOTED_TERM" {
  126. t.Error(`expected QUOTED_TERM got`, token.Type)
  127. }
  128. if token.Value != `test` {
  129. t.Error("expected test got", token.Value)
  130. }
  131. token = lex.NextToken()
  132. if token.Type != "QUOTED_TERM" {
  133. t.Error(`expected QUOTED_TERM got`, token.Type)
  134. }
  135. if token.Value != `something` {
  136. t.Error("expected something got", token.Value)
  137. }
  138. }
  139. func TestNextTokenMultipleSomethingQuote(t *testing.T) {
  140. lex := NewLexer(`("test")`)
  141. token := lex.NextToken()
  142. if token.Type != "PAREN_OPEN" {
  143. t.Error(`expected PAREN_OPEN got`, token.Type)
  144. }
  145. token = lex.NextToken()
  146. if token.Type != "QUOTED_TERM" {
  147. t.Error(`expected QUOTED_TERM got`, token.Type)
  148. }
  149. token = lex.NextToken()
  150. if token.Type != "PAREN_CLOSE" {
  151. t.Error(`expected PAREN_CLOSE got`, token.Type)
  152. }
  153. }
  154. func TestNextTokenMultipleEverythingQuote(t *testing.T) {
  155. lex := NewLexer(`("test") ("test")`)
  156. token := lex.NextToken()
  157. if token.Type != "PAREN_OPEN" {
  158. t.Error(`expected PAREN_OPEN got`, token.Type)
  159. }
  160. token = lex.NextToken()
  161. if token.Type != "QUOTED_TERM" {
  162. t.Error(`expected QUOTED_TERM got`, token.Type)
  163. }
  164. token = lex.NextToken()
  165. if token.Type != "PAREN_CLOSE" {
  166. t.Error(`expected PAREN_CLOSE got`, token.Type)
  167. }
  168. token = lex.NextToken()
  169. if token.Type != "PAREN_OPEN" {
  170. t.Error(`expected PAREN_OPEN got`, token.Type)
  171. }
  172. token = lex.NextToken()
  173. if token.Type != "QUOTED_TERM" {
  174. t.Error(`expected QUOTED_TERM got`, token.Type)
  175. }
  176. token = lex.NextToken()
  177. if token.Type != "PAREN_CLOSE" {
  178. t.Error(`expected PAREN_CLOSE got`, token.Type)
  179. }
  180. }
  181. func TestNextTokenRegex(t *testing.T) {
  182. lex := NewLexer(`/test/`)
  183. token := lex.NextToken()
  184. if token.Type != "REGEX_TERM" {
  185. t.Error(`expected REGEX_TERM got`, token.Type)
  186. }
  187. if token.Value != `test` {
  188. t.Error("expected test got", token.Value)
  189. }
  190. }
  191. func TestNextTokenRegexNoEnd(t *testing.T) {
  192. lex := NewLexer(`/test`)
  193. token := lex.NextToken()
  194. if token.Type != "REGEX_TERM" {
  195. t.Error(`expected REGEX_TERM got`, token.Type)
  196. }
  197. if token.Value != `test` {
  198. t.Error("expected test got", token.Value)
  199. }
  200. }
  201. func TestNextTokenRegexValue(t *testing.T) {
  202. lex := NewLexer(`/[cb]at/`)
  203. token := lex.NextToken()
  204. if token.Type != "REGEX_TERM" {
  205. t.Error(`expected REGEX_TERM got`, token.Type)
  206. }
  207. if token.Value != `[cb]at` {
  208. t.Error("expected [cb]at got", token.Value)
  209. }
  210. }
  211. func TestNextTokenTerm(t *testing.T) {
  212. lex := NewLexer(`something`)
  213. token := lex.NextToken()
  214. if token.Type != "TERM" {
  215. t.Error(`expected TERM got`, token.Type)
  216. }
  217. if token.Value != "something" {
  218. t.Error(`expected something got`, token.Value)
  219. }
  220. }
  221. func TestNextTokenMultipleTerm(t *testing.T) {
  222. lex := NewLexer(`something else`)
  223. token := lex.NextToken()
  224. if token.Type != "TERM" {
  225. t.Error(`expected TERM got`, token.Type)
  226. }
  227. if token.Value != "something" {
  228. t.Error(`expected something got`, token.Value)
  229. }
  230. token = lex.NextToken()
  231. if token.Type != "TERM" {
  232. t.Error(`expected TERM got`, token.Type)
  233. }
  234. if token.Value != "else" {
  235. t.Error(`expected else got`, token.Value)
  236. }
  237. }
  238. func TestNextTokenAnd(t *testing.T) {
  239. lex := NewLexer(`AND`)
  240. token := lex.NextToken()
  241. if token.Type != "AND" {
  242. t.Error(`expected AND got`, token.Type)
  243. }
  244. }
  245. func TestNextTokenOr(t *testing.T) {
  246. lex := NewLexer(`OR`)
  247. token := lex.NextToken()
  248. if token.Type != "OR" {
  249. t.Error(`expected OR got`, token.Type)
  250. }
  251. }
  252. func TestNextTokenNot(t *testing.T) {
  253. lex := NewLexer(`NOT`)
  254. token := lex.NextToken()
  255. if token.Type != "NOT" {
  256. t.Error(`expected NOT got`, token.Type)
  257. }
  258. }
  259. func TestNextTokenMultipleTermOperators(t *testing.T) {
  260. lex := NewLexer(`something AND else`)
  261. token := lex.NextToken()
  262. if token.Type != "TERM" {
  263. t.Error(`expected TERM got`, token.Type)
  264. }
  265. if token.Value != "something" {
  266. t.Error(`expected something got`, token.Value)
  267. }
  268. token = lex.NextToken()
  269. if token.Type != "AND" {
  270. t.Error(`expected AND got`, token.Type)
  271. }
  272. token = lex.NextToken()
  273. if token.Type != "TERM" {
  274. t.Error(`expected TERM got`, token.Type)
  275. }
  276. if token.Value != "else" {
  277. t.Error(`expected else got`, token.Value)
  278. }
  279. }
  280. func TestNextTokenMultiple(t *testing.T) {
  281. lex := NewLexer(`(something AND else) OR (other NOT this)`)
  282. token := lex.NextToken()
  283. if token.Type != "PAREN_OPEN" {
  284. t.Error(`expected PAREN_OPEN got`, token.Type)
  285. }
  286. token = lex.NextToken()
  287. if token.Type != "TERM" {
  288. t.Error(`expected TERM got`, token.Type)
  289. }
  290. token = lex.NextToken()
  291. if token.Type != "AND" {
  292. t.Error(`expected AND got`, token.Type)
  293. }
  294. token = lex.NextToken()
  295. if token.Type != "TERM" {
  296. t.Error(`expected TERM got`, token.Type)
  297. }
  298. token = lex.NextToken()
  299. if token.Type != "PAREN_CLOSE" {
  300. t.Error(`expected PAREN_CLOSE got`, token.Type)
  301. }
  302. token = lex.NextToken()
  303. if token.Type != "OR" {
  304. t.Error(`expected OR got`, token.Type)
  305. }
  306. token = lex.NextToken()
  307. if token.Type != "PAREN_OPEN" {
  308. t.Error(`expected PAREN_OPEN got`, token.Type)
  309. }
  310. token = lex.NextToken()
  311. if token.Type != "TERM" {
  312. t.Error(`expected TERM got`, token.Type)
  313. }
  314. token = lex.NextToken()
  315. if token.Type != "NOT" {
  316. t.Error(`expected NOT got`, token.Type)
  317. }
  318. token = lex.NextToken()
  319. if token.Type != "TERM" {
  320. t.Error(`expected TERM got`, token.Type)
  321. }
  322. token = lex.NextToken()
  323. if token.Type != "PAREN_CLOSE" {
  324. t.Error(`expected PAREN_CLOSE got`, token.Type)
  325. }
  326. }
  327. func TestTokens(t *testing.T) {
  328. lex := NewLexer(`(something AND else) OR (other NOT this) AND /[cb]at/`)
  329. tokens := lex.Tokens()
  330. if len(tokens) != 13 {
  331. t.Error("expected 13 tokens got", len(tokens))
  332. }
  333. }