tokeniser.tests.js 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104
  1. var hamjest = require("hamjest");
  2. var assertThat = hamjest.assertThat;
  3. var contains = hamjest.contains;
  4. var hasProperties = hamjest.hasProperties;
  5. var tokenise = require("../../../lib/styles/parser/tokeniser").tokenise;
  6. var test = require("../../test")(module);
  7. test("unknown tokens are tokenised", function() {
  8. assertTokens("~", [isToken("unrecognisedCharacter", "~")]);
  9. });
  10. test("empty string is tokenised to end of file token", function() {
  11. assertTokens("", []);
  12. });
  13. test("whitespace is tokenised", function() {
  14. assertTokens(" \t\t ", [isToken("whitespace")]);
  15. });
  16. test("identifiers are tokenised", function() {
  17. assertTokens("Overture", [isToken("identifier", "Overture")]);
  18. });
  19. test("integers are tokenised", function() {
  20. assertTokens("123", [isToken("integer", "123")]);
  21. });
  22. test("strings are tokenised", function() {
  23. assertTokens("'Tristan'", [isToken("string", "Tristan")]);
  24. });
  25. test("unterminated strings are tokenised", function() {
  26. assertTokens("'Tristan", [isToken("unterminated-string", "Tristan")]);
  27. });
  28. test("arrows are tokenised", function() {
  29. assertTokens("=>", [isToken("arrow")]);
  30. });
  31. test("classes are tokenised", function() {
  32. assertTokens(".overture", [isToken("dot"), isToken("identifier", "overture")]);
  33. });
  34. test("colons are tokenised", function() {
  35. assertTokens("::", [isToken("colon"), isToken("colon")]);
  36. });
  37. test("greater thans are tokenised", function() {
  38. assertTokens(">>", [isToken("gt"), isToken("gt")]);
  39. });
  40. test("equals are tokenised", function() {
  41. assertTokens("==", [isToken("equals"), isToken("equals")]);
  42. });
  43. test("startsWith symbols are tokenised", function() {
  44. assertTokens("^=^=", [isToken("startsWith"), isToken("startsWith")]);
  45. });
  46. test("open parens are tokenised", function() {
  47. assertTokens("((", [isToken("open-paren"), isToken("open-paren")]);
  48. });
  49. test("close parens are tokenised", function() {
  50. assertTokens("))", [isToken("close-paren"), isToken("close-paren")]);
  51. });
  52. test("open square brackets are tokenised", function() {
  53. assertTokens("[[", [isToken("open-square-bracket"), isToken("open-square-bracket")]);
  54. });
  55. test("close square brackets are tokenised", function() {
  56. assertTokens("]]", [isToken("close-square-bracket"), isToken("close-square-bracket")]);
  57. });
  58. test("choices are tokenised", function() {
  59. assertTokens("||", [isToken("choice"), isToken("choice")]);
  60. });
  61. test("can tokenise multiple tokens", function() {
  62. assertTokens("The Magic Position", [
  63. isToken("identifier", "The"),
  64. isToken("whitespace"),
  65. isToken("identifier", "Magic"),
  66. isToken("whitespace"),
  67. isToken("identifier", "Position")
  68. ]);
  69. });
  70. function assertTokens(input, expectedTokens) {
  71. assertThat(
  72. tokenise(input),
  73. contains.apply(null, expectedTokens.concat([isToken("end", null)]))
  74. );
  75. }
  76. function isToken(tokenType, value) {
  77. return hasProperties({
  78. name: tokenType,
  79. value: value
  80. });
  81. }