From 0ac0b7d07020723ef5595817c96ccb08c4ff8cd4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lukas=20Backstr=C3=B6m?= Date: Tue, 28 Jan 2025 15:11:32 +0100 Subject: [PATCH] Add tests for list and binary comprehensions --- lib/makeup/lexers/erlang_lexer.ex | 5 +- .../erlang_lexer_tokenizer_test.exs | 52 +++++++++++++++++++ 2 files changed, 56 insertions(+), 1 deletion(-) diff --git a/lib/makeup/lexers/erlang_lexer.ex b/lib/makeup/lexers/erlang_lexer.ex index 1cfba55..c3e481e 100644 --- a/lib/makeup/lexers/erlang_lexer.ex +++ b/lib/makeup/lexers/erlang_lexer.ex @@ -219,7 +219,10 @@ defmodule Makeup.Lexers.ErlangLexer do tuple = many_surrounded_by(parsec(:root_element), "{", "}") syntax_operators = - word_from_list(~W[+ - +? ++ = == -- * / < > /= =:= =/= =< >= ==? <- ! ? ?!], :operator) + word_from_list( + ~W[+ - +? ++ = == -- * / < > /= =:= =/= =< >= ==? <- <= ! ? ?!], + :operator + ) record = token(string("#"), :operator) diff --git a/test/makeup/erlang_lexer/erlang_lexer_tokenizer_test.exs b/test/makeup/erlang_lexer/erlang_lexer_tokenizer_test.exs index 3749609..f8918d6 100644 --- a/test/makeup/erlang_lexer/erlang_lexer_tokenizer_test.exs +++ b/test/makeup/erlang_lexer/erlang_lexer_tokenizer_test.exs @@ -231,6 +231,57 @@ defmodule ErlangLexerTokenizer do end end + describe "comprehensions" do + test "list" do + assert lex("[A||A<-B]") == [ + {:punctuation, %{group_id: "group-1"}, "["}, + {:name, %{}, "A"}, + {:punctuation, %{}, "||"}, + {:name, %{}, "A"}, + {:operator, %{}, "<-"}, + {:name, %{}, "B"}, + {:punctuation, %{group_id: "group-1"}, "]"} + ] + + assert lex("[A||A<-B,true]") == + [ + {:punctuation, %{group_id: "group-1"}, "["}, + {:name, %{}, "A"}, + {:punctuation, %{}, "||"}, + {:name, %{}, "A"}, + {:operator, %{}, "<-"}, + {:name, %{}, "B"}, + {:punctuation, %{}, ","}, + {:string_symbol, %{}, "true"}, + {:punctuation, %{group_id: "group-1"}, "]"} + ] + end + + test "binary" do + assert lex("[A||A<=B]") == [ + {:punctuation, %{group_id: "group-1"}, "["}, + {:name, %{}, "A"}, + {:punctuation, %{}, "||"}, + {:name, %{}, "A"}, + {:operator, %{}, "<="}, + {:name, %{}, "B"}, + {:punctuation, %{group_id: "group-1"}, "]"} + ] + + assert lex("<>") == [ + {:punctuation, %{group_id: "group-1"}, "<<"}, + {:name, %{}, "A"}, + {:punctuation, %{}, "||"}, + {:name, %{}, "A"}, + {:operator, %{}, "<="}, + {:name, %{}, "B"}, + {:punctuation, %{}, ","}, + {:string_symbol, %{}, "true"}, + {:punctuation, %{group_id: "group-1"}, ">>"} + ] + end + end + describe "atoms" do test "are tokenized as such" do assert lex("atom") == [{:string_symbol, %{}, "atom"}] @@ -312,6 +363,7 @@ defmodule ErlangLexerTokenizer do assert lex("=") == [{:operator, %{}, "="}] assert lex("!") == [{:operator, %{}, "!"}] assert lex("<-") == [{:operator, %{}, "<-"}] + assert lex("<=") == [{:operator, %{}, "<="}] end test "word operators are tokenized as operator" do