Skip to content

Commit

Permalink
Position refactor (#364)
Browse files Browse the repository at this point in the history
While working on indexing, I discovered a rather large performance issue. Finding references will return a large number of results in many documents that aren't open. These results return ranges, and those ranges need to be converted into their LSP forms. Under the current architecture, this would result in needing to open up every file that has a result and reading it. This would cause a great deal of I/O whenever find references was used. 

@zachallaun and I talked about this, and came up with the idea of storing the line record along with each position. This is a small amount of data, and would easily enable conversion to LSP. This has knock-on effects of eliminating the document from conversions to LSP, which simplifies the code somewhat. 

---------

Co-authored-by: Zach Allaun <[email protected]>
  • Loading branch information
scohen and zachallaun authored Sep 14, 2023
1 parent 4de2e9a commit 506c3f0
Show file tree
Hide file tree
Showing 52 changed files with 552 additions and 336 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/elixir.yml
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ jobs:
uses: actions/cache@v3
with:
path: "priv/plts"
key: lexical-plts-1-${{ env.DEFAULT_OTP }}-${{ env.DEFAULT_ELIXIR }}-${{ steps.set_mix_lock_hash.outputs.mix_lock_hash }}
key: lexical-plts-2-${{ env.DEFAULT_OTP }}-${{ env.DEFAULT_ELIXIR }}-${{ steps.set_mix_lock_hash.outputs.mix_lock_hash }}

# Step: Download project dependencies. If unchanged, uses
# the cached version.
Expand Down
100 changes: 60 additions & 40 deletions apps/common/lib/lexical/ast.ex
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ defmodule Lexical.Ast do
alias Lexical.Document
alias Lexical.Document.Edit
alias Lexical.Document.Position
alias Lexical.Document.Range
alias Sourceror.Zipper

require Logger
Expand Down Expand Up @@ -111,7 +112,7 @@ defmodule Lexical.Ast do
# can't deal with some cases like: `alias Foo.Bar, as: AnotherBar`,
# so we need to add a new line to make sure we can get the parrent node of the cursor
%{line: line} = position
added_new_line_position = Position.new(line + 1, 1)
added_new_line_position = Position.new(document, line + 1, 1)
fragment = Document.fragment(document, added_new_line_position)

case do_container_cursor_to_quoted(fragment) do
Expand Down Expand Up @@ -196,7 +197,7 @@ defmodule Lexical.Ast do
) ::
[Macro.t()]
def cursor_path(%Document{} = doc, {line, character}) do
cursor_path(doc, Position.new(line, character))
cursor_path(doc, Position.new(doc, line, character))
end

def cursor_path(%Document{} = document, %Position{} = position) do
Expand All @@ -214,10 +215,16 @@ defmodule Lexical.Ast do
end

@doc """
Traverses the given ast until the given position.
Traverses the given ast until the given end position.
"""
def prewalk_until(ast, acc, prewalk_fn, %Position{} = position) do
range = Document.Range.new(Position.new(0, 0), position)
def prewalk_until(
ast,
acc,
prewalk_fn,
%Position{} = start_position,
%Position{} = end_position
) do
range = Range.new(start_position, end_position)

{_, acc} =
ast
Expand All @@ -228,7 +235,7 @@ defmodule Lexical.Ast do
# we will never receive a callback where we match the end block. Adding
# a cursor node will allow us to place cursors after the document has ended
# and things will still work.
zipper = maybe_insert_cursor(zipper, position)
zipper = maybe_insert_cursor(zipper, end_position)

case Zipper.node(zipper) do
{_, _, _} = element ->
Expand Down Expand Up @@ -276,7 +283,7 @@ defmodule Lexical.Ast do
@spec traverse_line(Document.t(), Position.line(), (Zipper.zipper() -> Zipper.zipper())) ::
{:ok, Zipper.zipper()} | {:error, parse_error()}
def traverse_line(%Document{} = document, line_number, fun) when is_integer(line_number) do
range = one_line_range(line_number)
range = one_line_range(document, line_number)
traverse_in(document, range, fun)
end

Expand All @@ -289,7 +296,7 @@ defmodule Lexical.Ast do
{:ok, Zipper.zipper(), acc} | {:error, parse_error()}
when acc: any()
def traverse_line(%Document{} = document, line_number, acc, fun) when is_integer(line_number) do
range = one_line_range(line_number)
range = one_line_range(document, line_number)
traverse_in(document, range, acc, fun)
end

Expand All @@ -298,11 +305,11 @@ defmodule Lexical.Ast do
Returns `{:ok, edits}` if all patches are valid and `:error` otherwise.
"""
@spec patches_to_edits([patch()]) :: {:ok, [Edit.t()]} | :error
def patches_to_edits(patches) do
@spec patches_to_edits(Document.t(), [patch()]) :: {:ok, [Edit.t()]} | :error
def patches_to_edits(%Document{} = document, patches) do
maybe_edits =
Enum.reduce_while(patches, [], fn patch, edits ->
case patch_to_edit(patch) do
case patch_to_edit(document, patch) do
{:ok, edit} -> {:cont, [edit | edits]}
error -> {:halt, error}
end
Expand All @@ -319,9 +326,11 @@ defmodule Lexical.Ast do
Returns `{:ok, edit}` if valid and `:error` otherwise.
"""
@spec patch_to_edit(patch()) :: {:ok, Edit.t()} | :error
def patch_to_edit(%{change: change, range: %{start: start_pos, end: end_pos}}) do
with {:ok, range} <- patch_to_range(start_pos, end_pos) do
@spec patch_to_edit(Document.t(), patch()) :: {:ok, Edit.t()} | :error
def patch_to_edit(%Document{} = document, %{} = patch) do
%{change: change, range: %{start: start_pos, end: end_pos}} = patch

with {:ok, range} <- patch_to_range(document, start_pos, end_pos) do
{:ok, Edit.new(change, range)}
end
end
Expand Down Expand Up @@ -369,36 +378,47 @@ defmodule Lexical.Ast do
If no aliases can be found, the given alias is returned unmodified.
"""
@spec expand_aliases(alias_segments() | module(), Document.t() | Macro.t(), Position.t()) ::
@spec expand_aliases(
alias_segments() | module(),
Document.t(),
Position.t() | {Position.line(), Position.character()}
) ::
{:ok, module()} | :error
def expand_aliases(module, %Document{} = document, %Position{} = position)
def expand_aliases(module_or_segments, %Document{} = document, %Position{} = position) do
with {:ok, quoted} <- fragment(document, position) do
expand_aliases(module_or_segments, document, quoted, position)
end
end

def expand_aliases(module_or_segments, %Document{} = document, {line, column}) do
expand_aliases(module_or_segments, document, Position.new(document, line, column))
end

@spec expand_aliases(alias_segments() | module(), Document.t(), Macro.t(), Position.t()) ::
{:ok, module()} | :error
def expand_aliases(module, %Document{} = document, quoted_document, %Position{} = position)
when is_atom(module) and not is_nil(module) do
module
|> Module.split()
|> Enum.map(&String.to_atom/1)
|> expand_aliases(document, position)
end

def expand_aliases([_first | _rest] = segments, %Document{} = document, %Position{} = position) do
with {:ok, quoted} <- fragment(document, position) do
expand_aliases(segments, quoted, position)
end
|> expand_aliases(document, quoted_document, position)
end

def expand_aliases([first | rest] = segments, quoted_document, %Position{} = position) do
with {:ok, aliases_mapping} <- Aliases.at(quoted_document, position),
def expand_aliases(
[first | rest] = segments,
%Document{} = document,
quoted_document,
%Position{} = position
) do
with {:ok, aliases_mapping} <- Aliases.at(document, quoted_document, position),
{:ok, resolved} <- Map.fetch(aliases_mapping, first) do
{:ok, Module.concat([resolved | rest])}
else
_ -> {:ok, Module.concat(segments)}
end
end

def expand_aliases(segments, doc, {line, column}) do
expand_aliases(segments, doc, Position.new(line, column))
end

def expand_aliases(_, _, empty) do
def expand_aliases(empty, _, _, _) when empty in [nil, []] do
Logger.warning("Aliases are #{inspect(empty)}, can't expand them")
:error
end
Expand Down Expand Up @@ -435,17 +455,17 @@ defmodule Lexical.Ast do
end
end

defp patch_to_range(start_pos, end_pos) do
with {:ok, start_pos} <- patch_to_position(start_pos),
{:ok, end_pos} <- patch_to_position(end_pos) do
{:ok, Document.Range.new(start_pos, end_pos)}
defp patch_to_range(document, start_pos, end_pos) do
with {:ok, start_pos} <- patch_to_position(document, start_pos),
{:ok, end_pos} <- patch_to_position(document, end_pos) do
{:ok, Range.new(start_pos, end_pos)}
end
end

defp patch_to_position(patch_keyword) do
defp patch_to_position(document, patch_keyword) do
with {:ok, line} <- Keyword.fetch(patch_keyword, :line),
{:ok, column} <- Keyword.fetch(patch_keyword, :column) do
{:ok, Document.Position.new(line, column)}
{:ok, Position.new(document, line, column)}
end
end

Expand Down Expand Up @@ -523,10 +543,10 @@ defmodule Lexical.Ast do
line >= position.line and column >= position.character
end

defp one_line_range(line_number) do
start_pos = Document.Position.new(line_number, 1)
end_pos = Document.Position.new(line_number + 1, 0)
Document.Range.new(start_pos, end_pos)
defp one_line_range(%Document{} = document, line_number) do
start_pos = Position.new(document, line_number, 1)
end_pos = Position.new(document, line_number + 1, 0)
Range.new(start_pos, end_pos)
end

defp node_position(node, {line, column}) do
Expand Down
22 changes: 13 additions & 9 deletions apps/common/lib/lexical/ast/aliases.ex
Original file line number Diff line number Diff line change
Expand Up @@ -281,26 +281,30 @@ defmodule Lexical.Ast.Aliases do
May return aliases even in the event of syntax errors.
"""

@spec at(
Document.t() | Macro.t(),
Position.t() | {Position.line(), Position.character()}
) ::
@spec at(Document.t(), Position.t() | {Position.line(), Position.character()}) ::
{:ok, %{Ast.short_alias() => module()}} | {:error, Ast.parse_error()}
def at(%Document{} = doc, {line, character}) do
at(doc, Position.new(line, character))
at(doc, Position.new(doc, line, character))
end

def at(%Document{} = document, %Position{} = position) do
with {:ok, quoted} <- Ast.fragment(document, position) do
at(quoted, position)
at(document, quoted, position)
end
end

def at(quoted_document, %Position{} = position) do
@spec at(Document.t(), Macro.t(), Position.t() | {Position.line(), Position.character()}) ::
{:ok, %{Ast.short_alias() => module()}}
def at(%Document{} = document, quoted_document, {line, character}) do
at(document, quoted_document, Position.new(document, line, character))
end

def at(%Document{} = document, quoted_document, %Position{} = position) do
start_position = Position.new(document, 0, 0)

aliases =
quoted_document
|> Ast.prewalk_until(Reducer.new(), &collect/2, position)
|> Ast.prewalk_until(Reducer.new(), &collect/2, start_position, position)
|> Reducer.aliases()

{:ok, aliases}
Expand Down
2 changes: 1 addition & 1 deletion apps/common/test/lexical/ast/env_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ defmodule Lexical.Ast.EnvTest do
stripped_text = strip_cursor(text)
document = Document.new("file://foo.ex", stripped_text, 0)

position = Document.Position.new(line, column)
position = Document.Position.new(document, line, column)

{:ok, env} = new(project, document, position)
env
Expand Down
7 changes: 2 additions & 5 deletions apps/proto/lib/lexical/proto/convert.ex
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,7 @@ defmodule Lexical.Proto.Convert do
alias Lexical.Document

def to_lsp(%_{result: result} = response) do
context_document = Document.Container.context_document(result, nil)

case Convertible.to_lsp(result, context_document) do
case Convertible.to_lsp(result) do
{:ok, converted} ->
{:ok, Map.put(response, :result, converted)}

Expand All @@ -15,8 +13,7 @@ defmodule Lexical.Proto.Convert do
end

def to_lsp(other) do
context_document = Document.Container.context_document(other, nil)
Convertible.to_lsp(other, context_document)
Convertible.to_lsp(other)
end

def to_native(%{lsp: request_or_notification} = original_request) do
Expand Down
Loading

0 comments on commit 506c3f0

Please sign in to comment.