diff --git a/Cargo.lock b/Cargo.lock index 2bf794b9c3..474c7bdb08 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -19,10 +19,11 @@ dependencies = [ [[package]] name = "annotate-snippets" -version = "0.8.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d78ea013094e5ea606b1c05fe35f1dd7ea1eb1ea259908d040b25bd5ec677ee5" +checksum = "c3b9d411ecbaf79885c6df4d75fff75858d5995ff25385657a28af47e82f9c36" dependencies = [ + "unicode-width", "yansi-term", ] @@ -125,13 +126,13 @@ dependencies = [ [[package]] name = "cargo" -version = "0.62.0" -source = "git+https://github.com/rust-lang/cargo?rev=1ef1e0a12723ce9548d7da2b63119de9002bead8#1ef1e0a12723ce9548d7da2b63119de9002bead8" +version = "0.63.0" +source = "git+https://github.com/rust-lang/cargo?rev=3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1#3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1" dependencies = [ "anyhow", "atty", "bytesize", - "cargo-platform 0.1.2 (git+https://github.com/rust-lang/cargo?rev=1ef1e0a12723ce9548d7da2b63119de9002bead8)", + "cargo-platform 0.1.2 (git+https://github.com/rust-lang/cargo?rev=3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1)", "cargo-util", "clap 3.1.2", "crates-io", @@ -150,7 +151,8 @@ dependencies = [ "humantime 2.1.0", "ignore", "im-rc", - "itertools 0.10.1", + "indexmap", + "itertools", "jobserver", "lazy_static", "lazycell", @@ -160,6 +162,7 @@ dependencies = [ "memchr", "opener", "os_info", + "pathdiff", "percent-encoding 2.1.0", "rustc-workspace-hack", "rustfix", @@ -192,15 +195,15 @@ dependencies = [ [[package]] name = "cargo-platform" version = "0.1.2" -source = "git+https://github.com/rust-lang/cargo?rev=1ef1e0a12723ce9548d7da2b63119de9002bead8#1ef1e0a12723ce9548d7da2b63119de9002bead8" +source = "git+https://github.com/rust-lang/cargo?rev=3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1#3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1" dependencies = [ "serde", ] [[package]] name = "cargo-util" -version = "0.1.2" -source = "git+https://github.com/rust-lang/cargo?rev=1ef1e0a12723ce9548d7da2b63119de9002bead8#1ef1e0a12723ce9548d7da2b63119de9002bead8" +version = "0.1.3" +source = "git+https://github.com/rust-lang/cargo?rev=3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1#3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1" dependencies = [ "anyhow", "core-foundation", @@ -240,12 +243,6 @@ dependencies = [ "jobserver", ] -[[package]] -name = "cfg-if" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" - [[package]] name = "cfg-if" version = "1.0.0" @@ -275,13 +272,28 @@ checksum = "5177fac1ab67102d8989464efd043c6ff44191b1557ec1ddd489b4f7e1447e77" dependencies = [ "atty", "bitflags", + "clap_derive", "indexmap", + "lazy_static", "os_str_bytes", "strsim 0.10.0", "termcolor", "textwrap 0.14.2", ] +[[package]] +name = "clap_derive" +version = "3.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25320346e922cffe59c0bbc5410c8d8784509efb321488971081313cb1e1a33c" +dependencies = [ + "heck 0.4.0", + "proc-macro-error", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "clippy_lints" version = "0.1.60" @@ -290,7 +302,7 @@ dependencies = [ "cargo_metadata", "clippy_utils", "if_chain", - "itertools 0.10.1", + "itertools", "pulldown-cmark", "quine-mc_cluskey", "regex-syntax", @@ -366,7 +378,7 @@ checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "crates-io" version = "0.34.0" -source = "git+https://github.com/rust-lang/cargo?rev=1ef1e0a12723ce9548d7da2b63119de9002bead8#1ef1e0a12723ce9548d7da2b63119de9002bead8" +source = "git+https://github.com/rust-lang/cargo?rev=3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1#3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1" dependencies = [ "anyhow", "curl", @@ -382,7 +394,7 @@ version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3825b1e8580894917dc4468cb634a1b4e9745fddc854edad72d9c04644c0319f" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -391,7 +403,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "crossbeam-utils", ] @@ -401,7 +413,7 @@ version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "crossbeam-epoch", "crossbeam-utils", ] @@ -412,7 +424,7 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "crossbeam-utils", "lazy_static", "memoffset", @@ -425,7 +437,7 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "lazy_static", ] @@ -510,14 +522,23 @@ checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" [[package]] name = "dirs" -version = "2.0.2" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13aea89a5c93364a98e9b37b2fa237effbb694d5cfe01c5b70941f7eb087d5e3" +checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" dependencies = [ - "cfg-if 0.1.10", "dirs-sys", ] +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + [[package]] name = "dirs-sys" version = "0.3.6" @@ -529,6 +550,17 @@ dependencies = [ "winapi", ] +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + [[package]] name = "either" version = "1.6.1" @@ -548,19 +580,6 @@ dependencies = [ "termcolor", ] -[[package]] -name = "env_logger" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" -dependencies = [ - "atty", - "humantime 2.1.0", - "log", - "regex", - "termcolor", -] - [[package]] name = "env_logger" version = "0.9.0" @@ -580,7 +599,7 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "975ccf83d8d9d0d84682850a38c8169027be83368805971cc4f238c2b245bc98" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "redox_syscall", "winapi", @@ -592,7 +611,7 @@ version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e6988e897c1c9c485f43b47a529cef42fde0547f9d8d41a7062518f1d8fc53f" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "crc32fast", "libc", "libz-sys", @@ -758,7 +777,7 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "wasi 0.9.0+wasi-snapshot-preview1", ] @@ -769,7 +788,7 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "wasi 0.10.2+wasi-snapshot-preview1", ] @@ -835,6 +854,12 @@ dependencies = [ "unicode-segmentation", ] +[[package]] +name = "heck" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" + [[package]] name = "hermit-abi" version = "0.1.19" @@ -956,16 +981,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" dependencies = [ - "cfg-if 1.0.0", -] - -[[package]] -name = "itertools" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" -dependencies = [ - "either", + "cfg-if", ] [[package]] @@ -1104,11 +1120,11 @@ dependencies = [ [[package]] name = "kstring" -version = "1.0.6" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b310ccceade8121d7d77fee406160e457c2f4e7c7982d589da3499bc7ea4526" +checksum = "ec3066350882a1cd6d950d055997f379ac37fd39f81cd4d8ed186032eb3c5747" dependencies = [ - "serde", + "static_assertions", ] [[package]] @@ -1194,7 +1210,7 @@ version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1316,7 +1332,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c7ae222234c30df141154f159066c5093ff73b63204dcda7121eb082fc56a95" dependencies = [ "bitflags", - "cfg-if 1.0.0", + "cfg-if", "foreign-types", "libc", "once_cell", @@ -1399,7 +1415,7 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "instant", "libc", "redox_syscall", @@ -1407,6 +1423,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "pathdiff" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" + [[package]] name = "percent-encoding" version = "1.0.1" @@ -1529,8 +1551,6 @@ dependencies = [ [[package]] name = "racer" version = "2.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64954e44fc0d1dcc64e0b9f2b155249ad62849eba25354b76ae1598d1e8f0fa0" dependencies = [ "bitflags", "clap 2.33.3", @@ -1538,10 +1558,28 @@ dependencies = [ "env_logger 0.7.1", "humantime 2.1.0", "lazy_static", + "lazycell", "log", + "racer-cargo-metadata", "rls-span", ] +[[package]] +name = "racer-cargo-metadata" +version = "0.1.2" +dependencies = [ + "racer-interner", + "serde", + "serde_json", +] + +[[package]] +name = "racer-interner" +version = "0.1.0" +dependencies = [ + "serde", +] + [[package]] name = "rand" version = "0.7.3" @@ -1721,9 +1759,9 @@ dependencies = [ "difference", "env_logger 0.9.0", "futures 0.3.18", - "heck", + "heck 0.3.3", "home", - "itertools 0.10.1", + "itertools", "jsonrpc-core", "lazy_static", "log", @@ -1764,7 +1802,7 @@ version = "0.18.3" dependencies = [ "derive-new", "fst", - "itertools 0.10.1", + "itertools", "json", "log", "rls-data", @@ -1871,7 +1909,7 @@ dependencies = [ [[package]] name = "rustfmt-config_proc_macro" version = "0.2.0" -source = "git+https://github.com/rust-lang/rustfmt?rev=5056f4cfb311a084420f1828cd58af94d143f5e0#5056f4cfb311a084420f1828cd58af94d143f5e0" +source = "git+https://github.com/rust-lang/rustfmt?rev=8a4c05865be17bac75b8d53eae5be18d749a0f5c#8a4c05865be17bac75b8d53eae5be18d749a0f5c" dependencies = [ "proc-macro2", "quote", @@ -1881,19 +1919,20 @@ dependencies = [ [[package]] name = "rustfmt-nightly" version = "1.4.38" -source = "git+https://github.com/rust-lang/rustfmt?rev=5056f4cfb311a084420f1828cd58af94d143f5e0#5056f4cfb311a084420f1828cd58af94d143f5e0" +source = "git+https://github.com/rust-lang/rustfmt?rev=8a4c05865be17bac75b8d53eae5be18d749a0f5c#8a4c05865be17bac75b8d53eae5be18d749a0f5c" dependencies = [ "annotate-snippets", "anyhow", "bytecount", "cargo_metadata", + "clap 3.1.2", "derive-new", "diff", "dirs", - "env_logger 0.8.4", + "env_logger 0.9.0", "getopts", "ignore", - "itertools 0.9.0", + "itertools", "lazy_static", "log", "regex", @@ -1901,7 +1940,6 @@ dependencies = [ "rustfmt-config_proc_macro", "serde", "serde_json", - "structopt", "term", "thiserror", "toml", @@ -1910,6 +1948,12 @@ dependencies = [ "unicode_categories", ] +[[package]] +name = "rustversion" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f" + [[package]] name = "ryu" version = "1.0.5" @@ -2066,6 +2110,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + [[package]] name = "strip-ansi-escapes" version = "0.1.1" @@ -2087,30 +2137,6 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" -[[package]] -name = "structopt" -version = "0.3.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40b9788f4202aa75c240ecc9c15c65185e6a39ccdeb0fd5d008b98825464c87c" -dependencies = [ - "clap 2.33.3", - "lazy_static", - "structopt-derive", -] - -[[package]] -name = "structopt-derive" -version = "0.4.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" -dependencies = [ - "heck", - "proc-macro-error", - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "syn" version = "1.0.81" @@ -2138,7 +2164,7 @@ version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "rand 0.8.4", "redox_syscall", @@ -2148,11 +2174,12 @@ dependencies = [ [[package]] name = "term" -version = "0.6.1" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0863a3345e70f61d613eab32ee046ccd1bcc5f9105fe402c61fcd0c13eeb8b5" +checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" dependencies = [ - "dirs", + "dirs-next", + "rustversion", "winapi", ] @@ -2278,13 +2305,13 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.13.4" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744e9ed5b352340aa47ce033716991b5589e23781acb97cad37d4ea70560f55b" +checksum = "5376256e44f2443f8896ac012507c19a012df0fe8758b55246ae51a2279db51f" dependencies = [ "combine", "indexmap", - "itertools 0.10.1", + "itertools", "kstring", "serde", ] @@ -2339,9 +2366,9 @@ checksum = "58dd944fd05f2f0b5c674917aea8a4df6af84f2d8de3fe8d988b95d28fb8fb09" [[package]] name = "unicode-segmentation" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b" +checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" [[package]] name = "unicode-width" diff --git a/Cargo.toml b/Cargo.toml index 3300063c91..e8aa9be54c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -31,8 +31,8 @@ rls-vfs = "0.8" rls-ipc = { version = "0.1.0", path = "rls-ipc", optional = true } anyhow = "1.0.26" -cargo = { git = "https://github.com/rust-lang/cargo", rev = "1ef1e0a12723ce9548d7da2b63119de9002bead8" } -cargo-util = { git = "https://github.com/rust-lang/cargo", rev = "1ef1e0a12723ce9548d7da2b63119de9002bead8" } +cargo = { git = "https://github.com/rust-lang/cargo", rev = "3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1" } +cargo-util = { git = "https://github.com/rust-lang/cargo", rev = "3f052d8eed98c6a24f8b332fb2e6e6249d12d8c1" } cargo_metadata = "0.14" clippy_lints = { git = "https://github.com/rust-lang/rust-clippy", version = "0.1.60", optional = true } env_logger = "0.9" @@ -43,11 +43,11 @@ lsp-types = { version = "0.60", features = ["proposed"] } lazy_static = "1" log = "0.4" num_cpus = "1" -racer = { version = "2.2", default-features = false } +racer = { path = "racer" } rand = "0.8" rayon = "1" rustc_tools_util = "0.2" -rustfmt-nightly = { git = "https://github.com/rust-lang/rustfmt", rev = "5056f4cfb311a084420f1828cd58af94d143f5e0" } +rustfmt-nightly = { git = "https://github.com/rust-lang/rustfmt", rev = "8a4c05865be17bac75b8d53eae5be18d749a0f5c" } serde = "1.0" serde_json = "1.0" serde_derive = "1.0" @@ -58,7 +58,7 @@ regex = "1" ordslice = "0.3" crossbeam-channel = "0.5" toml = "0.5" -toml_edit = { version = "0.13.4", features = ["easy"] } +toml_edit = { version = "0.14.3", features = ["easy"] } heck = "0.3" # A noop dependency that changes in the Rust repository, it's a bit of a hack. diff --git a/racer/.github/workflows/ci.yml b/racer/.github/workflows/ci.yml new file mode 100644 index 0000000000..237ca7054e --- /dev/null +++ b/racer/.github/workflows/ci.yml @@ -0,0 +1,49 @@ +name: CI + +on: + pull_request: + types: [opened, synchronize, reopened] + push: + branches: + - master + - '*' + schedule: + - cron: '0 0 * * *' # Nightly at 00:00 UTC + +jobs: + build_and_test: + strategy: + fail-fast: false + matrix: + toolchain: + - x86_64-unknown-linux-gnu + - x86_64-apple-darwin + - x86_64-pc-windows-msvc + - i686-pc-windows-msvc + include: + - toolchain: x86_64-unknown-linux-gnu + builder: ubuntu-latest + os: linux + - toolchain: x86_64-apple-darwin + builder: macos-latest + os: macos + - toolchain: x86_64-pc-windows-msvc + builder: windows-latest + os: windows + - toolchain: i686-pc-windows-msvc + builder: windows-latest + os: windows + + name: nightly - ${{ matrix.toolchain }} + runs-on: ${{ matrix.builder }} + + steps: + - uses: actions/checkout@v2 + - name: Use latest nightly on scheduled builds + if: github.event_name == 'schedule' + run: echo "nightly" > rust-toolchain + - run: rustup set default-host ${{ matrix.toolchain }} + - run: rustup component add rust-src + - run: rustc -vV + - run: cargo build --verbose --all + - run: cargo test --all diff --git a/racer/.gitignore b/racer/.gitignore new file mode 100644 index 0000000000..a60e7f64d9 --- /dev/null +++ b/racer/.gitignore @@ -0,0 +1,9 @@ +\#* +src/scopes +!.travis.yml +*tmpfile* +*.racertmp +target/ +*.py[cod] +.vscode/** +*.log \ No newline at end of file diff --git a/racer/.rustfmt.toml b/racer/.rustfmt.toml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/racer/CHANGELOG.md b/racer/CHANGELOG.md new file mode 100644 index 0000000000..91d2f91555 --- /dev/null +++ b/racer/CHANGELOG.md @@ -0,0 +1,235 @@ +Change Log +========== + +All notable changes to this project will be documented in this file. This +project adheres to [Semantic Versioning](https://semver.org/). + +# 2.1.37 +- Bump rustc-ap-* version to 677.0 +- Account for new standard library source directory layout + +# 2.1.37 +- Bump rustc-ap-* version to 671.0 + +# 2.1.36 +- Bump rustc-ap-* version to 669.0 + +# 2.1.35 +- Bump rustc-ap-* version to 664.0 + +# 2.1.34 +- Bump rustc-ap-* version to 659.0 +- Fix submodule search (#1107) + +# 2.1.33 +- Bump rustc-ap-* version to 654.0 + +# 2.1.32 +- Bump rustc-ap-* version to 651.0 + +# 2.1.31 +- Bump rustc-ap-* version to 642.0 + +# 2.1.30 +- Support for union(#1086) + +# 2.1.29 +- Support async/await syntax(#1083, #1085) + +# 2.1.28 +- Update the version of rustc-ap-syntax + +# 2.1.27 +- Update the version of rustc-ap-syntax + +# 2.1.26 +- Update the version of rustc-ap-syntax + +# 2.1.25 +- Update the version of rustc-ap-syntax + +# 2.1.24 +- Rust 2018 (#1051) +- Update the version of rustc-ap-syntax + +# 2.1.22 +- Fix completion for `super::super::...`(#1053) + +# 2.1.20, 2.1.21 +- Fix completion in testdir for Rust 2018(#1022) +- Fix enum variant completion for pub(crate) enum(#1025) + +# 2.1.18, 2.1.19 +- Update rustc-ap-syntax + +# 2.1.17, 2.1.18 +- Fix doc comment parsing(#1010) + +# 2.1.15. 2.1.16 +- Handle CRLF correctly(#1007) + +# 2.1.14 +- Completion for binary operation(#976) + +# 2.1.10, 2.1.11, 2.1.12, 2.1.13 +- Completion for impl trait(#985, #986) +- Completion for use as(#988) + +# 2.1.8, 2.1.9 +- Completion for trait objects(#972) +- Completion for simple closure return types(#973) + +# 2.1.7 +- Lots of refactoring(#961, #963, #965) +- Add `is_use_statement` for RLS(#965) + +# 2.1.6 +- Completion based on impl #948 +- Fix for argument completion #943 +- Trait bound in where clause #937 + +# 2.1.5 +- migrate to cargo metadata #930 + +# 2.1.3 +- Make cargo optional for RLS #910 + +## 2.1.2 +- Fix bug around getting `use` context #906 +- Update rustc-ap-syntax to fix build in current nightly #911 + +## 2.1.1 +- Fix coordinate bug +- Get doc string for macro #905 + +## 2.1.0 +- Support completions for stdlib macros #902 +- Support extern "~" block #895 +- Support `crate_in_paths` #891 +- Fix bug of getting completion context from `use` statement #886 +- Handle const unsafe fn #879 +- Limit recursion depth through glob imports #875 +- Enable completion based on trait bound for function args #871 +- Fix bug in search_closure_args #862 +- Replace cargo.rs with cargo crate #855 +- Migrate over to rustc_ap_syntax #854 +- Make RUST_SRC_PATH optional #808 +- Refactor based on clippy #860 + +## 2.0.14 +- Cache generic impls #839 +- Cache parsed TOML file and cargo crate roots #838 +- Skip `pub` keyword as a temporary fix for #624 #850 +- Remove complex generic type by impl trait #848 +- Fix bug for array expression #841 +- Support completion for enum variants without type annotation #825 +- Fix bug for raw string #822 + +## 2.0.13 +- Fix bug for finding the start of match statement #819 + +## 2.0.12 +- Fix bug that broke completions in previous release #807 + +## 2.0.11 + +- Use `rustup` to find libstd path even when used as library #799 + +## 2.0.10 + +- Support resolving `use as` aliases declared in multi-element `use` statements #753 +- Provide suggestions for global paths in more cases #765 +- Suggestions imported via `use as` statements now return their in-scope alias as the match string #767 +- Add new commands for converting between points and coordinates in files #776 +- Return fewer duplicate suggestions #778 +- Handle cases where mod names and trait methods collide, such as `fmt` #781 + +## 2.0.9 + +- Support completion after using try operator `?` #726 +- Find methods on cooked string literals #728 +- Fix bug caused by closure completions feature #734 +- Find static methods on enums #737 +- Find doc comments on named and indexed struct fields #739 +- Find `pub(restricted)` items #748 + +## 2.0.8 + +- Fix bug finding definitions where impl contains bang #717 +- Find definition for closures #697 +- Resolve types for tuple struct fields #722 +- Resolve types for let patterns #724 +- Fix completions for reference fields #723 + +## 2.0.7 + +- Fix panic with macros called `impl*` #701 +- Relax semver specs + +## 2.0.6 + +- resolve Self (e.g. in-impl function calls like Self::myfunction()) +- Fix stack overflow issue on unresolvable imports :tada: #698 + +## 2.0.5 + +- Chained completions on separate lines now work #686 + +## 2.0.4 + +- Fix for find-doc not always returning full doc string #675 + +## 2.0.3 + +- Fix for recursion in certain `use foo::{self, ..}` cases #669 + +## 2.0.2 + +- Internal fixes so we can publish on crates.io + +## 2.0.1 + +- Syntex 0.52 #643 + +- Fix `racer --help` bug from 2.0 refactor #662 + +- Support short revision identifiers for git checkout paths #664 + +- Handle self resolution when using `use mymod::{self, Thing}` #665 + +- Fix type alias resolution #666 + +## 2.0 + +- Rework public API to hide many implementation details and allow the project to + move forward without breaking changes. + +- Many fixes that didn't make it into the changelog, but we're going to work on + that in the future! + +## 1.2 + +- Added basic 'daemon' mode, racer process can be kept running between + invocations + +- now uses clap to parse command line options + +- Adds caching of file source and code indices + +- Adds an alternative 'tabbed' mode where inputs and outputs can be tab + separated for easier parsing + +- emacs and vim support split out into their own git projects [emacs-racer] and + [vim-racer], respectively. + +- Fix issue resolving some `std::*` modules in latest rust source: (rust std lib + implicitly imports core with `#![no_std]`) + +- Searches multirust overrides when locating cargo src directories + +## 1.0.0 2015-07-29 + +- First release + +[vim-racer]: https://github.com/racer-rust/vim-racer +[emacs-racer]: https://github.com/racer-rust/emacs-racer diff --git a/racer/Cargo.toml b/racer/Cargo.toml new file mode 100644 index 0000000000..d30c840d30 --- /dev/null +++ b/racer/Cargo.toml @@ -0,0 +1,43 @@ +[package] +name = "racer" +version = "2.2.2" +license = "MIT" +description = "Code completion for Rust" +authors = ["Phil Dawes ", "The Racer developers"] +homepage = "https://github.com/racer-rust/racer" +repository = "https://github.com/racer-rust/racer" +edition = "2018" + +[lib] +name = "racer" +path = "src/racer/lib.rs" + +[profile.release] +debug = false # because of #1005 + +[dependencies] +bitflags = "1.0" +log = "0.4" +env_logger = "0.7.1" +clap = "2.32" +lazy_static = "1.2" +humantime = "2.0.0" +derive_more = "0.99.2" +rls-span = "0.5.1" +lazycell = { version = "1.2", optional = true } + +[dependencies.racer-cargo-metadata] +version = "0.1" +optional = true +path = "metadata" + +[features] +default = ["metadata"] +metadata = ["lazycell", "racer-cargo-metadata"] + +[workspace] +members = ["interner", "metadata"] + +[package.metadata.rust-analyzer] +# This package uses #[feature(rustc_private)] +rustc_private = true diff --git a/racer/LICENSE-MIT b/racer/LICENSE-MIT new file mode 100644 index 0000000000..e65102f461 --- /dev/null +++ b/racer/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 Phil Dawes + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/racer/README.md b/racer/README.md new file mode 100644 index 0000000000..6f29da2ee1 --- /dev/null +++ b/racer/README.md @@ -0,0 +1,137 @@ +# *Racer* - code completion for [Rust](https://www.rust-lang.org/) + +[![Build Status](https://github.com/racer-rust/racer/workflows/CI/badge.svg?branch=master)](https://github.com/racer-rust/racer/actions?query=workflow%3ACI+branch%3Amaster) + + +![racer completion screenshot](images/racer_completion.png) + +![racer eldoc screenshot](images/racer_eldoc.png) + +*RACER* = *R*ust *A*uto-*C*omplete-*er*. A utility intended to provide Rust code completion for editors and IDEs. Maybe one day the 'er' bit will be exploring + refactoring or something. + +## **DISCLAIMER** +Racer is **not** actively developped now. +Please consider using newer software such as +[rust-analyzer](https://rust-analyzer.github.io/). + +## Installation + +**NOTE** +From 2.1, racer needs **nightly rust** + +### Requirements + +#### Current nightly Rust + +If you're using rustup, run +``` +rustup toolchain install nightly +rustup component add rustc-dev --toolchain=nightly +``` + +_Note: The second command adds the `rustc-dev` component to the nightly +toolchain, which is necessary to compile Racer._ + +#### Cargo +Internally, racer calls cargo as a CLI tool, so please make sure cargo is installed + +### With `cargo install` + +Simply run: + +```cargo +nightly install racer``` + +As mentioned in the command output, don't forget to add the installation directory to your `PATH`. + +### From sources + +1. Clone the repository: ```git clone https://github.com/racer-rust/racer.git``` + +2. ```cd racer; cargo +nightly build --release```. The binary will now be in ```./target/release/racer``` + +3. Add the binary to your `PATH`. This can be done by moving it to a directory already in your `PATH` (i.e. `/usr/local/bin`) or by adding the `./target/release/` directory to your `PATH` + +## Configuration + +1. Fetch the Rust sourcecode + + 1. automatically via [rustup](https://www.rustup.rs/) and run `rustup component add rust-src` in order to install the source to `$(rustc --print sysroot)/lib/rustlib/src/rust/library` (or `$(rustc --print sysroot)/lib/rustlib/src/rust/src` in older toolchains). Rustup will keep the sources in sync with the toolchain if you run `rustup update`. + + 2. manually from git: https://github.com/rust-lang/rust + + **Note** + + If you want to use `racer` with multiple release channels (Rust has 3 release channels: `stable`, `beta` and `nightly`), you have to also download Rust source code for each release channel you install. + + e.g. (rustup case) Add a nightly toolchain build and install nightly sources too + + `rustup toolchain add nightly` + + `rustup component add rust-src` + +2. (Optional) Set `RUST_SRC_PATH` environment variable to point to the 'src' dir in the Rust source installation + e.g. `% export RUST_SRC_PATH=$(rustc --print sysroot)/lib/rustlib/src/rust/library` or `% export RUST_SRC_PATH="$(rustc --print sysroot)/lib/rustlib/src/rust/src"` (older) + + It's recommended to set `RUST_SRC_PATH` for speed up, but racer detects it automatically if you don't set it. + +3. Test on the command line: + + `racer complete std::io::B ` (should show some completions) + +**Note** + +To complete names in external crates, Racer needs `Cargo.lock`. +So, when you add a dependency in your `Cargo.toml`, you have to run a build command +such as `cargo build` or `cargo test`, to get completions. + +## Editors/IDEs Supported + +### RLS + +Racer is used as a static library in [RLS](https://github.com/rust-lang-nursery/rls) + +### Eclipse integration + +Racer can be used with Eclipse through the use of [RustDT](https://github.com/RustDT/RustDT). (User guide is [linked](https://rustdt.github.io/) in repo description) + +### Emacs integration + +Emacs integration has been moved to a separate project: [emacs-racer](https://github.com/racer-rust/emacs-racer). + +### Gedit integration + +Gedit integration can be found [here](https://github.com/isamert/gracer). + +### Builder integration + +Gnome Builder integration can be found [here](https://github.com/deikatsuo/bracer) + +### Kate integration + +The Kate community maintains a [plugin](https://cgit.kde.org/kate.git/tree/addons/rustcompletion). It is bundled with recent releases of Kate (tested with 16.08 - read more [here](https://blogs.kde.org/2015/05/22/updates-kates-rust-plugin-syntax-highlighting-and-rust-source-mime-type)). + +1. Enable 'Rust code completion' in the plugin list in the Kate config dialog; + +2. On the new 'Rust code completion' dialog page, make sure 'Racer command' and 'Rust source tree location' are set correctly. + +### Sublime Text integration + +The Sublime Text community maintains some packages that integrates Racer +* [RustAutoComplete](https://github.com/defuz/RustAutoComplete) that offers auto completion and goto definition. +* [AnacondaRUST](https://github.com/DamnWidget/anaconda_rust) from the [anaconda](https://github.com/DamnWidget/anaconda) plugins family that offers auto completion, goto definition and show documentation + +### Vim integration + +Vim integration has been moved to a separate project: [vim-racer](https://github.com/racer-rust/vim-racer). + +### Visual Studio Code extension + +Racer recommends the official [`Rust (rls)` extension](https://github.com/rust-lang-nursery/rls-vscode) based on RLS, which uses Racer for completion. + +### Atom integration + +You can find the racer package for Atom [here](https://atom.io/packages/autocomplete-racer) + +### Kakoune integration + +[Kakoune](https://github.com/mawww/kakoune) comes with a builtin integration for racer auto completion. diff --git a/racer/fixtures/.cargo/config b/racer/fixtures/.cargo/config new file mode 100644 index 0000000000..af8e2f86b2 --- /dev/null +++ b/racer/fixtures/.cargo/config @@ -0,0 +1 @@ +paths = ["./arst"] diff --git a/racer/fixtures/arst/Cargo.toml b/racer/fixtures/arst/Cargo.toml new file mode 100644 index 0000000000..8ec24288fd --- /dev/null +++ b/racer/fixtures/arst/Cargo.toml @@ -0,0 +1,6 @@ +[package] +name = "arst" +version = "0.1.0" +authors = ["Joe Wilm "] + +[dependencies] diff --git a/racer/fixtures/arst/src/lib.rs b/racer/fixtures/arst/src/lib.rs new file mode 100644 index 0000000000..cdfbe1aa56 --- /dev/null +++ b/racer/fixtures/arst/src/lib.rs @@ -0,0 +1,6 @@ +#[cfg(test)] +mod tests { + #[test] + fn it_works() { + } +} diff --git a/racer/fixtures/arst/src/submodule/mod.rs b/racer/fixtures/arst/src/submodule/mod.rs new file mode 100644 index 0000000000..dd08d4e4cc --- /dev/null +++ b/racer/fixtures/arst/src/submodule/mod.rs @@ -0,0 +1,3 @@ +pub fn hello_submodule() { + println!("Hello from submodule."); +} \ No newline at end of file diff --git a/racer/interner/Cargo.toml b/racer/interner/Cargo.toml new file mode 100644 index 0000000000..92a830bc7e --- /dev/null +++ b/racer/interner/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "racer-interner" +version = "0.1.0" +authors = ["Yuji Kanagawa "] +license = "MIT" +description = "thread-local string interner for racer-rust" +homepage = "https://github.com/racer-rust/racer" +repository = "https://github.com/racer-rust/racer" +edition = "2018" +workspace = ".." + +[dependencies] +serde = "1.0" diff --git a/racer/interner/src/lib.rs b/racer/interner/src/lib.rs new file mode 100644 index 0000000000..82994526e1 --- /dev/null +++ b/racer/interner/src/lib.rs @@ -0,0 +1,100 @@ +//! string interner +//! same as cargo::core::interning.rs, but thread local and Deserializable + +use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; + +use std::cell::RefCell; +use std::collections::HashSet; +use std::error::Error; +use std::fmt; +use std::ops::Deref; +use std::ptr; +use std::str; + +fn leak(s: String) -> &'static str { + Box::leak(s.into_boxed_str()) +} + +thread_local! { + static STRING_CACHE: RefCell> = Default::default(); +} + +#[derive(Clone, Copy, PartialOrd, Ord, Eq, Hash)] +pub struct InternedString { + inner: &'static str, +} + +impl PartialEq for InternedString { + fn eq(&self, other: &InternedString) -> bool { + ptr::eq(self.as_str(), other.as_str()) + } +} + +impl InternedString { + pub fn new(st: &str) -> InternedString { + STRING_CACHE.with(|cache| { + let mut cache = cache.borrow_mut(); + let s = cache.get(st).map(|&s| s).unwrap_or_else(|| { + let s = leak(st.to_string()); + cache.insert(s); + s + }); + InternedString { inner: s } + }) + } + + pub fn new_if_exists(st: &str) -> Option { + STRING_CACHE.with(|cache| cache.borrow().get(st).map(|&s| InternedString { inner: s })) + } + + pub fn as_str(&self) -> &'static str { + self.inner + } +} + +impl Deref for InternedString { + type Target = str; + fn deref(&self) -> &'static str { + self.as_str() + } +} + +impl fmt::Debug for InternedString { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(self.as_str(), f) + } +} + +impl fmt::Display for InternedString { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(self.as_str(), f) + } +} + +impl Serialize for InternedString { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(self.inner) + } +} + +impl<'de> Deserialize<'de> for InternedString { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct VisStr; + impl<'de> Visitor<'de> for VisStr { + type Value = InternedString; + fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "expecting string") + } + fn visit_borrowed_str(self, v: &'de str) -> Result { + Ok(InternedString::new(v)) + } + } + deserializer.deserialize_str(VisStr {}) + } +} diff --git a/racer/metadata/Cargo.toml b/racer/metadata/Cargo.toml new file mode 100644 index 0000000000..55a5dce365 --- /dev/null +++ b/racer/metadata/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "racer-cargo-metadata" +version = "0.1.2" +authors = ["Yuji Kanagawa "] +license = "MIT" +description = "light-weight cargo metadata parser for racer" +homepage = "https://github.com/racer-rust/racer" +repository = "https://github.com/racer-rust/racer" +edition = "2018" +workspace = ".." + +[dependencies] +serde_json = "1.0" + +[dependencies.serde] +version = "1.0" +features = ["derive"] + +[dependencies.racer-interner] +version = "0.1" +path = "../interner" + diff --git a/racer/metadata/src/lib.rs b/racer/metadata/src/lib.rs new file mode 100644 index 0000000000..18ef85094d --- /dev/null +++ b/racer/metadata/src/lib.rs @@ -0,0 +1,93 @@ +extern crate racer_interner; +#[macro_use] +extern crate serde; +extern crate serde_json; + +pub mod mapping; +pub mod metadata; + +use crate::metadata::Metadata; +use std::env; +use std::error::Error; +use std::fmt; +use std::io; +use std::path::{Path, PathBuf}; +use std::process::Command; +use std::str::Utf8Error; + +#[derive(Debug)] +pub enum ErrorKind { + Encode(Utf8Error), + Json(serde_json::Error), + Io(io::Error), + Subprocess(String), +} + +impl fmt::Display for ErrorKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + ErrorKind::Encode(e) => fmt::Display::fmt(e, f), + ErrorKind::Json(e) => fmt::Display::fmt(e, f), + ErrorKind::Io(e) => fmt::Display::fmt(e, f), + ErrorKind::Subprocess(s) => write!(f, "stderr: {}", s), + } + } +} + +impl Error for ErrorKind {} + +impl From for ErrorKind { + fn from(e: Utf8Error) -> ErrorKind { + ErrorKind::Encode(e) + } +} + +impl From for ErrorKind { + fn from(e: serde_json::Error) -> ErrorKind { + ErrorKind::Json(e) + } +} + +impl From for ErrorKind { + fn from(e: io::Error) -> ErrorKind { + ErrorKind::Io(e) + } +} + +pub fn find_manifest(mut current: &Path) -> Option { + let file = "Cargo.toml"; + if current.is_dir() { + let manifest = current.join(file); + if manifest.exists() { + return Some(manifest); + } + } + while let Some(parent) = current.parent() { + let manifest = parent.join(file); + if manifest.exists() { + return Some(manifest); + } + current = parent; + } + None +} + +pub fn run(manifest_path: &Path, frozen: bool) -> Result { + let cargo = env::var("CARGO").unwrap_or_else(|_| "cargo".to_owned()); + let mut cmd = Command::new(cargo); + cmd.arg("metadata"); + cmd.arg("--all-features"); + cmd.args(&["--format-version", "1"]); + cmd.args(&["--color", "never"]); + cmd.arg("--manifest-path"); + cmd.arg(manifest_path.as_os_str()); + if frozen { + cmd.arg("--frozen"); + } + let op = cmd.output()?; + if !op.status.success() { + let stderr = String::from_utf8(op.stderr).map_err(|e| e.utf8_error())?; + return Err(ErrorKind::Subprocess(stderr)); + } + serde_json::from_slice(&op.stdout).map_err(From::from) +} diff --git a/racer/metadata/src/mapping.rs b/racer/metadata/src/mapping.rs new file mode 100644 index 0000000000..7bc9f504a9 --- /dev/null +++ b/racer/metadata/src/mapping.rs @@ -0,0 +1,139 @@ +use crate::metadata::{Metadata, Package, PackageId, Resolve, ResolveNode, Target}; +use racer_interner::InternedString; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +/// Cached dependencies for racer +#[derive(Clone, Debug)] +pub struct PackageMap { + manifest_to_idx: HashMap, + id_to_idx: HashMap, + packages: Vec, +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd)] +pub enum Edition { + Ed2015, + Ed2018, + Ed2021, +} + +impl Edition { + pub fn from_str(s: &str) -> Self { + match s { + "2015" => Edition::Ed2015, + "2018" => Edition::Ed2018, + "2021" => Edition::Ed2021, + _ => unreachable!("got unexpected edition {}", s), + } + } +} + +#[derive(Clone, Debug)] +struct PackageInner { + edition: Edition, + deps: Vec<(InternedString, PathBuf)>, + lib: Option, + id: PackageId, +} + +impl PackageInner { + fn new(ed: InternedString, id: PackageId, lib: Option) -> Self { + PackageInner { + edition: Edition::from_str(ed.as_str()), + deps: Vec::new(), + id, + lib, + } + } +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub struct PackageIdx(usize); + +impl PackageMap { + pub fn from_metadata(meta: Metadata) -> Self { + let Metadata { + packages, resolve, .. + } = meta; + PackageMap::new(packages, resolve) + } + pub fn new(packages: Vec, resolve: Option) -> Self { + let mut manifest_to_idx = HashMap::new(); + let mut id_to_idx = HashMap::new(); + let mut inner = Vec::new(); + for (i, package) in packages.into_iter().enumerate() { + let Package { + id, + targets, + manifest_path, + edition, + .. + } = package; + id_to_idx.insert(id, PackageIdx(i)); + manifest_to_idx.insert(manifest_path, PackageIdx(i)); + let lib = targets.into_iter().find(|t| t.is_lib()).to_owned(); + inner.push(PackageInner::new(edition, id, lib)); + } + if let Some(res) = resolve { + construct_deps(res.nodes, &id_to_idx, &mut inner); + } + PackageMap { + manifest_to_idx, + id_to_idx, + packages: inner, + } + } + pub fn ids<'a>(&'a self) -> impl 'a + Iterator { + self.packages.iter().map(|p| p.id) + } + pub fn id_to_idx(&self, id: PackageId) -> Option { + self.id_to_idx.get(&id).map(|&x| x) + } + pub fn get_idx(&self, path: &Path) -> Option { + self.manifest_to_idx.get(path).map(|&id| id) + } + pub fn get_id(&self, idx: PackageIdx) -> PackageId { + self.packages[idx.0].id + } + pub fn get_edition(&self, idx: PackageIdx) -> Edition { + self.packages[idx.0].edition + } + pub fn get_lib(&self, idx: PackageIdx) -> Option<&Target> { + self.packages[idx.0].lib.as_ref() + } + pub fn get_lib_src_path(&self, idx: PackageIdx) -> Option<&Path> { + self.get_lib(idx).map(|t| t.src_path.as_ref()) + } + pub fn get_dependencies(&self, idx: PackageIdx) -> &[(InternedString, PathBuf)] { + self.packages[idx.0].deps.as_ref() + } + pub fn get_src_path_from_libname(&self, id: PackageIdx, s: &str) -> Option<&Path> { + let deps = self.get_dependencies(id); + let query_str = InternedString::new_if_exists(s)?; + deps.iter().find(|t| t.0 == query_str).map(|t| t.1.as_ref()) + } +} + +fn construct_deps( + nodes: Vec, + id_to_idx: &HashMap, + res: &mut [PackageInner], +) -> Option<()> { + for node in nodes { + let idx = id_to_idx.get(&node.id)?; + let deps: Vec<_> = node + .dependencies + .into_iter() + .filter_map(|id| { + let idx = id_to_idx.get(&id)?; + res[idx.0] + .lib + .as_ref() + .map(|l| (l.name, l.src_path.clone())) + }) + .collect(); + res[idx.0].deps.extend(deps); + } + Some(()) +} diff --git a/racer/metadata/src/metadata.rs b/racer/metadata/src/metadata.rs new file mode 100644 index 0000000000..4c2d73788c --- /dev/null +++ b/racer/metadata/src/metadata.rs @@ -0,0 +1,77 @@ +//! Data structures for metadata +use racer_interner::InternedString; +use std::path::PathBuf; + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Metadata { + pub packages: Vec, + pub workspace_members: Vec, + pub resolve: Option, + #[serde(default)] + pub workspace_root: PathBuf, + pub target_directory: PathBuf, + version: usize, + #[serde(skip)] + __guard: (), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Package { + pub id: PackageId, + pub targets: Vec, + pub manifest_path: PathBuf, + #[serde(default = "edition_default")] + pub edition: InternedString, + #[serde(skip)] + __guard: (), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Resolve { + pub nodes: Vec, + #[serde(skip)] + __guard: (), +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ResolveNode { + pub id: PackageId, + pub dependencies: Vec, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Target { + pub name: InternedString, + pub kind: Vec, + pub src_path: PathBuf, + #[serde(default = "edition_default")] + pub edition: InternedString, + #[serde(skip)] + __guard: (), +} + +const LIB_KINDS: [&'static str; 4] = ["lib", "rlib", "dylib", "proc-macro"]; + +impl Target { + pub fn is_lib(&self) -> bool { + self.kind.iter().any(|k| LIB_KINDS.contains(&k.as_str())) + } + pub fn is_2015(&self) -> bool { + self.edition.as_str() == "2015" + } +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)] +pub struct PackageId(InternedString); + +impl PackageId { + pub fn name(&self) -> &str { + let idx = self.0.find(' ').expect("Whitespace not found"); + &self.0[..idx] + } +} + +#[inline(always)] +fn edition_default() -> InternedString { + InternedString::new("2015") +} diff --git a/racer/rust-toolchain.toml b/racer/rust-toolchain.toml new file mode 100644 index 0000000000..ffcacf048f --- /dev/null +++ b/racer/rust-toolchain.toml @@ -0,0 +1,3 @@ +[toolchain] +channel = "nightly-2022-04-06" +components = ["rust-src", "rustc-dev"] diff --git a/racer/src/racer/ast.rs b/racer/src/racer/ast.rs new file mode 100644 index 0000000000..e5060d66ff --- /dev/null +++ b/racer/src/racer/ast.rs @@ -0,0 +1,1372 @@ +use crate::ast_types::Path as RacerPath; +use crate::ast_types::{ + self, GenericsArgs, ImplHeader, Pat, PathAlias, PathAliasKind, TraitBounds, Ty, +}; +use crate::core::{self, BytePos, ByteRange, Match, MatchType, Scope, Session, SessionExt}; +use crate::nameres; +use crate::typeinf; + +use std::path::Path; + +use rustc_ast::ast::{self, ExprKind, FnRetTy, ItemKind, PatKind, UseTree, UseTreeKind}; +use rustc_ast::{self, visit}; +use rustc_data_structures::sync::Lrc; +use rustc_errors::emitter::Emitter; +use rustc_errors::{Diagnostic, Handler}; +use rustc_parse::new_parser_from_source_str; +use rustc_parse::parser::{ForceCollect, Parser}; +use rustc_session::parse::ParseSess; +use rustc_span::edition::Edition; +use rustc_span::source_map::{self, FileName, SourceMap}; +use rustc_span::Span; + +struct DummyEmitter; + +impl Emitter for DummyEmitter { + fn emit_diagnostic(&mut self, _db: &Diagnostic) {} + fn source_map(&self) -> Option<&Lrc> { + None + } + fn should_show_explain(&self) -> bool { + false + } + fn fluent_bundle(&self) -> Option<&Lrc> { + None + } + fn fallback_fluent_bundle(&self) -> &rustc_errors::FluentBundle { + unimplemented!("diagnostic translations are unimplemented in racer"); + } +} + +/// construct parser from string +// From syntax/util/parser_testing.rs +pub fn string_to_parser(ps: &ParseSess, source_str: String) -> Parser<'_> { + new_parser_from_source_str(ps, FileName::Custom("racer-file".to_owned()), source_str) +} + +/// Get parser from string s and then apply closure f to it +// TODO: use Result insated of Option +pub fn with_error_checking_parse(s: String, f: F) -> Option +where + F: FnOnce(&mut Parser<'_>) -> Option, +{ + // FIXME: Set correct edition based on the edition of the target crate. + rustc_span::create_session_if_not_set_then(Edition::Edition2018, |_| { + let codemap = Lrc::new(SourceMap::new(source_map::FilePathMapping::empty())); + // We use DummyEmitter here not to print error messages to stderr + let handler = Handler::with_emitter(false, None, Box::new(DummyEmitter {})); + let parse_sess = ParseSess::with_span_handler(handler, codemap); + + let mut p = string_to_parser(&parse_sess, s); + f(&mut p) + }) +} + +/// parse string source_str as statement and then apply f to it +/// return false if we can't parse s as statement +// TODO: make F FnOnce(&ast::Stmt) -> Result +pub fn with_stmt(source_str: String, f: F) -> bool +where + F: FnOnce(&ast::Stmt), +{ + with_error_checking_parse(source_str, |p| { + let stmt = match p.parse_stmt(ForceCollect::No) { + Ok(Some(stmt)) => stmt, + _ => return None, + }; + f(&stmt); + Some(()) + }) + .is_some() +} + +pub(crate) fn destruct_span(span: Span) -> (u32, u32) { + let source_map::BytePos(lo) = span.lo(); + let source_map::BytePos(hi) = span.hi(); + (lo, hi) +} + +pub(crate) fn get_span_start(span: Span) -> u32 { + let source_map::BytePos(lo) = span.lo(); + lo +} + +/// collect paths from syntax::ast::UseTree +#[derive(Debug)] +pub struct UseVisitor { + pub path_list: Vec, + pub contains_glob: bool, +} + +impl<'ast> visit::Visitor<'ast> for UseVisitor { + fn visit_item(&mut self, i: &ast::Item) { + // collect items from use tree recursively + // returns (Paths, contains_glab) + fn collect_nested_items( + use_tree: &UseTree, + parent_path: Option<&ast_types::Path>, + ) -> (Vec, bool) { + let mut res = vec![]; + let mut path = if let Some(parent) = parent_path { + let relative_path = RacerPath::from_ast_nogen(&use_tree.prefix); + let mut path = parent.clone(); + path.extend(relative_path); + path + } else { + RacerPath::from_ast_nogen(&use_tree.prefix) + }; + let mut contains_glob = false; + match use_tree.kind { + UseTreeKind::Simple(rename, _, _) => { + let ident = use_tree.ident().name.to_string(); + let rename_pos: Option = + rename.map(|id| destruct_span(id.span).0.into()); + let kind = if let Some(last_seg) = path.segments.last() { + //` self` is treated normaly in libsyntax, + // but we distinguish it here to make completion easy + if last_seg.name == "self" { + PathAliasKind::Self_(ident, rename_pos) + } else { + PathAliasKind::Ident(ident, rename_pos) + } + } else { + PathAliasKind::Ident(ident, rename_pos) + }; + if let PathAliasKind::Self_(..) = kind { + path.segments.pop(); + } + res.push(PathAlias { + kind, + path, + range: ByteRange::from(use_tree.span), + }); + } + UseTreeKind::Nested(ref nested) => { + nested.iter().for_each(|(ref tree, _)| { + let (items, has_glob) = collect_nested_items(tree, Some(&path)); + res.extend(items); + contains_glob |= has_glob; + }); + } + UseTreeKind::Glob => { + res.push(PathAlias { + kind: PathAliasKind::Glob, + path, + range: ByteRange::from(use_tree.span), + }); + contains_glob = true; + } + } + (res, contains_glob) + } + if let ItemKind::Use(ref use_tree) = i.kind { + let (path_list, contains_glob) = collect_nested_items(use_tree, None); + self.path_list = path_list; + self.contains_glob = contains_glob; + } + } +} + +pub struct PatBindVisitor { + ident_points: Vec, +} + +impl<'ast> visit::Visitor<'ast> for PatBindVisitor { + fn visit_local(&mut self, local: &ast::Local) { + // don't visit the RHS (init) side of the let stmt + self.visit_pat(&local.pat); + } + + fn visit_expr(&mut self, ex: &ast::Expr) { + // don't visit the RHS or block of an 'if let' or 'for' stmt + match &ex.kind { + ExprKind::If(let_stmt, ..) | ExprKind::While(let_stmt, ..) => { + if let ExprKind::Let(pat, ..) = &let_stmt.kind { + self.visit_pat(pat); + } + } + ExprKind::ForLoop(pat, ..) => self.visit_pat(pat), + _ => visit::walk_expr(self, ex), + } + } + + fn visit_pat(&mut self, p: &ast::Pat) { + match p.kind { + PatKind::Ident(_, ref spannedident, _) => { + self.ident_points.push(spannedident.span.into()); + } + _ => { + visit::walk_pat(self, p); + } + } + } +} + +pub struct PatVisitor { + ident_points: Vec, +} + +impl<'ast> visit::Visitor<'ast> for PatVisitor { + fn visit_pat(&mut self, p: &ast::Pat) { + match p.kind { + PatKind::Ident(_, ref spannedident, _) => { + self.ident_points.push(spannedident.span.into()); + } + _ => { + visit::walk_pat(self, p); + } + } + } +} + +pub struct FnArgVisitor { + idents: Vec<(Pat, Option, ByteRange)>, + generics: GenericsArgs, + scope: Scope, + offset: i32, +} + +impl<'ast> visit::Visitor<'ast> for FnArgVisitor { + fn visit_fn(&mut self, fk: visit::FnKind<'_>, _: source_map::Span, _: ast::NodeId) { + let fd = match fk { + visit::FnKind::Fn(_, _, ref fn_sig, _, _, _) => &*fn_sig.decl, + visit::FnKind::Closure(ref fn_decl, _) => fn_decl, + }; + debug!("[FnArgVisitor::visit_fn] inputs: {:?}", fd.inputs); + self.idents = fd + .inputs + .iter() + .map(|arg| { + debug!("[FnArgTypeVisitor::visit_fn] type {:?} was found", arg.ty); + let pat = Pat::from_ast(&arg.pat.kind, &self.scope); + let ty = Ty::from_ast(&arg.ty, &self.scope); + let source_map::BytePos(lo) = arg.pat.span.lo(); + let source_map::BytePos(hi) = arg.ty.span.hi(); + (pat, ty, ByteRange::new(lo, hi)) + }) + .collect(); + } + fn visit_generics(&mut self, g: &'ast ast::Generics) { + let generics = GenericsArgs::from_generics(g, &self.scope.filepath, self.offset); + self.generics.extend(generics); + } +} + +fn point_is_in_span(point: BytePos, span: &Span) -> bool { + let point: u32 = point.0 as u32; + let (lo, hi) = destruct_span(*span); + point >= lo && point < hi +} + +// The point must point to an ident within the pattern. +fn destructure_pattern_to_ty( + pat: &ast::Pat, + point: BytePos, + ty: &Ty, + scope: &Scope, + session: &Session<'_>, +) -> Option { + debug!( + "destructure_pattern_to_ty point {:?} ty {:?} pat: {:?}", + point, ty, pat.kind + ); + match pat.kind { + PatKind::Ident(_, ref spannedident, _) => { + if point_is_in_span(point, &spannedident.span) { + debug!("destructure_pattern_to_ty matched an ident!"); + Some(ty.clone()) + } else { + panic!( + "Expecting the point to be in the patident span. pt: {:?}", + point + ); + } + } + PatKind::Tuple(ref tuple_elements) => match *ty { + Ty::Tuple(ref typeelems) => { + for (i, p) in tuple_elements.iter().enumerate() { + if !point_is_in_span(point, &p.span) { + continue; + } + if let Some(ref ty) = typeelems[i] { + return destructure_pattern_to_ty(p, point, ty, scope, session); + } + } + None + } + _ => panic!("Expecting TyTuple"), + }, + PatKind::TupleStruct(_, ref path, ref children) => { + let m = resolve_ast_path(path, &scope.filepath, scope.point, session)?; + let contextty = path_to_match(ty.clone(), session); + for (i, p) in children.iter().enumerate() { + if point_is_in_span(point, &p.span) { + return typeinf::get_tuplestruct_field_type(i, &m, session) + .and_then(|ty| { + // if context ty is a match, use its generics + if let Some(Ty::Match(ref contextm)) = contextty { + path_to_match_including_generics( + ty, + contextm.to_generics(), + session, + ) + } else { + path_to_match(ty, session) + } + }) + .and_then(|ty| destructure_pattern_to_ty(p, point, &ty, scope, session)); + } + } + None + } + PatKind::Struct(_, ref path, ref children, _) => { + let m = resolve_ast_path(path, &scope.filepath, scope.point, session)?; + let contextty = path_to_match(ty.clone(), session); + for child in children { + if point_is_in_span(point, &child.span) { + return typeinf::get_struct_field_type(&child.ident.name.as_str(), &m, session) + .and_then(|ty| { + if let Some(Ty::Match(ref contextm)) = contextty { + path_to_match_including_generics( + ty, + contextm.to_generics(), + session, + ) + } else { + path_to_match(ty, session) + } + }) + .and_then(|ty| { + destructure_pattern_to_ty(&child.pat, point, &ty, scope, session) + }); + } + } + None + } + _ => { + debug!("Could not destructure pattern {:?}", pat); + None + } + } +} + +struct LetTypeVisitor<'c, 's> { + scope: Scope, + session: &'s Session<'c>, + pos: BytePos, // pos is relative to the srctxt, scope is global + result: Option, +} + +impl<'c, 's, 'ast> visit::Visitor<'ast> for LetTypeVisitor<'c, 's> { + fn visit_local(&mut self, local: &ast::Local) { + let ty = match &local.ty { + Some(annon) => Ty::from_ast(&*annon, &self.scope), + None => local.kind.init().as_ref().and_then(|initexpr| { + debug!("[LetTypeVisitor] initexpr is {:?}", initexpr.kind); + let mut v = ExprTypeVisitor::new(self.scope.clone(), self.session); + v.visit_expr(initexpr); + v.result + }), + }; + debug!("[LetTypeVisitor] ty is {:?}. pos is {:?}", ty, self.pos); + self.result = ty + .and_then(|ty| { + destructure_pattern_to_ty(&local.pat, self.pos, &ty, &self.scope, self.session) + }) + .and_then(|ty| path_to_match(ty, self.session)); + } +} + +struct MatchTypeVisitor<'c, 's> { + scope: Scope, + session: &'s Session<'c>, + pos: BytePos, // pos is relative to the srctxt, scope is global + result: Option, +} + +impl<'c, 's, 'ast> visit::Visitor<'ast> for MatchTypeVisitor<'c, 's> { + fn visit_expr(&mut self, ex: &ast::Expr) { + if let ExprKind::Match(ref subexpression, ref arms) = ex.kind { + debug!("PHIL sub expr is {:?}", subexpression); + + let mut v = ExprTypeVisitor::new(self.scope.clone(), self.session); + v.visit_expr(subexpression); + + debug!("PHIL sub type is {:?}", v.result); + + for arm in arms { + if !point_is_in_span(self.pos, &arm.pat.span) { + continue; + } + debug!("PHIL point is in pattern |{:?}|", arm.pat); + self.result = v + .result + .as_ref() + .and_then(|ty| { + destructure_pattern_to_ty(&arm.pat, self.pos, ty, &self.scope, self.session) + }) + .and_then(|ty| path_to_match(ty, self.session)); + } + } + } +} + +fn resolve_ast_path( + path: &ast::Path, + filepath: &Path, + pos: BytePos, + session: &Session<'_>, +) -> Option { + let scope = Scope::new(filepath.to_owned(), pos); + let path = RacerPath::from_ast(path, &scope); + nameres::resolve_path_with_primitive( + &path, + filepath, + pos, + core::SearchType::ExactMatch, + core::Namespace::Path, + session, + ) + .into_iter() + .nth(0) +} + +fn path_to_match(ty: Ty, session: &Session<'_>) -> Option { + match ty { + Ty::PathSearch(paths) => { + find_type_match(&paths.path, &paths.filepath, paths.point, session).map(Ty::Match) + } + Ty::RefPtr(ty, _) => path_to_match(*ty, session), + _ => Some(ty), + } +} + +pub(crate) fn find_type_match( + path: &RacerPath, + fpath: &Path, + pos: BytePos, + session: &Session<'_>, +) -> Option { + debug!("find_type_match {:?}, {:?}", path, fpath); + let mut res = nameres::resolve_path_with_primitive( + path, + fpath, + pos, + core::SearchType::ExactMatch, + core::Namespace::Type, + session, + ) + .into_iter() + .nth(0) + .and_then(|m| match m.mtype { + MatchType::Type => typeinf::get_type_of_typedef(&m, session), + _ => Some(m), + })?; + // TODO: 'Type' support + // if res is Enum/Struct and has a generic type paramter, let's resolve it. + for (param, typ) in res.generics_mut().zip(path.generic_types()) { + param.resolve(typ.to_owned()); + } + Some(res) +} + +struct ExprTypeVisitor<'c, 's> { + scope: Scope, + session: &'s Session<'c>, + // what we have before calling typeinf::get_type_of_match + path_match: Option, + result: Option, +} + +impl<'c: 's, 's> ExprTypeVisitor<'c, 's> { + fn new(scope: Scope, session: &'s Session<'c>) -> Self { + ExprTypeVisitor { + scope, + session, + path_match: None, + result: None, + } + } + fn same_scope(&self) -> Self { + Self { + scope: self.scope.clone(), + session: self.session, + path_match: None, + result: None, + } + } +} + +impl<'c, 's, 'ast> visit::Visitor<'ast> for ExprTypeVisitor<'c, 's> { + fn visit_expr(&mut self, expr: &ast::Expr) { + debug!( + "ExprTypeVisitor::visit_expr {:?}(kind: {:?})", + expr, expr.kind + ); + //walk_expr(self, ex, e) + match expr.kind { + ExprKind::Unary(_, ref expr) | ExprKind::AddrOf(_, _, ref expr) => { + self.visit_expr(expr); + } + ExprKind::Path(_, ref path) => { + let source_map::BytePos(lo) = path.span.lo(); + self.result = resolve_ast_path( + path, + &self.scope.filepath, + self.scope.point + lo.into(), + self.session, + ) + .and_then(|m| { + let msrc = self.session.load_source_file(&m.filepath); + self.path_match = Some(m.clone()); + typeinf::get_type_of_match(m, msrc.as_src(), self.session) + }); + } + ExprKind::Call(ref callee_expression, ref caller_expr) => { + self.visit_expr(callee_expression); + self.result = self.result.take().and_then(|m| { + if let Ty::Match(mut m) = m { + match m.mtype { + MatchType::Function => { + typeinf::get_return_type_of_function(&m, &m, self.session) + .and_then(|ty| path_to_match(ty, self.session)) + } + MatchType::Method(ref gen) => { + let mut return_ty = + typeinf::get_return_type_of_function(&m, &m, self.session); + // Account for already resolved generics if the return type is Self + // (in which case we return bare type as found in the `impl` header) + if let (Some(Ty::Match(ref mut m)), Some(gen)) = + (&mut return_ty, gen) + { + for (type_param, arg) in m.generics_mut().zip(gen.args()) { + if let Some(resolved) = arg.resolved() { + type_param.resolve(resolved.clone()); + } + } + } + return_ty.and_then(|ty| { + path_to_match_including_generics( + ty, + gen.as_ref().map(AsRef::as_ref), + self.session, + ) + }) + } + // if we find tuple struct / enum variant, try to resolve its generics name + MatchType::Struct(ref mut gen) + | MatchType::Enum(ref mut gen) + | MatchType::Union(ref mut gen) => { + if gen.is_empty() { + return Some(Ty::Match(m)); + } + let tuple_fields = match self.path_match { + Some(ref m) => typeinf::get_tuplestruct_fields(m, self.session), + None => return Some(Ty::Match(m)), + }; + // search what is in callee e.g. Some(String::new()<-) for generics + for ((_, _, ty), expr) in tuple_fields.into_iter().zip(caller_expr) + { + let ty = try_continue!(ty).dereference(); + if let Ty::PathSearch(paths) = ty { + let (id, _) = + try_continue!(gen.search_param_by_path(&paths.path)); + let mut visitor = self.same_scope(); + visitor.visit_expr(expr); + if let Some(ty) = visitor.result { + gen.0[id].resolve(ty.dereference()); + } + } + } + Some(Ty::Match(m)) + } + MatchType::TypeParameter(ref traitbounds) + if traitbounds.has_closure() => + { + let mut output = None; + if let Some(path_search) = traitbounds.get_closure() { + for seg in path_search.path.segments.iter() { + if seg.output.is_some() { + output = seg.output.clone(); + break; + } + } + } + output + } + _ => { + debug!( + "ExprTypeVisitor: Cannot handle ExprCall of {:?} type", + m.mtype + ); + None + } + } + } else { + None + } + }); + } + ExprKind::Struct(ref struct_expr) => { + let ast::StructExpr { ref path, .. } = **struct_expr; + let pathvec = RacerPath::from_ast(path, &self.scope); + self.result = find_type_match( + &pathvec, + &self.scope.filepath, + self.scope.point, + self.session, + ) + .map(Ty::Match); + } + ExprKind::MethodCall(ref method_def, ref arguments, _) => { + let methodname = method_def.ident.name.as_str(); + debug!("method call ast name {}", methodname); + + // arguments[0] is receiver(e.g. self) + let objexpr = &arguments[0]; + self.visit_expr(objexpr); + let result = self.result.take(); + let get_method_output_ty = |contextm: Match| { + let matching_methods = nameres::search_for_fields_and_methods( + contextm.clone(), + &methodname, + core::SearchType::ExactMatch, + true, + self.session, + ); + matching_methods + .into_iter() + .filter_map(|method| { + let ty = typeinf::get_return_type_of_function( + &method, + &contextm, + self.session, + )?; + path_to_match_including_generics( + ty, + contextm.to_generics(), + self.session, + ) + }) + .nth(0) + }; + self.result = result.and_then(|ty| { + ty.resolve_as_field_match(self.session) + .and_then(get_method_output_ty) + }); + } + ExprKind::Field(ref subexpression, spannedident) => { + let fieldname = spannedident.name.to_string(); + debug!("exprfield {}", fieldname); + self.visit_expr(subexpression); + let result = self.result.take(); + let match_to_field_ty = |structm: Match| { + typeinf::get_struct_field_type(&fieldname, &structm, self.session).and_then( + |fieldtypepath| { + find_type_match_including_generics( + fieldtypepath, + &structm.filepath, + structm.point, + &structm, + self.session, + ) + }, + ) + }; + self.result = result.and_then(|ty| { + ty.resolve_as_field_match(self.session) + .and_then(match_to_field_ty) + }); + } + ExprKind::Tup(ref exprs) => { + let mut v = Vec::new(); + for expr in exprs { + self.visit_expr(expr); + v.push(self.result.take()); + } + self.result = Some(Ty::Tuple(v)); + } + ExprKind::Lit(ref lit) => self.result = Ty::from_lit(lit), + ExprKind::Try(ref expr) => { + self.visit_expr(&expr); + debug!("ExprKind::Try result: {:?} expr: {:?}", self.result, expr); + self.result = if let Some(&Ty::Match(ref m)) = self.result.as_ref() { + // HACK for speed up (kngwyu) + // Yeah there're many corner cases but it'll work well in most cases + if m.matchstr == "Result" || m.matchstr == "Option" { + debug!("Option or Result: {:?}", m); + m.resolved_generics().next().map(|x| x.to_owned()) + } else { + debug!("Unable to desugar Try expression; type was {:?}", m); + None + } + } else { + None + }; + } + ExprKind::Match(_, ref arms) => { + debug!("match expr"); + + for arm in arms { + self.visit_expr(&arm.body); + + // All match arms need to return the same result, so if we found a result + // we can end the search. + if self.result.is_some() { + break; + } + } + } + ExprKind::If(_, ref block, ref else_block) => { + debug!("if/iflet expr"); + if let Some(stmt) = block.stmts.last() { + visit::walk_stmt(self, stmt); + } + if self.result.is_some() { + return; + } + // if the block does not resolve to a type, try the else block + if let Some(expr) = else_block { + self.visit_expr(expr); + } + } + ExprKind::Block(ref block, ref _label) => { + debug!("block expr"); + if let Some(stmt) = block.stmts.last() { + visit::walk_stmt(self, stmt); + } + } + ExprKind::Index(ref body, ref _index) => { + self.visit_expr(body); + // TODO(kngwyu) now we don't have support for literal so don't parse index + // but in the future, we should handle index's type + self.result = self + .result + .take() + .and_then(|ty| typeinf::get_type_of_indexed_value(ty, self.session)); + } + ExprKind::Array(ref exprs) => { + for expr in exprs { + self.visit_expr(expr); + if self.result.is_some() { + self.result = self + .result + .take() + .map(|ty| Ty::Array(Box::new(ty), format!("{}", exprs.len()))); + break; + } + } + if self.result.is_none() { + self.result = Some(Ty::Array(Box::new(Ty::Unsupported), String::new())); + } + } + ExprKind::MacCall(ref m) => { + if let Some(name) = m.path.segments.last().map(|seg| seg.ident) { + // use some ad-hoc rules + if name.as_str() == "vec" { + let path = RacerPath::from_iter( + true, + ["std", "vec", "Vec"].iter().map(|s| s.to_string()), + ); + self.result = find_type_match( + &path, + &self.scope.filepath, + self.scope.point, + self.session, + ) + .map(Ty::Match); + } + } + } + ExprKind::Binary(bin, ref left, ref right) => { + self.visit_expr(left); + let type_match = match self.result.take() { + Some(Ty::Match(m)) => m, + Some(Ty::PathSearch(ps)) => match ps.resolve_as_match(self.session) { + Some(m) => m, + _ => { + return; + } + }, + _ => { + return; + } + }; + + self.visit_expr(right); + let right_expr_type = match self.result.take() { + Some(Ty::Match(m)) => Some(m.matchstr), + Some(Ty::PathSearch(ps)) => { + ps.resolve_as_match(self.session).map(|m| m.matchstr) + } + _ => None, + }; + self.result = nameres::resolve_binary_expr_type( + &type_match, + bin.node, + right_expr_type.as_ref().map(|s| s.as_str()), + self.session, + ); + } + _ => { + debug!("- Could not match expr node type: {:?}", expr.kind); + } + }; + } + /// Just do nothing if we see a macro, but also prevent the panic! in the default impl. + fn visit_mac_call(&mut self, _mac: &ast::MacCall) {} +} + +// gets generics info from the context match +fn path_to_match_including_generics( + mut ty: Ty, + generics: Option<&GenericsArgs>, + session: &Session<'_>, +) -> Option { + if let Some(gen) = generics { + ty = ty.replace_by_generics(gen); + } + match ty { + Ty::PathSearch(paths) => { + let fieldtypepath = &paths.path; + find_type_match(&fieldtypepath, &paths.filepath, paths.point, session).map(Ty::Match) + } + _ => Some(ty), + } +} + +fn find_type_match_including_generics( + fieldtype: Ty, + filepath: &Path, + pos: BytePos, + structm: &Match, + session: &Session<'_>, +) -> Option { + assert_eq!(&structm.filepath, filepath); + let fieldtypepath = match fieldtype { + Ty::PathSearch(paths) => paths.path, + Ty::RefPtr(ty, _) => match ty.dereference() { + Ty::PathSearch(paths) => paths.path, + Ty::Match(m) => return Some(Ty::Match(m)), + _ => return None, + }, + // already resolved + Ty::Match(m) => return Some(Ty::Match(m)), + _ => { + return None; + } + }; + let generics = match &structm.mtype { + MatchType::Struct(gen) => gen, + _ => return None, + }; + if fieldtypepath.segments.len() == 1 { + // could be a generic arg! - try and resolve it + if let Some((_, param)) = generics.search_param_by_path(&fieldtypepath) { + if let Some(res) = param.resolved() { + return Some(res.to_owned()); + } + let mut m = param.to_owned().into_match(); + m.local = structm.local; + return Some(Ty::Match(m)); + } + } + + find_type_match(&fieldtypepath, filepath, pos, session).map(Ty::Match) +} + +struct StructVisitor { + pub scope: Scope, + pub fields: Vec<(String, ByteRange, Option)>, +} + +impl<'ast> visit::Visitor<'ast> for StructVisitor { + fn visit_variant_data(&mut self, struct_definition: &ast::VariantData) { + for field in struct_definition.fields() { + let ty = Ty::from_ast(&field.ty, &self.scope); + let name = match field.ident { + Some(ref ident) => ident.to_string(), + // name unnamed field by its ordinal, since self.0 works + None => format!("{}", self.fields.len()), + }; + self.fields.push((name, field.span.into(), ty)); + } + } +} + +#[derive(Debug)] +pub struct TypeVisitor<'s> { + pub name: Option, + pub type_: Option, + scope: &'s Scope, +} + +impl<'ast, 's> visit::Visitor<'ast> for TypeVisitor<'s> { + fn visit_item(&mut self, item: &ast::Item) { + if let ItemKind::TyAlias(ref ty_kind) = item.kind { + if let Some(ref ty) = ty_kind.ty { + self.name = Some(item.ident.name.to_string()); + self.type_ = Ty::from_ast(&ty, self.scope); + debug!("typevisitor type is {:?}", self.type_); + } + } + } +} + +pub struct TraitVisitor { + pub name: Option, +} + +impl<'ast> visit::Visitor<'ast> for TraitVisitor { + fn visit_item(&mut self, item: &ast::Item) { + if let ItemKind::Trait(..) = item.kind { + self.name = Some(item.ident.name.to_string()); + } + } +} + +#[derive(Debug)] +pub struct ImplVisitor<'p> { + pub result: Option, + filepath: &'p Path, + offset: BytePos, + block_start: BytePos, // the point { appears + local: bool, +} + +impl<'p> ImplVisitor<'p> { + fn new(filepath: &'p Path, offset: BytePos, local: bool, block_start: BytePos) -> Self { + ImplVisitor { + result: None, + filepath, + offset, + block_start, + local, + } + } +} + +impl<'ast, 'p> visit::Visitor<'ast> for ImplVisitor<'p> { + fn visit_item(&mut self, item: &ast::Item) { + if let ItemKind::Impl(ref impl_kind) = item.kind { + let ast::Impl { + ref generics, + ref of_trait, + ref self_ty, + .. + } = **impl_kind; + let impl_start = self.offset + get_span_start(item.span).into(); + self.result = ImplHeader::new( + generics, + self.filepath, + of_trait, + self_ty, + self.offset, + self.local, + impl_start, + self.block_start, + ); + } + } +} + +pub struct ExternCrateVisitor { + pub name: Option, + pub realname: Option, +} + +impl<'ast> visit::Visitor<'ast> for ExternCrateVisitor { + fn visit_item(&mut self, item: &ast::Item) { + if let ItemKind::ExternCrate(ref optional_s) = item.kind { + self.name = Some(item.ident.name.to_string()); + if let Some(ref istr) = *optional_s { + self.realname = Some(istr.to_string()); + } + } + } + fn visit_mac_call(&mut self, _mac: &ast::MacCall) {} +} + +#[derive(Debug)] +struct GenericsVisitor

{ + result: GenericsArgs, + filepath: P, +} + +impl<'ast, P: AsRef> visit::Visitor<'ast> for GenericsVisitor

{ + fn visit_generics(&mut self, g: &ast::Generics) { + let path = &self.filepath; + if !self.result.0.is_empty() { + warn!("[visit_generics] called for multiple generics!"); + } + self.result.extend(GenericsArgs::from_generics(g, path, 0)); + } +} + +pub struct EnumVisitor { + pub name: String, + pub values: Vec<(String, BytePos)>, +} + +impl<'ast> visit::Visitor<'ast> for EnumVisitor { + fn visit_item(&mut self, i: &ast::Item) { + if let ItemKind::Enum(ref enum_definition, _) = i.kind { + self.name = i.ident.name.to_string(); + let (point1, point2) = destruct_span(i.span); + debug!("name point is {} {}", point1, point2); + + for variant in &enum_definition.variants { + let source_map::BytePos(point) = variant.span.lo(); + self.values.push((variant.ident.to_string(), point.into())); + } + } + } +} + +#[derive(Clone, Debug)] +pub struct StaticVisitor { + pub ty: Option, + pub is_mutable: bool, + scope: Scope, +} + +impl StaticVisitor { + fn new(scope: Scope) -> Self { + StaticVisitor { + ty: None, + is_mutable: false, + scope, + } + } +} + +impl<'ast> visit::Visitor<'ast> for StaticVisitor { + fn visit_item(&mut self, i: &ast::Item) { + match i.kind { + ItemKind::Const(_, ref ty, ref _expr) => self.ty = Ty::from_ast(ty, &self.scope), + ItemKind::Static(ref ty, m, ref _expr) => { + self.is_mutable = m == ast::Mutability::Mut; + self.ty = Ty::from_ast(ty, &self.scope); + } + _ => {} + } + } +} + +pub fn parse_use(s: String) -> UseVisitor { + let mut v = UseVisitor { + path_list: Vec::new(), + contains_glob: false, + }; + + // visit::walk_crate can be panic so we don't use it here + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v +} + +pub fn parse_pat_bind_stmt(s: String) -> Vec { + let mut v = PatBindVisitor { + ident_points: Vec::new(), + }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v.ident_points +} + +pub fn parse_struct_fields(s: String, scope: Scope) -> Vec<(String, ByteRange, Option)> { + let mut v = StructVisitor { + scope, + fields: Vec::new(), + }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v.fields +} + +pub fn parse_impl( + s: String, + path: &Path, + offset: BytePos, + local: bool, + scope_start: BytePos, +) -> Option { + let mut v = ImplVisitor::new(path, offset, local, scope_start); + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v.result +} + +pub fn parse_trait(s: String) -> TraitVisitor { + let mut v = TraitVisitor { name: None }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v +} + +/// parse traits and collect inherited traits as TraitBounds +pub fn parse_inherited_traits>( + s: String, + filepath: P, + offset: i32, +) -> Option { + let mut v = InheritedTraitsVisitor { + result: None, + file_path: filepath, + offset: offset, + }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v.result +} + +pub fn parse_generics(s: String, filepath: &Path) -> GenericsArgs { + let mut v = GenericsVisitor { + result: GenericsArgs::default(), + filepath: filepath, + }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v.result +} + +pub fn parse_type<'s>(s: String, scope: &'s Scope) -> TypeVisitor<'s> { + let mut v = TypeVisitor { + name: None, + type_: None, + scope, + }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v +} + +pub fn parse_fn_args_and_generics( + s: String, + scope: Scope, + offset: i32, +) -> (Vec<(Pat, Option, ByteRange)>, GenericsArgs) { + let mut v = FnArgVisitor { + idents: Vec::new(), + generics: GenericsArgs::default(), + scope, + offset, + }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + (v.idents, v.generics) +} + +pub fn parse_closure_args(s: String, scope: Scope) -> Vec<(Pat, Option, ByteRange)> { + let mut v = FnArgVisitor { + idents: Vec::new(), + generics: GenericsArgs::default(), + scope, + offset: 0, + }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v.idents +} + +pub fn parse_pat_idents(s: String) -> Vec { + let mut v = PatVisitor { + ident_points: Vec::new(), + }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + debug!("ident points are {:?}", v.ident_points); + v.ident_points +} + +pub fn parse_fn_output(s: String, scope: Scope) -> (Option, bool) { + let mut v = FnOutputVisitor::new(scope); + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + let FnOutputVisitor { ty, is_async, .. } = v; + (ty, is_async) +} + +pub fn parse_extern_crate(s: String) -> ExternCrateVisitor { + let mut v = ExternCrateVisitor { + name: None, + realname: None, + }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v +} + +pub fn parse_enum(s: String) -> EnumVisitor { + let mut v = EnumVisitor { + name: String::new(), + values: Vec::new(), + }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v +} + +pub fn parse_static(s: String, scope: Scope) -> StaticVisitor { + let mut v = StaticVisitor::new(scope); + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v +} + +pub fn get_type_of(s: String, fpath: &Path, pos: BytePos, session: &Session<'_>) -> Option { + let startscope = Scope { + filepath: fpath.to_path_buf(), + point: pos, + }; + + let mut v = ExprTypeVisitor::new(startscope, session); + + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v.result +} + +// pos points to an ident in the lhs of the stmtstr +pub fn get_let_type(s: String, pos: BytePos, scope: Scope, session: &Session<'_>) -> Option { + let mut v = LetTypeVisitor { + scope, + session, + pos, + result: None, + }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v.result +} + +pub fn get_match_arm_type( + s: String, + pos: BytePos, + scope: Scope, + session: &Session<'_>, +) -> Option { + let mut v = MatchTypeVisitor { + scope, + session, + pos, + result: None, + }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v.result +} + +pub struct FnOutputVisitor { + scope: Scope, + pub ty: Option, + pub is_async: bool, +} + +impl FnOutputVisitor { + pub(crate) fn new(scope: Scope) -> Self { + FnOutputVisitor { + scope, + ty: None, + is_async: false, + } + } +} + +impl<'ast> visit::Visitor<'ast> for FnOutputVisitor { + fn visit_fn(&mut self, kind: visit::FnKind<'_>, _: source_map::Span, _: ast::NodeId) { + let fd = match kind { + visit::FnKind::Fn(_, _, ref fn_sig, _, _, _) => &*fn_sig.decl, + visit::FnKind::Closure(ref fn_decl, _) => fn_decl, + }; + self.is_async = kind + .header() + .map(|header| header.asyncness.is_async()) + .unwrap_or(false); + self.ty = match fd.output { + FnRetTy::Ty(ref ty) => Ty::from_ast(ty, &self.scope), + FnRetTy::Default(_) => Some(Ty::Default), + }; + } +} + +/// Visitor to collect Inherited Traits +pub struct InheritedTraitsVisitor

{ + /// search result(list of Inherited Traits) + result: Option, + /// the file trait appears + file_path: P, + /// thecode point 'trait' statement starts + offset: i32, +} + +impl<'ast, P> visit::Visitor<'ast> for InheritedTraitsVisitor

+where + P: AsRef, +{ + fn visit_item(&mut self, item: &ast::Item) { + if let ItemKind::Trait(ref trait_kind) = item.kind { + self.result = Some(TraitBounds::from_generic_bounds( + &trait_kind.bounds, + &self.file_path, + self.offset, + )); + } + } +} + +/// Visitor for for ~ in .. statement +pub(crate) struct ForStmtVisitor<'r, 's> { + pub(crate) for_pat: Option, + pub(crate) in_expr: Option, + scope: Scope, + session: &'r Session<'s>, +} + +impl<'ast, 'r, 's> visit::Visitor<'ast> for ForStmtVisitor<'r, 's> { + fn visit_expr(&mut self, ex: &'ast ast::Expr) { + if let ExprKind::ForLoop(ref pat, ref expr, _, _) = ex.kind { + let for_pat = Pat::from_ast(&pat.kind, &self.scope); + let mut expr_visitor = ExprTypeVisitor::new(self.scope.clone(), self.session); + expr_visitor.visit_expr(expr); + self.in_expr = expr_visitor.result; + self.for_pat = Some(for_pat); + } + } +} + +pub(crate) fn parse_for_stmt<'r, 's: 'r>( + s: String, + scope: Scope, + session: &'r Session<'s>, +) -> ForStmtVisitor<'r, 's> { + let mut v = ForStmtVisitor { + for_pat: None, + in_expr: None, + scope, + session, + }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v +} + +/// Visitor for if let / while let statement +pub(crate) struct IfLetVisitor<'r, 's> { + pub(crate) let_pat: Option, + pub(crate) rh_expr: Option, + scope: Scope, + session: &'r Session<'s>, +} + +impl<'ast, 'r, 's> visit::Visitor<'ast> for IfLetVisitor<'r, 's> { + fn visit_expr(&mut self, ex: &'ast ast::Expr) { + match &ex.kind { + ExprKind::If(let_stmt, ..) | ExprKind::While(let_stmt, ..) => { + if let ExprKind::Let(pat, expr, _span) = &let_stmt.kind { + self.let_pat = Some(Pat::from_ast(&pat.kind, &self.scope)); + let mut expr_visitor = ExprTypeVisitor::new(self.scope.clone(), self.session); + expr_visitor.visit_expr(expr); + self.rh_expr = expr_visitor.result; + } + } + _ => {} + } + } +} + +pub(crate) fn parse_if_let<'r, 's: 'r>( + s: String, + scope: Scope, + session: &'r Session<'s>, +) -> IfLetVisitor<'r, 's> { + let mut v = IfLetVisitor { + let_pat: None, + rh_expr: None, + scope, + session, + }; + with_stmt(s, |stmt| visit::walk_stmt(&mut v, stmt)); + v +} diff --git a/racer/src/racer/ast_types.rs b/racer/src/racer/ast_types.rs new file mode 100644 index 0000000000..8bdeb51e11 --- /dev/null +++ b/racer/src/racer/ast_types.rs @@ -0,0 +1,1068 @@ +//! type conversion between racer types and libsyntax types +use super::ast::find_type_match; +use crate::core::{ + self, BytePos, ByteRange, Match, MatchType, Scope, SearchType, Session, SessionExt, +}; +use crate::matchers::ImportInfo; +use crate::nameres; +use crate::primitive; +use crate::primitive::PrimKind; +use crate::typeinf; +use crate::util; +use rustc_ast::ast::{ + self, GenericBound, GenericBounds, GenericParamKind, LitKind, PatKind, TraitRef, TyKind, + WherePredicate, +}; +use rustc_ast_pretty::pprust; +use rustc_span::source_map; +use std::fmt; +use std::path::{Path as FilePath, PathBuf}; +// we can only re-export types without thread-local interned string +pub use rustc_ast::ast::{BindingMode, Mutability}; + +/// The leaf of a `use` statement. +#[derive(Clone, Debug)] +pub struct PathAlias { + /// the leaf of Use Tree + /// it can be one of one of 3 types, e.g. + /// use std::collections::{self, hashmap::*, HashMap}; + pub kind: PathAliasKind, + /// The path. + pub path: Path, + /// range of item + pub range: ByteRange, +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum PathAliasKind { + Ident(String, Option), + Self_(String, Option), + Glob, +} + +impl AsRef for PathAlias { + fn as_ref(&self) -> &Path { + &self.path + } +} + +// Represents a type. Equivilent to rustc's ast::Ty but can be passed across threads +#[derive(Debug, Clone, PartialEq)] +pub enum Ty { + Match(Match), + PathSearch(PathSearch), // A path + the scope to be able to resolve it + Tuple(Vec>), + Array(Box, String), // ty, length expr as string + RefPtr(Box, Mutability), + Slice(Box), + Ptr(Box, Mutability), + TraitObject(TraitBounds), + Self_(Scope), + Future(Box, Scope), + Never, + Default, + Unsupported, +} + +impl Ty { + pub(crate) fn replace_by_resolved_generics(self, gen: &GenericsArgs) -> Self { + let (ty, deref_cnt) = self.deref_with_count(0); + if let Ty::PathSearch(ref paths) = ty { + if let Some((_, param)) = gen.search_param_by_path(&paths.path) { + if let Some(resolved) = param.resolved() { + return resolved.to_owned().wrap_by_ref(deref_cnt); + } + } + } + ty.wrap_by_ref(deref_cnt) + } + pub(crate) fn replace_by_generics(self, gen: &GenericsArgs) -> Self { + let (mut ty, deref_cnt) = self.deref_with_count(0); + if let Ty::PathSearch(ref mut paths) = ty { + if let Some((_, param)) = gen.search_param_by_path(&paths.path) { + if let Some(resolved) = param.resolved() { + return resolved.to_owned().wrap_by_ref(deref_cnt); + } else { + return Ty::Match(param.clone().into_match()); + } + } else { + paths.path.replace_by_bounds(gen); + } + } + ty.wrap_by_ref(deref_cnt) + } + pub(crate) fn dereference(self) -> Self { + if let Ty::RefPtr(ty, _) = self { + ty.dereference() + } else { + self + } + } + fn wrap_by_ref(self, count: usize) -> Self { + let mut ty = self; + // TODO: it's incorrect + for _ in 0..count { + ty = Ty::RefPtr(Box::new(ty), Mutability::Not); + } + ty + } + fn deref_with_count(self, count: usize) -> (Self, usize) { + if let Ty::RefPtr(ty, _) = self { + ty.deref_with_count(count + 1) + } else { + (self, count) + } + } + pub(crate) fn from_ast(ty: &ast::Ty, scope: &Scope) -> Option { + match ty.kind { + TyKind::Tup(ref items) => Some(Ty::Tuple( + items.into_iter().map(|t| Ty::from_ast(t, scope)).collect(), + )), + TyKind::Rptr(ref _lifetime, ref ty) => { + Ty::from_ast(&ty.ty, scope).map(|ref_ty| Ty::RefPtr(Box::new(ref_ty), ty.mutbl)) + } + TyKind::Path(_, ref path) => Some(Ty::PathSearch(PathSearch { + path: Path::from_ast(path, scope), + filepath: scope.filepath.clone(), + point: scope.point, + })), + TyKind::Array(ref ty, ref expr) => Ty::from_ast(ty, scope) + .map(|racer_ty| Ty::Array(Box::new(racer_ty), pprust::expr_to_string(&expr.value))), + TyKind::Slice(ref ty) => { + Ty::from_ast(ty, scope).map(|ref_ty| Ty::Slice(Box::new(ref_ty))) + } + TyKind::Ptr(ref ty) => { + Ty::from_ast(&*ty.ty, scope).map(|rty| Ty::Ptr(Box::new(rty), ty.mutbl)) + } + TyKind::Never => None, + TyKind::TraitObject(ref traits, _) | TyKind::ImplTrait(_, ref traits) => { + Some(Ty::TraitObject(TraitBounds::from_generic_bounds( + &traits, + scope.filepath.clone(), + scope.point.0 as i32, + ))) + } + TyKind::ImplicitSelf => Some(Ty::Self_(scope.clone())), + _ => { + trace!("unhandled Ty node: {:?}", ty.kind); + None + } + } + } + + pub(crate) fn from_lit(lit: &ast::Lit) -> Option { + let make_match = |kind: PrimKind| kind.to_module_match().map(Ty::Match); + match lit.kind { + LitKind::Str(_, _) => make_match(PrimKind::Str), + LitKind::ByteStr(ref bytes) => make_match(PrimKind::U8) + .map(|ty| Ty::Array(Box::new(ty), format!("{}", bytes.len()))), + LitKind::Byte(_) => make_match(PrimKind::U8), + LitKind::Char(_) => make_match(PrimKind::Char), + LitKind::Int(_, int_ty) => make_match(PrimKind::from_litint(int_ty)), + LitKind::Float(_, ast::LitFloatType::Unsuffixed) => make_match(PrimKind::F32), + LitKind::Float(_, ast::LitFloatType::Suffixed(float_ty)) => match float_ty { + ast::FloatTy::F32 => make_match(PrimKind::F32), + ast::FloatTy::F64 => make_match(PrimKind::F64), + }, + LitKind::Bool(_) => make_match(PrimKind::Bool), + LitKind::Err(_) => None, + } + } + fn resolve_common(self, session: &Session<'_>) -> Option { + match self { + Ty::Match(m) => Some(m), + Ty::PathSearch(paths) => { + find_type_match(&paths.path, &paths.filepath, paths.point, session) + } + Ty::Self_(scope) => { + let msrc = session.load_source_file(&scope.filepath); + let ty = typeinf::get_type_of_self( + scope.point, + &scope.filepath, + true, + msrc.as_src(), + session, + ); + match ty { + Some(Ty::Match(m)) => Some(m), + _ => None, + } + } + _ => None, + } + } + pub(crate) fn resolve_as_field_match(self, session: &Session<'_>) -> Option { + match self { + Ty::RefPtr(ty, _) => ty.resolve_as_field_match(session), + Ty::Array(_, _) | Ty::Slice(_) => primitive::PrimKind::Slice.to_module_match(), + _ => self.resolve_common(session), + } + } +} + +impl fmt::Display for Ty { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + Ty::Match(ref m) => write!(f, "{}", m.matchstr), + Ty::PathSearch(ref p) => write!(f, "{}", p.path), + Ty::Tuple(ref vec) => { + write!(f, "(")?; + for (i, field) in vec.iter().enumerate() { + if i != 0 { + write!(f, ", ")?; + } + if let Some(field) = field { + write!(f, "{}", field)?; + } else { + write!(f, "UNKNOWN")?; + } + } + write!(f, ")") + } + Ty::Array(ref ty, ref expr) => { + write!(f, "[")?; + write!(f, "{}", ty)?; + write!(f, "; ")?; + write!(f, "{}", expr)?; + write!(f, "]") + } + Ty::Slice(ref ty) => { + write!(f, "[")?; + write!(f, "{}", ty)?; + write!(f, "]") + } + Ty::RefPtr(ref ty, mutab) => match mutab { + Mutability::Not => write!(f, "&{}", ty), + Mutability::Mut => write!(f, "&mut {}", ty), + }, + Ty::Ptr(ref ty, mutab) => match mutab { + Mutability::Not => write!(f, "*const {}", ty), + Mutability::Mut => write!(f, "*mut {}", ty), + }, + Ty::TraitObject(ref bounds) => { + write!(f, "<")?; + let last = bounds.len() - 1; + for (i, ps) in bounds.iter().enumerate() { + if i == last { + write!(f, "{}", ps.path)?; + } else { + write!(f, "{},", ps.path)?; + } + } + write!(f, ">") + } + Ty::Self_(_) => write!(f, "Self"), + Ty::Future(ref ty, _) => write!(f, "impl Future", ty), + Ty::Never => write!(f, "!"), + Ty::Default => write!(f, "()"), + Ty::Unsupported => write!(f, "_"), + } + } +} + +/// Compatible type for syntax::ast::PatKind +/// but currently doesn't support all kinds +#[derive(Clone, Debug, PartialEq)] +pub enum Pat { + Wild, + Ident(BindingMode, String), + Struct(Path, Vec), + TupleStruct(Path, Vec), + Path(Path), + Tuple(Vec), + Box, + Ref(Box, Mutability), + Lit, + Range, + Slice, + Mac, + Rest, + Or, +} + +impl Pat { + pub(crate) fn search_by_name(&self, sname: &str, stype: SearchType) -> Option { + match self { + Pat::Wild => None, + Pat::Ident(_, name) => { + if util::symbol_matches(stype, sname, name) { + Some(name.clone()) + } else { + None + } + } + Pat::Struct(_, pats) => pats + .iter() + .filter_map(|pat| pat.pat.search_by_name(sname, stype)) + .next(), + Pat::TupleStruct(_, pats) | Pat::Tuple(pats) => pats + .iter() + .filter_map(|pat| pat.search_by_name(sname, stype)) + .next(), + Pat::Ref(pat, _) => pat.search_by_name(sname, stype), + _ => None, + } + } + pub(crate) fn from_ast(pat: &PatKind, scope: &Scope) -> Self { + match pat { + PatKind::Wild => Pat::Wild, + PatKind::Ident(bi, ident, _) => Pat::Ident(*bi, ident.to_string()), + PatKind::Struct(_, path, fields, _) => { + let path = Path::from_ast(path, scope); + let fields = fields + .iter() + .map(|fld| FieldPat::from_ast(&fld, scope)) + .collect(); + Pat::Struct(path, fields) + } + PatKind::TupleStruct(_, path, pats) => { + let path = Path::from_ast(path, scope); + let pats = pats + .iter() + .map(|pat| Pat::from_ast(&pat.kind, scope)) + .collect(); + Pat::TupleStruct(path, pats) + } + PatKind::Path(_, path) => Pat::Path(Path::from_ast(&path, scope)), + PatKind::Tuple(pats) => { + let pats = pats + .iter() + .map(|pat| Pat::from_ast(&pat.kind, scope)) + .collect(); + Pat::Tuple(pats) + } + PatKind::Box(_) => Pat::Box, + PatKind::Ref(pat, mut_) => Pat::Ref(Box::new(Pat::from_ast(&pat.kind, scope)), *mut_), + PatKind::Lit(_) => Pat::Lit, + PatKind::Range(..) => Pat::Range, + PatKind::Slice(..) => Pat::Slice, + // ignore paren + PatKind::Paren(pat) => Pat::from_ast(&pat.kind, scope), + PatKind::MacCall(_) => Pat::Mac, + PatKind::Rest => Pat::Rest, + PatKind::Or(_) => Pat::Or, + } + } +} + +#[derive(Clone, Debug, PartialEq)] +pub struct FieldPat { + pub field_name: String, + pub pat: Box, +} + +impl FieldPat { + pub fn from_ast(fpat: &ast::PatField, scope: &Scope) -> Self { + FieldPat { + field_name: fpat.ident.to_string(), + pat: Box::new(Pat::from_ast(&fpat.pat.kind, scope)), + } + } +} + +/// Prefix of path. +/// e.g. for path `::std` => Global +/// for path `self::abc` => Self_ +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum PathPrefix { + Crate, + Super, + Self_, + Global, +} + +impl PathPrefix { + pub(crate) fn from_str(s: &str) -> Option { + match s { + "crate" => Some(PathPrefix::Crate), + "super" => Some(PathPrefix::Super), + "self" => Some(PathPrefix::Self_), + "{{root}}" => Some(PathPrefix::Global), + _ => None, + } + } +} + +// The racer implementation of an ast::Path. Difference is that it is Send-able +#[derive(Clone, PartialEq)] +pub struct Path { + pub prefix: Option, + pub segments: Vec, +} + +impl Path { + pub(crate) fn replace_by_bounds(&mut self, gen: &GenericsArgs) { + self.segments.iter_mut().for_each(|segment| { + segment + .generics + .iter_mut() + .for_each(|generics| match generics { + Ty::PathSearch(ref mut ps) => { + if let Some((_, param)) = gen.search_param_by_path(&ps.path) { + if let Some(resolved) = param.resolved() { + *generics = resolved.to_owned(); + } else { + *generics = Ty::Match(param.clone().into_match()); + } + } else { + ps.path.replace_by_bounds(gen); + } + } + _ => {} + }) + }) + } + + pub fn is_single(&self) -> bool { + self.segments.len() == 1 + } + + pub fn from_ast_nogen(path: &ast::Path) -> Path { + let mut segments = Vec::new(); + for seg in path.segments.iter() { + let name = seg.ident.name.to_string(); + // used right now in use tree + segments.push(PathSegment::new(name, vec![], None)); + } + Path { + prefix: None, + segments, + } + } + + pub fn from_ast(path: &ast::Path, scope: &Scope) -> Path { + let mut segments = Vec::new(); + for seg in path.segments.iter() { + let name = seg.ident.name.to_string(); + let mut types = Vec::new(); + let mut output = None; + + if let Some(ref params) = seg.args { + if let ast::GenericArgs::AngleBracketed(ref angle_args) = **params { + angle_args.args.iter().for_each(|arg| { + if let ast::AngleBracketedArg::Arg(ast::GenericArg::Type(ty)) = arg { + if let Some(ty) = Ty::from_ast(ty, scope) { + types.push(ty); + } + } + }) + } + // TODO: support inputs in GenericArgs::Parenthesized (A path like `Foo(A,B) -> C`) + if let ast::GenericArgs::Parenthesized(ref paren_args) = **params { + if let ast::FnRetTy::Ty(ref ty) = paren_args.output { + output = Ty::from_ast(&*ty, scope); + } + } + } + + segments.push(PathSegment::new(name, types, output)); + } + Path { + prefix: None, + segments, + } + } + + pub fn generic_types(&self) -> impl Iterator { + self.segments[self.segments.len() - 1].generics.iter() + } + + pub fn single(seg: PathSegment) -> Path { + Path { + prefix: None, + segments: vec![seg], + } + } + + pub fn set_prefix(&mut self) { + if self.prefix.is_some() { + return; + } + self.prefix = self + .segments + .first() + .and_then(|seg| PathPrefix::from_str(&seg.name)); + if self.prefix.is_some() { + self.segments.remove(0); + } + } + + pub fn from_vec(global: bool, v: Vec<&str>) -> Path { + Self::from_iter(global, v.into_iter().map(|s| s.to_owned())) + } + + pub fn from_svec(global: bool, v: Vec) -> Path { + Self::from_iter(global, v.into_iter()) + } + + pub fn from_iter(global: bool, iter: impl Iterator) -> Path { + let mut prefix = if global { + Some(PathPrefix::Global) + } else { + None + }; + let segments: Vec<_> = iter + .enumerate() + .filter_map(|(i, s)| { + if i == 0 && prefix.is_none() { + if let Some(pre) = PathPrefix::from_str(&s) { + prefix = Some(pre); + return None; + } + } + Some(PathSegment::from(s)) + }) + .collect(); + Path { prefix, segments } + } + + pub fn extend(&mut self, path: Path) -> &mut Self { + self.segments.extend(path.segments); + self + } + + pub fn len(&self) -> usize { + self.segments.len() + } + + pub fn name(&self) -> Option<&str> { + self.segments.last().map(|seg| &*seg.name) + } +} + +impl fmt::Debug for Path { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "P[")?; + let mut first = true; + for seg in &self.segments { + if first { + write!(f, "{}", seg.name)?; + first = false; + } else { + write!(f, "::{}", seg.name)?; + } + + if !seg.output.is_none() { + write!(f, "(")?; + } + + if !seg.generics.is_empty() { + if seg.output.is_none() { + write!(f, "<")?; + } + for (i, ty) in seg.generics.iter().enumerate() { + if i == 0 { + write!(f, "{:?}", ty)?; + } else { + write!(f, ",{:?}", ty)? + } + } + if seg.output.is_none() { + write!(f, ">")?; + } + } + if !seg.output.is_none() { + write!(f, ")->{:?}", seg.output.as_ref().unwrap())?; + } + } + write!(f, "]") + } +} + +impl fmt::Display for Path { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut first = true; + for seg in &self.segments { + if first { + write!(f, "{}", seg.name)?; + first = false; + } else { + write!(f, "::{}", seg.name)?; + } + + if !seg.generics.is_empty() { + write!(f, "<")?; + for (i, ty) in seg.generics.iter().enumerate() { + if i == 0 { + write!(f, "{}", ty)?; + } else { + write!(f, ", {}", ty)? + } + } + write!(f, ">")?; + } + } + Ok(()) + } +} + +#[derive(Debug, Clone, PartialEq)] +pub struct PathSegment { + pub name: String, + pub generics: Vec, + /// If this path segment is a closure, it's return type + pub output: Option, +} + +impl PathSegment { + pub fn new(name: String, generics: Vec, output: Option) -> Self { + PathSegment { + name, + generics, + output, + } + } +} + +impl From for PathSegment { + fn from(name: String) -> Self { + PathSegment { + name, + generics: Vec::new(), + output: None, + } + } +} + +/// Information about generic types in a match +#[derive(Clone, PartialEq)] +pub struct PathSearch { + pub path: Path, + pub filepath: PathBuf, + pub point: BytePos, +} + +impl PathSearch { + pub fn new(path: Path, scope: Scope) -> Self { + let Scope { filepath, point } = scope; + PathSearch { + path, + filepath, + point, + } + } + pub(crate) fn resolve_as_match(&self, session: &Session<'_>) -> Option { + find_type_match(&self.path, &self.filepath, self.point, session) + } +} + +impl fmt::Debug for PathSearch { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Search [{:?}, {:?}, {:?}]", + self.path, + self.filepath.display(), + self.point + ) + } +} + +/// Wrapper struct for representing trait bounds. +/// Its usages are +/// - for generic types like T: Debug + Clone +/// - for trait inheritance like trait A: Debug + Clone +/// - for impl_trait like fn f(a: impl Debug + Clone) +/// - for dynamic traits(dyn_trait) like Box or Box +#[derive(Clone, Debug, PartialEq)] +pub struct TraitBounds(Vec); + +impl TraitBounds { + /// checks if it contains a trait, whick its name is 'name' + pub fn find_by_name(&self, name: &str) -> Option<&PathSearch> { + self.find_by_names(&[name]) + } + pub fn find_by_name_mut(&mut self, name: &str) -> Option<&mut PathSearch> { + self.find_by_names_mut(&[name]) + } + /// checks if it contains a trait, whick its name is 'name' + pub fn find_by_names(&self, names: &[&str]) -> Option<&PathSearch> { + self.0.iter().find(|path_search| { + let seg = &path_search.path.segments; + seg.len() == 1 && names.contains(&&*seg[0].name) + }) + } + pub fn find_by_names_mut(&mut self, names: &[&str]) -> Option<&mut PathSearch> { + self.0.iter_mut().find(|path_search| { + let seg = &path_search.path.segments; + seg.len() == 1 && names.contains(&&*seg[0].name) + }) + } + + pub fn iter(&self) -> impl Iterator { + self.0.iter() + } + + pub fn into_iter(self) -> impl Iterator { + self.0.into_iter() + } + /// Search traits included in bounds and return Matches + pub fn get_traits(&self, session: &Session<'_>) -> Vec { + self.0 + .iter() + .filter_map(|ps| { + nameres::resolve_path( + &ps.path, + &ps.filepath, + ps.point, + core::SearchType::ExactMatch, + core::Namespace::Trait, + session, + &ImportInfo::default(), + ) + .into_iter() + .nth(0) + }) + .collect() + } + #[inline] + pub fn len(&self) -> usize { + self.0.len() + } + + pub fn has_closure(&self) -> bool { + self.find_by_names(&["Fn", "FnMut", "FnOnce"]).is_some() + } + + pub fn get_closure(&self) -> Option<&PathSearch> { + self.find_by_names(&["Fn", "FnMut", "FnOnce"]) + } + + pub fn get_closure_mut(&mut self) -> Option<&mut PathSearch> { + self.find_by_names_mut(&["Fn", "FnMut", "FnOnce"]) + } + + pub(crate) fn from_generic_bounds>( + bounds: &GenericBounds, + filepath: P, + offset: i32, + ) -> TraitBounds { + let vec = bounds + .iter() + .filter_map(|bound| { + if let GenericBound::Trait(ref ptrait_ref, _) = *bound { + let ast_path = &ptrait_ref.trait_ref.path; + let source_map::BytePos(point) = ast_path.span.lo(); + let scope = Scope::new( + filepath.as_ref().to_path_buf(), + BytePos::from((point as i32 + offset) as u32), + ); + let path = Path::from_ast(&ast_path, &scope); + let path_search = PathSearch::new(path, scope); + Some(path_search) + } else { + None + } + }) + .collect(); + TraitBounds(vec) + } + fn extend(&mut self, other: Self) { + self.0.extend(other.0) + } +} + +/// Argument of generics like T: From +/// It's intended to use this type only for declaration of type parameter. +// TODO: impl trait's name +// TODO: it has too many PathBuf +#[derive(Clone, Debug, PartialEq)] +pub struct TypeParameter { + /// the name of type parameter declared in generics, like 'T' + pub name: String, + /// The point 'T' appears + pub point: BytePos, + /// file path + pub filepath: PathBuf, + /// bounds + pub bounds: TraitBounds, + /// Resolved Type + pub resolved: Option, +} + +impl TypeParameter { + pub fn name(&self) -> &str { + &(*self.name) + } + pub(crate) fn into_match(self) -> Match { + // TODO: contextstr, local + Match { + matchstr: self.name, + filepath: self.filepath, + point: self.point, + coords: None, + local: false, + mtype: MatchType::TypeParameter(Box::new(self.bounds)), + contextstr: String::new(), + docs: String::new(), + } + } + pub(crate) fn resolve(&mut self, ty: Ty) { + self.resolved = Some(ty); + } + pub(crate) fn resolved(&self) -> Option<&Ty> { + self.resolved.as_ref() + } + pub(crate) fn add_bound(&mut self, bound: TraitBounds) { + let add_bounds: Vec<_> = bound + .0 + .into_iter() + .filter(|p| { + if let Some(name) = p.path.name() { + self.bounds.find_by_name(name).is_none() + } else { + true + } + }) + .collect(); + self.bounds.0.extend(add_bounds); + } +} + +/// List of Args in generics, e.g. +/// Now it's intended to use only for type parameters +// TODO: should we extend this type enable to handle both type parameters and true types? +#[derive(Clone, Debug, Default, PartialEq)] +pub struct GenericsArgs(pub Vec); + +impl GenericsArgs { + pub(crate) fn extend(&mut self, other: GenericsArgs) { + self.0.extend(other.0); + } + pub(crate) fn from_generics<'a, P: AsRef>( + generics: &'a ast::Generics, + filepath: P, + offset: i32, + ) -> Self { + let mut args = Vec::new(); + let mut closure_args = Vec::new(); + for param in generics.params.iter() { + match param.kind { + // TODO: lifetime support + GenericParamKind::Lifetime => {} + // TODO: should we handle default type here? + GenericParamKind::Type { default: _ } => { + let param_name = param.ident.name.to_string(); + let source_map::BytePos(point) = param.ident.span.lo(); + let bounds = TraitBounds::from_generic_bounds(¶m.bounds, &filepath, offset); + let type_param = TypeParameter { + name: param_name, + point: BytePos::from((point as i32 + offset) as u32), + filepath: filepath.as_ref().to_path_buf(), + bounds, + resolved: None, + }; + if type_param.bounds.has_closure() { + closure_args.push(type_param); + } else { + args.push(type_param); + } + } + // TODO: Support const + GenericParamKind::Const { ty: _, .. } => {} + } + } + for pred in generics.where_clause.predicates.iter() { + match pred { + WherePredicate::BoundPredicate(bound) => match bound.bounded_ty.kind { + TyKind::Path(ref _qself, ref path) => { + if let Some(seg) = path.segments.get(0) { + let name = pprust::path_segment_to_string(&seg); + let bound = + TraitBounds::from_generic_bounds(&bound.bounds, &filepath, offset); + if let Some(tp) = args.iter_mut().find(|tp| tp.name == name) { + tp.bounds.extend(bound); + continue; + } + if let Some(tp) = closure_args.iter_mut().find(|tp| tp.name == name) { + tp.bounds.extend(bound); + } + } + } + // TODO 'self' support + TyKind::ImplicitSelf => {} + _ => {} + }, + // TODO: lifetime support + WherePredicate::RegionPredicate(_) => {} + _ => {} + } + } + + // resolve the closure's return type into the containing function's type parameter + fn replace_closure_output_with_matching_type_params_from_fn( + tp: &mut TypeParameter, + args: &GenericsArgs, + ) { + if let Some(ps) = tp.bounds.get_closure_mut() { + for segment in ps.path.segments.iter_mut() { + segment.output = segment.output.take().map(|ty| match ty { + Ty::PathSearch(mut ps) => { + match args.get_tbound_match(&ps.path.segments[0].name) { + Some(m) => Ty::Match(m), + None => { + // if output is a PathSearch, it may have generics + // eg. Option or Box>, so recursively replace + // them with that of the enclosing function's type params + ps.path.replace_by_bounds(args); + Ty::PathSearch(ps) + } + } + } + ty => ty, + }); + } + } + } + + let mut args = GenericsArgs(args); + // closure's return types may be the generic args from the function's definition + // like K>, so handle closure types after processing + // other type parameters + for type_param in closure_args.iter_mut() { + replace_closure_output_with_matching_type_params_from_fn(type_param, &args); + } + args.extend(GenericsArgs(closure_args)); + args + } + + pub fn get_idents(&self) -> Vec { + self.0.iter().map(|g| g.name.clone()).collect() + } + pub fn args(&self) -> impl Iterator { + self.0.iter() + } + pub fn args_mut(&mut self) -> impl Iterator { + self.0.iter_mut() + } + pub fn search_param_by_path(&self, path: &Path) -> Option<(usize, &TypeParameter)> { + if !path.is_single() { + return None; + } + let query = &path.segments[0].name; + for (i, typ) in self.0.iter().enumerate() { + if typ.name() == query { + return Some((i, typ)); + } + } + None + } + pub fn search_param_by_name(&self, name: &str) -> Option<(usize, &TypeParameter)> { + for (i, typ) in self.0.iter().enumerate() { + if typ.name() == name { + return Some((i, typ)); + } + } + None + } + pub fn get_tbound_match(&self, name: &str) -> Option { + Some(self.search_param_by_name(name)?.1.clone().into_match()) + } + pub(crate) fn add_bound(&mut self, pos: usize, bound: TraitBounds) { + if let Some(param) = self.0.get_mut(pos) { + param.add_bound(bound); + } + } + pub(crate) fn is_empty(&self) -> bool { + self.0.is_empty() + } + pub(crate) fn apply_types(&mut self, other: &[Ty]) { + for (l, r) in self.0.iter_mut().zip(other.iter()) { + l.resolve(r.clone()); + } + } +} + +/// `Impl` information +#[derive(Clone, Debug, PartialEq)] +pub struct ImplHeader { + self_path: Path, + trait_path: Option, + pub(crate) generics: GenericsArgs, + filepath: PathBuf, + // TODO: should be removed + local: bool, + impl_start: BytePos, + block_start: BytePos, +} + +impl ImplHeader { + pub(crate) fn new( + generics: &ast::Generics, + path: &FilePath, + otrait: &Option, + self_type: &ast::Ty, + offset: BytePos, + local: bool, + impl_start: BytePos, + block_start: BytePos, + ) -> Option { + let generics = GenericsArgs::from_generics(generics, path, offset.0 as i32); + let scope = Scope::new(path.to_owned(), impl_start); + let self_path = get_self_path(&self_type.kind, &scope)?; + let trait_path = otrait + .as_ref() + .map(|tref| Path::from_ast(&tref.path, &scope)); + Some(ImplHeader { + self_path, + trait_path, + generics, + filepath: path.to_owned(), + local, + impl_start, + block_start, + }) + } + pub(crate) fn self_path(&self) -> &Path { + &self.self_path + } + pub(crate) fn trait_path(&self) -> Option<&Path> { + self.trait_path.as_ref() + } + pub(crate) fn file_path(&self) -> &FilePath { + self.filepath.as_ref() + } + pub(crate) fn generics(&self) -> &GenericsArgs { + &self.generics + } + pub(crate) fn impl_start(&self) -> BytePos { + self.impl_start + } + // TODO: should be removed + pub(crate) fn is_local(&self) -> bool { + self.local || self.trait_path.is_some() + } + pub(crate) fn is_trait(&self) -> bool { + self.trait_path.is_some() + } + pub(crate) fn resolve_trait( + &self, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, + ) -> Option { + nameres::resolve_path( + self.trait_path()?, + self.file_path(), + self.impl_start, + core::SearchType::ExactMatch, + core::Namespace::Trait, + session, + import_info, + ) + .into_iter() + .nth(0) + } + pub(crate) fn scope_start(&self) -> BytePos { + self.block_start.increment() + } +} + +pub(crate) fn get_self_path(ty: &TyKind, scope: &Scope) -> Option { + match ty { + TyKind::Rptr(_, ref ty) => get_self_path(&ty.ty.kind, scope), + TyKind::Path(_, ref path) => Some(Path::from_ast(path, &scope)), + // HACK: treat slice as path + TyKind::Slice(_) => Some(Path::single("[T]".to_owned().into())), + _ => None, + } +} diff --git a/racer/src/racer/benches.rs b/racer/src/racer/benches.rs new file mode 100644 index 0000000000..96401c4afa --- /dev/null +++ b/racer/src/racer/benches.rs @@ -0,0 +1,63 @@ +extern crate test; + +use std::env::var; +use std::fs::File; +use std::io::Read; +use std::path::PathBuf; + +use codecleaner::code_chunks; +use codeiter::StmtIndicesIter; +use core::IndexedSource; +use scopes::{mask_comments, mask_sub_scopes}; + +use self::test::Bencher; + +fn get_rust_file_str(path: &[&str]) -> String { + let mut src_path = match var("RUST_SRC_PATH") { + Ok(env) => PathBuf::from(&env), + _ => panic!("Cannot find $RUST_SRC_PATH"), + }; + for &s in path.iter() { + src_path.push(s); + } + + let mut s = String::new(); + File::open(&src_path) + .unwrap() + .read_to_string(&mut s) + .unwrap(); + s +} + +#[bench] +fn bench_code_chunks(b: &mut Bencher) { + let src = &get_rust_file_str(&["liballoc", "vec.rs"]); + b.iter(|| { + test::black_box(code_chunks(src).collect::>()); + }); +} + +#[bench] +fn bench_iter_stmts(b: &mut Bencher) { + let src = &get_rust_file_str(&["liballoc", "vec.rs"]); + b.iter(|| { + test::black_box(StmtIndicesIter::from_parts(src, code_chunks(src)).collect::>()); + }); +} + +#[bench] +fn bench_mask_comments(b: &mut Bencher) { + let src_indexed = IndexedSource::new(get_rust_file_str(&["liballoc", "vec.rs"])); + let src = src_indexed.as_src(); + b.iter(|| { + test::black_box(mask_comments(src)); + }); +} + +#[bench] +fn bench_mask_sub_scopes(b: &mut Bencher) { + let src = &get_rust_file_str(&["liballoc", "vec.rs"]); + b.iter(|| { + test::black_box(mask_sub_scopes(src)); + }); +} diff --git a/racer/src/racer/codecleaner.rs b/racer/src/racer/codecleaner.rs new file mode 100644 index 0000000000..6d5917e91f --- /dev/null +++ b/racer/src/racer/codecleaner.rs @@ -0,0 +1,460 @@ +use crate::core::{BytePos, ByteRange}; + +/// Type of the string +#[derive(Clone, Copy, Debug)] +enum StrStyle { + /// normal string starts with " + Cooked, + /// Raw(n) => raw string started with n #s + Raw(usize), +} + +#[derive(Clone, Copy)] +enum State { + Code, + Comment, + CommentBlock, + String(StrStyle), + Char, + Finished, +} + +#[derive(Clone, Copy)] +pub struct CodeIndicesIter<'a> { + src: &'a str, + pos: BytePos, + state: State, +} + +impl<'a> Iterator for CodeIndicesIter<'a> { + type Item = ByteRange; + + fn next(&mut self) -> Option { + match self.state { + State::Code => Some(self.code()), + State::Comment => Some(self.comment()), + State::CommentBlock => Some(self.comment_block()), + State::String(style) => Some(self.string(style)), + State::Char => Some(self.char()), + State::Finished => None, + } + } +} + +impl<'a> CodeIndicesIter<'a> { + fn code(&mut self) -> ByteRange { + let mut pos = self.pos; + let start = match self.state { + State::String(_) | State::Char => pos.decrement(), // include quote + _ => pos, + }; + let src_bytes = self.src.as_bytes(); + for &b in &src_bytes[pos.0..] { + pos = pos.increment(); + match b { + b'/' if src_bytes.len() > pos.0 => match src_bytes[pos.0] { + b'/' => { + self.state = State::Comment; + self.pos = pos.increment(); + return ByteRange::new(start, pos.decrement()); + } + b'*' => { + self.state = State::CommentBlock; + self.pos = pos.increment(); + return ByteRange::new(start, pos.decrement()); + } + _ => {} + }, + b'"' => { + // " + let str_type = self.detect_str_type(pos); + self.state = State::String(str_type); + self.pos = pos; + return ByteRange::new(start, pos); // include dblquotes + } + b'\'' => { + // single quotes are also used for lifetimes, so we need to + // be confident that this is not a lifetime. + // Look for backslash starting the escape, or a closing quote: + if src_bytes.len() > pos.increment().0 + && (src_bytes[pos.0] == b'\\' || src_bytes[pos.increment().0] == b'\'') + { + self.state = State::Char; + self.pos = pos; + return ByteRange::new(start, pos); // include single quote + } + } + _ => {} + } + } + + self.state = State::Finished; + ByteRange::new(start, self.src.len().into()) + } + + fn comment(&mut self) -> ByteRange { + let mut pos = self.pos; + let src_bytes = self.src.as_bytes(); + for &b in &src_bytes[pos.0..] { + pos = pos.increment(); + if b == b'\n' { + if pos.0 + 2 <= src_bytes.len() && src_bytes[pos.0..pos.0 + 2] == [b'/', b'/'] { + continue; + } + break; + } + } + self.pos = pos; + self.code() + } + + fn comment_block(&mut self) -> ByteRange { + let mut nesting_level = 0usize; + let mut prev = b' '; + let mut pos = self.pos; + for &b in &self.src.as_bytes()[pos.0..] { + pos = pos.increment(); + match b { + b'/' if prev == b'*' => { + prev = b' '; + if nesting_level == 0 { + break; + } else { + nesting_level -= 1; + } + } + b'*' if prev == b'/' => { + prev = b' '; + nesting_level += 1; + } + _ => { + prev = b; + } + } + } + self.pos = pos; + self.code() + } + + fn string(&mut self, str_type: StrStyle) -> ByteRange { + let src_bytes = self.src.as_bytes(); + let mut pos = self.pos; + match str_type { + StrStyle::Raw(level) => { + // raw string (e.g. br#"\"#) + #[derive(Debug)] + enum SharpState { + Sharp { + // number of preceding #s + num_sharps: usize, + // Position of last " + quote_pos: BytePos, + }, + None, // No preceding "##... + } + let mut cur_state = SharpState::None; + let mut end_was_found = false; + // detect corresponding end(if start is r##", "##) greedily + for (i, &b) in src_bytes[self.pos.0..].iter().enumerate() { + match cur_state { + SharpState::Sharp { + num_sharps, + quote_pos, + } => { + cur_state = match b { + b'#' => SharpState::Sharp { + num_sharps: num_sharps + 1, + quote_pos, + }, + b'"' => SharpState::Sharp { + num_sharps: 0, + quote_pos: BytePos(i), + }, + _ => SharpState::None, + } + } + SharpState::None => { + if b == b'"' { + cur_state = SharpState::Sharp { + num_sharps: 0, + quote_pos: BytePos(i), + }; + } + } + } + if let SharpState::Sharp { + num_sharps, + quote_pos, + } = cur_state + { + if num_sharps == level { + end_was_found = true; + pos += quote_pos.increment(); + break; + } + } + } + if !end_was_found { + pos = src_bytes.len().into(); + } + } + StrStyle::Cooked => { + let mut is_not_escaped = true; + for &b in &src_bytes[pos.0..] { + pos = pos.increment(); + match b { + b'"' if is_not_escaped => { + break; + } // " + b'\\' => { + is_not_escaped = !is_not_escaped; + } + _ => { + is_not_escaped = true; + } + } + } + } + }; + self.pos = pos; + self.code() + } + + fn char(&mut self) -> ByteRange { + let mut is_not_escaped = true; + let mut pos = self.pos; + for &b in &self.src.as_bytes()[pos.0..] { + pos = pos.increment(); + match b { + b'\'' if is_not_escaped => { + break; + } + b'\\' => { + is_not_escaped = !is_not_escaped; + } + _ => { + is_not_escaped = true; + } + } + } + self.pos = pos; + self.code() + } + + fn detect_str_type(&self, pos: BytePos) -> StrStyle { + let src_bytes = self.src.as_bytes(); + let mut sharp = 0; + if pos == BytePos::ZERO { + return StrStyle::Cooked; + } + // now pos is at one byte after ", so we have to start at pos - 2 + for &b in src_bytes[..pos.decrement().0].iter().rev() { + match b { + b'#' => sharp += 1, + b'r' => return StrStyle::Raw(sharp), + _ => return StrStyle::Cooked, + } + } + StrStyle::Cooked + } +} + +/// Returns indices of chunks of code (minus comments and string contents) +pub fn code_chunks(src: &str) -> CodeIndicesIter<'_> { + CodeIndicesIter { + src, + state: State::Code, + pos: BytePos::ZERO, + } +} + +#[cfg(test)] +mod code_indices_iter_test { + use super::*; + use crate::testutils::{rejustify, slice}; + + #[test] + fn removes_a_comment() { + let src = &rejustify( + " + this is some code // this is a comment + some more code + ", + ); + let mut it = code_chunks(src); + assert_eq!("this is some code ", slice(src, it.next().unwrap())); + assert_eq!("some more code", slice(src, it.next().unwrap())); + } + + #[test] + fn removes_consecutive_comments() { + let src = &rejustify( + " + this is some code // this is a comment + // this is more comment + // another comment + some more code + ", + ); + let mut it = code_chunks(src); + assert_eq!("this is some code ", slice(src, it.next().unwrap())); + assert_eq!("some more code", slice(src, it.next().unwrap())); + } + + #[test] + fn removes_string_contents() { + let src = &rejustify( + " + this is some code \"this is a string\" more code + ", + ); + let mut it = code_chunks(src); + assert_eq!("this is some code \"", slice(src, it.next().unwrap())); + assert_eq!("\" more code", slice(src, it.next().unwrap())); + } + + #[test] + fn removes_char_contents() { + let src = &rejustify( + " + this is some code \'\"\' more code \'\\x00\' and \'\\\'\' that\'s it + ", + ); + let mut it = code_chunks(src); + assert_eq!("this is some code \'", slice(src, it.next().unwrap())); + assert_eq!("\' more code \'", slice(src, it.next().unwrap())); + assert_eq!("\' and \'", slice(src, it.next().unwrap())); + assert_eq!("\' that\'s it", slice(src, it.next().unwrap())); + } + + #[test] + fn removes_string_contents_with_a_comment_in_it() { + let src = &rejustify( + " + this is some code \"string with a // fake comment \" more code + ", + ); + let mut it = code_chunks(src); + assert_eq!("this is some code \"", slice(src, it.next().unwrap())); + assert_eq!("\" more code", slice(src, it.next().unwrap())); + } + + #[test] + fn removes_a_comment_with_a_dbl_quote_in_it() { + let src = &rejustify( + " + this is some code // comment with \" double quote + some more code + ", + ); + let mut it = code_chunks(src); + assert_eq!("this is some code ", slice(src, it.next().unwrap())); + assert_eq!("some more code", slice(src, it.next().unwrap())); + } + + #[test] + fn removes_multiline_comment() { + let src = &rejustify( + " + this is some code /* this is a + \"multiline\" comment */some more code + ", + ); + let mut it = code_chunks(src); + assert_eq!("this is some code ", slice(src, it.next().unwrap())); + assert_eq!("some more code", slice(src, it.next().unwrap())); + } + + #[test] + fn handles_nesting_of_block_comments() { + let src = &rejustify( + " + this is some code /* nested /* block */ comment */ some more code + ", + ); + let mut it = code_chunks(src); + assert_eq!("this is some code ", slice(src, it.next().unwrap())); + assert_eq!(" some more code", slice(src, it.next().unwrap())); + } + + #[test] + fn handles_documentation_block_comments_nested_into_block_comments() { + let src = &rejustify( + " + this is some code /* nested /** documentation block */ comment */ some more code + ", + ); + let mut it = code_chunks(src); + assert_eq!("this is some code ", slice(src, it.next().unwrap())); + assert_eq!(" some more code", slice(src, it.next().unwrap())); + } + + #[test] + fn removes_string_with_escaped_dblquote_in_it() { + let src = &rejustify( + " + this is some code \"string with a \\\" escaped dblquote fake comment \" more code + ", + ); + + let mut it = code_chunks(src); + assert_eq!("this is some code \"", slice(src, it.next().unwrap())); + assert_eq!("\" more code", slice(src, it.next().unwrap())); + } + + #[test] + fn removes_raw_string_with_dangling_escape_in_it() { + let src = &rejustify( + " + this is some code br\" escaped dblquote raw string \\\" more code + ", + ); + + let mut it = code_chunks(src); + assert_eq!("this is some code br\"", slice(src, it.next().unwrap())); + assert_eq!("\" more code", slice(src, it.next().unwrap())); + } + + #[test] + fn removes_string_with_escaped_slash_before_dblquote_in_it() { + let src = &rejustify(" + this is some code \"string with an escaped slash, so dbl quote does end the string after all \\\\\" more code + "); + + let mut it = code_chunks(src); + assert_eq!("this is some code \"", slice(src, it.next().unwrap())); + assert_eq!("\" more code", slice(src, it.next().unwrap())); + } + + #[test] + fn handles_tricky_bit_from_str_rs() { + let src = &rejustify( + " + before(\"\\\\\'\\\\\\\"\\\\\\\\\"); + more_code(\" skip me \") + ", + ); + + for range in code_chunks(src) { + let range = || range.to_range(); + println!("BLOB |{}|", &src[range()]); + if src[range()].contains("skip me") { + panic!("{}", &src[range()]); + } + } + } + + #[test] + fn removes_nested_rawstr() { + let src = &rejustify( + r####" + this is some code br###""" r##""##"### more code + "####, + ); + + let mut it = code_chunks(src); + assert_eq!("this is some code br###\"", slice(src, it.next().unwrap())); + assert_eq!("\"### more code", slice(src, it.next().unwrap())); + } + +} diff --git a/racer/src/racer/codeiter.rs b/racer/src/racer/codeiter.rs new file mode 100644 index 0000000000..1a171dc08e --- /dev/null +++ b/racer/src/racer/codeiter.rs @@ -0,0 +1,367 @@ +use std::iter::{Fuse, Iterator}; + +use crate::core::{BytePos, ByteRange}; +use crate::scopes; +use crate::util::is_whitespace_byte; + +/// An iterator which iterates statements. +/// e.g. for "let a = 5; let b = 4;" it returns "let a = 5;" and then "let b = 4;" +/// This iterator only works for comment-masked source codes. +pub struct StmtIndicesIter<'a> { + src: &'a str, + pos: BytePos, + end: BytePos, +} + +impl<'a> Iterator for StmtIndicesIter<'a> { + type Item = ByteRange; + + #[inline] + fn next(&mut self) -> Option { + let src_bytes = self.src.as_bytes(); + let mut enddelim = b';'; + let mut bracelevel = 0isize; + let mut parenlevel = 0isize; + let mut bracketlevel = 0isize; + let mut pos = self.pos; + for &b in &src_bytes[pos.0..self.end.0] { + match b { + b' ' | b'\r' | b'\n' | b'\t' => { + pos += BytePos(1); + } + _ => { + break; + } + } + } + let start = pos; + // test attribute #[foo = bar] + if pos < self.end && src_bytes[pos.0] == b'#' { + enddelim = b']' + }; + // iterate through the chunk, looking for stmt end + for &b in &src_bytes[pos.0..self.end.0] { + pos += BytePos(1); + match b { + b'(' => { + parenlevel += 1; + } + b')' => { + parenlevel -= 1; + } + b'[' => { + bracketlevel += 1; + } + b']' => { + bracketlevel -= 1; + } + b'{' => { + // if we are top level and stmt is not a 'use' or 'let' then + // closebrace finishes the stmt + if bracelevel == 0 + && parenlevel == 0 + && !(is_a_use_stmt(src_bytes, start, pos) + || is_a_let_stmt(src_bytes, start, pos)) + { + enddelim = b'}'; + } + bracelevel += 1; + } + b'}' => { + // have we reached the end of the scope? + if bracelevel == 0 { + self.pos = pos; + return None; + } + bracelevel -= 1; + } + b'!' => { + // macro if followed by at least one space or ( + // FIXME: test with boolean 'not' expression + if parenlevel == 0 && bracelevel == 0 && pos < self.end && (pos - start).0 > 1 { + match src_bytes[pos.0] { + b' ' | b'\r' | b'\n' | b'\t' | b'(' => { + enddelim = b')'; + } + _ => {} + } + } + } + _ => {} + } + if parenlevel < 0 + || bracelevel < 0 + || bracketlevel < 0 + || (enddelim == b && bracelevel == 0 && parenlevel == 0 && bracketlevel == 0) + { + self.pos = pos; + return Some(ByteRange::new(start, pos)); + } + } + if start < self.end { + self.pos = pos; + return Some(ByteRange::new(start, self.end)); + } + None + } +} + +fn is_a_use_stmt(src_bytes: &[u8], start: BytePos, pos: BytePos) -> bool { + let src = unsafe { ::std::str::from_utf8_unchecked(&src_bytes[start.0..pos.0]) }; + scopes::use_stmt_start(&src).is_some() +} + +fn is_a_let_stmt(src_bytes: &[u8], start: BytePos, pos: BytePos) -> bool { + pos.0 > 3 + && &src_bytes[start.0..start.0 + 3] == b"let" + && is_whitespace_byte(src_bytes[start.0 + 3]) +} + +impl<'a> StmtIndicesIter<'a> { + pub fn from_parts(src: &str) -> Fuse> { + StmtIndicesIter { + src, + pos: BytePos::ZERO, + end: BytePos(src.len()), + } + .fuse() + } +} + +#[cfg(test)] +mod test { + use std::iter::Fuse; + + use crate::codecleaner; + use crate::testutils::{rejustify, slice}; + + use super::*; + + fn iter_stmts(src: &str) -> Fuse> { + let idx: Vec<_> = codecleaner::code_chunks(&src).collect(); + let code = scopes::mask_comments(src, &idx); + let code: &'static str = Box::leak(code.into_boxed_str()); + StmtIndicesIter::from_parts(code) + } + + #[test] + fn iterates_single_use_stmts() { + let src = rejustify( + " + use std::Foo; // a comment + use std::Bar; + ", + ); + + let mut it = iter_stmts(src.as_ref()); + assert_eq!("use std::Foo;", slice(&src, it.next().unwrap())); + assert_eq!("use std::Bar;", slice(&src, it.next().unwrap())); + } + + #[test] + fn iterates_array_stmts() { + let src = rejustify( + " + let a: [i32; 2] = [1, 2]; + let b = [[0], [1], [2]]; + let c = ([1, 2, 3])[1]; + ", + ); + + let mut it = iter_stmts(src.as_ref()); + assert_eq!("let a: [i32; 2] = [1, 2];", slice(&src, it.next().unwrap())); + assert_eq!("let b = [[0], [1], [2]];", slice(&src, it.next().unwrap())); + assert_eq!("let c = ([1, 2, 3])[1];", slice(&src, it.next().unwrap())); + } + + #[test] + fn iterates_use_stmt_over_two_lines() { + let src = rejustify( + " + use std::{Foo, + Bar}; // a comment + ", + ); + let mut it = iter_stmts(src.as_ref()); + assert_eq!( + "use std::{Foo, + Bar};", + slice(&src, it.next().unwrap()) + ); + } + + #[test] + fn iterates_use_stmt_without_the_prefix() { + let src = rejustify( + " + pub use {Foo, + Bar}; // this is also legit apparently + ", + ); + let mut it = iter_stmts(src.as_ref()); + assert_eq!( + "pub use {Foo, + Bar};", + slice(&src, it.next().unwrap()) + ); + } + + #[test] + fn iterates_while_stmt() { + let src = rejustify( + " + while self.pos < 3 { } + ", + ); + let mut it = iter_stmts(src.as_ref()); + assert_eq!("while self.pos < 3 { }", slice(&src, it.next().unwrap())); + } + + #[test] + fn iterates_lambda_arg() { + let src = rejustify( + " + myfn(|n|{}); + ", + ); + let mut it = iter_stmts(src.as_ref()); + assert_eq!("myfn(|n|{});", slice(&src, it.next().unwrap())); + } + + #[test] + fn iterates_macro() { + let src = " + mod foo; + macro_rules! otry( + ($e:expr) => (match $e { Some(e) => e, None => return }) + ) + mod bar; + "; + let mut it = iter_stmts(src.as_ref()); + assert_eq!("mod foo;", slice(&src, it.next().unwrap())); + assert_eq!( + "macro_rules! otry( + ($e:expr) => (match $e { Some(e) => e, None => return }) + )", + slice(&src, it.next().unwrap()) + ); + assert_eq!("mod bar;", slice(&src, it.next().unwrap())); + } + + #[test] + fn iterates_macro_invocation() { + let src = " + mod foo; + local_data_key!(local_stdout: Box) // no ';' + mod bar; + "; + let mut it = iter_stmts(src.as_ref()); + assert_eq!("mod foo;", slice(&src, it.next().unwrap())); + assert_eq!( + "local_data_key!(local_stdout: Box)", + slice(&src, it.next().unwrap()) + ); + assert_eq!("mod bar;", slice(&src, it.next().unwrap())); + } + + #[test] + fn iterates_if_else_stmt() { + let src = " + if self.pos < 3 { } else { } + "; + let mut it = iter_stmts(src.as_ref()); + assert_eq!("if self.pos < 3 { }", slice(&src, it.next().unwrap())); + assert_eq!("else { }", slice(&src, it.next().unwrap())); + } + + #[test] + fn iterates_inner_scope() { + let src = &" + while(self.pos < 3 { + let a = 35; + return a + 35; // should iterate this + } + { + b = foo; // but not this + } + "[29..]; + + let mut it = iter_stmts(src.as_ref()); + + assert_eq!("let a = 35;", slice(&src, it.next().unwrap())); + assert_eq!("return a + 35;", slice(&src, it.next().unwrap())); + assert_eq!(None, it.next()); + } + + #[test] + fn iterates_module_attribute() { + let src = rejustify( + " + #![license = \"BSD\"] + #[test] + ", + ); + let mut it = iter_stmts(src.as_ref()); + assert_eq!("#![license = \"BSD\"]", slice(&src, it.next().unwrap())); + assert_eq!("#[test]", slice(&src, it.next().unwrap())); + } + + #[test] + fn iterates_half_open_subscope_if_is_the_last_thing() { + let src = " + let something = 35; + while self.pos < 3 { + let a = 35; + return a + 35; // should iterate this + "; + + let mut it = iter_stmts(src.as_ref()); + assert_eq!("let something = 35;", slice(&src, it.next().unwrap())); + assert_eq!( + "while self.pos < 3 { + let a = 35; + return a + 35; // should iterate this + ", + slice(&src, it.next().unwrap()) + ); + } + + #[test] + fn iterates_ndarray() { + let src = " + let a = [[f64; 5]; 5]; + pub struct Matrix44f(pub [[f64; 4]; 4]); + "; + let mut it = iter_stmts(src.as_ref()); + assert_eq!("let a = [[f64; 5]; 5];", slice(&src, it.next().unwrap())); + assert_eq!( + "pub struct Matrix44f(pub [[f64; 4]; 4]);", + slice(&src, it.next().unwrap()) + ); + } + + #[test] + #[ignore] + fn iterates_for_struct() { + let src = " + let a = 5; + for St { a, b } in iter() { + let b = a; + } + while let St { a, b } = iter().next() { + + } + if let St(a) = hoge() { + + } + "; + let mut it = iter_stmts(src.as_ref()); + assert_eq!("let a = 5;", slice(&src, it.next().unwrap())); + assert_eq!( + r"for St { a, b } in iter() { + let b = a; + }", + slice(&src, it.next().unwrap()) + ); + } +} diff --git a/racer/src/racer/core.rs b/racer/src/racer/core.rs new file mode 100644 index 0000000000..842231b5f9 --- /dev/null +++ b/racer/src/racer/core.rs @@ -0,0 +1,1414 @@ +use crate::ast_types::{GenericsArgs, ImplHeader, Pat, TraitBounds, Ty, TypeParameter}; +use crate::codecleaner; +use crate::codeiter::StmtIndicesIter; +use crate::matchers::ImportInfo; +use crate::project_model::ProjectModelProvider; +use rls_span; +use std::cell::RefCell; +use std::cmp::Ordering; +use std::collections::HashMap; +use std::fs::File; +use std::io; +use std::io::Read; +use std::iter::{Fuse, Iterator}; +use std::ops::{Deref, Range}; +use std::rc::Rc; +use std::{fmt, vec}; +use std::{path, str}; +use rustc_span::source_map; + +use crate::ast; +use crate::fileres; +use crate::nameres; +use crate::primitive::PrimKind; +use crate::scopes; +use crate::util; + +/// Within a [`Match`], specifies what was matched +/// +/// [`Match`]: struct.Match.html +#[derive(Clone, Debug, PartialEq)] +pub enum MatchType { + Struct(Box), + Module, + MatchArm, + Function, + Method(Option>), + Crate, + Let(BytePos), + IfLet(BytePos), + WhileLet(BytePos), + For(BytePos), + StructField, + Enum(Box), + Union(Box), + /// EnumVariant needs to have Enum type to complete methods + EnumVariant(Option>), + UseAlias(Box), + AssocType, + Type, + FnArg(Box<(Pat, Option)>), + Trait, + Const, + Static, + Macro, + Builtin(PrimKind), + /// fn f or fn f(a: impl Clone) with its trait bounds + TypeParameter(Box), +} + +impl MatchType { + pub fn is_function(&self) -> bool { + match self { + MatchType::Function | MatchType::Method(_) => true, + _ => false, + } + } + pub fn is_enum(&self) -> bool { + match self { + MatchType::Enum(_) => true, + _ => false, + } + } + pub fn is_struct(&self) -> bool { + match self { + MatchType::Struct(_) => true, + _ => false, + } + } +} + +impl fmt::Display for MatchType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + MatchType::Struct(_) => write!(f, "Struct"), + MatchType::Union(_) => write!(f, "Union"), + MatchType::Method(_) => write!(f, "Method"), + MatchType::IfLet(_) => write!(f, "IfLet"), + MatchType::Let(_) => write!(f, "Let"), + MatchType::WhileLet(_) => write!(f, "WhileLet"), + MatchType::For(_) => write!(f, "For"), + MatchType::Enum(_) => write!(f, "Enum"), + MatchType::EnumVariant(_) => write!(f, "EnumVariant"), + MatchType::TypeParameter(_) => write!(f, "TypeParameter"), + MatchType::FnArg(_) => write!(f, "FnArg"), + MatchType::Type => write!(f, "Type"), + MatchType::UseAlias(_) => write!(f, "UseAlias"), + _ => fmt::Debug::fmt(self, f), + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum SearchType { + ExactMatch, + StartsWith, +} + +mod declare_namespace { + // (kngwyu) I reserved Crate, Mod or other names for future usage(like for #830) + // but, currently they're not used and... I'm not sure they're useful:) + #![allow(non_upper_case_globals, unused)] + bitflags! { + /// Type context + pub struct Namespace: u32 { + const Crate = 0b0000000000001; + const Mod = 0b0000000000010; + const Space = 0b0000000000011; + const Enum = 0b0000000000100; + const Struct = 0b0000000001000; + const Union = 0b0000000010000; + const Trait = 0b0000000100000; + const TypeDef = 0b0000001000000; + const HasField = 0b0000001011100; + const Type = 0b0000001111100; + const PathParen = 0b0000001111111; + const Const = 0b0000010000000; + const Static = 0b0000100000000; + const Func = 0b0001000000000; + // for use_extern_macros + const Macro = 0b0010000000000; + const Impl = 0b0001110000000; + const PathChild = 0b0011110000000; + const Path = 0b0011111111111; + const Primitive = 0b0100000000000; + const StdMacro = 0b1000000000000; + const Global = 0b1100000000000; + } + } +} +pub use self::declare_namespace::Namespace; + +#[derive(Debug, Clone, Copy)] +pub enum CompletionType { + Field, + Path, +} + +/// 0-based byte offset in a file. +#[derive( + Clone, + Copy, + Debug, + Default, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + Index, + From, + Add, + Sub, + AddAssign, + SubAssign, +)] +pub struct BytePos(pub usize); + +impl From for BytePos { + fn from(u: u32) -> Self { + BytePos(u as usize) + } +} + +impl BytePos { + pub const ZERO: BytePos = BytePos(0); + /// returns self - 1 + pub fn decrement(&self) -> Self { + BytePos(self.0 - 1) + } + pub fn checked_sub(&self, sub: impl Into) -> Option { + self.0.checked_sub(sub.into().0).map(BytePos) + } + pub fn try_decrement(&self) -> Option { + self.0.checked_sub(1).map(BytePos) + } + /// returns self + 1 + pub fn increment(&self) -> Self { + BytePos(self.0 + 1) + } +} + +impl fmt::Display for BytePos { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +/// 0-based byte range in a file. +#[derive(Clone, Copy, Default, Eq, PartialEq, Hash)] +pub struct ByteRange { + /// start of byte position in codes(inclusive) + pub start: BytePos, + /// end of byte position in codes(exclusive) + pub end: BytePos, +} + +impl ByteRange { + /// returns new ByteRange from start and end + pub fn new>(start: P, end: P) -> Self { + ByteRange { + start: start.into(), + end: end.into(), + } + } + + /// returns the length of the range + #[inline] + pub fn len(&self) -> usize { + (self.end - self.start).0 + } + + /// returns if the range contains `point` or not + #[inline] + pub fn contains(&self, point: BytePos) -> bool { + self.start <= point && point < self.end + } + + /// returns if the range contains `point` (except its start point) + #[inline] + pub fn contains_exclusive(&self, point: BytePos) -> bool { + self.start < point && point < self.end + } + + /// returns the new range with which its start is `self.start + shift`, + /// its end is `self.end + shift` + #[inline] + pub fn shift>(&self, shift: P) -> Self { + let shift = shift.into(); + ByteRange { + start: self.start + shift, + end: self.end + shift, + } + } + + /// convert the range to `std::ops::Range` + #[inline] + pub fn to_range(&self) -> Range { + self.start.0..self.end.0 + } +} + +impl PartialEq for ByteRange { + fn eq(&self, other: &BytePos) -> bool { + self.contains(*other) + } +} + +impl PartialOrd for ByteRange { + fn partial_cmp(&self, other: &BytePos) -> Option { + if *other < self.start { + Some(Ordering::Greater) + } else if *other >= self.end { + Some(Ordering::Less) + } else { + Some(Ordering::Equal) + } + } +} + +impl From for ByteRange { + fn from(span: source_map::Span) -> Self { + let (lo, hi) = ast::destruct_span(span); + ByteRange::new(lo, hi) + } +} + +impl fmt::Debug for ByteRange { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "ByteRange({}..{})", self.start.0, self.end.0) + } +} + +/// Row and Column position in a file +// for backward compatibility, we use 1-index row and 0-indexed column here +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +pub struct Coordinate { + pub row: rls_span::Row, + pub col: rls_span::Column, +} + +impl Coordinate { + /// construct new Coordinate + pub fn new(row: u32, col: u32) -> Self { + Coordinate { + row: rls_span::Row::::new_one_indexed(row), + col: rls_span::Column::::new_zero_indexed(col), + } + } + /// start point of the file + pub fn start() -> Self { + Coordinate::new(1, 0) + } +} + +/// Context, source, and etc. for detected completion or definition +#[derive(Clone, PartialEq)] +pub struct Match { + pub matchstr: String, + pub filepath: path::PathBuf, + pub point: BytePos, + pub coords: Option, + pub local: bool, + pub mtype: MatchType, + pub contextstr: String, + pub docs: String, +} + +impl Match { + /// Checks if two matches can be considered the same for deduplication purposes. + /// + /// This could be the basis for a `PartialEq` implementation in the future, + /// but in the interest of minimizing the crate's public API surface it's exposed + /// as a private method for now. + fn is_same_as(&self, other: &Match) -> bool { + self.point == other.point + && self.matchstr == other.matchstr + && self.filepath == other.filepath + } + pub(crate) fn to_generics(&self) -> Option<&GenericsArgs> { + match &self.mtype { + MatchType::Struct(gen_arg) | MatchType::Enum(gen_arg) => Some(gen_arg.as_ref()), + MatchType::Method(gen_arg) => gen_arg.as_ref().map(AsRef::as_ref), + _ => None, + } + } + pub(crate) fn into_generics(self) -> Option { + match self.mtype { + MatchType::Struct(gen_arg) | MatchType::Enum(gen_arg) => Some(*gen_arg), + MatchType::Method(gen_arg) => gen_arg.map(|x| *x), + _ => None, + } + } + pub(crate) fn generics(&self) -> impl Iterator { + let opt = match self.mtype { + MatchType::Struct(ref gen_arg) | MatchType::Enum(ref gen_arg) => Some(gen_arg), + MatchType::Method(ref gen_arg) => gen_arg.as_ref(), + _ => None, + }; + opt.into_iter().flat_map(|gen_arg| gen_arg.args()) + } + pub(crate) fn resolved_generics(&self) -> impl Iterator { + let opt = match self.mtype { + MatchType::Struct(ref gen_arg) | MatchType::Enum(ref gen_arg) => Some(gen_arg), + MatchType::Method(ref gen_arg) => gen_arg.as_ref(), + _ => None, + }; + opt.into_iter() + .flat_map(|gen_arg| gen_arg.args()) + .filter_map(|ty_param| ty_param.resolved.as_ref()) + } + pub(crate) fn resolve_generics(&mut self, types: &[Ty]) { + match self.mtype { + MatchType::Struct(ref mut gen_arg) | MatchType::Enum(ref mut gen_arg) => { + gen_arg.apply_types(types); + } + _ => {} + }; + } + // currently we can't resolve method's type parameter + pub(crate) fn generics_mut(&mut self) -> impl Iterator { + let opt = match &mut self.mtype { + MatchType::Struct(gen_arg) | MatchType::Enum(gen_arg) => Some(&mut **gen_arg), + _ => None, + }; + opt.into_iter().flat_map(|gen_arg| gen_arg.args_mut()) + } +} + +/// The cursor position used by public search methods +#[derive(Debug, Clone, Copy)] +pub enum Location { + /// A byte offset in the file + Point(BytePos), + /// 1-based line and column indices. + Coords(Coordinate), +} + +impl From for Location { + fn from(val: BytePos) -> Location { + Location::Point(val) + } +} + +impl From for Location { + fn from(val: usize) -> Location { + Location::Point(BytePos(val)) + } +} + +impl From for Location { + fn from(val: Coordinate) -> Location { + Location::Coords(val) + } +} + +/// Internal cursor methods +pub trait LocationExt { + fn to_point(&self, src: &RawSource) -> Option; + fn to_coords(&self, src: &RawSource) -> Option; +} + +impl LocationExt for Location { + fn to_point(&self, src: &RawSource) -> Option { + match *self { + Location::Point(val) => Some(val), + Location::Coords(ref coords) => src.coords_to_point(coords), + } + } + + fn to_coords(&self, src: &RawSource) -> Option { + match *self { + Location::Coords(val) => Some(val), + Location::Point(point) => src.point_to_coords(point), + } + } +} + +impl fmt::Debug for Match { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "Match [{:?}, {:?}, {:?}, {:?}, {:?}, |{}|]", + self.matchstr, + self.filepath.display(), + self.point, + self.local, + self.mtype, + self.contextstr + ) + } +} + +#[derive(Clone, PartialEq)] +pub struct Scope { + pub filepath: path::PathBuf, + pub point: BytePos, +} + +impl Scope { + pub fn new(path: path::PathBuf, pos: BytePos) -> Self { + Scope { + filepath: path, + point: pos, + } + } + + pub fn from_match(m: &Match) -> Scope { + Scope { + filepath: m.filepath.clone(), + point: m.point, + } + } +} + +impl fmt::Debug for Scope { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Scope [{:?}, {:?}]", self.filepath.display(), self.point) + } +} + +#[derive(Clone, Debug)] +pub struct RawSource { + pub code: String, + pub lines: RefCell>, +} + +impl RawSource { + pub fn new(s: String) -> Self { + RawSource { + code: s, + lines: Default::default(), + } + } + + fn cache_lineoffsets(&self) { + if self.lines.borrow().len() != 0 { + return; + } + let mut before = 0; + *self.lines.borrow_mut() = self + .code + .split('\n') + .map(|line| { + let len = line.len() + 1; + let res = ByteRange::new(before, before + len); + before += len; + res + }) + .collect(); + } + + pub fn coords_to_point(&self, coords: &Coordinate) -> Option { + self.cache_lineoffsets(); + self.lines + .borrow() + .get(coords.row.zero_indexed().0 as usize) + .and_then(|&range| { + let col = coords.col.0 as usize; + if col < range.len() { + Some(range.start + col.into()) + } else { + None + } + }) + } + + pub fn point_to_coords(&self, point: BytePos) -> Option { + self.cache_lineoffsets(); + let lines = self.lines.borrow(); + lines + .binary_search_by(|range| range.partial_cmp(&point).unwrap()) + .ok() + .map(|idx| Coordinate::new(idx as u32 + 1, (point - lines[idx].start).0 as u32)) + } +} + +#[derive(Clone, Debug)] +pub struct MaskedSource { + pub code: String, +} + +#[derive(Clone, Copy, Debug)] +pub struct Src<'c> { + pub src: &'c MaskedSource, + pub range: ByteRange, +} + +impl MaskedSource { + pub fn new(src: &str) -> MaskedSource { + let idx: Vec<_> = codecleaner::code_chunks(&src).collect(); + let code = scopes::mask_comments(src, &idx); + MaskedSource { code } + } + + pub fn as_src(&self) -> Src<'_> { + self.get_src_from_start(BytePos::ZERO) + } + + pub fn get_src_from_start(&self, new_start: BytePos) -> Src<'_> { + Src { + src: self, + range: ByteRange::new(new_start, self.len().into()), + } + } +} + +pub struct MatchIter<'c> { + session: &'c Session<'c>, + matches: vec::IntoIter, +} + +impl<'c> Iterator for MatchIter<'c> { + type Item = Match; + + fn next(&mut self) -> Option { + self.matches.next().map(|mut m| { + if m.coords.is_none() { + let point = m.point; + let src = self.session.load_raw_file(m.filepath.as_path()); + m.coords = src.point_to_coords(point); + } + m + }) + } +} + +#[test] +fn coords_to_point_works() { + let src = " +fn myfn() { + let a = 3; + print(a); +}"; + let src = RawSource::new(src.into()); + assert_eq!( + src.coords_to_point(&Coordinate::new(3, 5)), + Some(BytePos(18)) + ); +} + +#[test] +fn coords_to_point_lf_newline() { + let src = "\n\ + fn myfn() {\n\ + let a = 3;\n\ + print(a);\n\ + }\n"; + let src = RawSource::new(src.into()); + assert_eq!( + src.coords_to_point(&Coordinate::new(3, 5)), + Some(BytePos(18)) + ); +} + +#[test] +fn coords_to_point_crlf_newline() { + let src = "\r\n\ + fn myfn() {\r\n\ + let a = 3;\r\n\ + print(a);\r\n\ + }\r\n"; + let src = RawSource::new(src.into()); + assert_eq!( + src.coords_to_point(&Coordinate::new(3, 5)), + Some(BytePos(20)) + ); +} + +#[test] +fn test_point_to_coords() { + let src = " +fn myfn(b:usize) { + let a = 3; + if b == 12 { + let a = 24; + do_something_with(a); + } + do_something_with(a); +} +"; + fn round_trip_point_and_coords(src: &str, lineno: usize, charno: usize) { + let raw_src = RawSource::new(src.to_owned()); + let point = raw_src + .coords_to_point(&Coordinate::new(lineno as u32, charno as u32)) + .unwrap(); + let coords = raw_src.point_to_coords(point).unwrap(); + assert_eq!(coords, Coordinate::new(lineno as u32, charno as u32)); + } + round_trip_point_and_coords(src, 4, 5); +} + +impl<'c> Src<'c> { + pub fn iter_stmts(&self) -> Fuse> { + StmtIndicesIter::from_parts(self) + } + + pub fn shift_start(&self, shift: BytePos) -> Src<'c> { + Src { + src: self.src, + range: ByteRange::new(self.range.start + shift, self.range.end), + } + } + + pub fn change_length(&self, new_length: BytePos) -> Src<'c> { + Src { + src: self.src, + range: ByteRange::new(self.range.start, self.range.start + new_length), + } + } + + pub fn shift_range(&self, new_range: ByteRange) -> Src<'c> { + Src { + src: self.src, + range: new_range.shift(self.range.start), + } + } +} + +pub struct RangedRawSrc { + inner: Rc, + range: ByteRange, +} + +impl Deref for RangedRawSrc { + type Target = str; + fn deref(&self) -> &str { + &self.inner.code[self.range.to_range()] + } +} + +impl Deref for RawSource { + type Target = str; + fn deref(&self) -> &str { + &self.code + } +} + +impl Deref for MaskedSource { + type Target = str; + fn deref(&self) -> &str { + &self.code + } +} + +impl<'c> Deref for Src<'c> { + type Target = str; + fn deref(&self) -> &str { + &self.src.code[self.range.to_range()] + } +} + +/// Caches file contents for re-use between sessions. +/// +/// The file cache is an opaque blob outside of racer which contains maps of loaded and masked +/// files. +pub struct FileCache { + /// raw source for cached files + raw_map: RefCell>>, + + /// masked source for cached files + /// + /// a version with comments and strings replaced by spaces, so that they + /// aren't found when scanning the source for signatures. + masked_map: RefCell>>, + + /// The file loader + pub(crate) loader: Box, +} + +/// Used by the FileCache for loading files +/// +/// Implement one of these and pass it to `FileCache::new()` to override Racer's +/// file loading behavior. +pub trait FileLoader { + /// Load a single file + fn load_file(&self, path: &path::Path) -> io::Result; +} + +/// Provide a blanket impl for Arc since Rls uses that +impl FileLoader for ::std::sync::Arc { + fn load_file(&self, path: &path::Path) -> io::Result { + (&self as &T).load_file(path) + } +} + +/// The default file loader +/// +/// Private since this shouldn't be needed outside of racer +struct DefaultFileLoader; + +impl FileLoader for DefaultFileLoader { + fn load_file(&self, path: &path::Path) -> io::Result { + let mut rawbytes = Vec::new(); + let mut f = File::open(path)?; + f.read_to_end(&mut rawbytes)?; + + // skip BOM bytes, if present + if rawbytes.len() > 2 && rawbytes[0..3] == [0xEF, 0xBB, 0xBF] { + str::from_utf8(&rawbytes[3..]) + .map(|s| s.to_owned()) + .map_err(|err| io::Error::new(io::ErrorKind::Other, err)) + } else { + String::from_utf8(rawbytes).map_err(|err| io::Error::new(io::ErrorKind::Other, err)) + } + } +} + +impl Default for FileCache { + fn default() -> FileCache { + FileCache::new(DefaultFileLoader) + } +} + +impl FileCache { + /// Create a new file cache + /// + /// In order to load files into the cache, please see + /// [`Session::cache_file_contents()`] + /// + /// [`Session::cache_file_contents()`]: struct.Session.html#method.cache_file_contents + pub fn new(loader: L) -> FileCache { + FileCache { + raw_map: RefCell::new(HashMap::new()), + masked_map: RefCell::new(HashMap::new()), + loader: Box::new(loader), + } + } + + /// Remove specific files from the cache + /// + /// Returns true if a file was removed + pub fn remove_file>(&self, path: &P) -> bool { + let path = path.as_ref(); + let mut raw = self.raw_map.borrow_mut(); + let mut masked = self.masked_map.borrow_mut(); + raw.remove(path).is_some() || masked.remove(path).is_some() + } + + /// Add/Replace a file in both versions. + fn cache_file_contents(&self, filepath: P, buf: T) + where + T: Into, + P: Into, + { + let pathbuf = filepath.into(); + let src = buf.into(); + let masked_src = MaskedSource::new(&src); + self.raw_map + .borrow_mut() + .insert(pathbuf.clone(), Rc::new(RawSource::new(src))); + self.masked_map + .borrow_mut() + .insert(pathbuf, Rc::new(masked_src)); + } + + fn load_file(&self, filepath: &path::Path) -> Rc { + if let Some(src) = self.raw_map.borrow().get(filepath) { + return src.clone(); + } + + // nothing found, insert into cache + // Ugh, really need handle results on all these methods :( + let source = self + .loader + .load_file(filepath) + .expect(&format!("Failed load file {:?}", filepath)); + let source = Rc::new(RawSource::new(source)); + self.raw_map + .borrow_mut() + .insert(filepath.to_path_buf(), Rc::clone(&source)); + source + } + + fn load_file_and_mask_comments(&self, filepath: &path::Path) -> Rc { + if let Some(src) = self.masked_map.borrow().get(filepath) { + return src.clone(); + } + // nothing found, insert into cache + let src = self.load_file(filepath); + let msrc = Rc::new(MaskedSource::new(&src.code)); + self.masked_map + .borrow_mut() + .insert(filepath.to_path_buf(), msrc.clone()); + msrc + } +} + +/// Private methods for the Session type +pub trait SessionExt { + /// Request that a file is loaded into the cache + /// + /// This API is unstable and should not be used outside of Racer + fn load_raw_file(&self, _: &path::Path) -> Rc; + + /// ranged version of load_raw_file + fn load_raw_src_ranged(&self, src: &Src<'_>, _: &path::Path) -> RangedRawSrc; + + /// Request that a file is loaded into the cache with comments masked + /// + /// This API is unstable and should not be used outside of Racer + fn load_source_file(&self, _: &path::Path) -> Rc; +} + +/// Context for a Racer operation +pub struct Session<'c> { + /// Cache for files + /// + /// The file cache is used within a session to prevent multiple reads. It is + /// borrowed here in order to support reuse across Racer operations. + cache: &'c FileCache, + /// Cache for generic impls + pub generic_impls: RefCell>>>, + pub project_model: Box, +} + +impl<'c> fmt::Debug for Session<'c> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Session {{ .. }}") + } +} + +impl<'c> Session<'c> { + /// Create a Session for use in Racer operations + /// + /// * `cache` is a reference to a `FileCache`. It's take by reference for + /// use across racer operations. + /// + /// # Examples + /// + /// ``` + /// extern crate racer; + /// + /// let cache = racer::FileCache::default(); + /// let session = racer::Session::new(&cache, None); + /// ``` + /// + /// [`FileCache`]: struct.FileCache.html + #[cfg(feature = "metadata")] + pub fn new(cache: &'c FileCache, project_path: Option<&path::Path>) -> Session<'c> { + let project_model = crate::metadata::project_model(project_path); + Session::with_project_model(cache, project_model) + } + + pub fn with_project_model( + cache: &'c FileCache, + project_model: Box, + ) -> Session<'c> { + Session { + cache, + generic_impls: Default::default(), + project_model, + } + } + /// Specify the contents of a file to be used in completion operations + /// + /// The path to the file and the file's contents must both be specified. + /// + /// # Examples + /// + /// ``` + /// extern crate racer; + /// + /// let cache = racer::FileCache::default(); + /// let session = racer::Session::new(&cache, None); + /// + /// session.cache_file_contents("foo.rs", "pub struct Foo;\\n"); + /// ``` + pub fn cache_file_contents(&self, filepath: P, buf: T) + where + T: Into, + P: Into, + { + self.cache.cache_file_contents(filepath, buf); + } + + pub fn contains_file>(&self, path: P) -> bool { + let path = path.as_ref(); + let raw = self.cache.raw_map.borrow(); + let masked = self.cache.masked_map.borrow(); + raw.contains_key(path) && masked.contains_key(path) + } +} + +impl<'c> SessionExt for Session<'c> { + fn load_raw_file(&self, filepath: &path::Path) -> Rc { + self.cache.load_file(filepath) + } + + fn load_raw_src_ranged(&self, src: &Src<'_>, filepath: &path::Path) -> RangedRawSrc { + let inner = self.cache.load_file(filepath); + RangedRawSrc { + inner, + range: src.range, + } + } + + fn load_source_file(&self, filepath: &path::Path) -> Rc { + self.cache.load_file_and_mask_comments(filepath) + } +} + +/// Get the racer point of a line/character number pair for a file. +pub fn to_point

(coords: Coordinate, path: P, session: &Session<'_>) -> Option +where + P: AsRef, +{ + Location::from(coords).to_point(&session.load_raw_file(path.as_ref())) +} + +/// Get the racer point of a line/character number pair for a file. +pub fn to_coords

(point: BytePos, path: P, session: &Session<'_>) -> Option +where + P: AsRef, +{ + Location::from(point).to_coords(&session.load_raw_file(path.as_ref())) +} + +/// Find completions for a fully qualified name like `std::io::` +/// +/// Searchs are started relative to `path`. +/// +/// * `query` - is the fqn to search for +/// * `path` - the directory to start searching in +/// * `session` - reference to a racer::Session +/// +/// ```no_run +/// extern crate racer; +/// +/// let path = std::path::Path::new("."); +/// let cache = racer::FileCache::default(); +/// let session = racer::Session::new(&cache, Some(path)); +/// +/// let m = racer::complete_fully_qualified_name( +/// "std::fs::canon", +/// &path, +/// &session +/// ).next().unwrap(); +/// +/// assert_eq!(&m.matchstr[..], "canonicalize"); +/// assert_eq!(m.mtype, racer::MatchType::Function); +/// ``` +#[inline] +pub fn complete_fully_qualified_name<'c, S, P>( + query: S, + path: P, + session: &'c Session<'_>, +) -> MatchIter<'c> +where + S: AsRef, + P: AsRef, +{ + let mut matches = complete_fully_qualified_name_(query.as_ref(), path.as_ref(), session); + matches.dedup_by(|a, b| a.is_same_as(b)); + + MatchIter { + matches: matches.into_iter(), + session, + } +} + +/// Actual implementation without generic bounds +fn complete_fully_qualified_name_( + query: &str, + path: &path::Path, + session: &Session<'_>, +) -> Vec { + let p: Vec<&str> = query.split("::").collect(); + + let mut matches = Vec::new(); + + for m in nameres::do_file_search(p[0], path, session) { + if p.len() == 1 { + matches.push(m); + } else { + let external_search_matches = nameres::do_external_search( + &p[1..], + &m.filepath, + m.point, + SearchType::StartsWith, + Namespace::Path, + &session, + ); + + for m in external_search_matches { + matches.push(m); + } + } + } + + matches +} + +/// Search for completion at position in a file +/// +/// * `src` - the file contents to search in +/// * `filepath` - path to file containing `src` +/// * `pos` - byte offset in file with path/expr to complete +/// * `session` - a racer::Session +/// +/// # Examples +/// +/// ``` +/// extern crate racer; +/// +/// # fn main() { +/// let src = " +/// fn apple() { +/// } +/// +/// fn main() { +/// let b = ap +/// }"; +/// +/// println!("{:?}", src); +/// +/// let cache = racer::FileCache::default(); +/// let session = racer::Session::new(&cache, None); +/// +/// session.cache_file_contents("lib.rs", src); +/// +/// let got = racer::complete_from_file("lib.rs", racer::Location::from(43), &session) +/// .nth(0).unwrap(); +/// assert_eq!("apple", got.matchstr); +/// assert_eq!(got.mtype, racer::MatchType::Function); +/// +/// # } +/// ``` +pub fn complete_from_file<'c, P, C>( + filepath: P, + cursor: C, + session: &'c Session<'_>, +) -> MatchIter<'c> +where + P: AsRef, + C: Into, +{ + let mut matches = complete_from_file_(filepath.as_ref(), cursor.into(), session); + matches.sort_by(|a, b| a.matchstr.cmp(&b.matchstr).then(a.point.cmp(&b.point))); + matches.dedup_by(|a, b| a.is_same_as(b)); + + MatchIter { + matches: matches.into_iter(), + session, + } +} + +fn complete_from_file_( + filepath: &path::Path, + cursor: Location, + session: &Session<'_>, +) -> Vec { + let src = session.load_source_file(filepath); + let raw_src = session.load_raw_file(filepath); + let src_text = &src.as_src()[..]; + // TODO return result + let pos = match cursor.to_point(&raw_src) { + Some(pos) => pos, + None => { + debug!("Failed to convert cursor to point"); + return Vec::new(); + } + }; + let start = scopes::get_start_of_search_expr(src_text, pos); + let expr = &src_text[start.0..pos.0]; + let (contextstr, searchstr, completetype) = scopes::split_into_context_and_completion(expr); + + debug!( + "{:?}: contextstr is |{}|, searchstr is |{}|", + completetype, contextstr, searchstr + ); + + let mut out = Vec::new(); + + match completetype { + CompletionType::Path => { + let (stmtstart, stmt) = &scopes::get_current_stmt(src.as_src(), pos); + debug!("Complete path with stmt: {:?}", stmt); + // when in the function ident position, only look for methods + // from a trait to complete. + if util::in_fn_name(stmt) { + trace!("Path is in fn declaration: `{}`", expr); + return nameres::resolve_method( + pos, + src.as_src(), + expr, + filepath, + SearchType::StartsWith, + session, + &ImportInfo::default(), + ); + } + let (path, namespace) = if let Some(use_start) = scopes::use_stmt_start(stmt) { + let path = scopes::construct_path_from_use_tree(&stmt[use_start.0..]); + (path, Namespace::Path) + } else if scopes::is_extern_crate(stmt) { + return fileres::search_crate_names( + searchstr, + SearchType::StartsWith, + filepath, + false, + session, + ); + } else if let Some(str_path) = scopes::is_in_struct_ctor(src.as_src(), *stmtstart, pos) + { + let path = scopes::expr_to_path(&src[str_path.to_range()]).0; + return nameres::get_struct_fields( + &path, + searchstr, + filepath, + pos, + SearchType::StartsWith, + session, + ); + } else { + scopes::expr_to_path(expr) + }; + debug!("path: {:?}, prefix: {:?}", path, path.prefix); + out.extend(nameres::resolve_path( + &path, + filepath, + pos, + SearchType::StartsWith, + namespace, + session, + &ImportInfo::default(), + )); + } + CompletionType::Field => { + let context = ast::get_type_of(contextstr.to_owned(), filepath, pos, session); + debug!("complete_from_file context is {:?}", context); + if let Some(ty) = context { + out.extend(nameres::get_field_matches_from_ty( + ty, + searchstr, + SearchType::StartsWith, + session, + )); + } + } + } + + out +} + +/// Finds if the statement where cursor lies is a `use` statement. +/// +/// # Examples +/// +/// ``` +/// extern crate racer; +/// extern crate env_logger; +/// +/// +/// # fn main() { +/// let _ = env_logger::init(); +/// let cache = racer::FileCache::default(); +/// let session = racer::Session::new(&cache, None); +/// +/// // This is the file where we request completion from +/// let src = stringify! { +/// use sub::foo; +/// use sub::{ +/// bar +/// }; +/// pub(crate) use sub::baz; +/// }; +/// +/// // Load files into cache to prevent trying to read from disk +/// session.cache_file_contents("lib.rs", src); +/// +/// assert_eq!(racer::is_use_stmt("lib.rs", racer::Location::from(9), &session), true); +/// assert_eq!(racer::is_use_stmt("lib.rs", racer::Location::from(28), &session), true); +/// assert_eq!(racer::is_use_stmt("lib.rs", racer::Location::from(5000), &session), false); +/// # } +/// ``` +pub fn is_use_stmt(file_path: P, cursor: C, session: &Session<'_>) -> bool +where + P: AsRef, + C: Into, +{ + let file_path = file_path.as_ref(); + let src = session.load_source_file(file_path); + let raw_src = session.load_raw_file(file_path); + let pos = match cursor.into().to_point(&raw_src) { + Some(pos) => pos, + None => return false, + }; + + if src.bytes().len() <= pos.0 { + return false; + } + + let line = &scopes::get_current_stmt(src.as_src(), pos).1; + scopes::use_stmt_start(line).is_some() +} + +/// Find the definition for item at given a file, source, and cursor index +/// +/// # Examples +/// +/// ``` +/// extern crate racer; +/// extern crate env_logger; +/// +/// use std::path::Path; +/// +/// # fn main() { +/// let _ = env_logger::init(); +/// let cache = racer::FileCache::default(); +/// let session = racer::Session::new(&cache, None); +/// +/// // This is the file where we request completion from +/// let src = r" +/// mod sub; +/// use sub::foo; +/// fn main() { +/// foo(); +/// }; +/// "; +/// +/// // This is the submodule where the definition is found +/// let sub = r"pub fn foo() {}"; +/// +/// // Load files into cache to prevent trying to read from disk +/// session.cache_file_contents("sub.rs", sub); +/// session.cache_file_contents("lib.rs", src); +/// +/// // Search for the definition. 52 is the byte offset in `src`. +/// // Specifically, this asks for the definition of `foo()`. +/// let m = racer::find_definition("lib.rs", racer::Location::from(52), &session) +/// .expect("find definition returns a match"); +/// +/// // Should have found definition in the "sub.rs" file +/// assert_eq!(m.filepath, Path::new("sub.rs")); +/// // The definition should be for foo +/// assert_eq!(&m.matchstr[..], "foo"); +/// // The definition should be a function +/// assert_eq!(m.mtype, racer::MatchType::Function); +/// # } +/// ``` +pub fn find_definition(filepath: P, cursor: C, session: &Session<'_>) -> Option +where + P: AsRef, + C: Into, +{ + find_definition_(filepath.as_ref(), cursor.into(), session).map(|mut m| { + if m.coords.is_none() { + let point = m.point; + let src = session.load_raw_file(m.filepath.as_path()); + m.coords = src.point_to_coords(point); + } + m + }) +} + +pub fn find_definition_( + filepath: &path::Path, + cursor: Location, + session: &Session<'_>, +) -> Option { + let src = session.load_source_file(filepath); + let src_txt = &src[..]; + // TODO return result + let pos = match cursor.to_point(&session.load_raw_file(filepath)) { + Some(pos) => pos, + None => { + debug!("Failed to convert cursor to point"); + return None; + } + }; + + // Make sure `src` is in the cache + let range = scopes::expand_search_expr(src_txt, pos); + let expr = &src[range.to_range()]; + let (contextstr, searchstr, completetype) = scopes::split_into_context_and_completion(expr); + debug!( + "find_definition_ for |{:?}| |{:?}| {:?}", + contextstr, searchstr, completetype + ); + + match completetype { + CompletionType::Path => { + let (stmtstart, stmt) = &scopes::get_current_stmt(src.as_src(), range.end); + let (path, namespace) = if let Some(use_start) = scopes::use_stmt_start(stmt) { + let path = scopes::construct_path_from_use_tree(&stmt[use_start.0..]); + (path, Namespace::Path) + } else if let Some(str_path) = scopes::is_in_struct_ctor(src.as_src(), *stmtstart, pos) + { + let path = scopes::expr_to_path(&src[str_path.to_range()]).0; + return nameres::get_struct_fields( + &path, + searchstr, + filepath, + pos, + SearchType::StartsWith, + session, + ) + .into_iter() + .next(); + } else { + scopes::expr_to_path(expr) + }; + debug!("[find_definition_] Path: {:?}", path); + nameres::resolve_path( + &path, + filepath, + pos, + SearchType::ExactMatch, + namespace, + session, + &ImportInfo::default(), + ) + .into_iter() + .nth(0) + } + CompletionType::Field => { + let context = ast::get_type_of(contextstr.to_owned(), filepath, pos, session); + debug!("context is {:?}", context); + let only_method = src[range.end.0..].starts_with('('); + context.and_then(|ty| { + nameres::get_field_matches_from_ty(ty, searchstr, SearchType::ExactMatch, session) + .into_iter() + .filter(|m| !only_method || m.mtype.is_function()) + .next() + }) + } + } +} + +#[cfg(test)] +mod tests { + use super::FileCache; + use super::{Session, SessionExt}; + use std::path::Path; + + #[test] + fn overwriting_cached_files() { + let src1 = "src1"; + let src2 = "src2"; + let src3 = "src3"; + let src4 = "src4"; + + // Need session and path to cache files + let path = Path::new("not_on_disk"); + let cache = FileCache::default(); + + // Cache contents for a file and assert that load_file and load_file_and_mask_comments return + // the newly cached contents. + macro_rules! cache_and_assert { + ($src: ident) => {{ + let session = Session::new(&cache, Some(path)); + session.cache_file_contents(path, $src); + assert_eq!($src, &session.load_raw_file(path)[..]); + assert_eq!($src, &session.load_source_file(path).code[..]); + }}; + } + + // Check for all srcN + cache_and_assert!(src1); + cache_and_assert!(src2); + cache_and_assert!(src3); + cache_and_assert!(src4); + } +} diff --git a/racer/src/racer/fileres.rs b/racer/src/racer/fileres.rs new file mode 100644 index 0000000000..bc7a5f7678 --- /dev/null +++ b/racer/src/racer/fileres.rs @@ -0,0 +1,106 @@ +use crate::core::{BytePos, Coordinate, Match, MatchType, SearchType, Session, SessionExt}; +use crate::matchers; +use crate::nameres::RUST_SRC_PATH; +use crate::project_model::Edition; +use std::path::{Path, PathBuf}; + +/// get crate file from current path & crate name +pub fn get_crate_file(name: &str, from_path: &Path, session: &Session<'_>) -> Option { + debug!("get_crate_file {}, {:?}", name, from_path); + get_std_file(name, session).or_else(|| get_outer_crates(name, from_path, session)) +} + +pub fn get_std_file(name: &str, session: &Session<'_>) -> Option { + if let Some(ref std_path) = *RUST_SRC_PATH { + // try lib/lib.rs, like in the rust source dir + let cratelibname = format!("lib{}", name); + let filepath = std_path.join(cratelibname).join("lib.rs"); + if filepath.exists() || session.contains_file(&filepath) { + return Some(filepath); + } + // If not found, try using the new standard library directory layout + let filepath = std_path.join(name).join("src").join("lib.rs"); + if filepath.exists() || session.contains_file(&filepath) { + return Some(filepath); + } + } + return None; +} + +/// 2018 style crate name resolution +pub fn search_crate_names( + searchstr: &str, + search_type: SearchType, + file_path: &Path, + only_2018: bool, + session: &Session<'_>, +) -> Vec { + let manifest_path = try_vec!(session.project_model.discover_project_manifest(file_path)); + if only_2018 { + let edition = session + .project_model + .edition(&manifest_path) + .unwrap_or(Edition::Ed2015); + if edition < Edition::Ed2018 { + return Vec::new(); + } + } + let hyphenated = searchstr.replace('_', "-"); + let searchstr = searchstr.to_owned(); + session + .project_model + .search_dependencies( + &manifest_path, + Box::new(move |libname| match search_type { + SearchType::ExactMatch => libname == hyphenated || libname == searchstr, + SearchType::StartsWith => { + libname.starts_with(&hyphenated) || libname.starts_with(&searchstr) + } + }), + ) + .into_iter() + .map(|(name, path)| { + let name = name.replace('-', "_"); + let raw_src = session.load_raw_file(&path); + Match { + matchstr: name, + filepath: path, + point: BytePos::ZERO, + coords: Some(Coordinate::start()), + local: false, + mtype: MatchType::Crate, + contextstr: String::new(), + docs: matchers::find_mod_doc(&raw_src, BytePos::ZERO), + } + }) + .collect() +} + +/// get module file from current path & crate name +pub fn get_module_file(name: &str, parentdir: &Path, session: &Session<'_>) -> Option { + // try just .rs + let filepath = parentdir.join(format!("{}.rs", name)); + if filepath.exists() || session.contains_file(&filepath) { + return Some(filepath); + } + // try /mod.rs + let filepath = parentdir.join(name).join("mod.rs"); + if filepath.exists() || session.contains_file(&filepath) { + return Some(filepath); + } + None +} + +/// try to get outer crates +/// if we have dependencies in cache, use it. +/// else, call cargo-metadata(default) or fall back to rls +fn get_outer_crates(libname: &str, from_path: &Path, session: &Session<'_>) -> Option { + debug!( + "[get_outer_crates] lib name: {:?}, from_path: {:?}", + libname, from_path + ); + + let manifest = session.project_model.discover_project_manifest(from_path)?; + let res = session.project_model.resolve_dependency(&manifest, libname); + res +} diff --git a/racer/src/racer/lib.rs b/racer/src/racer/lib.rs new file mode 100755 index 0000000000..485dae1df7 --- /dev/null +++ b/racer/src/racer/lib.rs @@ -0,0 +1,60 @@ +#![cfg_attr(feature = "nightly", feature(test))] +#![feature(control_flow_enum)] +#![feature(try_trait_v2)] +#![feature(rustc_private)] + +#[macro_use] +extern crate log; +#[macro_use] +extern crate lazy_static; +#[macro_use] +extern crate bitflags; + +#[macro_use] +extern crate derive_more; + +extern crate rustc_ast; +extern crate rustc_ast_pretty; +extern crate rustc_data_structures; +extern crate rustc_errors; +extern crate rustc_parse; +extern crate rustc_session; +extern crate rustc_span; + +#[macro_use] +mod testutils; +#[macro_use] +mod util; +mod ast; +mod ast_types; +mod codecleaner; +mod codeiter; +mod core; +mod fileres; +mod matchers; +#[cfg(feature = "metadata")] +mod metadata; +mod nameres; +mod primitive; +mod project_model; +mod scopes; +mod snippets; +mod typeinf; + +pub use crate::ast_types::PathSearch; +pub use crate::core::{ + complete_from_file, complete_fully_qualified_name, find_definition, is_use_stmt, to_coords, + to_point, +}; +pub use crate::core::{ + BytePos, ByteRange, Coordinate, FileCache, FileLoader, Location, Match, MatchType, Session, +}; +pub use crate::primitive::PrimKind; +pub use crate::project_model::{Edition, ProjectModelProvider}; +pub use crate::snippets::snippet_for_match; +pub use crate::util::expand_ident; + +pub use crate::util::{get_rust_src_path, RustSrcPathError}; + +#[cfg(all(feature = "nightly", test))] +mod benches; diff --git a/racer/src/racer/matchers.rs b/racer/src/racer/matchers.rs new file mode 100644 index 0000000000..cf79d9fda0 --- /dev/null +++ b/racer/src/racer/matchers.rs @@ -0,0 +1,914 @@ +use crate::ast_types::{ImplHeader, PathAlias, PathAliasKind, PathSegment}; +use crate::core::MatchType::{ + self, Const, Enum, EnumVariant, For, Function, IfLet, Let, Macro, Module, Static, Struct, + Trait, Type, WhileLet, +}; +use crate::core::Namespace; +use crate::core::SearchType::{self, ExactMatch, StartsWith}; +use crate::core::{BytePos, ByteRange, Coordinate, Match, Session, SessionExt, Src}; +use crate::fileres::{get_crate_file, get_module_file}; +use crate::nameres::resolve_path; +use crate::util::*; +use crate::{ast, scopes, typeinf}; +use std::path::Path; +use std::{str, vec}; + +/// The location of an import (`use` item) currently being resolved. +#[derive(PartialEq, Eq)] +struct PendingImport<'fp> { + filepath: &'fp Path, + range: ByteRange, +} + +/// A stack of imports (`use` items) currently being resolved. +type PendingImports<'stack, 'fp> = StackLinkedListNode<'stack, PendingImport<'fp>>; + +const GLOB_LIMIT: usize = 2; +/// Import information(pending imports, glob, and etc.) +pub struct ImportInfo<'stack, 'fp> { + /// A stack of imports currently being resolved + imports: PendingImports<'stack, 'fp>, + /// the max number of times where we can go through glob continuously + /// if current search path isn't constructed via glob, it's none + glob_limit: Option, +} + +impl<'stack, 'fp: 'stack> Default for ImportInfo<'stack, 'fp> { + fn default() -> Self { + ImportInfo { + imports: PendingImports::empty(), + glob_limit: None, + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct MatchCxt<'s, 'p> { + pub filepath: &'p Path, + pub search_str: &'s str, + pub range: ByteRange, + pub search_type: SearchType, + pub is_local: bool, +} + +impl<'s, 'p> MatchCxt<'s, 'p> { + fn get_key_ident( + &self, + blob: &str, + keyword: &str, + ignore: &[&str], + ) -> Option<(BytePos, String)> { + find_keyword(blob, keyword, ignore, self).map(|start| { + let s = match self.search_type { + ExactMatch => self.search_str.to_owned(), + StartsWith => { + let end = find_ident_end(blob, start + BytePos(self.search_str.len())); + blob[start.0..end.0].to_owned() + } + }; + (start, s) + }) + } +} + +pub(crate) fn find_keyword( + src: &str, + pattern: &str, + ignore: &[&str], + context: &MatchCxt<'_, '_>, +) -> Option { + find_keyword_impl( + src, + pattern, + context.search_str, + ignore, + context.search_type, + context.is_local, + ) +} + +fn find_keyword_impl( + src: &str, + pattern: &str, + search_str: &str, + ignore: &[&str], + search_type: SearchType, + is_local: bool, +) -> Option { + let mut start = BytePos::ZERO; + + if let Some(offset) = strip_visibility(&src[..]) { + start += offset; + } else if !is_local { + // TODO: too about + return None; + } + + if ignore.len() > 0 { + start += strip_words(&src[start.0..], ignore); + } + // mandatory pattern\s+ + if !src[start.0..].starts_with(pattern) { + return None; + } + // remove whitespaces ... must have one at least + start += pattern.len().into(); + let oldstart = start; + for &b in src[start.0..].as_bytes() { + match b { + b if is_whitespace_byte(b) => start = start.increment(), + _ => break, + } + } + if start == oldstart { + return None; + } + + let search_str_len = search_str.len(); + if src[start.0..].starts_with(search_str) { + match search_type { + StartsWith => Some(start), + ExactMatch => { + if src.len() > start.0 + search_str_len + && !is_ident_char(char_at(src, start.0 + search_str_len)) + { + Some(start) + } else { + None + } + } + } + } else { + None + } +} + +fn is_const_fn(src: &str, blob_range: ByteRange) -> bool { + if let Some(b) = strip_word(&src[blob_range.to_range()], "const") { + let s = src[(blob_range.start + b).0..].trim_start(); + s.starts_with("fn") || s.starts_with("unsafe") + } else { + false + } +} + +fn match_pattern_start( + src: &str, + context: &MatchCxt<'_, '_>, + pattern: &str, + ignore: &[&str], + mtype: MatchType, +) -> Option { + // ast currently doesn't contain the ident coords, so match them with a hacky + // string search + + let blob = &src[context.range.to_range()]; + if let Some(start) = find_keyword(blob, pattern, ignore, context) { + if let Some(end) = blob[start.0..].find(|c: char| c == ':' || c.is_whitespace()) { + if blob[start.0 + end..].trim_start().chars().next() == Some(':') { + let s = &blob[start.0..start.0 + end]; + return Some(Match { + matchstr: s.to_owned(), + filepath: context.filepath.to_path_buf(), + point: context.range.start + start, + coords: None, + local: context.is_local, + mtype: mtype, + contextstr: first_line(blob), + docs: String::new(), + }); + } + } + } + None +} + +pub fn match_const(msrc: &str, context: &MatchCxt<'_, '_>) -> Option { + if is_const_fn(msrc, context.range) { + return None; + } + // Here we don't have to ignore "unsafe" + match_pattern_start(msrc, context, "const", &[], Const) +} + +pub fn match_static(msrc: &str, context: &MatchCxt<'_, '_>) -> Option { + // Here we don't have to ignore "unsafe" + match_pattern_start(msrc, context, "static", &[], Static) +} + +fn match_let_impl(msrc: &str, context: &MatchCxt<'_, '_>, mtype: MatchType) -> Vec { + let mut out = Vec::new(); + let coords = ast::parse_pat_bind_stmt(msrc.to_owned()); + for pat_range in coords { + let s = &msrc[pat_range.to_range()]; + if symbol_matches(context.search_type, context.search_str, s) { + let start = context.range.start + pat_range.start; + debug!("match_pattern_let point is {:?}", start); + out.push(Match { + matchstr: s.to_owned(), + filepath: context.filepath.to_path_buf(), + point: start, + coords: None, + local: context.is_local, + mtype: mtype.clone(), + contextstr: msrc.to_owned(), + docs: String::new(), + }); + if context.search_type == ExactMatch { + break; + } + } + } + out +} + +pub fn match_if_let(msrc: &str, start: BytePos, context: &MatchCxt<'_, '_>) -> Vec { + match_let_impl(msrc, context, IfLet(start)) +} + +pub fn match_while_let(msrc: &str, start: BytePos, context: &MatchCxt<'_, '_>) -> Vec { + match_let_impl(msrc, context, WhileLet(start)) +} + +pub fn match_let(msrc: &str, start: BytePos, context: &MatchCxt<'_, '_>) -> Vec { + let blob = &msrc[context.range.to_range()]; + if blob.starts_with("let ") && txt_matches(context.search_type, context.search_str, blob) { + match_let_impl(blob, context, Let(start)) + } else { + Vec::new() + } +} + +pub fn match_for(msrc: &str, for_start: BytePos, context: &MatchCxt<'_, '_>) -> Vec { + let mut out = Vec::new(); + let blob = &msrc[context.range.to_range()]; + let coords = ast::parse_pat_bind_stmt(blob.to_owned()); + for pat_range in coords { + let s = &blob[pat_range.to_range()]; + if symbol_matches(context.search_type, context.search_str, s) { + let start = pat_range.start + context.range.start; + debug!("match_for point is {:?}, found ident {}", start, s); + out.push(Match { + matchstr: s.to_owned(), + filepath: context.filepath.to_path_buf(), + point: start, // it's 'for ~' start + coords: None, + local: context.is_local, + mtype: For(for_start), + contextstr: blob.to_owned(), + docs: String::new(), + }); + } + } + out +} + +pub fn first_line(blob: &str) -> String { + blob[..blob.find('\n').unwrap_or(blob.len())].to_owned() +} + +/// Get the match's cleaned up context string +/// +/// Strip all whitespace, including newlines in order to have a single line +/// context string. +pub fn get_context(blob: &str, context_end: &str) -> String { + blob[..blob.find(context_end).unwrap_or(blob.len())] + .split_whitespace() + .collect::>() + .join(" ") +} + +pub fn match_extern_crate( + msrc: Src<'_>, + context: &MatchCxt<'_, '_>, + session: &Session<'_>, +) -> Option { + let mut res = None; + let mut blob = &msrc[context.range.to_range()]; + + // Temporary fix to parse reexported crates by skipping pub + // keyword until racer understands crate visibility. + if let Some(offset) = strip_visibility(blob) { + blob = &blob[offset.0..]; + } + + if txt_matches( + context.search_type, + &format!("extern crate {}", context.search_str), + blob, + ) && !(txt_matches( + context.search_type, + &format!("extern crate {} as", context.search_str), + blob, + )) || (blob.starts_with("extern crate") + && txt_matches( + context.search_type, + &format!("as {}", context.search_str), + blob, + )) + { + debug!("found an extern crate: |{}|", blob); + + let extern_crate = ast::parse_extern_crate(blob.to_owned()); + + if let Some(ref name) = extern_crate.name { + let realname = extern_crate.realname.as_ref().unwrap_or(name); + if let Some(cratepath) = get_crate_file(realname, context.filepath, session) { + let raw_src = session.load_raw_file(&cratepath); + res = Some(Match { + matchstr: name.clone(), + filepath: cratepath.to_path_buf(), + point: BytePos::ZERO, + coords: Some(Coordinate::start()), + local: false, + mtype: Module, + contextstr: cratepath.to_str().unwrap().to_owned(), + docs: find_mod_doc(&raw_src, BytePos::ZERO), + }); + } + } + } + res +} + +pub fn match_mod( + msrc: Src<'_>, + context: &MatchCxt<'_, '_>, + session: &Session<'_>, +) -> Option { + let blob = &msrc[context.range.to_range()]; + let (start, s) = context.get_key_ident(blob, "mod", &[])?; + if blob.find('{').is_some() { + debug!("found a module inline: |{}|", blob); + return Some(Match { + matchstr: s, + filepath: context.filepath.to_path_buf(), + point: context.range.start + start, + coords: None, + local: false, + mtype: Module, + contextstr: context.filepath.to_str().unwrap().to_owned(), + docs: String::new(), + }); + } else { + debug!("found a module declaration: |{}|", blob); + // the name of the file where we found the module declaration (foo.rs) + // without its extension! + let filename = context.filepath.file_stem()?; + let parent_path = context.filepath.parent()?; + // if we found the declaration in `src/foo.rs`, then let's look for the + // submodule in `src/foo/` as well! + let filename_subdir = parent_path.join(filename); + // if we are looking for "foo::bar", we have two cases: + // 1. we found `pub mod bar;` in either `src/foo/mod.rs` + // (or `src/lib.rs`). As such we are going to search for `bar.rs` in + // the same directory (`src/foo/`, or `src/` respectively). + // 2. we found `pub mod bar;` in `src/foo.rs`. This means that we also + // need to seach in `src/foo/` if it exists! + let search_path = if filename_subdir.exists() { + filename_subdir.as_path() + } else { + parent_path + }; + match_mod_inner(msrc, context, session, search_path, s) + } +} + +fn match_mod_inner( + msrc: Src<'_>, + context: &MatchCxt<'_, '_>, + session: &Session<'_>, + search_path: &Path, + s: String, +) -> Option { + let ranged_raw = session.load_raw_src_ranged(&msrc, context.filepath); + // get module from path attribute + if let Some(modpath) = + scopes::get_module_file_from_path(msrc, context.range.start, search_path, ranged_raw) + { + let doc_src = session.load_raw_file(&modpath); + return Some(Match { + matchstr: s, + filepath: modpath.to_path_buf(), + point: BytePos::ZERO, + coords: Some(Coordinate::start()), + local: false, + mtype: Module, + contextstr: modpath.to_str().unwrap().to_owned(), + docs: find_mod_doc(&doc_src, BytePos::ZERO), + }); + } + // get internal module nesting + // e.g. is this in an inline submodule? mod foo{ mod bar; } + // because if it is then we need to search further down the + // directory hierarchy - e.g. /foo/bar.rs + let internalpath = scopes::get_local_module_path(msrc, context.range.start); + let mut searchdir = (*search_path).to_owned(); + for s in internalpath { + searchdir.push(&s); + } + if let Some(modpath) = get_module_file(&s, &searchdir, session) { + let doc_src = session.load_raw_file(&modpath); + let context = modpath.to_str().unwrap().to_owned(); + return Some(Match { + matchstr: s, + filepath: modpath, + point: BytePos::ZERO, + coords: Some(Coordinate::start()), + local: false, + mtype: Module, + contextstr: context, + docs: find_mod_doc(&doc_src, BytePos::ZERO), + }); + } + None +} + +fn find_generics_end(blob: &str) -> Option { + // Naive version that attempts to skip over attributes + let mut in_attr = false; + let mut attr_level = 0; + + let mut level = 0; + for (i, b) in blob.as_bytes().into_iter().enumerate() { + // Naively skip attributes `#[...]` + if in_attr { + match b { + b'[' => attr_level += 1, + b']' => { + attr_level -=1; + if attr_level == 0 { + in_attr = false; + continue; + } + }, + _ => continue, + } + } + // ...otherwise just try to find the last `>` + match b { + b'{' | b'(' | b';' => return None, + b'<' => level += 1, + b'>' => { + level -= 1; + if level == 0 { + return Some(i.into()); + } + } + b'#' if blob.bytes().nth(i + 1) == Some(b'[') => in_attr = true, + _ => {} + } + } + None +} + +pub fn match_struct( + msrc: Src<'_>, + context: &MatchCxt<'_, '_>, + session: &Session<'_>, +) -> Option { + let blob = &msrc[context.range.to_range()]; + let (start, s) = context.get_key_ident(blob, "struct", &[])?; + + debug!("found a struct |{}|", s); + let generics = + find_generics_end(&blob[start.0..]).map_or_else(Default::default, |generics_end| { + let header = format!("struct {}();", &blob[start.0..=(start + generics_end).0]); + ast::parse_generics(header, context.filepath) + }); + let start = context.range.start + start; + let doc_src = session.load_raw_src_ranged(&msrc, context.filepath); + Some(Match { + matchstr: s, + filepath: context.filepath.to_path_buf(), + point: start, + coords: None, + local: context.is_local, + mtype: Struct(Box::new(generics)), + contextstr: get_context(blob, "{"), + docs: find_doc(&doc_src, start), + }) +} + +pub fn match_union( + msrc: Src<'_>, + context: &MatchCxt<'_, '_>, + session: &Session<'_>, +) -> Option { + let blob = &msrc[context.range.to_range()]; + let (start, s) = context.get_key_ident(blob, "union", &[])?; + + debug!("found a union |{}|", s); + let generics = + find_generics_end(&blob[start.0..]).map_or_else(Default::default, |generics_end| { + let header = format!("union {}();", &blob[start.0..=(start + generics_end).0]); + ast::parse_generics(header, context.filepath) + }); + let start = context.range.start + start; + let doc_src = session.load_raw_src_ranged(&msrc, context.filepath); + Some(Match { + matchstr: s, + filepath: context.filepath.to_path_buf(), + point: start, + coords: None, + local: context.is_local, + mtype: MatchType::Union(Box::new(generics)), + contextstr: get_context(blob, "{"), + docs: find_doc(&doc_src, start), + }) +} + +pub fn match_type( + msrc: Src<'_>, + context: &MatchCxt<'_, '_>, + session: &Session<'_>, +) -> Option { + let blob = &msrc[context.range.to_range()]; + let (start, s) = context.get_key_ident(blob, "type", &[])?; + debug!("found!! a type {}", s); + // parse type here + let start = context.range.start + start; + let doc_src = session.load_raw_src_ranged(&msrc, context.filepath); + Some(Match { + matchstr: s, + filepath: context.filepath.to_path_buf(), + point: start, + coords: None, + local: context.is_local, + mtype: Type, + contextstr: first_line(blob), + docs: find_doc(&doc_src, start), + }) +} + +pub fn match_trait( + msrc: Src<'_>, + context: &MatchCxt<'_, '_>, + session: &Session<'_>, +) -> Option { + let blob = &msrc[context.range.to_range()]; + let (start, s) = context.get_key_ident(blob, "trait", &["unsafe"])?; + debug!("found!! a trait {}", s); + let start = context.range.start + start; + let doc_src = session.load_raw_src_ranged(&msrc, context.filepath); + Some(Match { + matchstr: s, + filepath: context.filepath.to_path_buf(), + point: start, + coords: None, + local: context.is_local, + mtype: Trait, + contextstr: get_context(blob, "{"), + docs: find_doc(&doc_src, start), + }) +} + +pub fn match_enum_variants(msrc: &str, context: &MatchCxt<'_, '_>) -> Vec { + let blob = &msrc[context.range.to_range()]; + let mut out = Vec::new(); + let parsed_enum = ast::parse_enum(blob.to_owned()); + for (name, offset) in parsed_enum.values { + if name.starts_with(context.search_str) { + let start = context.range.start + offset; + let m = Match { + matchstr: name, + filepath: context.filepath.to_path_buf(), + point: start, + coords: None, + local: context.is_local, + mtype: EnumVariant(None), + contextstr: first_line(&blob[offset.0..]), + docs: find_doc(msrc, start), + }; + out.push(m); + } + } + out +} + +pub fn match_enum( + msrc: Src<'_>, + context: &MatchCxt<'_, '_>, + session: &Session<'_>, +) -> Option { + let blob = &msrc[context.range.to_range()]; + let (start, s) = context.get_key_ident(blob, "enum", &[])?; + + debug!("found!! an enum |{}|", s); + + let generics = + find_generics_end(&blob[start.0..]).map_or_else(Default::default, |generics_end| { + let header = format!("enum {}{{}}", &blob[start.0..=(start + generics_end).0]); + ast::parse_generics(header, context.filepath) + }); + let start = context.range.start + start; + let doc_src = session.load_raw_src_ranged(&msrc, context.filepath); + Some(Match { + matchstr: s, + filepath: context.filepath.to_path_buf(), + point: start, + coords: None, + local: context.is_local, + mtype: Enum(Box::new(generics)), + contextstr: first_line(blob), + docs: find_doc(&doc_src, start), + }) +} + +pub fn match_use( + msrc: Src<'_>, + context: &MatchCxt<'_, '_>, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, +) -> Vec { + let import = PendingImport { + filepath: context.filepath, + range: context.range, + }; + + let blob = &msrc[context.range.to_range()]; + + // If we're trying to resolve the same import recursively, + // do not return any matches this time. + if import_info.imports.contains(&import) { + debug!("import {} involved in a cycle; ignoring", blob); + return Vec::new(); + } + + // Push this import on the stack of pending imports. + let pending_imports = import_info.imports.push(import); + + let mut out = Vec::new(); + + if find_keyword_impl(blob, "use", "", &[], StartsWith, context.is_local).is_none() { + return out; + } + + let use_item = ast::parse_use(blob.to_owned()); + debug!( + "[match_use] found item: {:?}, searchstr: {}", + use_item, context.search_str + ); + // for speed up! + if !use_item.contains_glob && !txt_matches(context.search_type, context.search_str, blob) { + return out; + } + let mut import_info = ImportInfo { + imports: pending_imports, + glob_limit: import_info.glob_limit, + }; + let alias_match = |ident, start, inner, cstr| Match { + matchstr: ident, + filepath: context.filepath.to_owned(), + point: context.range.start + start, + coords: None, + local: context.is_local, + mtype: MatchType::UseAlias(Box::new(inner)), + contextstr: cstr, + docs: String::new(), + }; + // common utilities + macro_rules! with_match { + ($path:expr, $ns: expr, $f:expr) => { + let path_iter = resolve_path( + $path, + context.filepath, + context.range.start, + ExactMatch, + $ns, + session, + &import_info, + ); + for m in path_iter { + out.push($f(m)); + if context.search_type == ExactMatch { + return out; + } + } + }; + } + // let's find searchstr using path_aliases + for path_alias in use_item.path_list { + let PathAlias { + path: mut alias_path, + kind: alias_kind, + range: alias_range, + } = path_alias; + alias_path.set_prefix(); + match alias_kind { + PathAliasKind::Ident(ref ident, rename_start) => { + if !symbol_matches(context.search_type, context.search_str, &ident) { + continue; + } + with_match!(&alias_path, Namespace::Path, |m: Match| { + debug!("[match_use] PathAliasKind::Ident {:?} was found", ident); + let rename_start = match rename_start { + Some(r) => r, + None => return m, + }; + // if use A as B found, we treat this type as type alias + let context_str = &msrc[alias_range.shift(context.range.start).to_range()]; + alias_match(ident.clone(), rename_start, m, context_str.to_owned()) + }); + } + PathAliasKind::Self_(ref ident, rename_start) => { + if let Some(last_seg) = alias_path.segments.last() { + let search_name = if rename_start.is_some() { + ident + } else { + &last_seg.name + }; + if !symbol_matches(context.search_type, context.search_str, search_name) { + continue; + } + with_match!(&alias_path, Namespace::PathParen, |m: Match| { + debug!("[match_use] PathAliasKind::Self_ {:?} was found", ident); + let rename_start = match rename_start { + Some(r) => r, + None => return m, + }; + // if use A as B found, we treat this type as type alias + let context_str = &msrc[alias_range.shift(context.range.start).to_range()]; + alias_match(ident.clone(), rename_start, m, context_str.to_owned()) + }); + } + } + PathAliasKind::Glob => { + let glob_depth_reserved = if let Some(ref mut d) = import_info.glob_limit { + if *d == 0 { + continue; + } + *d -= 1; + Some(*d + 1) + } else { + // heuristics for issue #844 + import_info.glob_limit = Some(GLOB_LIMIT - 1); + None + }; + let mut search_path = alias_path; + search_path.segments.push(PathSegment::new( + context.search_str.to_owned(), + vec![], + None, + )); + let path_iter = resolve_path( + &search_path, + context.filepath, + context.range.start, + context.search_type, + Namespace::Path, + session, + &import_info, + ); + import_info.glob_limit = glob_depth_reserved; + debug!("[match_use] resolve_path returned {:?} for Glob", path_iter,); + out.extend(path_iter); + } + } + } + out +} + +/// TODO: Handle `extern` functions +pub fn match_fn(msrc: Src<'_>, context: &MatchCxt<'_, '_>, session: &Session<'_>) -> Option { + let blob = &msrc[context.range.to_range()]; + if typeinf::first_param_is_self(blob) { + return None; + } + match_fn_common(blob, msrc, context, session) +} + +pub fn match_method( + msrc: Src<'_>, + context: &MatchCxt<'_, '_>, + include_assoc_fn: bool, + session: &Session<'_>, +) -> Option { + let blob = &msrc[context.range.to_range()]; + if !include_assoc_fn && !typeinf::first_param_is_self(blob) { + return None; + } + match_fn_common(blob, msrc, context, session) +} + +fn match_fn_common( + blob: &str, + msrc: Src<'_>, + context: &MatchCxt<'_, '_>, + session: &Session<'_>, +) -> Option { + let (start, s) = context.get_key_ident(blob, "fn", &["const", "unsafe", "async"])?; + let start = context.range.start + start; + let doc_src = session.load_raw_src_ranged(&msrc, context.filepath); + Some(Match { + matchstr: s, + filepath: context.filepath.to_path_buf(), + point: start, + coords: None, + local: context.is_local, + mtype: Function, + contextstr: get_context(blob, "{"), + docs: find_doc(&doc_src, start), + }) +} + +pub fn match_macro( + msrc: Src<'_>, + context: &MatchCxt<'_, '_>, + session: &Session<'_>, +) -> Option { + let trimed = context.search_str.trim_end_matches('!'); + let mut context = context.clone(); + context.search_str = trimed; + let blob = &msrc[context.range.to_range()]; + let (start, mut s) = context.get_key_ident(blob, "macro_rules!", &[])?; + s.push('!'); + debug!("found a macro {}", s); + let doc_src = session.load_raw_src_ranged(&msrc, context.filepath); + Some(Match { + matchstr: s, + filepath: context.filepath.to_owned(), + point: context.range.start + start, + coords: None, + local: context.is_local, + mtype: Macro, + contextstr: first_line(blob), + docs: find_doc(&doc_src, context.range.start), + }) +} + +pub fn find_doc(msrc: &str, match_point: BytePos) -> String { + let blob = &msrc[0..match_point.0]; + blob.lines() + .rev() + .skip(1) // skip the line that the match is on + .map(|line| line.trim()) + .take_while(|line| line.starts_with("///") || line.starts_with("#[") || line.is_empty()) + .filter(|line| !(line.trim().starts_with("#[") || line.is_empty())) // remove the #[flags] + .collect::>() // These are needed because + .iter() // you cannot `rev`an `iter` that + .rev() // has already been `rev`ed. + .map(|line| if line.len() >= 4 { &line[4..] } else { "" }) // Remove "/// " + .collect::>() + .join("\n") +} + +pub(crate) fn find_mod_doc(msrc: &str, blobstart: BytePos) -> String { + let blob = &msrc[blobstart.0..]; + let mut doc = String::new(); + + let mut iter = blob + .lines() + .map(|line| line.trim()) + .take_while(|line| line.starts_with("//") || line.is_empty()) + // Skip over the copyright notice and empty lines until you find + // the module's documentation (it will go until the end of the + // file if the module doesn't have any docs). + .filter(|line| line.starts_with("//!")) + .peekable(); + + // Use a loop to avoid unnecessary collect and String allocation + while let Some(line) = iter.next() { + // Remove "//! " and push to doc string to be returned + doc.push_str(if line.len() >= 4 { &line[4..] } else { "" }); + if iter.peek() != None { + doc.push_str("\n"); + } + } + doc +} + +// DON'T USE MatchCxt's range +pub fn match_impl(decl: String, context: &MatchCxt<'_, '_>, offset: BytePos) -> Option> { + let ImplHeader { generics, .. } = + ast::parse_impl(decl, context.filepath, offset, true, offset)?; + let mut out = Vec::new(); + for type_param in generics.0 { + if !symbol_matches(context.search_type, context.search_str, &type_param.name) { + continue; + } + out.push(type_param.into_match()); + } + Some(out) +} + +#[cfg(test)] +mod tests { + use super::*; + #[test] + fn find_generics_end() { + use super::find_generics_end; + assert_eq!( + find_generics_end("Vec"), + Some(BytePos(64)) + ); + assert_eq!( + find_generics_end("Vec"), + Some(BytePos(27)) + ); + assert_eq!( + find_generics_end("Result, Option<&str>>"), + Some(BytePos(32)) + ); + } +} diff --git a/racer/src/racer/metadata.rs b/racer/src/racer/metadata.rs new file mode 100644 index 0000000000..d1b078c62a --- /dev/null +++ b/racer/src/racer/metadata.rs @@ -0,0 +1,125 @@ +use lazycell; +extern crate racer_cargo_metadata as metadata; +use self::lazycell::LazyCell; +use self::metadata::mapping::{Edition as Ed, PackageIdx, PackageMap}; +use crate::project_model::{Edition, ProjectModelProvider}; +use std::cell::Cell; +use std::path::{Path, PathBuf}; + +struct MetadataCache { + pkg_map: LazyCell, + manifest_path: Option, + failed_to_fill: Cell, +} + +impl MetadataCache { + fn new(manifest_path: Option) -> Self { + MetadataCache { + pkg_map: LazyCell::new(), + manifest_path, + failed_to_fill: Cell::new(false), + } + } + fn fill_impl(&self, manifest: &Path) -> Result<(), ()> { + let meta = metadata::run(manifest, true) + .or_else(|e| { + if let metadata::ErrorKind::Subprocess(ref s) = e { + // HACK: if --frozen failed, try again without --frozen + // see https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/registry.rs#L344 + if s.contains("--frozen") { + info!("MetadataCache: try again without --frozen"); + return metadata::run(manifest, false); + } + } + Err(e) + }) + .map_err(|e| { + warn!("Error in cargo metadata: {}", e); + })?; + let pkg_map = PackageMap::from_metadata(meta); + self.pkg_map.fill(pkg_map).map_err(|_| { + warn!("Error in initialize lazy cell"); + }) + } + fn fill(&self, manifest: &Path) { + if !self.pkg_map.filled() && !self.failed_to_fill.get() { + self.failed_to_fill.set(self.fill_impl(manifest).is_err()); + } + } + fn setup(&self, manifest: &Path) -> Option<(&PackageMap, PackageIdx)> { + self.fill(manifest); + let pkg_map: &PackageMap = self.pkg_map.borrow().unwrap(); + let idx = if manifest.is_relative() { + let path = manifest.canonicalize().ok()?; + pkg_map.get_idx(&path)? + } else { + pkg_map.get_idx(manifest)? + }; + Some((pkg_map, idx)) + } +} + +impl ProjectModelProvider for MetadataCache { + fn edition(&self, manifest: &Path) -> Option { + let (pkg_map, idx) = self.setup(manifest)?; + let edition = pkg_map.get_edition(idx); + Some(match edition { + Ed::Ed2015 => Edition::Ed2015, + Ed::Ed2018 => Edition::Ed2018, + Ed::Ed2021 => Edition::Ed2021, + }) + } + fn discover_project_manifest(&self, path: &Path) -> Option { + let cur_manifest = metadata::find_manifest(path)?; + let manifest = self.manifest_path.as_ref()?; + self.fill(manifest); + Some(cur_manifest) + } + fn search_dependencies( + &self, + manifest: &Path, + search_fn: Box bool>, + ) -> Vec<(String, PathBuf)> { + let (pkg_map, idx) = match self.setup(manifest) { + Some(x) => x, + None => return vec![], + }; + let deps = pkg_map + .get_dependencies(idx) + .iter() + .filter(|(s, _)| search_fn(s)) + .map(|(s, p)| (s.to_string(), p.to_path_buf())); + let lib = pkg_map + .get_lib(idx) + .filter(|t| search_fn(&t.name)) + .map(|t| (t.name.to_string(), t.src_path.to_path_buf())); + deps.chain(lib).collect() + } + fn resolve_dependency(&self, manifest: &Path, libname: &str) -> Option { + debug!( + "MetadataCache::resolve_dependency manifest: {:?} libname: {}", + manifest, libname + ); + let (pkg_map, idx) = self.setup(manifest)?; + pkg_map + .get_src_path_from_libname(idx, libname) + .or_else(|| { + let hyphnated = libname.replace('_', "-"); + pkg_map.get_src_path_from_libname(idx, &hyphnated) + }) + .or_else(|| { + let target = pkg_map.get_lib(idx)?; + if target.name.replace('-', "_") == libname { + Some(&target.src_path) + } else { + None + } + }) + .map(|p| p.to_owned()) + } +} + +pub fn project_model(project_path: Option<&Path>) -> Box { + let manifest = project_path.and_then(|p| metadata::find_manifest(p)); + Box::new(MetadataCache::new(manifest)) +} diff --git a/racer/src/racer/nameres.rs b/racer/src/racer/nameres.rs new file mode 100644 index 0000000000..6a6e615e99 --- /dev/null +++ b/racer/src/racer/nameres.rs @@ -0,0 +1,2749 @@ +//! Name resolving +use std::collections::HashSet; +use std::path::{Path, PathBuf}; +use std::rc::Rc; +use std::{self, vec}; + +use crate::primitive::PrimKind; +use rustc_ast::ast::BinOpKind; + +use crate::ast_types::{ImplHeader, Path as RacerPath, PathPrefix, PathSegment, Ty}; +use crate::core::Namespace; +use crate::core::SearchType::{self, ExactMatch, StartsWith}; +use crate::core::{ + BytePos, ByteRange, Coordinate, Match, MatchType, Scope, Session, SessionExt, Src, +}; +use crate::fileres::{get_crate_file, get_module_file, get_std_file, search_crate_names}; +use crate::matchers::{find_doc, ImportInfo, MatchCxt}; +use crate::primitive; +use crate::util::{ + self, calculate_str_hash, find_ident_end, get_rust_src_path, strip_words, symbol_matches, + trim_visibility, txt_matches, txt_matches_with_pos, +}; +use crate::{ast, core, matchers, scopes, typeinf}; + +lazy_static! { + pub static ref RUST_SRC_PATH: Option = get_rust_src_path().ok(); +} + +pub(crate) fn search_struct_fields( + searchstr: &str, + structmatch: &Match, + search_type: SearchType, + session: &Session<'_>, +) -> Vec { + match structmatch.mtype { + MatchType::Struct(_) | MatchType::EnumVariant(_) | MatchType::Union(_) => {} + _ => return Vec::new(), + } + let src = session.load_source_file(&structmatch.filepath); + let mut out = Vec::new(); + + let struct_start = scopes::expect_stmt_start(src.as_src(), structmatch.point); + let struct_range = if let Some(end) = scopes::end_of_next_scope(&src[struct_start.0..]) { + struct_start.0..=(struct_start + end).0 + } else { + return out; + }; + let structsrc = match structmatch.mtype { + MatchType::EnumVariant(_) => "struct ".to_string() + &src[struct_range.clone()], + _ => src[struct_range.clone()].to_string(), + }; + let fields = ast::parse_struct_fields(structsrc, core::Scope::from_match(structmatch)); + for (field, field_range, _) in fields { + if symbol_matches(search_type, searchstr, &field) { + let raw_src = session.load_raw_file(&structmatch.filepath); + let contextstr = src[field_range.shift(struct_start).to_range()].to_owned(); + out.push(Match { + matchstr: field, + filepath: structmatch.filepath.clone(), + point: field_range.start + struct_start, + coords: None, + local: structmatch.local, + mtype: MatchType::StructField, + contextstr, + docs: find_doc(&raw_src[struct_range.clone()], field_range.start), + }); + } + } + out +} + +pub fn search_for_impl_methods( + match_request: &Match, + fieldsearchstr: &str, + point: BytePos, + fpath: &Path, + local: bool, + search_type: SearchType, + session: &Session<'_>, +) -> Vec { + let implsearchstr: &str = &match_request.matchstr; + + debug!( + "searching for impl methods |{:?}| |{}| {:?}", + match_request, + fieldsearchstr, + fpath.display() + ); + + let mut out = Vec::new(); + + for header in search_for_impls(point, implsearchstr, fpath, local, session) { + debug!("found impl!! |{:?}| looking for methods", header); + let mut found_methods = HashSet::new(); + let src = session.load_source_file(header.file_path()); + for m in search_scope_for_methods( + header.scope_start(), + src.as_src(), + fieldsearchstr, + header.file_path(), + false, + false, + search_type, + session, + ) { + found_methods.insert(calculate_str_hash(&m.matchstr)); + out.push(m); + } + let trait_path = try_continue!(header.trait_path()); + // search methods coerced by deref + if trait_path.name() == Some("Deref") { + let target = search_scope_for_impled_assoc_types( + &header, + "Target", + SearchType::ExactMatch, + session, + ); + if let Some((_, target_ty)) = target.into_iter().next() { + out.extend(search_for_deref_matches( + target_ty, + match_request, + &header, + fieldsearchstr, + session, + )); + } + continue; + } + let trait_match = try_continue!(header.resolve_trait(session, &ImportInfo::default())); + for m in search_for_trait_methods(trait_match.clone(), fieldsearchstr, search_type, session) + { + if !found_methods.contains(&calculate_str_hash(&m.matchstr)) { + out.push(m); + } + } + for gen_impl_header in search_for_generic_impls( + trait_match.point, + &trait_match.matchstr, + &trait_match.filepath, + session, + ) { + debug!("found generic impl!! {:?}", gen_impl_header); + let src = session.load_source_file(gen_impl_header.file_path()); + for gen_method in search_generic_impl_scope_for_methods( + gen_impl_header.scope_start(), + src.as_src(), + fieldsearchstr, + &gen_impl_header, + search_type, + ) { + out.push(gen_method); + } + } + } + out +} + +fn search_scope_for_methods( + point: BytePos, + src: Src<'_>, + searchstr: &str, + filepath: &Path, + includes_assoc_fn: bool, + includes_assoc_ty_and_const: bool, + search_type: SearchType, + session: &Session<'_>, +) -> Vec { + debug!( + "searching scope for methods {:?} |{}| {:?}", + point, + searchstr, + filepath.display() + ); + let scopesrc = src.shift_start(point); + let mut out = Vec::new(); + macro_rules! ret_or_continue { + () => { + if search_type == ExactMatch { + return out; + } else { + continue; + } + }; + } + for blob_range in scopesrc.iter_stmts() { + let matchcxt = MatchCxt { + filepath, + range: blob_range.shift(point), + search_str: searchstr, + search_type, + is_local: true, + }; + let method = matchers::match_method(src, &matchcxt, includes_assoc_fn, session); + if let Some(mut m) = method { + // for backward compatibility + if m.contextstr.ends_with(";") { + m.contextstr.pop(); + } + out.push(m); + ret_or_continue!(); + } + if !includes_assoc_ty_and_const { + continue; + } + let type_ = matchers::match_type(src, &matchcxt, session); + if let Some(mut m) = type_ { + m.mtype = MatchType::AssocType; + out.push(m); + ret_or_continue!(); + } + let const_ = matchers::match_const(&src, &matchcxt); + if let Some(m) = const_ { + out.push(m); + ret_or_continue!(); + } + } + out +} + +fn search_generic_impl_scope_for_methods( + point: BytePos, + src: Src<'_>, + searchstr: &str, + impl_header: &Rc, + search_type: SearchType, +) -> Vec { + debug!( + "searching generic impl scope for methods {:?} |{}|", + point, searchstr, + ); + + let scopesrc = src.shift_start(point); + let mut out = Vec::new(); + for blob_range in scopesrc.iter_stmts() { + let blob = &scopesrc[blob_range.to_range()]; + if let Some(n) = blob.find(|c| c == '{' || c == ';') { + let signature = blob[..n].trim_end(); + + if txt_matches(search_type, &format!("fn {}", searchstr), signature) + && typeinf::first_param_is_self(blob) + { + debug!("found a method starting |{}| |{}|", searchstr, blob); + // TODO: parse this properly, or, txt_matches should return match pos? + let start = BytePos::from(blob.find(&format!("fn {}", searchstr)).unwrap() + 3); + let end = find_ident_end(blob, start); + let l = &blob[start.0..end.0]; + // TODO: make a better context string for functions + let m = Match { + matchstr: l.to_owned(), + filepath: impl_header.file_path().to_owned(), + point: point + blob_range.start + start, + coords: None, + local: true, + mtype: MatchType::Method(Some(Box::new(impl_header.generics.clone()))), + contextstr: signature.to_owned(), + docs: find_doc(&scopesrc, blob_range.start + start), + }; + out.push(m); + } + } + } + out +} + +fn search_scope_for_impled_assoc_types( + header: &ImplHeader, + searchstr: &str, + search_type: SearchType, + session: &Session<'_>, +) -> Vec<(String, Ty)> { + let src = session.load_source_file(header.file_path()); + let scope_src = src.as_src().shift_start(header.scope_start()); + let mut out = vec![]; + let scope = Scope::new(header.file_path().to_owned(), header.scope_start()); + for blob_range in scope_src.iter_stmts() { + let blob = &scope_src[blob_range.to_range()]; + if blob.starts_with("type") { + let ast::TypeVisitor { name, type_, .. } = ast::parse_type(blob.to_owned(), &scope); + let name = try_continue!(name); + let type_ = try_continue!(type_); + match search_type { + SearchType::ExactMatch => { + if &name == searchstr { + out.push((name, type_)); + break; + } + } + SearchType::StartsWith => { + if name.starts_with(searchstr) { + out.push((name, type_)); + } + } + } + } + } + out +} + +// helper function for search_for_impls and etc +fn impl_scope_start(blob: &str) -> Option { + if blob.starts_with("impl") { + if let Some(&b) = blob.as_bytes().get(4) { + if b == b' ' || b == b'<' { + return blob.find('{'); + } + } + } + None +} + +// get impl headers from scope +fn search_for_impls( + pos: BytePos, + searchstr: &str, + filepath: &Path, + local: bool, + session: &Session<'_>, +) -> Vec { + debug!( + "search_for_impls {:?}, {}, {:?}", + pos, + searchstr, + filepath.display() + ); + let s = session.load_source_file(filepath); + let scope_start = scopes::scope_start(s.as_src(), pos); + let src = s.get_src_from_start(scope_start); + + let mut out = Vec::new(); + for blob_range in src.iter_stmts() { + let blob = &src[blob_range.to_range()]; + if let Some(n) = impl_scope_start(blob) { + if !txt_matches(ExactMatch, searchstr, &blob[..n + 1]) { + continue; + } + let decl = blob[..n + 1].to_owned() + "}"; + let start = blob_range.start + scope_start; + let impl_header = try_continue!(ast::parse_impl( + decl, + filepath, + blob_range.start + scope_start, + local, + start + n.into(), + )); + let matched = impl_header + .self_path() + .name() + .map_or(false, |name| symbol_matches(ExactMatch, searchstr, name)); + if matched { + out.push(impl_header); + } + } + } + out +} + +// trait_only version of search_for_impls +// needs both `Self` type name and trait name +pub(crate) fn search_trait_impls( + pos: BytePos, + self_search: &str, + trait_search: &[&str], + once: bool, + filepath: &Path, + local: bool, + session: &Session<'_>, +) -> Vec { + debug!( + "search_trait_impls {:?}, {}, {:?}, {:?}", + pos, + self_search, + trait_search, + filepath.display() + ); + let s = session.load_source_file(filepath); + let scope_start = scopes::scope_start(s.as_src(), pos); + let src = s.get_src_from_start(scope_start); + + let mut out = Vec::new(); + for blob_range in src.iter_stmts() { + let blob = &src[blob_range.to_range()]; + if let Some(n) = impl_scope_start(blob) { + if !txt_matches(ExactMatch, self_search, &blob[..n + 1]) { + continue; + } + let decl = blob[..n + 1].to_owned() + "}"; + let start = blob_range.start + scope_start; + let impl_header = try_continue!(ast::parse_impl( + decl, + filepath, + blob_range.start + scope_start, + local, + start + n.into(), + )); + let self_matched = impl_header + .self_path() + .name() + .map_or(false, |name| symbol_matches(ExactMatch, self_search, name)); + if !self_matched { + continue; + } + let trait_matched = { + let trait_name = + try_continue!(impl_header.trait_path().and_then(|tpath| tpath.name())); + trait_search + .into_iter() + .any(|ts| symbol_matches(ExactMatch, ts, trait_name)) + }; + if trait_matched { + out.push(impl_header); + if once { + break; + } + } + } + } + out +} + +fn cached_generic_impls( + filepath: &Path, + session: &Session<'_>, + scope_start: BytePos, +) -> Vec> { + // the cache is keyed by path and the scope we search in + session + .generic_impls + .borrow_mut() + .entry((filepath.into(), scope_start)) + .or_insert_with(|| { + let s = session.load_source_file(&filepath); + let src = s.get_src_from_start(scope_start); + src.iter_stmts() + .filter_map(|blob_range| { + let blob = &src[blob_range.to_range()]; + let n = impl_scope_start(blob)?; + let decl = blob[..n + 1].to_owned() + "}"; + let start = blob_range.start + scope_start; + ast::parse_impl(decl, filepath, start, true, start + n.into()).map(Rc::new) + }) + .collect() + }) + .clone() +} + +// Find trait impls +fn search_for_generic_impls( + pos: BytePos, + searchstr: &str, + filepath: &Path, + session: &Session<'_>, +) -> Vec> { + debug!( + "search_for_generic_impls {:?}, {}, {:?}", + pos, + searchstr, + filepath.display() + ); + let s = session.load_source_file(filepath); + let scope_start = scopes::scope_start(s.as_src(), pos); + + let mut out = Vec::new(); + + for header in cached_generic_impls(filepath, session, scope_start).iter() { + let name_path = header.self_path(); + if !header.is_trait() { + continue; + } + if let Some(name) = name_path.segments.last() { + for type_param in header.generics().args() { + if symbol_matches(ExactMatch, type_param.name(), &name.name) + && type_param.bounds.find_by_name(searchstr).is_some() + { + out.push(header.to_owned()); + } + } + } + } + out +} + +// scope headers include fn decls, if let, while let etc.. +fn search_scope_headers( + point: BytePos, + scopestart: BytePos, + msrc: Src<'_>, + search_str: &str, + filepath: &Path, + search_type: SearchType, +) -> Vec { + debug!( + "search_scope_headers for |{}| pt: {:?}", + search_str, scopestart + ); + + let get_cxt = |len| MatchCxt { + filepath, + search_type, + search_str, + range: ByteRange::new(0, len), + is_local: true, + }; + let stmtstart = match scopes::find_stmt_start(msrc, scopestart) { + Some(s) => s, + None => return Vec::new(), + }; + let preblock = &msrc[stmtstart.0..scopestart.0]; + debug!("search_scope_headers preblock is |{}|", preblock); + if preblock_is_fn(preblock) { + return search_fn_args_and_generics( + stmtstart, + scopestart, + &msrc, + search_str, + filepath, + search_type, + true, + ); + // 'if let' can be an expression, so might not be at the start of the stmt + } else if let Some(n) = preblock.find("if let") { + let ifletstart = stmtstart + n.into(); + let trimed = msrc[ifletstart.0..scopestart.0].trim(); + if txt_matches(search_type, search_str, trimed) { + let src = trimed.to_owned() + "{}"; + let match_cxt = get_cxt(src.len()); + let mut out = matchers::match_if_let(&src, ifletstart, &match_cxt); + for m in &mut out { + m.point += ifletstart; + } + return out; + } + } else if preblock.starts_with("while let") { + let trimed = msrc[stmtstart.0..scopestart.0].trim(); + if txt_matches(search_type, search_str, trimed) { + let src = trimed.to_owned() + "{}"; + let match_cxt = get_cxt(src.len()); + let mut out = matchers::match_while_let(&src, stmtstart, &match_cxt); + for m in &mut out { + m.point += stmtstart; + } + return out; + } + } else if preblock.starts_with("for ") { + let trimed = msrc[stmtstart.0..scopestart.0].trim(); + if txt_matches(search_type, search_str, trimed) { + let src = trimed.to_owned() + "{}"; + let match_cxt = get_cxt(src.len()); + let mut out = matchers::match_for(&src, stmtstart, &match_cxt); + for m in &mut out { + m.point += stmtstart; + } + return out; + } + } else if preblock.starts_with("impl") { + let trimed = msrc[stmtstart.0..scopestart.0].trim(); + if txt_matches(search_type, search_str, trimed) { + let src = trimed.to_owned() + "{}"; + let match_cxt = get_cxt(0); + let mut out = match matchers::match_impl(src, &match_cxt, stmtstart) { + Some(v) => v, + None => return Vec::new(), + }; + for m in &mut out { + m.local = true; + m.contextstr = trimed.to_owned(); + } + return out; + } + } else if let Some(n) = preblock.rfind("match ") { + // TODO: this code is crufty. refactor me! + let matchstart = stmtstart + n.into(); + let matchstmt = typeinf::get_first_stmt(msrc.shift_start(matchstart)); + if !matchstmt.range.contains(point) { + return Vec::new(); + } + // The definition could be in the match LHS arms. Try to find this + let masked_matchstmt = mask_matchstmt(&matchstmt, scopestart.increment() - matchstart); + debug!( + "found match stmt, masked is len {} |{}|", + masked_matchstmt.len(), + masked_matchstmt + ); + + // Locate the match arm LHS by finding the => just before point and then backtracking + // be sure to be on the right side of the ... => ... arm + let arm = match masked_matchstmt[..(point - matchstart).0].rfind("=>") { + None => + // we are in the first arm enum + { + return Vec::new() + } + Some(arm) => { + // be sure not to be in the next arm enum + if let Some(next_arm) = masked_matchstmt[arm + 2..].find("=>") { + let enum_start = scopes::get_start_of_pattern( + &masked_matchstmt, + BytePos(arm + next_arm + 1), + ); + if point > matchstart + enum_start { + return Vec::new(); + } + } + BytePos(arm) + } + }; + + debug!("PHIL matched arm rhs is |{}|", &masked_matchstmt[arm.0..]); + + let lhs_start = scopes::get_start_of_pattern(&msrc, matchstart + arm); + let lhs = &msrc[lhs_start.0..(matchstart + arm).0]; + + // Now create a pretend match expression with just the one match arm in it + let faux_prefix_size = scopestart.increment() - matchstart; + let fauxmatchstmt = format!("{}{{{} => () }};", &msrc[matchstart.0..scopestart.0], lhs); + + debug!("PHIL arm lhs is |{}|", lhs); + debug!( + "PHIL arm fauxmatchstmt is |{}|, {:?}", + fauxmatchstmt, faux_prefix_size + ); + let mut out = Vec::new(); + for pat_range in ast::parse_pat_idents(fauxmatchstmt) { + let (start, end) = ( + lhs_start + pat_range.start - faux_prefix_size, + lhs_start + pat_range.end - faux_prefix_size, + ); + let s = &msrc[start.0..end.0]; + + if symbol_matches(search_type, search_str, s) { + out.push(Match { + matchstr: s.to_owned(), + filepath: filepath.to_path_buf(), + point: start, + coords: None, + local: true, + mtype: MatchType::MatchArm, + contextstr: lhs.trim().to_owned(), + docs: String::new(), + }); + if let SearchType::ExactMatch = search_type { + break; + } + } + } + return out; + } else if let Some(vec) = search_closure_args( + search_str, + preblock, + stmtstart, + point - stmtstart, + filepath, + search_type, + ) { + return vec; + } + Vec::new() +} + +/// Checks if a scope preblock is a function declaration. +// TODO: handle extern ".." fn +fn preblock_is_fn(preblock: &str) -> bool { + let s = trim_visibility(preblock); + let p = strip_words(s, &["const", "unsafe", "async"]); + if p.0 < s.len() { + s[p.0..].starts_with("fn") + } else { + false + } +} + +#[test] +fn is_fn() { + assert!(preblock_is_fn("pub fn bar()")); + assert!(preblock_is_fn("fn foo()")); + assert!(preblock_is_fn("async fn foo()")); + assert!(preblock_is_fn("const fn baz()")); + assert!(preblock_is_fn("pub(crate) fn bar()")); + assert!(preblock_is_fn("pub(in foo::bar) fn bar()")); + assert!(preblock_is_fn("crate fn bar()")); + assert!(preblock_is_fn("crate const unsafe fn bar()")); +} + +fn mask_matchstmt(matchstmt_src: &str, innerscope_start: BytePos) -> String { + let s = scopes::mask_sub_scopes(&matchstmt_src[innerscope_start.0..]); + matchstmt_src[..innerscope_start.0].to_owned() + &s +} + +#[test] +fn test_mask_match_stmt() { + let src = " + match foo { + Some(a) => { something } + }"; + let res = mask_matchstmt(src, BytePos(src.find('{').unwrap() + 1)); + debug!("PHIL res is |{}|", res); +} + +fn search_fn_args_and_generics( + fnstart: BytePos, + open_brace_pos: BytePos, + msrc: &str, + searchstr: &str, + filepath: &Path, + search_type: SearchType, + local: bool, +) -> Vec { + let mut out = Vec::new(); + // wrap in 'impl blah {}' so that methods get parsed correctly too + let mut fndecl = "impl blah {".to_owned(); + let offset = fnstart.0 as i32 - fndecl.len() as i32; + let impl_header_len = fndecl.len(); + fndecl += &msrc[fnstart.0..open_brace_pos.increment().0]; + fndecl += "}}"; + debug!( + "search_fn_args: found start of fn!! {:?} |{}| {}", + fnstart, fndecl, searchstr + ); + if !txt_matches(search_type, searchstr, &fndecl) { + return Vec::new(); + } + let (args, generics) = ast::parse_fn_args_and_generics( + fndecl.clone(), + Scope::new(filepath.to_owned(), fnstart), + offset, + ); + for (pat, ty, range) in args { + debug!("search_fn_args: arg pat is {:?}", pat); + if let Some(matchstr) = pat.search_by_name(searchstr, search_type) { + let context_str = &fndecl[range.to_range()]; + if let Some(p) = context_str.find(searchstr) { + let ty = ty.map(|t| t.replace_by_generics(&generics)); + let m = Match { + matchstr: matchstr, + filepath: filepath.to_path_buf(), + point: fnstart + range.start + p.into() - impl_header_len.into(), + coords: None, + local: local, + mtype: MatchType::FnArg(Box::new((pat, ty))), + contextstr: context_str.to_owned(), + docs: String::new(), + }; + out.push(m); + if search_type == SearchType::ExactMatch { + break; + } + } + } + } + for type_param in generics.0 { + if symbol_matches(search_type, searchstr, type_param.name()) { + out.push(type_param.into_match()); + if search_type == SearchType::ExactMatch { + break; + } + } + } + out +} + +#[test] +fn test_do_file_search_std() { + let cache = core::FileCache::default(); + let path = Path::new("."); + let session = Session::new(&cache, Some(path)); + let matches = do_file_search("std", path, &session); + assert!(matches + .into_iter() + .any(|m| m.filepath.ends_with("std/src/lib.rs"))); +} + +#[test] +fn test_do_file_search_local() { + let cache = core::FileCache::default(); + let path = Path::new("fixtures/arst/src"); + let session = Session::new(&cache, Some(path)); + let matches = do_file_search("submodule", path, &session); + assert!(matches + .into_iter() + .any(|m| m.filepath.ends_with("fixtures/arst/src/submodule/mod.rs"))); +} + +pub fn do_file_search(searchstr: &str, currentdir: &Path, session: &Session<'_>) -> Vec { + debug!("do_file_search with search string \"{}\"", searchstr); + let mut out = Vec::new(); + + let std_path = RUST_SRC_PATH.as_ref(); + debug!("do_file_search std_path: {:?}", std_path); + + let (v_1, v_2); + let v = if let Some(std_path) = std_path { + v_2 = [std_path, currentdir]; + &v_2[..] + } else { + v_1 = [currentdir]; + &v_1[..] + }; + + debug!("do_file_search v: {:?}", v); + for srcpath in v { + if let Ok(iter) = std::fs::read_dir(srcpath) { + for fpath_buf in iter.filter_map(|res| res.ok().map(|entry| entry.path())) { + // skip filenames that can't be decoded + let fname = match fpath_buf.file_name().and_then(|n| n.to_str()) { + Some(fname) => fname, + None => continue, + }; + // Firstly, try the original layout, e.g. libstd/lib.rs + if fname.starts_with(&format!("lib{}", searchstr)) { + let filepath = fpath_buf.join("lib.rs"); + if filepath.exists() || session.contains_file(&filepath) { + let m = Match { + matchstr: fname[3..].to_owned(), + filepath: filepath.to_path_buf(), + point: BytePos::ZERO, + coords: Some(Coordinate::start()), + local: false, + mtype: MatchType::Module, + contextstr: fname[3..].to_owned(), + docs: String::new(), + }; + out.push(m); + } + } + // Secondly, try the new standard library layout, e.g. std/src/lib.rs + if fname.starts_with(searchstr) { + let filepath = fpath_buf.join("src").join("lib.rs"); + if filepath.exists() || session.contains_file(&filepath) { + let m = Match { + matchstr: fname.to_owned(), + filepath: filepath.to_path_buf(), + point: BytePos::ZERO, + coords: Some(Coordinate::start()), + local: false, + mtype: MatchType::Module, + contextstr: fname.to_owned(), + docs: String::new(), + }; + out.push(m); + } + } + + if fname.starts_with(searchstr) { + for name in &[&format!("{}.rs", fname)[..], "mod.rs", "lib.rs"] { + let filepath = fpath_buf.join(name); + if filepath.exists() || session.contains_file(&filepath) { + let m = Match { + matchstr: fname.to_owned(), + filepath: filepath.to_path_buf(), + point: BytePos::ZERO, + coords: Some(Coordinate::start()), + local: false, + mtype: MatchType::Module, + contextstr: filepath.to_str().unwrap().to_owned(), + docs: String::new(), + }; + out.push(m); + } + } + // try just .rs + if fname.ends_with(".rs") + && (fpath_buf.exists() || session.contains_file(&fpath_buf)) + { + let m = Match { + matchstr: fname[..(fname.len() - 3)].to_owned(), + filepath: fpath_buf.clone(), + point: BytePos::ZERO, + coords: Some(Coordinate::start()), + local: false, + mtype: MatchType::Module, + contextstr: fpath_buf.to_str().unwrap().to_owned(), + docs: String::new(), + }; + out.push(m); + } + } + } + } + } + out +} + +pub fn search_crate_root( + pathseg: &PathSegment, + modfpath: &Path, + searchtype: SearchType, + namespace: Namespace, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, + // Skip current file or not + // If we aren't searching paths with global prefix, should do so + skip_modfpath: bool, +) -> Vec { + debug!("search_crate_root |{:?}| {:?}", pathseg, modfpath.display()); + + let mut crateroots = find_possible_crate_root_modules(modfpath.parent().unwrap(), session); + // for cases when file is not part of a project + if crateroots.is_empty() { + crateroots.push(modfpath.to_path_buf()); + } + + let mut out = Vec::new(); + for crateroot in crateroots + .into_iter() + .filter(|c| !skip_modfpath || modfpath != c) + { + debug!( + "going to search for {:?} in crateroot {:?}", + pathseg, + crateroot.display() + ); + for m in resolve_name( + pathseg, + &crateroot, + BytePos::ZERO, + searchtype, + namespace, + session, + import_info, + ) { + out.push(m); + if let ExactMatch = searchtype { + break; + } + } + } + out +} + +pub fn find_possible_crate_root_modules(currentdir: &Path, session: &Session<'_>) -> Vec { + let mut res = Vec::new(); + + for root in &["lib.rs", "main.rs"] { + let filepath = currentdir.join(root); + if filepath.exists() || session.contains_file(&filepath) { + res.push(filepath); + return res; // for now stop at the first match + } + } + // recurse up the directory structure + if let Some(parentdir) = currentdir.parent() { + if parentdir != currentdir { + res.append(&mut find_possible_crate_root_modules(parentdir, session)); + return res; // for now stop at the first match + } + } + res +} + +pub fn search_next_scope( + mut startpoint: BytePos, + pathseg: &PathSegment, + filepath: &Path, + search_type: SearchType, + local: bool, + namespace: Namespace, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, +) -> Vec { + let filesrc = session.load_source_file(filepath); + if startpoint != BytePos::ZERO { + // is a scope inside the file. Point should point to the definition + // (e.g. mod blah {...}), so the actual scope is past the first open brace. + let src = &filesrc[startpoint.0..]; + // find the opening brace and skip to it. + if let Some(n) = src.find('{') { + startpoint += BytePos(n + 1); + } + } + search_scope( + startpoint, + None, + filesrc.as_src(), + pathseg, + filepath, + search_type, + local, + namespace, + session, + import_info, + ) +} + +pub fn search_scope( + start: BytePos, + complete_point: Option, + src: Src<'_>, + pathseg: &PathSegment, + filepath: &Path, + search_type: SearchType, + is_local: bool, + namespace: Namespace, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, +) -> Vec { + let search_str = &pathseg.name; + let mut out = Vec::new(); + + debug!( + "searching scope {:?} start: {:?} point: {:?} '{}' {:?} {:?} local: {}", + namespace, + start, + complete_point, + search_str, + filepath.display(), + search_type, + is_local, + ); + + let scopesrc = src.shift_start(start); + let mut delayed_single_imports = Vec::new(); + let mut delayed_glob_imports = Vec::new(); + let mut codeit = scopesrc.iter_stmts(); + let mut v = Vec::new(); + + let get_match_cxt = |range| MatchCxt { + filepath, + search_str, + search_type, + is_local, + range, + }; + if let Some(point) = complete_point { + // collect up to point so we can search backwards for let bindings + // (these take precidence over local fn declarations etc.. + for blob_range in &mut codeit { + v.push(blob_range); + if blob_range.start > point { + break; + } + } + // search backwards from point for let bindings + for &blob_range in v.iter().rev() { + if (start + blob_range.end) >= point { + continue; + } + let range = blob_range.shift(start); + let match_cxt = get_match_cxt(range); + for m in matchers::match_let(&src, range.start, &match_cxt) { + out.push(m); + if let ExactMatch = search_type { + return out; + } + } + } + } + // since we didn't find a `let` binding, now search from top of scope for items etc.. + let mut codeit = v.into_iter().chain(codeit); + for blob_range in &mut codeit { + let blob = &scopesrc[blob_range.to_range()]; + if util::trim_visibility(blob).starts_with("use") { + // A `use` item can import a value + // with the same name as a "type" (type/module/etc.) in the same scope. + // However, that type might appear after the `use`, + // so we need to process the type first and the `use` later (if necessary). + // If we didn't delay imports, + // we'd try to resolve such a `use` item by recursing onto itself. + + // Optimisation: if the search string is not in the blob and it is not + // a glob import, this cannot match so fail fast! + let is_glob_import = blob.contains("::*"); + if !is_glob_import && !blob.contains(search_str.trim_end_matches('!')) { + continue; + } + + if is_glob_import { + delayed_glob_imports.push(blob_range); + } else { + delayed_single_imports.push(blob_range); + } + continue; + } + + if search_str == "core" && blob.starts_with("#![no_std]") { + debug!("Looking for core and found #![no_std], which implicitly imports it"); + if let Some(cratepath) = get_crate_file("core", filepath, session) { + let context = cratepath.to_str().unwrap().to_owned(); + out.push(Match { + matchstr: "core".into(), + filepath: cratepath, + point: BytePos::ZERO, + coords: Some(Coordinate::start()), + local: false, + mtype: MatchType::Module, + contextstr: context, + docs: String::new(), + }); + } + } + + // Optimisation: if the search string is not in the blob, + // this cannot match so fail fast! + if !blob.contains(search_str.trim_end_matches('!')) { + continue; + } + + // if we find extern block, let's look up inner scope + if blob.starts_with("extern") { + if let Some(block_start) = blob[7..].find('{') { + debug!("[search_scope] found extern block!"); + // move to the point next to { + let start = blob_range.start + BytePos(block_start + 8); + out.extend(search_scope( + start, + None, + src, + pathseg, + filepath, + search_type, + is_local, + namespace, + session, + import_info, + )); + continue; + } + } + // There's a good chance of a match. Run the matchers + let match_cxt = get_match_cxt(blob_range.shift(start)); + out.extend(run_matchers_on_blob( + src, + &match_cxt, + namespace, + session, + import_info, + )); + if let ExactMatch = search_type { + if !out.is_empty() { + return out; + } + } + } + + let delayed_import_len = delayed_single_imports.len() + delayed_glob_imports.len(); + + if delayed_import_len > 0 { + trace!( + "Searching {} delayed imports for `{}`", + delayed_import_len, + search_str + ); + } + + // Finally, process the imports that we skipped before. + // Process single imports first, because they shadow glob imports. + for blob_range in delayed_single_imports + .into_iter() + .chain(delayed_glob_imports) + { + // There's a good chance of a match. Run the matchers + let match_cxt = get_match_cxt(blob_range.shift(start)); + for m in run_matchers_on_blob(src, &match_cxt, namespace, session, import_info) { + out.push(m); + if let ExactMatch = search_type { + return out; + } + } + } + + if let Some(point) = complete_point { + if let Some(vec) = search_closure_args( + search_str, + &scopesrc[0..], + start, + point - start, + filepath, + search_type, + ) { + for mat in vec { + out.push(mat); + if let ExactMatch = search_type { + return out; + } + } + } + } + debug!("search_scope found matches {:?} {:?}", search_type, out); + out +} + +fn search_closure_args( + search_str: &str, + scope_src: &str, + scope_src_pos: BytePos, + point: BytePos, + filepath: &Path, + search_type: SearchType, +) -> Option> { + if search_str.is_empty() { + return None; + } + + trace!( + "Closure definition match is looking for `{}` in {} characters", + search_str, + scope_src.len() + ); + + if let Some((pipe_range, body_range)) = util::find_closure(scope_src) { + let pipe_str = &scope_src[pipe_range.to_range()]; + if point < pipe_range.start || point > body_range.end { + return None; + } + + debug!( + "search_closure_args found valid closure arg scope: {}", + pipe_str + ); + if !txt_matches(search_type, search_str, pipe_str) { + return None; + } + // Add a fake body for parsing + let closure_def = String::from(pipe_str) + "{}"; + let scope = Scope::new(filepath.to_owned(), scope_src_pos); + let args = ast::parse_closure_args(closure_def.clone(), scope); + let mut out: Vec = Vec::new(); + for (pat, ty, arg_range) in args { + if let Some(matchstr) = pat.search_by_name(search_str, search_type) { + let context_str = &closure_def[arg_range.to_range()]; + if let Some(p) = context_str.find(search_str) { + let m = Match { + matchstr: matchstr, + filepath: filepath.to_path_buf(), + point: scope_src_pos + pipe_range.start + arg_range.start + p.into(), + coords: None, + local: true, + mtype: MatchType::FnArg(Box::new((pat, ty))), + // TODO: context_str(without pipe) is better? + contextstr: pipe_str.to_owned(), + docs: String::new(), + }; + debug!("search_closure_args matched: {:?}", m); + out.push(m); + } + } + } + return Some(out); + } + None +} + +fn run_matchers_on_blob( + src: Src<'_>, + context: &MatchCxt<'_, '_>, + namespace: Namespace, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, +) -> Vec { + debug!( + "[run_matchers_on_blob] cxt: {:?}, namespace: {:?}", + context, namespace + ); + macro_rules! run_matcher_common { + ($ns: expr, $matcher: expr) => { + if namespace.contains($ns) { + if let Some(m) = $matcher { + return vec![m]; + } + } + }; + } + macro_rules! run_matcher { + ($ns: expr, $matcher: path) => { + run_matcher_common!($ns, $matcher(src, context, session)) + }; + } + macro_rules! run_const_matcher { + ($ns: expr, $matcher: path) => { + run_matcher_common!($ns, $matcher(&src, context)) + }; + } + run_matcher!(Namespace::Crate, matchers::match_extern_crate); + run_matcher!(Namespace::Mod, matchers::match_mod); + run_matcher!(Namespace::Enum, matchers::match_enum); + run_matcher!(Namespace::Struct, matchers::match_struct); + run_matcher!(Namespace::Union, matchers::match_union); + run_matcher!(Namespace::Trait, matchers::match_trait); + run_matcher!(Namespace::TypeDef, matchers::match_type); + run_matcher!(Namespace::Func, matchers::match_fn); + run_const_matcher!(Namespace::Const, matchers::match_const); + run_const_matcher!(Namespace::Static, matchers::match_static); + // TODO(kngwyu): support use_extern_macros + run_matcher!(Namespace::Global, matchers::match_macro); + let mut out = Vec::new(); + if namespace.intersects(Namespace::PathParen) { + for m in matchers::match_use(src, context, session, import_info) { + out.push(m); + if context.search_type == ExactMatch { + return out; + } + } + } + out +} + +fn search_local_scopes( + pathseg: &PathSegment, + filepath: &Path, + msrc: Src<'_>, + point: BytePos, + search_type: SearchType, + namespace: Namespace, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, +) -> Vec { + debug!( + "search_local_scopes {:?} {:?} {:?} {:?} {:?}", + pathseg, + filepath.display(), + point, + search_type, + namespace + ); + + if point == BytePos::ZERO { + // search the whole file + search_scope( + BytePos::ZERO, + None, + msrc, + pathseg, + filepath, + search_type, + true, + namespace, + session, + import_info, + ) + } else { + let mut out = Vec::new(); + let mut start = point; + // search each parent scope in turn + while start > BytePos::ZERO { + start = scopes::scope_start(msrc, start); + for m in search_scope( + start, + Some(point), + msrc, + pathseg, + filepath, + search_type, + true, + namespace, + session, + import_info, + ) { + out.push(m); + if search_type == ExactMatch { + return out; + } + } + if start == BytePos::ZERO { + break; + } + start = start.decrement(); + let searchstr = &pathseg.name; + + // scope headers = fn decls, if let, match, etc.. + for m in search_scope_headers(point, start, msrc, searchstr, filepath, search_type) { + out.push(m); + if let ExactMatch = search_type { + return out; + } + } + } + out + } +} + +pub fn search_prelude_file( + pathseg: &PathSegment, + search_type: SearchType, + namespace: Namespace, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, +) -> Vec { + debug!( + "search_prelude file {:?} {:?} {:?}", + pathseg, search_type, namespace + ); + let mut out: Vec = Vec::new(); + + // find the prelude file from the search path and scan it + if let Some(ref std_path) = *RUST_SRC_PATH { + let filepath = std_path.join("std").join("src").join("prelude").join("v1.rs"); + if filepath.exists() || session.contains_file(&filepath) { + let msrc = session.load_source_file(&filepath); + let is_local = true; + for m in search_scope( + BytePos::ZERO, + None, + msrc.as_src(), + pathseg, + &filepath, + search_type, + is_local, + namespace, + session, + import_info, + ) { + out.push(m); + } + } + } + out +} + +pub fn resolve_path_with_primitive( + path: &RacerPath, + filepath: &Path, + pos: BytePos, + search_type: SearchType, + namespace: Namespace, + session: &Session<'_>, +) -> Vec { + debug!("resolve_path_with_primitive {:?}", path); + + let mut out = Vec::new(); + if path.segments.len() == 1 { + primitive::get_primitive_mods(&path.segments[0].name, search_type, &mut out); + if search_type == ExactMatch && !out.is_empty() { + return out; + } + } + let generics = match path.segments.last() { + Some(seg) => &seg.generics, + None => return out, + }; + for mut m in resolve_path( + path, + filepath, + pos, + search_type, + namespace, + session, + &ImportInfo::default(), + ) { + m.resolve_generics(generics); + out.push(m); + if search_type == ExactMatch { + break; + } + } + out +} + +#[derive(PartialEq, Debug)] +pub struct Search { + path: Vec, + filepath: String, + pos: BytePos, +} + +/// Attempt to resolve a name which occurs in a given file. +pub fn resolve_name( + pathseg: &PathSegment, + filepath: &Path, + pos: BytePos, + search_type: SearchType, + namespace: Namespace, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, +) -> Vec { + let mut out = Vec::new(); + let searchstr = &pathseg.name; + + let msrc = session.load_source_file(filepath); + let is_exact_match = search_type == ExactMatch; + + if is_exact_match && &searchstr[..] == "Self" { + if let Some(Ty::Match(m)) = + typeinf::get_type_of_self(pos, filepath, true, msrc.as_src(), session) + { + out.push(m.clone()); + } + } + + if (is_exact_match && &searchstr[..] == "std") + || (!is_exact_match && "std".starts_with(searchstr)) + { + if let Some(cratepath) = get_std_file("std", session) { + let context = cratepath.to_str().unwrap().to_owned(); + out.push(Match { + matchstr: "std".into(), + filepath: cratepath, + point: BytePos::ZERO, + coords: Some(Coordinate::start()), + local: false, + mtype: MatchType::Module, + contextstr: context, + docs: String::new(), + }); + } + + if is_exact_match && !out.is_empty() { + return out; + } + } + + for m in search_local_scopes( + pathseg, + filepath, + msrc.as_src(), + pos, + search_type, + namespace, + session, + import_info, + ) { + out.push(m); + if is_exact_match { + return out; + } + } + + for m in search_crate_root( + pathseg, + filepath, + search_type, + namespace, + session, + import_info, + true, + ) { + out.push(m); + if is_exact_match { + return out; + } + } + + if namespace.contains(Namespace::Crate) { + out.extend(search_crate_names( + searchstr, + search_type, + filepath, + true, + session, + )); + if is_exact_match && !out.is_empty() { + return out; + } + } + + if namespace.contains(Namespace::Primitive) { + primitive::get_primitive_docs(searchstr, search_type, session, &mut out); + if is_exact_match && !out.is_empty() { + return out; + } + } + if namespace.contains(Namespace::StdMacro) { + get_std_macros(searchstr, search_type, session, &mut out); + if is_exact_match && !out.is_empty() { + return out; + } + } + + for m in search_prelude_file(pathseg, search_type, namespace, session, import_info) { + out.push(m); + if is_exact_match { + return out; + } + } + // filesearch. Used to complete e.g. mod foo + if let StartsWith = search_type { + for m in do_file_search(searchstr, filepath.parent().unwrap(), session) { + out.push(m); + } + } + out +} + +// Get the scope corresponding to super:: +pub fn get_super_scope( + filepath: &Path, + pos: BytePos, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, +) -> Option { + let msrc = session.load_source_file(filepath); + let mut path = scopes::get_local_module_path(msrc.as_src(), pos); + debug!( + "get_super_scope: path: {:?} filepath: {:?} {:?} {:?}", + path, filepath, pos, session + ); + if path.is_empty() { + let moduledir = if filepath.ends_with("mod.rs") || filepath.ends_with("lib.rs") { + // Need to go up to directory above + filepath.parent()?.parent()? + } else { + // module is in current directory + filepath.parent()? + }; + + for filename in &["mod.rs", "lib.rs"] { + let f_path = moduledir.join(&filename); + if f_path.exists() || session.contains_file(&f_path) { + return Some(core::Scope { + filepath: f_path, + point: BytePos::ZERO, + }); + } + } + None + } else if path.len() == 1 { + Some(core::Scope { + filepath: filepath.to_path_buf(), + point: BytePos::ZERO, + }) + } else { + path.pop(); + let path = RacerPath::from_svec(false, path); + debug!("get_super_scope looking for local scope {:?}", path); + resolve_path( + &path, + filepath, + BytePos::ZERO, + SearchType::ExactMatch, + Namespace::PathParen, + session, + import_info, + ) + .into_iter() + .nth(0) + .and_then(|m| { + msrc[m.point.0..].find('{').map(|p| core::Scope { + filepath: filepath.to_path_buf(), + point: m.point + BytePos(p + 1), + }) + }) + } +} + +fn get_enum_variants( + search_path: &PathSegment, + search_type: SearchType, + context: &Match, + session: &Session<'_>, +) -> Vec { + let mut out = Vec::new(); + debug!("context: {:?}", context); + match context.mtype { + // TODO(kngwyu): use generics + MatchType::Enum(ref _generics) => { + let filesrc = session.load_source_file(&context.filepath); + let scopestart = scopes::find_stmt_start(filesrc.as_src(), context.point) + .expect("[resolve_path] statement start was not found"); + let scopesrc = filesrc.get_src_from_start(scopestart); + if let Some(blob_range) = scopesrc.iter_stmts().nth(0) { + let match_cxt = MatchCxt { + filepath: &context.filepath, + search_str: &search_path.name, + search_type, + range: blob_range.shift(scopestart), + is_local: true, + }; + for mut enum_var in matchers::match_enum_variants(&filesrc, &match_cxt) { + debug!( + "Found enum variant {} with enum type {}", + enum_var.matchstr, context.matchstr + ); + // return Match which has enum simultaneously, for method completion + enum_var.mtype = MatchType::EnumVariant(Some(Box::new(context.clone()))); + out.push(enum_var); + } + } + } + _ => {} + } + out +} + +fn search_impl_scope( + path: &PathSegment, + search_type: SearchType, + header: &ImplHeader, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, +) -> Vec { + let src = session.load_source_file(header.file_path()); + let search_str = &path.name; + let scope_src = src.as_src().shift_start(header.scope_start()); + let mut out = Vec::new(); + for blob_range in scope_src.iter_stmts() { + let match_cxt = MatchCxt { + filepath: header.file_path(), + search_str, + search_type, + is_local: header.is_local(), + range: blob_range.shift(header.scope_start()), + }; + out.extend(run_matchers_on_blob( + src.as_src(), + &match_cxt, + Namespace::Impl, + session, + import_info, + )); + } + out +} + +fn get_impled_items( + search_path: &PathSegment, + search_type: SearchType, + context: &Match, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, +) -> Vec { + let mut out = get_enum_variants(search_path, search_type, context, session); + for header in search_for_impls( + context.point, + &context.matchstr, + &context.filepath, + context.local, + session, + ) { + out.extend(search_impl_scope( + &search_path, + search_type, + &header, + session, + import_info, + )); + let trait_match = try_continue!(header.resolve_trait(session, import_info)); + for timpl_header in search_for_generic_impls( + trait_match.point, + &trait_match.matchstr, + &trait_match.filepath, + session, + ) { + debug!("found generic impl!! {:?}", timpl_header); + out.extend(search_impl_scope( + &search_path, + search_type, + &timpl_header, + session, + import_info, + )); + } + } + if search_type != ExactMatch { + return out; + } + // for return type inference + if let Some(gen) = context.to_generics() { + for m in &mut out { + if m.mtype == MatchType::Function { + m.mtype = MatchType::Method(Some(Box::new(gen.to_owned()))); + } + } + } + out +} + +pub fn resolve_path( + path: &RacerPath, + filepath: &Path, + pos: BytePos, + search_type: SearchType, + namespace: Namespace, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, +) -> Vec { + debug!( + "resolve_path {:?} {:?} {:?} {:?}", + path, + filepath.display(), + pos, + search_type + ); + let len = path.len(); + if let Some(ref prefix) = path.prefix { + match prefix { + // TODO: Crate, Self,.. + PathPrefix::Super => { + if let Some(scope) = get_super_scope(filepath, pos, session, import_info) { + debug!("PHIL super scope is {:?}", scope); + let mut newpath = path.clone(); + newpath.prefix = None; + newpath.set_prefix(); + return resolve_path( + &newpath, + &scope.filepath, + scope.point, + search_type, + namespace, + session, + import_info, + ); + } else { + // can't find super scope. Return no matches + debug!("can't resolve path {:?}, returning no matches", path); + return Vec::new(); + } + } + PathPrefix::Global => { + return resolve_global_path( + path, + filepath, + search_type, + namespace, + session, + import_info, + ) + .unwrap_or_else(Vec::new); + } + _ => {} + } + } + if len == 1 { + let pathseg = &path.segments[0]; + resolve_name( + pathseg, + filepath, + pos, + search_type, + namespace, + session, + import_info, + ) + } else if len != 0 { + let mut parent_path = path.clone(); + let last_seg = parent_path.segments.pop().unwrap(); + let context = resolve_path( + &parent_path, + filepath, + pos, + ExactMatch, + Namespace::PathParen, + session, + import_info, + ) + .into_iter() + .nth(0); + debug!( + "[resolve_path] context: {:?}, last_seg: {:?}", + context, last_seg + ); + if let Some(followed_match) = context { + resolve_following_path( + followed_match, + &last_seg, + namespace, + search_type, + import_info, + session, + ) + } else { + Vec::new() + } + } else { + // TODO: Should this better be an assertion ? Why do we have a core::Path + // with empty segments in the first place ? + Vec::new() + } +} + +/// resolve paths like ::path::to::file +fn resolve_global_path( + path: &RacerPath, + filepath: &Path, + search_type: SearchType, + namespace: Namespace, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, +) -> Option> { + let mut segs = path.segments.iter().enumerate(); + let first_stype = if path.segments.len() == 1 { + search_type + } else { + SearchType::ExactMatch + }; + let mut context = search_crate_root( + segs.next()?.1, + filepath, + first_stype, + namespace, + session, + import_info, + false, + ); + for (i, seg) in segs { + let cxt = context.into_iter().next()?; + let is_last = i + 1 == path.segments.len(); + let stype = if is_last { + search_type + } else { + SearchType::ExactMatch + }; + context = resolve_following_path(cxt, seg, namespace, stype, import_info, session); + } + Some(context) +} + +fn resolve_following_path( + followed_match: Match, + following_seg: &PathSegment, + namespace: Namespace, + search_type: SearchType, + import_info: &ImportInfo<'_, '_>, + session: &Session<'_>, +) -> Vec { + match followed_match.mtype { + MatchType::Module | MatchType::Crate => { + let mut searchstr: &str = &following_seg.name; + if let Some(i) = searchstr.rfind(',') { + searchstr = searchstr[i + 1..].trim(); + } + if searchstr.starts_with('{') { + searchstr = &searchstr[1..]; + } + let pathseg = PathSegment::new(searchstr.to_owned(), vec![], None); + debug!( + "searching a module '{}' for {}", + followed_match.matchstr, pathseg.name, + ); + search_next_scope( + followed_match.point, + &pathseg, + &followed_match.filepath, + search_type, + false, + namespace, + session, + import_info, + ) + } + MatchType::Enum(_) | MatchType::Struct(_) | MatchType::Union(_) => get_impled_items( + following_seg, + search_type, + &followed_match, + session, + import_info, + ), + MatchType::Trait => search_for_trait_items( + followed_match, + &following_seg.name, + search_type, + true, + true, + session, + ) + .collect(), + MatchType::TypeParameter(bounds) => bounds + .get_traits(session) + .into_iter() + .map(|m| { + search_for_trait_items(m, &following_seg.name, search_type, true, true, session) + }) + .flatten() + .collect(), + MatchType::Type => { + if let Some(match_) = typeinf::get_type_of_typedef(&followed_match, session) { + get_impled_items(following_seg, search_type, &match_, session, import_info) + } else { + // TODO: Should use STUB here + Vec::new() + } + } + MatchType::UseAlias(inner) => resolve_following_path( + *inner, + following_seg, + namespace, + search_type, + import_info, + session, + ), + _ => Vec::new(), + } +} + +pub fn resolve_method( + point: BytePos, + msrc: Src<'_>, + searchstr: &str, + filepath: &Path, + search_type: SearchType, + session: &Session<'_>, + import_info: &ImportInfo<'_, '_>, +) -> Vec { + let scopestart = scopes::scope_start(msrc, point); + debug!( + "resolve_method for |{}| pt: {:?}; scopestart: {:?}", + searchstr, point, scopestart, + ); + + let parent_scope = match scopestart.try_decrement() { + Some(x) => x, + None => return vec![], + }; + + if let Some(stmtstart) = scopes::find_stmt_start(msrc, parent_scope) { + let preblock = &msrc[stmtstart.0..scopestart.0]; + debug!("search_scope_headers preblock is |{}|", preblock); + + if preblock.starts_with("impl") { + if let Some(n) = preblock.find(" for ") { + let start = scopes::get_start_of_search_expr(preblock, n.into()); + let expr = &preblock[start.0..n]; + + debug!("found impl of trait : expr is |{}|", expr); + let path = RacerPath::from_vec(false, expr.split("::").collect::>()); + let m = resolve_path( + &path, + filepath, + stmtstart + BytePos(n - 1), + SearchType::ExactMatch, + Namespace::Trait, + session, + import_info, + ) + .into_iter() + .filter(|m| m.mtype == MatchType::Trait) + .nth(0); + if let Some(m) = m { + debug!("found trait : match is |{:?}|", m); + let mut out = Vec::new(); + let src = session.load_source_file(&m.filepath); + if let Some(n) = src[m.point.0..].find('{') { + let point = m.point + BytePos(n + 1); + for m in search_scope_for_methods( + point, + src.as_src(), + searchstr, + &m.filepath, + true, + false, + search_type, + session, + ) { + out.push(m); + } + } + + trace!( + "Found {} methods matching `{}` for trait `{}`", + out.len(), + searchstr, + m.matchstr + ); + + return out; + } + } + } + } + + Vec::new() +} + +pub fn do_external_search( + path: &[&str], + filepath: &Path, + pos: BytePos, + search_type: SearchType, + namespace: Namespace, + session: &Session<'_>, +) -> Vec { + debug!( + "do_external_search path {:?} {:?}", + path, + filepath.display() + ); + let mut out = Vec::new(); + if path.len() == 1 { + let searchstr = path[0]; + // hack for now + let pathseg = PathSegment::new(path[0].to_owned(), vec![], None); + out.extend(search_next_scope( + pos, + &pathseg, + filepath, + search_type, + false, + namespace, + session, + &ImportInfo::default(), + )); + + if let Some(path) = get_module_file(searchstr, filepath.parent().unwrap(), session) { + let context = path.to_str().unwrap().to_owned(); + out.push(Match { + matchstr: searchstr.to_owned(), + filepath: path, + point: BytePos::ZERO, + coords: Some(Coordinate::start()), + local: false, + mtype: MatchType::Module, + contextstr: context, + docs: String::new(), + }); + } + } else { + let parent_path = &path[..(path.len() - 1)]; + let context = do_external_search( + parent_path, + filepath, + pos, + ExactMatch, + Namespace::PathParen, + session, + ) + .into_iter() + .nth(0); + context.map(|m| { + let import_info = &ImportInfo::default(); + match m.mtype { + MatchType::Module => { + debug!("found an external module {}", m.matchstr); + // deal with started with "{", so that "foo::{bar" will be same as "foo::bar" + let searchstr = match path[path.len() - 1].chars().next() { + Some('{') => &path[path.len() - 1][1..], + _ => path[path.len() - 1], + }; + let pathseg = PathSegment::new(searchstr.to_owned(), vec![], None); + for m in search_next_scope( + m.point, + &pathseg, + &m.filepath, + search_type, + false, + namespace, + session, + import_info, + ) { + out.push(m); + } + } + + MatchType::Struct(_) => { + debug!("found a pub struct. Now need to look for impl"); + for impl_header in + search_for_impls(m.point, &m.matchstr, &m.filepath, m.local, session) + { + // deal with started with "{", so that "foo::{bar" will be same as "foo::bar" + let searchstr = match path[path.len() - 1].chars().next() { + Some('{') => &path[path.len() - 1][1..], + _ => path[path.len() - 1], + }; + let pathseg = PathSegment::new(searchstr.to_owned(), vec![], None); + debug!("about to search impl scope..."); + for m in search_next_scope( + impl_header.impl_start(), + &pathseg, + impl_header.file_path(), + search_type, + impl_header.is_local(), + namespace, + session, + import_info, + ) { + out.push(m); + } + } + } + _ => (), + } + }); + } + out +} + +/// collect inherited traits by Depth First Search +fn collect_inherited_traits(trait_match: Match, s: &Session<'_>) -> Vec { + // search node + struct Node { + target_str: String, + offset: i32, + filepath: PathBuf, + } + impl Node { + fn from_match(m: &Match) -> Self { + let target_str = m.contextstr.to_owned() + "{}"; + let offset = m.point.0 as i32 - "trait ".len() as i32; + Node { + target_str: target_str, + offset: offset, + filepath: m.filepath.clone(), + } + } + } + // DFS stack + let mut stack = vec![Node::from_match(&trait_match)]; + // we have to store hashes of trait names to prevent infinite loop! + let mut trait_names = HashSet::new(); + trait_names.insert(calculate_str_hash(&trait_match.matchstr)); + let mut res = vec![trait_match]; + // DFS + while let Some(t) = stack.pop() { + if let Some(bounds) = ast::parse_inherited_traits(t.target_str, t.filepath, t.offset) { + let traits = bounds.get_traits(s); + let filtered = traits.into_iter().filter(|tr| { + let hash = calculate_str_hash(&tr.matchstr); + if trait_names.contains(&hash) { + return false; + } + trait_names.insert(hash); + let tr_info = Node::from_match(&tr); + stack.push(tr_info); + true + }); + res.extend(filtered); + } + } + res +} + +pub fn search_for_fields_and_methods( + context: Match, + searchstr: &str, + search_type: SearchType, + only_methods: bool, + session: &Session<'_>, +) -> Vec { + let m = context; + let mut out = Vec::new(); + match m.mtype { + MatchType::Struct(_) | MatchType::Union(_) => { + debug!( + "got a struct or union, looking for fields and impl methods!! {}", + m.matchstr, + ); + if !only_methods { + for m in search_struct_fields(searchstr, &m, search_type, session) { + out.push(m); + } + } + for m in search_for_impl_methods( + &m, + searchstr, + m.point, + &m.filepath, + m.local, + search_type, + session, + ) { + out.push(m); + } + } + MatchType::Builtin(kind) => { + if let Some(files) = kind.get_impl_files() { + for file in files { + for m in search_for_impl_methods( + &m, + searchstr, + BytePos::ZERO, + &file, + false, + search_type, + session, + ) { + out.push(m); + } + } + } + } + MatchType::Enum(_) => { + debug!("got an enum, looking for impl methods {}", m.matchstr); + for m in search_for_impl_methods( + &m, + searchstr, + m.point, + &m.filepath, + m.local, + search_type, + session, + ) { + out.push(m); + } + } + MatchType::Trait => { + debug!("got a trait, looking for methods {}", m.matchstr); + out.extend(search_for_trait_methods(m, searchstr, search_type, session)) + } + MatchType::TypeParameter(bounds) => { + debug!("got a trait bound, looking for methods {}", m.matchstr); + let traits = bounds.get_traits(session); + traits.into_iter().for_each(|m| { + out.extend(search_for_trait_methods(m, searchstr, search_type, session)) + }); + } + _ => { + debug!( + "WARN!! context wasn't a Struct, Enum, Builtin or Trait {:?}", + m + ); + } + }; + out +} + +#[inline(always)] +fn search_for_trait_methods<'s, 'sess: 's>( + traitm: Match, + search_str: &'s str, + search_type: SearchType, + session: &'sess Session<'sess>, +) -> impl 's + Iterator { + search_for_trait_items(traitm, search_str, search_type, false, false, session) +} + +// search trait items by search_str +fn search_for_trait_items<'s, 'sess: 's>( + traitm: Match, + search_str: &'s str, + search_type: SearchType, + includes_assoc_fn: bool, + includes_assoc_ty_and_const: bool, + session: &'sess Session<'sess>, +) -> impl 's + Iterator { + let traits = collect_inherited_traits(traitm, session); + traits + .into_iter() + .filter_map(move |tr| { + let src = session.load_source_file(&tr.filepath); + src[tr.point.0..].find('{').map(|start| { + search_scope_for_methods( + tr.point + BytePos(start + 1), + src.as_src(), + search_str, + &tr.filepath, + includes_assoc_fn, + includes_assoc_ty_and_const, + search_type, + session, + ) + }) + }) + .flatten() +} + +fn search_for_deref_matches( + target_ty: Ty, // target = ~ + type_match: &Match, // the type which implements Deref + impl_header: &ImplHeader, + fieldsearchstr: &str, + session: &Session<'_>, +) -> Vec { + match target_ty { + Ty::PathSearch(ref paths) => { + let ty = match get_assoc_type_from_header(&paths.path, type_match, impl_header, session) + { + Some(t) => t, + None => return vec![], + }; + get_field_matches_from_ty(ty, fieldsearchstr, SearchType::StartsWith, session) + } + _ => get_field_matches_from_ty(target_ty, fieldsearchstr, SearchType::StartsWith, session), + } +} + +pub(crate) fn get_field_matches_from_ty( + ty: Ty, + searchstr: &str, + stype: SearchType, + session: &Session<'_>, +) -> Vec { + match ty { + Ty::Match(m) => search_for_fields_and_methods(m, searchstr, stype, false, session), + Ty::PathSearch(paths) => paths.resolve_as_match(session).map_or_else(Vec::new, |m| { + search_for_fields_and_methods(m, searchstr, stype, false, session) + }), + Ty::Self_(scope) => { + let msrc = session.load_source_file(&scope.filepath); + let ty = typeinf::get_type_of_self( + scope.point, + &scope.filepath, + true, + msrc.as_src(), + session, + ); + match ty { + Some(Ty::Match(m)) => { + search_for_fields_and_methods(m, searchstr, stype, false, session) + } + _ => Vec::new(), + } + } + Ty::Tuple(v) => get_tuple_field_matches(v.len(), searchstr, stype, session).collect(), + Ty::RefPtr(ty, _) => { + // TODO(kngwyu): support impl &Type {..} + get_field_matches_from_ty(*ty, searchstr, stype, session) + } + Ty::Array(_, _) | Ty::Slice(_) => { + let mut m = primitive::PrimKind::Slice.to_module_match().unwrap(); + m.matchstr = "[T]".to_owned(); + search_for_fields_and_methods(m, searchstr, stype, false, session) + } + Ty::TraitObject(traitbounds) => traitbounds + .into_iter() + .flat_map(|ps| get_field_matches_from_ty(Ty::PathSearch(ps), searchstr, stype, session)) + .collect(), + Ty::Future(_, scope) => get_future(scope, session) + .into_iter() + .flat_map(|f| search_for_trait_methods(f, searchstr, stype, session)) + .chain( + txt_matches_with_pos(stype, searchstr, "await") + .and_then(|_| PrimKind::Await.to_doc_match(session)) + .into_iter(), + ) + .collect(), + _ => vec![], + } +} + +fn get_future(scope: Scope, session: &Session<'_>) -> Option { + let path = RacerPath::from_iter( + false, + ["std", "future", "Future"].iter().map(|s| s.to_string()), + ); + + ast::find_type_match(&path, &scope.filepath, scope.point, session) +} + +fn get_assoc_type_from_header( + target_path: &RacerPath, // type target = ~ + type_match: &Match, // the type which implements trait + impl_header: &ImplHeader, + session: &Session<'_>, +) -> Option { + debug!( + "[search_for_deref_matches] target: {:?} impl: {:?}", + target_path, impl_header + ); + if let Some((pos, _)) = impl_header.generics().search_param_by_path(target_path) { + type_match + .resolved_generics() + .nth(pos) + .map(|x| x.to_owned()) + } else { + resolve_path_with_primitive( + &target_path, + impl_header.file_path(), + BytePos::ZERO, + SearchType::ExactMatch, + Namespace::Type, + session, + ) + .into_iter() + .next() + .map(Ty::Match) + } +} + +fn get_std_macros( + searchstr: &str, + search_type: SearchType, + session: &Session<'_>, + out: &mut Vec, +) { + let std_path = if let Some(ref p) = *RUST_SRC_PATH { + p + } else { + return; + }; + let searchstr = if searchstr.ends_with("!") { + let len = searchstr.len(); + &searchstr[..len - 1] + } else { + searchstr + }; + for macro_file in &[ + "std/src/macros.rs", + "core/src/macros.rs", + "core/src/macros/mod.rs", + "alloc/src/macros.rs", + ] { + let macro_path = std_path.join(macro_file); + if !macro_path.exists() { + continue; + } + get_std_macros_( + ¯o_path, + searchstr, + macro_file == &"core/src/macros.rs", + search_type, + session, + out, + ); + } +} + +fn get_std_macros_( + macro_path: &Path, + searchstr: &str, + is_core: bool, + search_type: SearchType, + session: &Session<'_>, + out: &mut Vec, +) { + let raw_src = session.load_raw_file(¯o_path); + let src = session.load_source_file(¯o_path); + let mut export = false; + let mut get_macro_def = |blob: &str| -> Option<(BytePos, String)> { + if blob.starts_with("#[macro_export]") | blob.starts_with("#[rustc_doc_only_macro]") { + export = true; + return None; + } + if !export { + return None; + } + if !blob.starts_with("macro_rules!") { + return None; + } + export = false; + let mut start = BytePos(12); + for &b in blob[start.0..].as_bytes() { + match b { + b if util::is_whitespace_byte(b) => start = start.increment(), + _ => break, + } + } + if !blob[start.0..].starts_with(searchstr) { + return None; + } + let end = find_ident_end(blob, start + BytePos(searchstr.len())); + let mut matchstr = blob[start.0..end.0].to_owned(); + if search_type == SearchType::ExactMatch && searchstr != matchstr { + return None; + } + matchstr.push_str("!"); + Some((start, matchstr)) + }; + let mut builtin_start = None; + out.extend(src.as_src().iter_stmts().filter_map(|range| { + let blob = &src[range.to_range()]; + // for builtin macros in libcore/macros.rs + if is_core && blob.starts_with("mod builtin") { + builtin_start = blob.find("#").map(|u| range.start + u.into()); + } + let (offset, matchstr) = get_macro_def(blob)?; + let start = range.start + offset; + Some(Match { + matchstr, + filepath: macro_path.to_owned(), + point: start, + coords: raw_src.point_to_coords(start), + local: false, + mtype: MatchType::Macro, + contextstr: matchers::first_line(blob), + docs: matchers::find_doc(&raw_src, range.start), + }) + })); + if let Some(builtin_start) = builtin_start { + let mod_src = src.get_src_from_start(builtin_start); + out.extend(mod_src.iter_stmts().filter_map(|range| { + let blob = &mod_src[range.to_range()]; + let (offset, matchstr) = get_macro_def(blob)?; + let start = builtin_start + range.start + offset; + Some(Match { + matchstr, + filepath: macro_path.to_owned(), + point: start, + coords: raw_src.point_to_coords(start), + local: false, + mtype: MatchType::Macro, + contextstr: matchers::first_line(blob), + docs: matchers::find_doc(&raw_src, range.start), + }) + })); + } +} + +pub(crate) fn get_iter_item(selfm: &Match, session: &Session<'_>) -> Option { + let iter_header = search_trait_impls( + selfm.point, + &selfm.matchstr, + &["IntoIterator", "Iterator"], + true, + &selfm.filepath, + selfm.local, + session, + ) + .into_iter() + .next()?; + let item = search_scope_for_impled_assoc_types( + &iter_header, + "Item", + core::SearchType::ExactMatch, + session, + ); + item.into_iter() + .next() + .and_then(|(_, item_ty)| match item_ty { + Ty::PathSearch(paths) => { + get_assoc_type_from_header(&paths.path, selfm, &iter_header, session) + } + _ => Some(item_ty), + }) +} + +pub(crate) fn get_tuple_field_matches<'a, 'b: 'a>( + fields: usize, + search_str: &'a str, + search_type: SearchType, + session: &'b Session<'_>, +) -> impl 'a + Iterator { + util::gen_tuple_fields(fields).filter_map(move |field| { + if txt_matches(search_type, search_str, field) { + primitive::PrimKind::Tuple + .to_doc_match(session) + .map(|mut m| { + m.matchstr = field.to_owned(); + m.mtype = MatchType::StructField; + m + }) + } else { + None + } + }) +} + +pub(crate) fn get_index_output(selfm: &Match, session: &Session<'_>) -> Option { + // short cut + if selfm.matchstr == "Vec" { + return selfm.resolved_generics().next().map(|ty| ty.to_owned()); + } + let index_header = search_trait_impls( + selfm.point, + &selfm.matchstr, + &["Index"], + true, + &selfm.filepath, + selfm.local, + session, + ) + .into_iter() + .next()?; + get_associated_type_match(&index_header, "Output", selfm, session) +} + +pub(crate) fn get_associated_type_match( + impl_header: &ImplHeader, + type_name: &str, + context: &Match, + session: &Session<'_>, +) -> Option { + let output = search_scope_for_impled_assoc_types( + impl_header, + type_name, + core::SearchType::ExactMatch, + session, + ); + output + .into_iter() + .next() + .and_then(|(_, item_ty)| match item_ty { + Ty::PathSearch(paths) => { + get_assoc_type_from_header(&paths.path, context, impl_header, session) + } + _ => Some(item_ty), + }) +} + +pub(crate) fn get_struct_fields( + path: &RacerPath, + search_str: &str, + filepath: &Path, + complete_pos: BytePos, + stype: SearchType, + session: &Session<'_>, +) -> Vec { + resolve_path( + &path, + filepath, + complete_pos, + SearchType::ExactMatch, + Namespace::HasField, + session, + &ImportInfo::default(), + ) + .into_iter() + .next() + .map_or_else( + || Vec::new(), + |m| match m.mtype { + MatchType::Struct(_) | MatchType::EnumVariant(_) => { + search_struct_fields(search_str, &m, stype, session) + } + MatchType::Type => { + let m = try_vec!(typeinf::get_type_of_typedef(&m, session)); + search_struct_fields(search_str, &m, stype, session) + } + MatchType::UseAlias(m) => search_struct_fields(search_str, &*m, stype, session), + _ => Vec::new(), + }, + ) +} + +/// Checks if trait_impl is the impl TraitName +fn has_impl_for_other_type( + trait_impl: &ImplHeader, + trait_name: &str, + other_type: Option<&str>, +) -> bool { + if let Some(ref path) = trait_impl.trait_path() { + if path.name() == Some(trait_name) { + if other_type.is_none() && path.segments[0].generics.len() == 0 { + return true; + } + if let Some(ty) = path.segments[0].generics.get(0) { + return match ty.to_owned().dereference() { + // TODO: Handle generics arguments + Ty::PathSearch(ref g) => other_type == g.path.name(), + _ => false, + }; + } + // default is self + return trait_impl.self_path().name() == other_type; + } + } + false +} + +/// Resolves the type of a binary expression +/// # Arguments +/// * base_type: the type on the left hand side +/// * node: the operator +/// * other_type: the type on the right hand side +pub(crate) fn resolve_binary_expr_type( + base_type: &Match, + node: BinOpKind, + other_type: Option<&str>, + session: &Session<'_>, +) -> Option { + let trait_name = typeinf::get_operator_trait(node); + if trait_name == "bool" { + return PrimKind::Bool.to_module_match().map(Ty::Match); + } + + let matching_impl = search_trait_impls( + base_type.point, + &base_type.matchstr, + &[trait_name], + false, + &base_type.filepath, + base_type.local, + session, + ) + .into_iter() + .filter(|trait_impl| has_impl_for_other_type(trait_impl, trait_name, other_type)) + .next(); + if let Some(matching_impl) = matching_impl { + get_associated_type_match(&matching_impl, "Output", &base_type, session) + .or_else(|| Some(Ty::Match(base_type.clone()))) + } else { + // default to base type if an impl can't be found + Some(Ty::Match(base_type.clone())) + } +} diff --git a/racer/src/racer/primitive.rs b/racer/src/racer/primitive.rs new file mode 100644 index 0000000000..e9e759226a --- /dev/null +++ b/racer/src/racer/primitive.rs @@ -0,0 +1,231 @@ +use crate::core::{BytePos, Match, MatchType, Namespace, SearchType, Session}; +use crate::matchers::ImportInfo; +use crate::nameres::{self, RUST_SRC_PATH}; +use rustc_ast::ast::{IntTy, LitIntType, UintTy}; +use std::path::PathBuf; + +const PRIM_DOC: &str = "std/src/primitive_docs.rs"; +const KEY_DOC: &str = "std/src/keyword_docs.rs"; + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum PrimKind { + Bool, + Never, + Char, + Unit, + Pointer, + Array, + Slice, + Str, + Tuple, + F32, + F64, + I8, + I16, + I32, + I64, + I128, + U8, + U16, + U32, + U64, + U128, + Isize, + Usize, + Ref, + Fn, + Await, +} + +const PRIM_MATCHES: [PrimKind; 17] = [ + PrimKind::Bool, + PrimKind::Char, + PrimKind::Str, + PrimKind::F32, + PrimKind::F64, + PrimKind::I8, + PrimKind::I16, + PrimKind::I32, + PrimKind::I64, + PrimKind::I128, + PrimKind::U8, + PrimKind::U16, + PrimKind::U32, + PrimKind::U64, + PrimKind::U128, + PrimKind::Isize, + PrimKind::Usize, +]; + +impl PrimKind { + pub(crate) fn from_litint(lit: LitIntType) -> Self { + match lit { + LitIntType::Signed(i) => match i { + IntTy::I8 => PrimKind::I8, + IntTy::I16 => PrimKind::I16, + IntTy::I32 => PrimKind::I32, + IntTy::I64 => PrimKind::I64, + IntTy::I128 => PrimKind::I128, + IntTy::Isize => PrimKind::Isize, + }, + LitIntType::Unsigned(u) => match u { + UintTy::U8 => PrimKind::U8, + UintTy::U16 => PrimKind::U16, + UintTy::U32 => PrimKind::U32, + UintTy::U64 => PrimKind::U64, + UintTy::U128 => PrimKind::U128, + UintTy::Usize => PrimKind::Usize, + }, + LitIntType::Unsuffixed => PrimKind::U32, + } + } + fn impl_files(self) -> Option<&'static [&'static str]> { + match self { + PrimKind::Bool => None, + PrimKind::Never => None, + PrimKind::Char => Some(&["core/src/char/methods.rs"]), + PrimKind::Unit => None, + PrimKind::Pointer => Some(&["core/src/ptr.rs"]), + PrimKind::Array => None, + PrimKind::Slice => Some(&["core/src/slice/mod.rs", "alloc/src/slice.rs"]), + PrimKind::Str => Some(&["core/src/str/mod.rs", "alloc/src/str.rs"]), + PrimKind::Tuple => None, + PrimKind::F32 => Some(&["std/src/f32.rs", "core/src/num/f32.rs"]), + PrimKind::F64 => Some(&["std/src/f64.rs", "core/src/num/f64.rs"]), + PrimKind::I8 => Some(&["core/src/num/mod.rs"]), + PrimKind::I16 => Some(&["core/src/num/mod.rs"]), + PrimKind::I32 => Some(&["core/src/num/mod.rs"]), + PrimKind::I64 => Some(&["core/src/num/mod.rs"]), + PrimKind::I128 => Some(&["core/src/num/mod.rs"]), + PrimKind::U8 => Some(&["core/src/num/mod.rs"]), + PrimKind::U16 => Some(&["core/src/num/mod.rs"]), + PrimKind::U32 => Some(&["core/src/num/mod.rs"]), + PrimKind::U64 => Some(&["core/src/num/mod.rs"]), + PrimKind::U128 => Some(&["core/src/num/mod.rs"]), + PrimKind::Isize => Some(&["core/src/num/mod.rs"]), + PrimKind::Usize => Some(&["core/src/num/mod.rs"]), + PrimKind::Ref => None, + PrimKind::Fn => None, + PrimKind::Await => None, + } + } + fn is_keyword(self) -> bool { + match self { + PrimKind::Await => true, + _ => false, + } + } + fn match_name(self) -> &'static str { + match self { + PrimKind::Bool => "bool", + PrimKind::Never => "never", + PrimKind::Char => "char", + PrimKind::Unit => "unit", + PrimKind::Pointer => "pointer", + PrimKind::Array => "array", + PrimKind::Slice => "slice", + PrimKind::Str => "str", + PrimKind::Tuple => "tuple", + PrimKind::F32 => "f32", + PrimKind::F64 => "f64", + PrimKind::I8 => "i8", + PrimKind::I16 => "i16", + PrimKind::I32 => "i32", + PrimKind::I64 => "i64", + PrimKind::I128 => "i128", + PrimKind::U8 => "u8", + PrimKind::U16 => "u16", + PrimKind::U32 => "u32", + PrimKind::U64 => "u64", + PrimKind::U128 => "u128", + PrimKind::Isize => "isize", + PrimKind::Usize => "usize", + PrimKind::Ref => "ref", + PrimKind::Fn => "fn", + PrimKind::Await => "await", + } + } + pub(crate) fn get_impl_files(&self) -> Option> { + let src_path = RUST_SRC_PATH.as_ref()?; + let impls = self.impl_files()?; + Some(impls.iter().map(|file| src_path.join(file)).collect()) + } + pub fn to_module_match(self) -> Option { + let _impl_files = self.impl_files()?; + Some(Match { + matchstr: self.match_name().to_owned(), + filepath: PathBuf::new(), + point: BytePos::ZERO, + coords: None, + local: false, + mtype: MatchType::Builtin(self), + contextstr: String::new(), + docs: String::new(), + }) + } + pub fn to_doc_match(self, session: &Session<'_>) -> Option { + let src_path = RUST_SRC_PATH.as_ref()?; + let (path, seg) = if self.is_keyword() { + ( + src_path.join(KEY_DOC), + format!("{}_keyword", self.match_name()), + ) + } else { + ( + src_path.join(PRIM_DOC), + format!("prim_{}", self.match_name()), + ) + }; + let mut m = nameres::resolve_name( + &seg.into(), + &path, + BytePos::ZERO, + SearchType::ExactMatch, + Namespace::Mod, + session, + &ImportInfo::default(), + ) + .into_iter() + .next()?; + m.mtype = MatchType::Builtin(self); + m.matchstr = self.match_name().to_owned(); + Some(m) + } +} + +pub fn get_primitive_docs( + searchstr: &str, + stype: SearchType, + session: &Session<'_>, + out: &mut Vec, +) { + for prim in PRIM_MATCHES.iter() { + let prim_str = prim.match_name(); + if (stype == SearchType::StartsWith && prim_str.starts_with(searchstr)) + || (stype == SearchType::ExactMatch && prim_str == searchstr) + { + if let Some(m) = prim.to_doc_match(session) { + out.push(m); + if stype == SearchType::ExactMatch { + return; + } + } + } + } +} + +pub fn get_primitive_mods(searchstr: &str, stype: SearchType, out: &mut Vec) { + for prim in PRIM_MATCHES.iter() { + let prim_str = prim.match_name(); + if (stype == SearchType::StartsWith && prim_str.starts_with(searchstr)) + || (stype == SearchType::ExactMatch && prim_str == searchstr) + { + if let Some(matches) = prim.to_module_match() { + out.push(matches); + if stype == SearchType::ExactMatch { + return; + } + } + } + } +} diff --git a/racer/src/racer/project_model.rs b/racer/src/racer/project_model.rs new file mode 100644 index 0000000000..67ace8680a --- /dev/null +++ b/racer/src/racer/project_model.rs @@ -0,0 +1,19 @@ +use std::path::{Path, PathBuf}; + +#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd)] +pub enum Edition { + Ed2015, + Ed2018, + Ed2021, +} + +pub trait ProjectModelProvider { + fn edition(&self, manifest: &Path) -> Option; + fn discover_project_manifest(&self, path: &Path) -> Option; + fn search_dependencies( + &self, + manifest: &Path, + search_fn: Box bool>, + ) -> Vec<(String, PathBuf)>; + fn resolve_dependency(&self, manifest: &Path, dep_name: &str) -> Option; +} diff --git a/racer/src/racer/scopes.rs b/racer/src/racer/scopes.rs new file mode 100644 index 0000000000..991c0564e4 --- /dev/null +++ b/racer/src/racer/scopes.rs @@ -0,0 +1,903 @@ +use crate::ast_types::Path as RacerPath; +#[cfg(test)] +use crate::core::{self, Coordinate}; +use crate::core::{BytePos, ByteRange, CompletionType, Namespace, RangedRawSrc, Src}; + +use crate::util::{self, char_at}; +use std::iter::Iterator; +use std::path::{Path, PathBuf}; +use std::str::from_utf8; + +fn find_close<'a, A>(iter: A, open: u8, close: u8, level_end: u32) -> Option +where + A: Iterator, +{ + let mut levels = 0u32; + for (count, &b) in iter.enumerate() { + if b == close { + if levels == level_end { + return Some(count.into()); + } + if levels == 0 { + return None; + } + levels -= 1; + } else if b == open { + levels += 1; + } + } + None +} + +// expected to use with +fn find_close_with_pos<'a>( + iter: impl Iterator, + open: u8, + close: u8, + level_end: u32, +) -> Option { + let mut levels = 0u32; + for (pos, &c) in iter { + if c == close { + if levels == level_end { + // +1 for compatibility with find_close + return Some(BytePos(pos).increment()); + } + if levels == 0 { + return None; + } + levels -= 1; + } else if c == open { + levels += 1; + } + } + None +} + +pub fn find_closing_paren(src: &str, pos: BytePos) -> BytePos { + find_close(src.as_bytes()[pos.0..].iter(), b'(', b')', 0) + .map_or(src.len().into(), |count| pos + count) +} + +pub fn find_closure_scope_start( + src: Src<'_>, + point: BytePos, + parentheses_open_pos: BytePos, +) -> Option { + let closing_paren_pos = find_closing_paren(&src[..], point - parentheses_open_pos); + let src_between_parent = &src[..closing_paren_pos.0]; + util::closure_valid_arg_scope(src_between_parent).map(|_| parentheses_open_pos) +} + +pub fn scope_start(src: Src<'_>, point: BytePos) -> BytePos { + let src = src.change_length(point); + let (mut clev, mut plev) = (0u32, 0u32); + let mut iter = src[..].as_bytes().into_iter().enumerate().rev(); + for (pos, b) in &mut iter { + match b { + b'{' => { + // !!! found { earlier than ( + if clev == 0 { + return BytePos(pos).increment(); + } + clev -= 1; + } + b'}' => clev += 1, + b'(' => { + // !!! found ( earlier than { + if plev == 0 { + if let Some(scope_pos) = + find_closure_scope_start(src, point, BytePos(pos).increment()) + { + return scope_pos; + } else { + break; + } + } + plev -= 1; + } + b')' => plev += 1, + _ => {} + } + } + // fallback: return curly_parent_open_pos + find_close_with_pos(iter, b'}', b'{', 0).unwrap_or(BytePos::ZERO) +} + +pub fn find_stmt_start(msrc: Src<'_>, point: BytePos) -> Option { + let scope_start = scope_start(msrc, point); + find_stmt_start_given_scope(msrc, point, scope_start) +} + +fn find_stmt_start_given_scope( + msrc: Src<'_>, + point: BytePos, + scope_start: BytePos, +) -> Option { + // Iterate the scope to find the start of the statement that surrounds the point. + debug!( + "[find_stmt_start] now we are in scope {:?} ~ {:?}", + scope_start, point, + ); + msrc.shift_start(scope_start) + .iter_stmts() + .map(|range| range.shift(scope_start)) + .find(|range| range.contains(point)) + .map(|range| range.start) +} + +/// Finds a statement start or panics. +pub fn expect_stmt_start(msrc: Src<'_>, point: BytePos) -> BytePos { + find_stmt_start(msrc, point).expect("Statement does not have a beginning") +} + +pub fn get_local_module_path(msrc: Src<'_>, point: BytePos) -> Vec { + let mut v = Vec::new(); + get_local_module_path_(msrc, point, &mut v); + v +} + +fn get_local_module_path_(msrc: Src<'_>, point: BytePos, out: &mut Vec) { + for range in msrc.iter_stmts() { + if range.contains_exclusive(point) { + let blob = msrc.shift_range(range); + let start = util::strip_visibility(&blob).unwrap_or(BytePos::ZERO); + if !blob[start.0..].starts_with("mod") { + continue; + } + if let Some(newstart) = blob[start.0 + 3..].find('{') { + let newstart = newstart + start.0 + 4; + out.push(blob[start.0 + 3..newstart - 1].trim().to_owned()); + get_local_module_path_( + blob.shift_start(newstart.into()), + point - range.start - newstart.into(), + out, + ); + } + } + } +} + +pub fn get_module_file_from_path( + msrc: Src<'_>, + point: BytePos, + parentdir: &Path, + raw_src: RangedRawSrc, +) -> Option { + let mut iter = msrc.iter_stmts(); + while let Some(range) = iter.next() { + let blob = &raw_src[range.to_range()]; + let start = range.start; + if blob.starts_with("#[path ") { + if let Some(ByteRange { + start: _, + end: modend, + }) = iter.next() + { + if start < point && modend > point { + let pathstart = blob.find('"')? + 1; + let pathend = blob[pathstart..].find('"').unwrap(); + let path = &blob[pathstart..pathstart + pathend]; + debug!("found a path attribute, path = |{}|", path); + let filepath = parentdir.join(path); + if filepath.exists() { + return Some(filepath); + } + } + } + } + } + None +} + +// TODO(kngwyu): this functions shouldn't be generic +pub fn find_impl_start(msrc: Src<'_>, point: BytePos, scopestart: BytePos) -> Option { + let len = point - scopestart; + msrc.shift_start(scopestart) + .iter_stmts() + .find(|range| range.end > len) + .and_then(|range| { + let blob = msrc.shift_start(scopestart + range.start); + if blob.starts_with("impl") || util::trim_visibility(&blob[..]).starts_with("trait") { + Some(scopestart + range.start) + } else { + let newstart = blob.find('{')? + 1; + find_impl_start(msrc, point, scopestart + range.start + newstart.into()) + } + }) +} + +#[test] +fn finds_subnested_module() { + use crate::core; + let src = " + pub mod foo { + pub mod bar { + here + } + }"; + let raw_src = core::RawSource::new(src.to_owned()); + let src = core::MaskedSource::new(src); + let point = raw_src.coords_to_point(&Coordinate::new(4, 12)).unwrap(); + let v = get_local_module_path(src.as_src(), point); + assert_eq!("foo", &v[0][..]); + assert_eq!("bar", &v[1][..]); + + let point = raw_src.coords_to_point(&Coordinate::new(3, 8)).unwrap(); + let v = get_local_module_path(src.as_src(), point); + assert_eq!("foo", &v[0][..]); +} + +// TODO: This function can't handle use_nested_groups +pub fn split_into_context_and_completion(s: &str) -> (&str, &str, CompletionType) { + match s + .char_indices() + .rev() + .find(|&(_, c)| !util::is_ident_char(c)) + { + Some((i, c)) => match c { + '.' => (&s[..i], &s[(i + 1)..], CompletionType::Field), + ':' if s.len() > 1 => (&s[..(i - 1)], &s[(i + 1)..], CompletionType::Path), + _ => (&s[..(i + 1)], &s[(i + 1)..], CompletionType::Path), + }, + None => ("", s, CompletionType::Path), + } +} + +/// search in reverse for the start of the current expression +/// allow . and :: to be surrounded by white chars to enable multi line call chains +pub fn get_start_of_search_expr(src: &str, point: BytePos) -> BytePos { + #[derive(Debug)] + enum State { + /// In parentheses; the value inside identifies depth. + Paren(usize), + /// in bracket + Bracket(usize), + /// In a string + StringLiteral, + /// In char + CharLiteral, + StartsWithDot, + MustEndsWithDot(usize), + StartsWithCol(usize), + None, + Result(usize), + } + let mut ws_ok = State::None; + for (i, c) in src.as_bytes()[..point.0].iter().enumerate().rev() { + ws_ok = match (*c, ws_ok) { + (b'(', State::None) => State::Result(i + 1), + (b'(', State::Paren(1)) => State::None, + (b'(', State::Paren(lev)) => State::Paren(lev - 1), + (b')', State::Paren(lev)) => State::Paren(lev + 1), + (b')', State::None) | (b')', State::StartsWithDot) => State::Paren(1), + (b'[', State::None) => State::Result(i + 1), + (b'[', State::Bracket(1)) => State::None, + (b'[', State::Bracket(lev)) => State::Bracket(lev - 1), + (b']', State::Bracket(lev)) => State::Bracket(lev + 1), + (b']', State::StartsWithDot) => State::Bracket(1), + (b'.', State::None) => State::StartsWithDot, + (b'.', State::StartsWithDot) => State::Result(i + 2), + (b'.', State::MustEndsWithDot(_)) => State::None, + (b':', State::MustEndsWithDot(index)) => State::StartsWithCol(index), + (b':', State::StartsWithCol(_)) => State::None, + (b'"', State::None) | (b'"', State::StartsWithDot) => State::StringLiteral, + (b'"', State::StringLiteral) => State::None, + (b'?', State::StartsWithDot) => State::None, + (b'\'', State::None) | (b'\'', State::StartsWithDot) => State::CharLiteral, + (b'\'', State::StringLiteral) => State::StringLiteral, + (b'\'', State::CharLiteral) => State::None, + (_, State::CharLiteral) => State::CharLiteral, + (_, State::StringLiteral) => State::StringLiteral, + (_, State::StartsWithCol(index)) => State::Result(index), + (_, State::None) if char_at(src, i).is_whitespace() => State::MustEndsWithDot(i + 1), + (_, State::MustEndsWithDot(index)) if char_at(src, i).is_whitespace() => { + State::MustEndsWithDot(index) + } + (_, State::StartsWithDot) if char_at(src, i).is_whitespace() => State::StartsWithDot, + (_, State::MustEndsWithDot(index)) => State::Result(index), + (_, State::None) if !util::is_search_expr_char(char_at(src, i)) => State::Result(i + 1), + (_, State::None) => State::None, + (_, s @ State::Paren(_)) => s, + (_, s @ State::Bracket(_)) => s, + (_, State::StartsWithDot) if util::is_search_expr_char(char_at(src, i)) => State::None, + (_, State::StartsWithDot) => State::Result(i + 1), + (_, State::Result(_)) => unreachable!(), + }; + if let State::Result(index) = ws_ok { + return index.into(); + } + } + BytePos::ZERO +} + +pub fn get_start_of_pattern(src: &str, point: BytePos) -> BytePos { + let mut levels = 0u32; + for (i, &b) in src[..point.0].as_bytes().into_iter().enumerate().rev() { + match b { + b'(' => { + if levels == 0 { + return BytePos(i).increment(); + } + levels -= 1; + } + b')' => { + levels += 1; + } + _ => { + if levels == 0 && !util::is_pattern_char(b as char) { + return BytePos(i).increment(); + } + } + } + } + BytePos::ZERO +} + +#[cfg(test)] +mod test_get_start_of_pattern { + use super::{get_start_of_pattern, BytePos}; + fn get_start_of_pattern_(s: &str, u: usize) -> usize { + get_start_of_pattern(s, BytePos(u)).0 + } + #[test] + fn handles_variant() { + assert_eq!(4, get_start_of_pattern_("foo, Some(a) =>", 13)); + } + + #[test] + fn handles_variant2() { + assert_eq!( + 4, + get_start_of_pattern_("bla, ast::PatTup(ref tuple_elements) => {", 36) + ); + } +} + +pub fn expand_search_expr(msrc: &str, point: BytePos) -> ByteRange { + let start = get_start_of_search_expr(msrc, point); + ByteRange::new(start, util::find_ident_end(msrc, point)) +} + +#[cfg(test)] +mod test_expand_seacrh_expr { + use super::{expand_search_expr, BytePos}; + fn expand_search_expr_(s: &str, u: usize) -> (usize, usize) { + let res = expand_search_expr(s, BytePos(u)); + (res.start.0, res.end.0) + } + #[test] + fn finds_ident() { + assert_eq!((0, 7), expand_search_expr_("foo.bar", 5)) + } + + #[test] + fn ignores_bang_at_start() { + assert_eq!((1, 4), expand_search_expr_("!foo", 1)) + } + + #[test] + fn handles_chained_calls() { + assert_eq!((0, 20), expand_search_expr_("yeah::blah.foo().bar", 18)) + } + + #[test] + fn handles_inline_closures() { + assert_eq!( + (0, 29), + expand_search_expr_("yeah::blah.foo(|x:foo|{}).bar", 27) + ) + } + #[test] + fn handles_a_function_arg() { + assert_eq!( + (5, 25), + expand_search_expr_("myfn(foo::new().baz().com)", 23) + ) + } + + #[test] + fn handles_macros() { + assert_eq!((0, 9), expand_search_expr_("my_macro!()", 8)) + } + + #[test] + fn handles_pos_at_end_of_search_str() { + assert_eq!((0, 7), expand_search_expr_("foo.bar", 7)) + } + + #[test] + fn handles_type_definition() { + assert_eq!((4, 7), expand_search_expr_("x : foo", 7)) + } + + #[test] + fn handles_ws_before_dot() { + assert_eq!((0, 8), expand_search_expr_("foo .bar", 7)) + } + + #[test] + fn handles_ws_after_dot() { + assert_eq!((0, 8), expand_search_expr_("foo. bar", 7)) + } + + #[test] + fn handles_ws_dot() { + assert_eq!((0, 13), expand_search_expr_("foo. bar .foo", 12)) + } + + #[test] + fn handles_let() { + assert_eq!((8, 11), expand_search_expr_("let b = foo", 10)) + } + + #[test] + fn handles_double_dot() { + assert_eq!((2, 5), expand_search_expr_("..foo", 4)) + } +} + +fn fill_gaps(buffer: &str, result: &mut String, start: usize, prev: usize) { + for _ in 0..((start - prev) / buffer.len()) { + result.push_str(buffer); + } + result.push_str(&buffer[..((start - prev) % buffer.len())]); +} + +pub fn mask_comments(src: &str, chunks: &[ByteRange]) -> String { + let mut result = String::with_capacity(src.len()); + let buf_byte = &[b' '; 128]; + let buffer = from_utf8(buf_byte).unwrap(); + let mut prev = BytePos::ZERO; + for range in chunks { + fill_gaps(buffer, &mut result, range.start.0, prev.0); + result.push_str(&src[range.to_range()]); + prev = range.end; + } + + // Fill up if the comment was at the end + if src.len() > prev.0 { + fill_gaps(buffer, &mut result, src.len(), prev.0); + } + assert_eq!(src.len(), result.len()); + result +} + +pub fn mask_sub_scopes(src: &str) -> String { + let mut result = String::with_capacity(src.len()); + let buf_byte = [b' '; 128]; + let buffer = from_utf8(&buf_byte).unwrap(); + let mut levels = 0i32; + let mut start = 0usize; + let mut pos = 0usize; + + for &b in src.as_bytes() { + pos += 1; + match b { + b'{' => { + if levels == 0 { + result.push_str(&src[start..(pos)]); + start = pos + 1; + } + levels += 1; + } + b'}' => { + if levels == 1 { + fill_gaps(buffer, &mut result, pos, start); + result.push_str("}"); + start = pos; + } + levels -= 1; + } + b'\n' if levels > 0 => { + fill_gaps(buffer, &mut result, pos, start); + result.push('\n'); + start = pos + 1; + } + _ => {} + } + } + if start > pos { + start = pos; + } + if levels > 0 { + fill_gaps(buffer, &mut result, pos, start); + } else { + result.push_str(&src[start..pos]); + } + result +} + +pub fn end_of_next_scope(src: &str) -> Option { + find_close(src.as_bytes().iter(), b'{', b'}', 1) +} + +#[test] +fn test_scope_start() { + let src = String::from( + " +fn myfn() { + let a = 3; + print(a); +} +", + ); + let src = core::MaskedSource::new(&src); + let raw_src = core::RawSource::new(src.to_string()); + let point = raw_src.coords_to_point(&Coordinate::new(4, 10)).unwrap(); + let start = scope_start(src.as_src(), point); + assert_eq!(start, BytePos(12)); +} + +#[test] +fn test_scope_start_handles_sub_scopes() { + let src = String::from( + " +fn myfn() { + let a = 3; + { + let b = 4; + } + print(a); +} +", + ); + let src = core::MaskedSource::new(&src); + let raw_src = core::RawSource::new(src.to_string()); + let point = raw_src.coords_to_point(&Coordinate::new(7, 10)).unwrap(); + let start = scope_start(src.as_src(), point); + assert_eq!(start, BytePos(12)); +} + +#[test] +fn masks_out_comments() { + let src = String::from( + " +this is some code +this is a line // with a comment +some more +", + ); + let raw = core::RawSource::new(src.to_string()); + let src = core::MaskedSource::new(&src); + assert!(src.len() == raw.len()); + // characters at the start are the same + assert!(src.as_bytes()[5] == raw.as_bytes()[5]); + // characters in the comments are masked + let commentoffset = raw.coords_to_point(&Coordinate::new(3, 23)).unwrap(); + assert!(char_at(&src, commentoffset.0) == ' '); + assert!(src.as_bytes()[commentoffset.0] != raw.as_bytes()[commentoffset.0]); + // characters afterwards are the same + assert!(src.as_bytes()[src.len() - 3] == raw.as_bytes()[src.len() - 3]); +} + +#[test] +fn finds_end_of_struct_scope() { + let src = " +struct foo { + a: usize, + blah: ~str +} +Some other junk"; + + let expected = " +struct foo { + a: usize, + blah: ~str +}"; + let end = end_of_next_scope(src).unwrap(); + assert_eq!(expected, &src[..=end.0]); +} + +/// get start of path from use statements +/// e.g. get Some(16) from "pub(crate) use a" +pub(crate) fn use_stmt_start(line_str: &str) -> Option { + let use_start = util::strip_visibility(line_str).unwrap_or(BytePos::ZERO); + util::strip_word(&line_str[use_start.0..], "use").map(|b| b + use_start) +} + +pub(crate) fn is_extern_crate(line_str: &str) -> bool { + let extern_start = util::strip_visibility(line_str).unwrap_or(BytePos::ZERO); + if let Some(crate_start) = util::strip_word(&line_str[extern_start.0..], "extern") { + let crate_str = &line_str[(extern_start + crate_start).0..]; + crate_str.starts_with("crate ") + } else { + false + } +} + +#[inline(always)] +fn next_use_item(expr: &str) -> Option { + let bytes = expr.as_bytes(); + let mut i = bytes.len(); + let mut before = b' '; + while i > 0 { + i -= 1; + let cur = bytes[i]; + if before == b':' && cur == b':' { + return Some(i); + } + if cur == b',' { + while i > 0 && bytes[i] != b'{' { + i -= 1; + } + } + before = cur; + } + None +} + +/// get path from use statement, supposing completion point is end of expr +/// e.g. "use std::collections::{hash_map, Hash" -> P["std", "collections", "Hash"] +pub(crate) fn construct_path_from_use_tree(expr: &str) -> RacerPath { + let mut segments = Vec::new(); + let bytes = expr.as_bytes(); + let mut i = bytes.len(); + let mut ident_end = Some(i - 1); + while i > 0 { + i -= 1; + if util::is_ident_char(bytes[i] as char) { + if ident_end.is_none() { + ident_end = Some(i) + } + } else { + if let Some(end) = ident_end { + segments.push(&expr[i + 1..=end]); + ident_end = None; + } + if let Some(point) = next_use_item(&expr[..=i]) { + i = point; + continue; + } + break; + } + } + if let Some(end) = ident_end { + segments.push(&expr[0..=end]); + } + segments.reverse(); + let is_global = expr.starts_with("::"); + RacerPath::from_vec(is_global, segments) +} + +/// get current statement for completion context +pub(crate) fn get_current_stmt<'c>(src: Src<'c>, pos: BytePos) -> (BytePos, String) { + let mut scopestart = scope_start(src, pos); + // for use statement + if scopestart > BytePos::ZERO && src[..scopestart.0].ends_with("::{") { + if let Some(pos) = src[..pos.0].rfind("use") { + scopestart = scope_start(src, pos.into()); + } + } + let linestart = find_stmt_start_given_scope(src, pos, scopestart).unwrap_or(scopestart); + ( + linestart, + (&src[linestart.0..pos.0]) + .trim() + .rsplit(';') + .next() + .unwrap() + .to_owned(), + ) +} + +pub(crate) fn expr_to_path(expr: &str) -> (RacerPath, Namespace) { + let is_global = expr.starts_with("::"); + let v: Vec<_> = (if is_global { &expr[2..] } else { expr }) + .split("::") + .collect(); + let path = RacerPath::from_vec(is_global, v); + let namespace = if path.len() == 1 { + Namespace::Global | Namespace::Path + } else { + Namespace::Path + }; + (path, namespace) +} + +pub(crate) fn is_in_struct_ctor( + src: Src<'_>, + stmt_start: BytePos, + pos: BytePos, +) -> Option { + const ALLOW_SYMBOL: [u8; 5] = [b'{', b'(', b'|', b';', b',']; + const ALLOW_KEYWORDS: [&'static str; 3] = ["let", "mut", "ref"]; + const INIHIBIT_KEYWORDS: [&'static str; 2] = ["unsafe", "async"]; + if stmt_start.0 <= 3 || src.as_bytes()[stmt_start.0 - 1] != b'{' || pos <= stmt_start { + return None; + } + { + for &b in src[stmt_start.0..pos.0].as_bytes().iter().rev() { + match b { + b',' => break, + b':' => return None, + _ => continue, + } + } + } + let src = &src[..stmt_start.0 - 1]; + #[derive(Clone, Copy, Debug)] + enum State { + Initial, + Name(usize), + End, + } + let mut state = State::Initial; + let mut result = None; + let bytes = src.as_bytes(); + for (i, b) in bytes.iter().enumerate().rev() { + match (state, *b) { + (State::Initial, b) if util::is_whitespace_byte(b) => continue, + (State::Initial, b) if util::is_ident_char(b.into()) => state = State::Name(i), + (State::Initial, _) => return None, + (State::Name(_), b) if b == b':' || util::is_ident_char(b.into()) => continue, + (State::Name(end), b) if util::is_whitespace_byte(b) => { + result = Some(ByteRange::new(i + 1, end + 1)); + if INIHIBIT_KEYWORDS.contains(&&src[i + 1..=end]) { + return None; + } + state = State::End; + } + (State::Name(end), b) if ALLOW_SYMBOL.contains(&b) => { + result = Some(ByteRange::new(i + 1, end + 1)); + break; + } + (State::End, b) if util::is_ident_char(b.into()) => { + let bytes = &bytes[..=i]; + if !ALLOW_KEYWORDS.iter().any(|s| bytes.ends_with(s.as_bytes())) { + return None; + } else { + break; + } + } + (State::End, b) if util::is_whitespace_byte(b) => continue, + (State::End, b) if ALLOW_SYMBOL.contains(&b) => break, + (_, _) => return None, + } + } + match state { + State::Initial => None, + State::Name(end) => { + if INIHIBIT_KEYWORDS.contains(&&src[0..=end]) { + None + } else { + Some(ByteRange::new(0, end + 1)) + } + } + State::End => result, + } +} + +#[cfg(test)] +mod use_tree_test { + use super::*; + #[test] + fn test_use_stmt_start() { + assert_eq!(use_stmt_start("pub(crate) use some::").unwrap().0, 19); + } + + #[test] + fn test_is_extern_crate() { + assert!(is_extern_crate("extern crate ")); + assert!(is_extern_crate("pub extern crate abc")); + assert!(!is_extern_crate("pub extern crat")); + } + #[test] + fn test_construct_path_from_use_tree() { + let get_path_idents = |s| { + let s = construct_path_from_use_tree(s); + s.segments + .into_iter() + .map(|seg| seg.name) + .collect::>() + }; + assert_eq!( + get_path_idents("std::collections::HashMa"), + vec!["std", "collections", "HashMa"], + ); + assert_eq!( + get_path_idents("std::{collections::{HashMap, hash_ma"), + vec!["std", "collections", "hash_ma"], + ); + assert_eq!( + get_path_idents("std::{collections::{HashMap, "), + vec!["std", "collections", ""], + ); + assert_eq!( + get_path_idents("std::collections::{"), + vec!["std", "collections", ""], + ); + assert_eq!( + get_path_idents("std::{collections::HashMap, sync::Arc"), + vec!["std", "sync", "Arc"], + ); + assert_eq!(get_path_idents("{Str1, module::Str2, Str3"), vec!["Str3"],); + } +} + +#[cfg(test)] +mod ctor_test { + use super::{is_in_struct_ctor, scope_start}; + use crate::core::{ByteRange, MaskedSource}; + fn check(src: &str) -> Option { + let source = MaskedSource::new(src); + let point = src.find("~").unwrap(); + let scope_start = scope_start(source.as_src(), point.into()); + is_in_struct_ctor(source.as_src(), scope_start, point.into()) + } + #[test] + fn first_line() { + let src = " + struct UserData { + name: String, + id: usize, + } + fn main() { + UserData { + na~ + } + }"; + assert!(check(src).is_some()) + } + #[test] + fn second_line() { + let src = r#" + fn main() { + UserData { + name: "ahkj".to_owned(), + i~d: + } + }"#; + assert!(check(src).is_some()) + } + #[test] + fn tuple() { + let src = r#" + fn main() { + let (a, + UserData { + name: "ahkj".to_owned(), + i~d: + } + ) = f(); + }"#; + assert!(check(src).is_some()) + } + #[test] + fn expr_pos() { + let src = r#" + fn main() { + UserData { + name: ~ + } + }"#; + assert!(check(src).is_none()) + } + #[test] + fn fnarg() { + let src = r#" + func(UserData { + name~ + }) + "#; + assert!(check(src).is_some()) + } + #[test] + fn closure() { + let src = r#" + let f = || UserData { + name~ + }; + "#; + assert!(check(src).is_some()) + } + #[test] + fn unsafe_() { + let src = r#" + unsafe { + name~ + } + "#; + assert!(check(src).is_none()) + } +} diff --git a/racer/src/racer/snippets.rs b/racer/src/racer/snippets.rs new file mode 100644 index 0000000000..abf91d1693 --- /dev/null +++ b/racer/src/racer/snippets.rs @@ -0,0 +1,123 @@ +use crate::ast::with_error_checking_parse; +use crate::core::{Match, Session}; +use crate::typeinf::get_function_declaration; + +use rustc_ast::ast::AssocItemKind; +use rustc_parse::parser::ForceCollect; + +/// Returns completion snippets usable by some editors +/// +/// Generates a snippet string given a `Match`. The provided snippet contains +/// substrings like "${1:name}" which some editors can use to quickly fill in +/// arguments. +/// +/// # Examples +/// +/// ```no_run +/// extern crate racer; +/// +/// use std::path::Path; +/// +/// let path = Path::new("."); +/// let cache = racer::FileCache::default(); +/// let session = racer::Session::new(&cache, Some(path)); +/// +/// let m = racer::complete_fully_qualified_name( +/// "std::fs::canonicalize", +/// &path, +/// &session +/// ).next().unwrap(); +/// +/// let snip = racer::snippet_for_match(&m, &session); +/// assert_eq!(snip, "canonicalize(${1:path})"); +/// ``` +pub fn snippet_for_match(m: &Match, session: &Session<'_>) -> String { + if m.mtype.is_function() { + let method = get_function_declaration(m, session); + if let Some(m) = MethodInfo::from_source_str(&method) { + m.snippet() + } else { + "".into() + } + } else { + m.matchstr.clone() + } +} + +struct MethodInfo { + name: String, + args: Vec, +} + +impl MethodInfo { + ///Parses method declaration as string and returns relevant data + fn from_source_str(source: &str) -> Option { + let trim: &[_] = &['\n', '\r', '{', ' ']; + let decorated = format!("{} {{}}()", source.trim_end_matches(trim)); + + trace!("MethodInfo::from_source_str: {:?}", decorated); + with_error_checking_parse(decorated, |p| { + if let Ok(Some(Some(method))) = p.parse_impl_item(ForceCollect::No) { + if let AssocItemKind::Fn(ref fn_kind) = method.kind { + let decl = &fn_kind.sig.decl; + return Some(MethodInfo { + // ident.as_str calls Ident.name.as_str + name: method.ident.name.to_string(), + args: decl + .inputs + .iter() + .map(|arg| { + let source_map = &p.sess.source_map(); + let var_name = match source_map.span_to_snippet(arg.pat.span) { + Ok(name) => name, + _ => "".into(), + }; + match source_map.span_to_snippet(arg.ty.span) { + Ok(ref type_name) if !type_name.is_empty() => { + format!("{}: {}", var_name, type_name) + } + _ => var_name, + } + }) + .collect(), + }); + } + } + debug!("Unable to parse method declaration. |{}|", source); + None + }) + } + + ///Returns completion snippets usable by some editors + fn snippet(&self) -> String { + format!( + "{}({})", + self.name, + &self + .args + .iter() + .filter(|&s| !s.ends_with("self")) + .enumerate() + .fold(String::new(), |cur, (i, ref s)| { + let arg = format!("${{{}:{}}}", i + 1, s); + let delim = if i > 0 { ", " } else { "" }; + cur + delim + &arg + }) + ) + } +} + +#[test] +fn method_info_test() { + let info = MethodInfo::from_source_str("pub fn new() -> Vec").unwrap(); + assert_eq!(info.name, "new"); + assert_eq!(info.args.len(), 0); + assert_eq!(info.snippet(), "new()"); + + let info = MethodInfo::from_source_str("pub fn reserve(&mut self, additional: usize)").unwrap(); + assert_eq!(info.name, "reserve"); + assert_eq!(info.args.len(), 2); + // it looks odd, but no problme because what our clients see is only snippet + assert_eq!(info.args[0], "&mut self: &mut self"); + assert_eq!(info.snippet(), "reserve(${1:additional: usize})"); +} diff --git a/racer/src/racer/testutils.rs b/racer/src/racer/testutils.rs new file mode 100644 index 0000000000..4c8e7a4442 --- /dev/null +++ b/racer/src/racer/testutils.rs @@ -0,0 +1,21 @@ +#![cfg(test)] +use crate::core::ByteRange; + +pub fn rejustify(src: &str) -> String { + let s = &src[1..]; // remove the newline + let mut sb = String::new(); + for l in s.lines() { + let tabless = &l[4..]; + sb.push_str(tabless); + if !tabless.is_empty() { + sb.push_str("\n"); + } + } + let newlen = sb.len() - 1; // remove the trailing newline + sb.truncate(newlen); + sb +} + +pub fn slice(src: &str, range: ByteRange) -> &str { + &src[range.to_range()] +} diff --git a/racer/src/racer/typeinf.rs b/racer/src/racer/typeinf.rs new file mode 100644 index 0000000000..e812a81dd7 --- /dev/null +++ b/racer/src/racer/typeinf.rs @@ -0,0 +1,630 @@ +//! Type inference +//! THIS MODULE IS ENTIRELY TOO UGLY SO REALLY NEADS REFACTORING(kngwyu) +use crate::ast; +use crate::ast_types::{Pat, Ty}; +use crate::core; +use crate::core::{ + BytePos, ByteRange, Match, MatchType, Namespace, Scope, SearchType, Session, SessionExt, Src, +}; +use crate::matchers; +use crate::nameres; +use crate::primitive::PrimKind; +use crate::scopes; +use crate::util::{self, txt_matches}; +use rustc_ast::ast::BinOpKind; +use std::path::Path; + +// Removes the body of the statement (anything in the braces {...}), leaving just +// the header +pub fn generate_skeleton_for_parsing(src: &str) -> Option { + src.find('{').map(|n| src[..=n].to_owned() + "}") +} + +/// Get the trait name implementing which overrides the operator `op` +/// For comparison operators, it is `bool` +pub(crate) fn get_operator_trait(op: BinOpKind) -> &'static str { + match op { + BinOpKind::Add => "Add", + BinOpKind::Sub => "Sub", + BinOpKind::Mul => "Mul", + BinOpKind::Div => "Div", + BinOpKind::Rem => "Rem", + BinOpKind::And => "And", + BinOpKind::Or => "Or", + BinOpKind::BitXor => "BitXor", + BinOpKind::BitAnd => "BitAnd", + BinOpKind::BitOr => "BitOr", + BinOpKind::Shl => "Shl", + BinOpKind::Shr => "Shr", + _ => "bool", + } +} + +// TODO(kngwyu): use libsyntax parser +pub fn first_param_is_self(blob: &str) -> bool { + // Restricted visibility introduces the possibility of `pub(in ...)` at the start + // of a method declaration. To counteract this, we restrict the search to only + // look at text _after_ the visibility declaration. + // + // Having found the end of the visibility declaration, we now start the search + // for method parameters. + let blob = util::trim_visibility(blob); + + // skip generic arg + // consider 'pub fn map U>(self, f: F)' + // we have to match the '>' + match blob.find('(') { + None => false, + Some(probable_param_start) => { + let skip_generic = match blob.find('<') { + None => 0, + Some(generic_start) if generic_start < probable_param_start => { + let mut level = 0; + let mut prev = ' '; + let mut skip_generic = 0; + for (i, c) in blob[generic_start..].char_indices() { + match c { + '<' => level += 1, + '>' if prev == '-' => (), + '>' => level -= 1, + _ => (), + } + prev = c; + if level == 0 { + skip_generic = i; + break; + } + } + skip_generic + } + Some(..) => 0, + }; + if let Some(start) = blob[skip_generic..].find('(') { + let start = BytePos::from(skip_generic + start).increment(); + let end = scopes::find_closing_paren(blob, start); + let is_self = txt_matches(SearchType::ExactMatch, "self", &blob[start.0..end.0]); + trace!( + "searching fn args for self: |{}| {}", + &blob[start.0..end.0], + is_self + ); + return is_self; + } + false + } + } +} + +#[test] +fn generates_skeleton_for_mod() { + let src = "mod foo { blah }"; + let out = generate_skeleton_for_parsing(src).unwrap(); + assert_eq!("mod foo {}", out); +} + +fn get_type_of_self_arg(m: &Match, msrc: Src<'_>, session: &Session<'_>) -> Option { + debug!("get_type_of_self_arg {:?}", m); + get_type_of_self(m.point, &m.filepath, m.local, msrc, session) +} + +// TODO(kngwyu): parse correctly +pub fn get_type_of_self( + point: BytePos, + filepath: &Path, + local: bool, + msrc: Src<'_>, + session: &Session<'_>, +) -> Option { + let start = scopes::find_impl_start(msrc, point, BytePos::ZERO)?; + let decl = generate_skeleton_for_parsing(&msrc.shift_start(start))?; + debug!("get_type_of_self_arg impl skeleton |{}|", decl); + if decl.starts_with("impl") { + // we have to do 2 operations around generics here + // 1. Checks if self's type is T + // 2. Checks if self's type contains T + let scope_start = start + decl.len().into(); + let implres = ast::parse_impl(decl, filepath, start, local, scope_start)?; + if let Some((_, param)) = implres.generics().search_param_by_path(implres.self_path()) { + if let Some(resolved) = param.resolved() { + return Some(resolved.to_owned()); + } + let mut m = param.to_owned().into_match(); + m.local = local; + return Some(Ty::Match(m)); + } + debug!("get_type_of_self_arg implres |{:?}|", implres); + nameres::resolve_path( + implres.self_path(), + filepath, + start, + SearchType::ExactMatch, + Namespace::Type, + session, + &matchers::ImportInfo::default(), + ) + .into_iter() + .nth(0) + .map(|mut m| { + match &mut m.mtype { + MatchType::Enum(gen) | MatchType::Struct(gen) => { + for (i, param) in implres.generics.0.into_iter().enumerate() { + gen.add_bound(i, param.bounds); + } + } + _ => {} + } + Ty::Match(m) + }) + } else { + // // must be a trait + ast::parse_trait(decl).name.and_then(|name| { + Some(Ty::Match(Match { + matchstr: name, + filepath: filepath.into(), + point: start, + coords: None, + local: local, + mtype: core::MatchType::Trait, + contextstr: matchers::first_line(&msrc[start.0..]), + docs: String::new(), + })) + }) + } +} + +fn get_type_of_fnarg(m: Match, session: &Session<'_>) -> Option { + let Match { + matchstr, + filepath, + point, + mtype, + .. + } = m; + let (pat, ty) = *match mtype { + MatchType::FnArg(a) => a, + _ => return None, + }; + resolve_lvalue_ty(pat, ty, &matchstr, &filepath, point, session) +} + +fn get_type_of_let_expr(m: Match, session: &Session<'_>) -> Option { + let Match { + mtype, + contextstr, + filepath, + point, + .. + } = m; + let let_start = match mtype { + MatchType::Let(s) => s, + _ => return None, + }; + debug!("get_type_of_let_expr calling parse_let |{}|", contextstr); + let pos = point - let_start; + let scope = Scope { + filepath, + point: let_start, + }; + ast::get_let_type(contextstr, pos, scope, session) +} + +/// Decide l_value's type given r_value and ident query +pub(crate) fn resolve_lvalue_ty<'a>( + l_value: Pat, + r_value: Option, + query: &str, + fpath: &Path, + pos: BytePos, + session: &Session<'_>, +) -> Option { + match l_value { + Pat::Ident(_bi, name) => { + if name != query { + return None; + } + r_value + } + Pat::Tuple(pats) => { + if let Ty::Tuple(ty) = r_value? { + for (p, t) in pats.into_iter().zip(ty) { + let ret = try_continue!(resolve_lvalue_ty(p, t, query, fpath, pos, session,)); + return Some(ret); + } + } + None + } + Pat::Ref(pat, _) => { + if let Some(ty) = r_value { + if let Ty::RefPtr(ty, _) = ty { + resolve_lvalue_ty(*pat, Some(*ty), query, fpath, pos, session) + } else { + resolve_lvalue_ty(*pat, Some(ty), query, fpath, pos, session) + } + } else { + resolve_lvalue_ty(*pat, None, query, fpath, pos, session) + } + } + Pat::TupleStruct(path, pats) => { + let ma = ast::find_type_match(&path, fpath, pos, session)?; + match &ma.mtype { + MatchType::Struct(_generics) => { + for (pat, (_, _, t)) in + pats.into_iter().zip(get_tuplestruct_fields(&ma, session)) + { + let ret = + try_continue!(resolve_lvalue_ty(pat, t, query, fpath, pos, session)); + return Some(ret); + } + None + } + MatchType::EnumVariant(enum_) => { + let generics = if let Some(Ty::Match(match_)) = r_value.map(Ty::dereference) { + match_.into_generics() + } else { + enum_.to_owned().and_then(|ma| ma.into_generics()) + }; + for (pat, (_, _, mut t)) in + pats.into_iter().zip(get_tuplestruct_fields(&ma, session)) + { + debug!( + "Hi! I'm in enum and l: {:?}\n r: {:?}\n gen: {:?}", + pat, t, generics + ); + if let Some(ref gen) = generics { + t = t.map(|ty| ty.replace_by_resolved_generics(&gen)); + } + let ret = + try_continue!(resolve_lvalue_ty(pat, t, query, fpath, pos, session)); + return Some(ret); + } + None + } + _ => None, + } + } + // Let's implement after #946 solved + Pat::Struct(path, _) => { + let item = ast::find_type_match(&path, fpath, pos, session)?; + if !item.mtype.is_struct() { + return None; + } + None + } + _ => None, + } +} + +fn get_type_of_for_arg(m: &Match, session: &Session<'_>) -> Option { + let for_start = match &m.mtype { + MatchType::For(pos) => *pos, + _ => { + warn!("[get_type_of_for_expr] invalid match type: {:?}", m.mtype); + return None; + } + }; + // HACK: use outer scope when getting in ~ expr's type + let scope = Scope::new(m.filepath.clone(), for_start); + let ast::ForStmtVisitor { + for_pat, in_expr, .. + } = ast::parse_for_stmt(m.contextstr.clone(), scope, session); + debug!( + "[get_type_of_for_expr] match: {:?}, for: {:?}, in: {:?},", + m, for_pat, in_expr + ); + fn get_item(ty: Ty, session: &Session<'_>) -> Option { + match ty { + Ty::Match(ma) => nameres::get_iter_item(&ma, session), + Ty::PathSearch(paths) => { + nameres::get_iter_item(&paths.resolve_as_match(session)?, session) + } + Ty::RefPtr(ty, _) => get_item(*ty, session), + _ => None, + } + } + resolve_lvalue_ty( + for_pat?, + in_expr.and_then(|ty| get_item(ty, session)), + &m.matchstr, + &m.filepath, + m.point, + session, + ) +} + +fn get_type_of_if_let(m: &Match, session: &Session<'_>, start: BytePos) -> Option { + // HACK: use outer scope when getting r-value's type + let scope = Scope::new(m.filepath.clone(), start); + let ast::IfLetVisitor { + let_pat, rh_expr, .. + } = ast::parse_if_let(m.contextstr.clone(), scope, session); + debug!( + "[get_type_of_if_let] match: {:?}\n let: {:?}\n rh: {:?},", + m, let_pat, rh_expr, + ); + resolve_lvalue_ty( + let_pat?, + rh_expr, + &m.matchstr, + &m.filepath, + m.point, + session, + ) +} + +pub fn get_struct_field_type( + fieldname: &str, + structmatch: &Match, + session: &Session<'_>, +) -> Option { + // temporary fix for https://github.com/rust-lang-nursery/rls/issues/783 + if !structmatch.mtype.is_struct() { + warn!( + "get_struct_filed_type is called for {:?}", + structmatch.mtype + ); + return None; + } + debug!("[get_struct_filed_type]{}, {:?}", fieldname, structmatch); + + let src = session.load_source_file(&structmatch.filepath); + + let opoint = scopes::expect_stmt_start(src.as_src(), structmatch.point); + // HACK: if scopes::end_of_next_scope returns empty struct, it's maybe tuple struct + let structsrc = if let Some(end) = scopes::end_of_next_scope(&src[opoint.0..]) { + src[opoint.0..=(opoint + end).0].to_owned() + } else { + (*get_first_stmt(src.as_src().shift_start(opoint))).to_owned() + }; + let fields = ast::parse_struct_fields(structsrc.to_owned(), Scope::from_match(structmatch)); + for (field, _, ty) in fields { + if fieldname != field { + continue; + } + return ty; + } + None +} + +pub(crate) fn get_tuplestruct_fields( + structmatch: &Match, + session: &Session<'_>, +) -> Vec<(String, ByteRange, Option)> { + let src = session.load_source_file(&structmatch.filepath); + let structsrc = if let core::MatchType::EnumVariant(_) = structmatch.mtype { + // decorate the enum variant src to make it look like a tuple struct + let to = src[structmatch.point.0..] + .find('(') + .map(|n| { + scopes::find_closing_paren(&src, structmatch.point + BytePos::from(n).increment()) + }) + .expect("Tuple enum variant should have `(` in definition"); + "struct ".to_owned() + &src[structmatch.point.0..to.increment().0] + ";" + } else { + assert!(structmatch.mtype.is_struct()); + let opoint = scopes::expect_stmt_start(src.as_src(), structmatch.point); + (*get_first_stmt(src.as_src().shift_start(opoint))).to_owned() + }; + + debug!("[tuplestruct_fields] structsrc=|{}|", structsrc); + + ast::parse_struct_fields(structsrc, Scope::from_match(structmatch)) +} + +pub fn get_tuplestruct_field_type( + fieldnum: usize, + structmatch: &Match, + session: &Session<'_>, +) -> Option { + let fields = get_tuplestruct_fields(structmatch, session); + + for (i, (_, _, ty)) in fields.into_iter().enumerate() { + if i == fieldnum { + return ty; + } + } + None +} + +pub fn get_first_stmt(src: Src<'_>) -> Src<'_> { + match src.iter_stmts().next() { + Some(range) => src.shift_range(range), + None => src, + } +} + +pub fn get_type_of_match(m: Match, msrc: Src<'_>, session: &Session<'_>) -> Option { + debug!("get_type_of match {:?} ", m); + + match m.mtype { + core::MatchType::Let(_) => get_type_of_let_expr(m, session), + core::MatchType::IfLet(start) | core::MatchType::WhileLet(start) => { + get_type_of_if_let(&m, session, start) + } + core::MatchType::For(_) => get_type_of_for_arg(&m, session), + core::MatchType::FnArg(_) => get_type_of_fnarg(m, session), + core::MatchType::MatchArm => get_type_from_match_arm(&m, msrc, session), + core::MatchType::Struct(_) + | core::MatchType::Union(_) + | core::MatchType::Enum(_) + | core::MatchType::Function + | core::MatchType::Method(_) + | core::MatchType::Module => Some(Ty::Match(m)), + core::MatchType::Const | core::MatchType::Static => get_type_of_static(m), + core::MatchType::EnumVariant(Some(boxed_enum)) => { + if boxed_enum.mtype.is_enum() { + Some(Ty::Match(*boxed_enum)) + } else { + debug!("EnumVariant has not-enum type: {:?}", boxed_enum.mtype); + None + } + } + _ => { + debug!("!!! WARNING !!! Can't get type of {:?}", m.mtype); + None + } + } +} + +pub fn get_type_from_match_arm(m: &Match, msrc: Src<'_>, session: &Session<'_>) -> Option { + // We construct a faux match stmt and then parse it. This is because the + // match stmt may be incomplete (half written) in the real code + + // skip to end of match arm pattern so we can search backwards + let arm = BytePos(msrc[m.point.0..].find("=>")?) + m.point; + let scopestart = scopes::scope_start(msrc, arm); + + let stmtstart = scopes::find_stmt_start(msrc, scopestart.decrement())?; + debug!("PHIL preblock is {:?} {:?}", stmtstart, scopestart); + let preblock = &msrc[stmtstart.0..scopestart.0]; + let matchstart = stmtstart + preblock.rfind("match ")?.into(); + + let lhs_start = scopes::get_start_of_pattern(&msrc, arm); + let lhs = &msrc[lhs_start.0..arm.0]; + // construct faux match statement and recreate point + let mut fauxmatchstmt = msrc[matchstart.0..scopestart.0].to_owned(); + let faux_prefix_size = BytePos::from(fauxmatchstmt.len()); + fauxmatchstmt = fauxmatchstmt + lhs + " => () };"; + let faux_point = faux_prefix_size + (m.point - lhs_start); + + debug!( + "fauxmatchstmt for parsing is pt:{:?} src:|{}|", + faux_point, fauxmatchstmt + ); + + ast::get_match_arm_type( + fauxmatchstmt, + faux_point, + // scope is used to locate expression, so send + // it the start of the match expr + Scope { + filepath: m.filepath.clone(), + point: matchstart, + }, + session, + ) +} + +pub fn get_function_declaration(fnmatch: &Match, session: &Session<'_>) -> String { + let src = session.load_source_file(&fnmatch.filepath); + let start = scopes::expect_stmt_start(src.as_src(), fnmatch.point); + let def_end: &[_] = &['{', ';']; + let end = src[start.0..] + .find(def_end) + .expect("Definition should have an end (`{` or `;`)"); + src[start.0..start.0 + end].to_owned() +} + +pub fn get_return_type_of_function( + fnmatch: &Match, + contextm: &Match, + session: &Session<'_>, +) -> Option { + let src = session.load_source_file(&fnmatch.filepath); + let point = scopes::expect_stmt_start(src.as_src(), fnmatch.point); + let block_start = src[point.0..].find('{')?; + let decl = "impl b{".to_string() + &src[point.0..point.0 + block_start + 1] + "}}"; + debug!("get_return_type_of_function: passing in |{}|", decl); + let mut scope = Scope::from_match(fnmatch); + // TODO(kngwyu): if point <= 5 scope is incorrect + scope.point = point.checked_sub("impl b{".len()).unwrap_or(BytePos::ZERO); + let (ty, is_async) = ast::parse_fn_output(decl, scope); + let resolve_ty = |ty| { + if let Some(Ty::PathSearch(ref paths)) = ty { + let path = &paths.path; + if let Some(ref path_seg) = path.segments.get(0) { + if "Self" == path_seg.name { + return get_type_of_self_arg(fnmatch, src.as_src(), session); + } + if path.segments.len() == 1 && path_seg.generics.is_empty() { + for type_param in fnmatch.generics() { + if type_param.name() == &path_seg.name { + return Some(Ty::Match(contextm.clone())); + } + } + } + } + } + ty + }; + resolve_ty(ty).map(|ty| { + if is_async { + Ty::Future(Box::new(ty), Scope::from_match(fnmatch)) + } else { + ty + } + }) +} + +pub(crate) fn get_type_of_indexed_value(body: Ty, session: &Session<'_>) -> Option { + match body.dereference() { + Ty::Match(m) => nameres::get_index_output(&m, session), + Ty::PathSearch(p) => p + .resolve_as_match(session) + .and_then(|m| nameres::get_index_output(&m, session)), + Ty::Array(ty, _) | Ty::Slice(ty) => Some(*ty), + _ => None, + } +} + +pub(crate) fn get_type_of_typedef(m: &Match, session: &Session<'_>) -> Option { + debug!("get_type_of_typedef match is {:?}", m); + let msrc = session.load_source_file(&m.filepath); + let blobstart = m.point - BytePos(5); // 5 == "type ".len() + let blob = msrc.get_src_from_start(blobstart); + let type_ = blob.iter_stmts().nth(0).and_then(|range| { + let range = range.shift(blobstart); + let blob = msrc[range.to_range()].to_owned(); + debug!("get_type_of_typedef blob string {}", blob); + let scope = Scope::new(m.filepath.clone(), range.start); + ast::parse_type(blob, &scope).type_ + })?; + match type_.dereference() { + Ty::Match(m) => Some(m), + Ty::Ptr(_, _) => PrimKind::Pointer.to_module_match(), + Ty::Array(_, _) => PrimKind::Array.to_module_match(), + Ty::Slice(_) => PrimKind::Slice.to_module_match(), + Ty::PathSearch(paths) => { + let src = session.load_source_file(&m.filepath); + let scope_start = scopes::scope_start(src.as_src(), m.point); + // Type of TypeDef cannot be inside the impl block so look outside + let outer_scope_start = scope_start + .0 + .checked_sub(1) + .map(|sub| scopes::scope_start(src.as_src(), sub.into())) + .and_then(|s| { + let blob = src.get_src_from_start(s); + let blob = blob.trim_start(); + if blob.starts_with("impl") || util::trim_visibility(blob).starts_with("trait") + { + Some(s) + } else { + None + } + }); + nameres::resolve_path_with_primitive( + &paths.path, + &paths.filepath, + outer_scope_start.unwrap_or(scope_start), + core::SearchType::StartsWith, + core::Namespace::Type, + session, + ) + .into_iter() + .filter(|m_| Some(m_.matchstr.as_ref()) == paths.path.name() && m_.point != m.point) + .next() + } + _ => None, + } +} + +fn get_type_of_static(m: Match) -> Option { + let Match { + filepath, + point, + contextstr, + .. + } = m; + let scope = Scope::new(filepath, point - "static".len().into()); + let res = ast::parse_static(contextstr, scope); + res.ty +} diff --git a/racer/src/racer/util.rs b/racer/src/racer/util.rs new file mode 100644 index 0000000000..a37a5447b8 --- /dev/null +++ b/racer/src/racer/util.rs @@ -0,0 +1,856 @@ +// Small functions of utility +use std::rc::Rc; +use std::{cmp, error, fmt, path}; +use std::{ + collections::hash_map::DefaultHasher, + hash::{Hash, Hasher}, +}; + +use crate::core::SearchType::{self, ExactMatch, StartsWith}; +use crate::core::{BytePos, ByteRange, Location, LocationExt, RawSource, Session, SessionExt}; + +#[cfg(unix)] +pub const PATH_SEP: char = ':'; +#[cfg(windows)] +pub const PATH_SEP: char = ';'; + +#[inline] +pub(crate) fn is_pattern_char(c: char) -> bool { + c.is_alphanumeric() || c.is_whitespace() || (c == '_') || (c == ':') || (c == '.') +} + +#[inline] +pub(crate) fn is_search_expr_char(c: char) -> bool { + c.is_alphanumeric() || (c == '_') || (c == ':') || (c == '.') +} + +#[inline] +pub(crate) fn is_ident_char(c: char) -> bool { + c.is_alphanumeric() || (c == '_') || (c == '!') +} + +#[inline(always)] +pub(crate) fn is_whitespace_byte(b: u8) -> bool { + b == b' ' || b == b'\r' || b == b'\n' || b == b'\t' +} + +/// Searches for `needle` as a standalone identifier in `haystack`. To be considered a match, +/// the `needle` must occur either at the beginning of `haystack` or after a non-identifier +/// character. +pub fn txt_matches(stype: SearchType, needle: &str, haystack: &str) -> bool { + txt_matches_with_pos(stype, needle, haystack).is_some() +} + +pub fn txt_matches_with_pos(stype: SearchType, needle: &str, haystack: &str) -> Option { + if needle.is_empty() { + return Some(0); + } + match stype { + ExactMatch => { + let n_len = needle.len(); + let h_len = haystack.len(); + for (n, _) in haystack.match_indices(needle) { + if (n == 0 || !is_ident_char(char_before(haystack, n))) + && (n + n_len == h_len || !is_ident_char(char_at(haystack, n + n_len))) + { + return Some(n); + } + } + } + StartsWith => { + for (n, _) in haystack.match_indices(needle) { + if n == 0 || !is_ident_char(char_before(haystack, n)) { + return Some(n); + } + } + } + } + None +} + +pub fn symbol_matches(stype: SearchType, searchstr: &str, candidate: &str) -> bool { + match stype { + ExactMatch => searchstr == candidate, + StartsWith => candidate.starts_with(searchstr), + } +} + +pub fn find_closure(src: &str) -> Option<(ByteRange, ByteRange)> { + let (pipe_range, _) = closure_valid_arg_scope(src)?; + let mut chars = src + .chars() + .enumerate() + .skip(pipe_range.end.0) + .skip_while(|(_, c)| c.is_whitespace()); + let (start, start_char) = chars + .next() + .map(|(i, c)| (if c == '{' { i + 1 } else { i }, c))?; + + let mut clevel = if start_char == '{' { 1 } else { 0 }; + let mut plevel = 0; + + let mut last = None; + for (i, current) in chars { + match current { + '{' => clevel += 1, + '(' => plevel += 1, + '}' => { + clevel -= 1; + if (clevel == 0 && start_char == '{') || (clevel == -1) { + last = Some(i); + break; + } + } + ';' => { + if start_char != '{' { + last = Some(i); + break; + } + } + ')' => { + plevel -= 1; + if plevel == 0 { + last = Some(i + 1); + } + if plevel == -1 { + last = Some(i + 1); + break; + } + } + _ => {} + } + } + if let Some(last) = last { + Some((pipe_range, ByteRange::new(BytePos(start), BytePos(last)))) + } else { + None + } +} + +#[test] +fn test_find_closure() { + let src = "|a, b, c| something()"; + let src2 = "|a, b, c| { something() }"; + let src3 = "let a = |a, b, c|something();"; + let src4 = "let a = |a, b, c| something().second().third();"; + let src5 = "| x: i32 | y.map(|z| z~)"; + let src6 = "| x: i32 | Struct { x };"; + let src7 = "y.map(| x: i32 | y.map(|z| z) )"; + let src8 = "|z| z)"; + let src9 = "let p = |z| something() + 5;"; + let get_range = |a, b| ByteRange::new(BytePos(a as usize), BytePos(b as usize)); + let find = |src: &str, a, off1: i32, b, off2: i32| { + get_range( + src.find(a).unwrap() as i32 + off1, + src.rfind(b).unwrap() as i32 + 1 + off2, + ) + }; + let get_pipe = |src| find(src, '|', 0, '|', 0); + + assert_eq!( + Some((get_pipe(src), find(src, 's', 0, ')', 0))), + find_closure(src) + ); + assert_eq!( + Some((get_pipe(src2), find(src2, '{', 1, '}', -1))), + find_closure(src2) + ); + assert_eq!( + Some((get_pipe(src3), find(src3, 's', 0, ')', 0))), + find_closure(src3) + ); + assert_eq!( + Some((get_pipe(src4), find(src4, 's', 0, ')', 0))), + find_closure(src4) + ); + assert_eq!( + Some((find(src5, '|', 0, 'y', -2), find(src5, 'y', 0, ')', 0))), + find_closure(src5) + ); + assert_eq!( + Some((get_pipe(src6), find(src6, 'S', 0, ';', -1))), + find_closure(src6) + ); + assert_eq!( + Some((find(src7, '|', 0, 'y', -2), find(src7, '2', 4, ')', 0))), + find_closure(src7) + ); + assert_eq!( + Some((get_pipe(src8), find(src8, ' ', 1, ')', 0))), + find_closure(src8) + ); + assert_eq!( + Some((get_pipe(src9), find(src9, 's', 0, '5', 0))), + find_closure(src9) + ); +} + +/// Try to valid if the given scope contains a valid closure arg scope. +pub fn closure_valid_arg_scope(scope_src: &str) -> Option<(ByteRange, &str)> { + // Try to find the left and right pipe, if one or both are not present, this is not a valid + // closure definition + let left_pipe = scope_src.find('|')?; + let candidate = &scope_src[left_pipe..]; + let mut brace_level = 0; + for (i, c) in candidate.chars().skip(1).enumerate() { + match c { + '{' => brace_level += 1, + '}' => brace_level -= 1, + '|' => { + let right_pipe = left_pipe + 1 + i; + // now we find right | + if brace_level == 0 { + let range = ByteRange::new(left_pipe, right_pipe + 1); + return Some((range, &scope_src[range.to_range()])); + } + break; + } + ';' => break, + _ => {} + } + if brace_level < 0 { + break; + } + } + None +} + +#[test] +fn test_closure_valid_arg_scope() { + let valid = r#" + let a = |int, int| int * int; +"#; + assert_eq!( + closure_valid_arg_scope(valid), + Some((ByteRange::new(BytePos(13), BytePos(23)), "|int, int|")) + ); + + let confusing = r#" + match a { + EnumA::A => match b { + EnumB::A(u) | EnumB::B(u) => println!("u: {}", u), + }, + EnumA::B => match b { + EnumB::A(u) | EnumB::B(u) => println!("u: {}", u), + }, + } +"#; + assert_eq!(closure_valid_arg_scope(confusing), None); +} + +#[test] +fn txt_matches_matches_stuff() { + assert_eq!(true, txt_matches(ExactMatch, "Vec", "Vec")); + assert_eq!(true, txt_matches(ExactMatch, "Vec", "use Vec")); + assert_eq!(false, txt_matches(ExactMatch, "Vec", "use Vecä")); + + assert_eq!(true, txt_matches(StartsWith, "Vec", "Vector")); + assert_eq!(true, txt_matches(StartsWith, "Vec", "use Vector")); + assert_eq!(true, txt_matches(StartsWith, "Vec", "use Vec")); + assert_eq!(false, txt_matches(StartsWith, "Vec", "use äVector")); +} + +#[test] +fn txt_matches_matches_methods() { + assert_eq!(true, txt_matches(StartsWith, "do_st", "fn do_stuff")); + assert_eq!(true, txt_matches(StartsWith, "do_st", "pub fn do_stuff")); + assert_eq!( + true, + txt_matches(StartsWith, "do_st", "pub(crate) fn do_stuff") + ); + assert_eq!( + true, + txt_matches(StartsWith, "do_st", "pub(in codegen) fn do_stuff") + ); +} + +/// Given a string and index, return span of identifier +/// +/// `pos` is coerced to be within `s`. Note that `expand_ident` only backtracks. +/// If the provided `pos` is in the middle of an identifier, the returned +/// `(start, end)` will have `end` = `pos`. +/// +/// # Examples +/// +/// ``` +/// extern crate racer; +/// +/// let src = "let x = this_is_an_identifier;"; +/// let pos = racer::Location::from(29); +/// let path = "lib.rs"; +/// +/// let cache = racer::FileCache::default(); +/// let session = racer::Session::new(&cache, None); +/// +/// session.cache_file_contents(path, src); +/// +/// let expanded = racer::expand_ident(path, pos, &session).unwrap(); +/// assert_eq!("this_is_an_identifier", expanded.ident()); +/// ``` +pub fn expand_ident(filepath: P, cursor: C, session: &Session<'_>) -> Option +where + P: AsRef, + C: Into, +{ + let cursor = cursor.into(); + let indexed_source = session.load_raw_file(filepath.as_ref()); + let (start, pos) = { + let s = &indexed_source.code[..]; + let pos = match cursor.to_point(&indexed_source) { + Some(pos) => pos, + None => { + debug!("Failed to convert cursor to point"); + return None; + } + }; + + // TODO: Would this better be an assertion ? Why are out-of-bound values getting here ? + // They are coming from the command-line, question is, if they should be handled beforehand + // clamp pos into allowed range + let pos = cmp::min(s.len().into(), pos); + let sb = &s[..pos.0]; + let mut start = pos; + + // backtrack to find start of word + for (i, c) in sb.char_indices().rev() { + if !is_ident_char(c) { + break; + } + start = i.into(); + } + + (start, pos) + }; + + Some(ExpandedIdent { + src: indexed_source, + start, + pos, + }) +} + +pub struct ExpandedIdent { + src: Rc, + start: BytePos, + pos: BytePos, +} + +impl ExpandedIdent { + pub fn ident(&self) -> &str { + &self.src.code[self.start.0..self.pos.0] + } + + pub fn start(&self) -> BytePos { + self.start + } + + pub fn pos(&self) -> BytePos { + self.pos + } +} + +pub fn find_ident_end(s: &str, pos: BytePos) -> BytePos { + // find end of word + let sa = &s[pos.0..]; + for (i, c) in sa.char_indices() { + if !is_ident_char(c) { + return pos + i.into(); + } + } + s.len().into() +} + +#[cfg(test)] +mod test_find_ident_end { + use super::{find_ident_end, BytePos}; + fn find_ident_end_(s: &str, pos: usize) -> usize { + find_ident_end(s, BytePos(pos)).0 + } + #[test] + fn ascii() { + assert_eq!(5, find_ident_end_("ident", 0)); + assert_eq!(6, find_ident_end_("(ident)", 1)); + assert_eq!(17, find_ident_end_("let an_identifier = 100;", 4)); + } + #[test] + fn unicode() { + assert_eq!(7, find_ident_end_("num_µs", 0)); + assert_eq!(10, find_ident_end_("ends_in_µ", 0)); + } +} + +fn char_before(src: &str, i: usize) -> char { + let mut prev = '\0'; + for (ii, ch) in src.char_indices() { + if ii >= i { + return prev; + } + prev = ch; + } + prev +} + +#[test] +fn test_char_before() { + assert_eq!('ä', char_before("täst", 3)); + assert_eq!('ä', char_before("täst", 2)); + assert_eq!('s', char_before("täst", 4)); + assert_eq!('t', char_before("täst", 100)); +} + +pub fn char_at(src: &str, i: usize) -> char { + src[i..].chars().next().unwrap() +} + +/// Error type returned from validate_rust_src_path() +#[derive(Debug, PartialEq)] +pub enum RustSrcPathError { + Missing, + DoesNotExist(path::PathBuf), + NotRustSourceTree(path::PathBuf), +} + +impl error::Error for RustSrcPathError {} + +impl fmt::Display for RustSrcPathError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + RustSrcPathError::Missing => write!( + f, + "RUST_SRC_PATH environment variable must be set to \ + point to the src directory of a rust checkout. \ + E.g. \"/home/foouser/src/rust/library\" (or \"/home/foouser/src/rust/src\" in older toolchains)" + ), + RustSrcPathError::DoesNotExist(ref path) => write!( + f, + "racer can't find the directory pointed to by the \ + RUST_SRC_PATH variable \"{:?}\". Try using an \ + absolute fully qualified path and make sure it \ + points to the src directory of a rust checkout - \ + e.g. \"/home/foouser/src/rust/library\" (or \"/home/foouser/src/rust/src\" in older toolchains).", + path + ), + RustSrcPathError::NotRustSourceTree(ref path) => write!( + f, + "Unable to find libstd under RUST_SRC_PATH. N.B. \ + RUST_SRC_PATH variable needs to point to the *src* \ + directory inside a rust checkout e.g. \ + \"/home/foouser/src/rust/library\" (or \"/home/foouser/src/rust/src\" in older toolchains). \ + Current value \"{:?}\"", + path + ), + } + } +} + +fn check_rust_sysroot() -> Option { + use std::process::Command; + let mut cmd = Command::new("rustc"); + cmd.arg("--print").arg("sysroot"); + + if let Ok(output) = cmd.output() { + if let Ok(s) = String::from_utf8(output.stdout) { + let sysroot = path::Path::new(s.trim()); + // See if the toolchain is sufficiently new, after the libstd + // has been internally reorganized + let srcpath = sysroot.join("lib/rustlib/src/rust/library"); + if srcpath.exists() { + return Some(srcpath); + } + let srcpath = sysroot.join("lib/rustlib/src/rust/src"); + if srcpath.exists() { + return Some(srcpath); + } + } + } + None +} + +/// Get the path for Rust standard library source code. +/// Checks first the paths in the `RUST_SRC_PATH` environment variable. +/// +/// If the environment variable is _not_ set, it checks the rust sys +/// root for the `rust-src` component. +/// +/// If that isn't available, checks `/usr/local/src/rust/src` and +/// `/usr/src/rust/src` as default values. +/// +/// If the Rust standard library source code cannot be found, returns +/// `Err(racer::RustSrcPathError::Missing)`. +/// +/// If the path in `RUST_SRC_PATH` or the path in rust sys root is invalid, +/// returns a corresponding error. If a valid path is found, returns that path. +/// +/// # Examples +/// +/// ``` +/// extern crate racer; +/// +/// match racer::get_rust_src_path() { +/// Ok(_path) => { +/// // RUST_SRC_PATH is valid +/// }, +/// Err(racer::RustSrcPathError::Missing) => { +/// // path is not set +/// }, +/// Err(racer::RustSrcPathError::DoesNotExist(_path)) => { +/// // provided path doesnt point to valid file +/// }, +/// Err(racer::RustSrcPathError::NotRustSourceTree(_path)) => { +/// // provided path doesn't have rustc src +/// } +/// } +/// ``` +pub fn get_rust_src_path() -> Result { + use std::env; + + debug!("Getting rust source path. Trying env var RUST_SRC_PATH."); + + if let Ok(ref srcpaths) = env::var("RUST_SRC_PATH") { + if !srcpaths.is_empty() { + if let Some(path) = srcpaths.split(PATH_SEP).next() { + return validate_rust_src_path(path::PathBuf::from(path)); + } + } + }; + + debug!("Nope. Trying rustc --print sysroot and appending lib/rustlib/src/rust/{{src, library}} to that."); + + if let Some(path) = check_rust_sysroot() { + return validate_rust_src_path(path); + }; + + debug!("Nope. Trying default paths: /usr/local/src/rust/src and /usr/src/rust/src"); + + let default_paths = ["/usr/local/src/rust/src", "/usr/src/rust/src"]; + + for path in &default_paths { + if let Ok(path) = validate_rust_src_path(path::PathBuf::from(path)) { + return Ok(path); + } + } + + warn!("Rust stdlib source path not found!"); + + Err(RustSrcPathError::Missing) +} + +fn validate_rust_src_path(path: path::PathBuf) -> Result { + if !path.exists() { + return Err(RustSrcPathError::DoesNotExist(path)); + } + // Historically, the Rust standard library was distributed under "libstd" + // but was later renamed to "std" when the library was moved under "library/" + // in https://github.com/rust-lang/rust/pull/73265. + if path.join("libstd").exists() || path.join("std").join("src").exists() { + Ok(path) + } else { + Err(RustSrcPathError::NotRustSourceTree(path.join("libstd"))) + } +} + +#[cfg(test)] +lazy_static! { + static ref TEST_SEMAPHORE: ::std::sync::Mutex<()> = Default::default(); +} + +#[test] +fn test_get_rust_src_path_env_ok() { + use std::env; + + let _guard = TEST_SEMAPHORE.lock().unwrap(); + + let original = env::var_os("RUST_SRC_PATH"); + if env::var_os("RUST_SRC_PATH").is_none() { + env::set_var("RUST_SRC_PATH", check_rust_sysroot().unwrap()); + } + let result = get_rust_src_path(); + + match original { + Some(path) => env::set_var("RUST_SRC_PATH", path), + None => env::remove_var("RUST_SRC_PATH"), + } + assert!(result.is_ok()); +} + +#[test] +fn test_get_rust_src_path_does_not_exist() { + use std::env; + + let _guard = TEST_SEMAPHORE.lock().unwrap(); + + let original = env::var_os("RUST_SRC_PATH"); + env::set_var("RUST_SRC_PATH", "test_path"); + let result = get_rust_src_path(); + + match original { + Some(path) => env::set_var("RUST_SRC_PATH", path), + None => env::remove_var("RUST_SRC_PATH"), + } + + assert_eq!( + Err(RustSrcPathError::DoesNotExist(path::PathBuf::from( + "test_path" + ))), + result + ); +} + +#[test] +fn test_get_rust_src_path_not_rust_source_tree() { + use std::env; + + let _guard = TEST_SEMAPHORE.lock().unwrap(); + + let original = env::var_os("RUST_SRC_PATH"); + + env::set_var("RUST_SRC_PATH", "/"); + + let result = get_rust_src_path(); + + match original { + Some(path) => env::set_var("RUST_SRC_PATH", path), + None => env::remove_var("RUST_SRC_PATH"), + } + + assert_eq!( + Err(RustSrcPathError::NotRustSourceTree(path::PathBuf::from( + "/libstd" + ))), + result + ); +} + +#[test] +fn test_get_rust_src_path_missing() { + use std::env; + + let _guard = TEST_SEMAPHORE.lock().unwrap(); + + let path = env::var_os("PATH").unwrap(); + let original = env::var_os("RUST_SRC_PATH"); + + env::remove_var("RUST_SRC_PATH"); + env::remove_var("PATH"); + + let result = get_rust_src_path(); + + env::set_var("PATH", path); + match original { + Some(path) => env::set_var("RUST_SRC_PATH", path), + None => env::remove_var("RUST_SRC_PATH"), + } + + assert_eq!(Err(RustSrcPathError::Missing), result); +} + +#[test] +fn test_get_rust_src_path_rustup_ok() { + use std::env; + + let _guard = TEST_SEMAPHORE.lock().unwrap(); + let original = env::var_os("RUST_SRC_PATH"); + env::remove_var("RUST_SRC_PATH"); + + let result = get_rust_src_path(); + + match original { + Some(path) => env::set_var("RUST_SRC_PATH", path), + None => env::remove_var("RUST_SRC_PATH"), + } + + match result { + Ok(_) => (), + Err(_) => panic!( + "Couldn't get the path via rustup! \ + Rustup and the component rust-src needs to be installed for this test to pass!" + ), + } +} + +/// An immutable stack implemented as a linked list backed by a thread's stack. +// TODO: this implementation is fast, but if we want to run racer in multiple threads, +// we have to rewrite it using std::sync::Arc. +pub struct StackLinkedListNode<'stack, T>(Option>); + +struct StackLinkedListNodeData<'stack, T> { + item: T, + previous: &'stack StackLinkedListNode<'stack, T>, +} + +impl<'stack, T> StackLinkedListNode<'stack, T> { + /// Returns an empty node. + pub fn empty() -> Self { + StackLinkedListNode(None) + } + /// Pushes a new node on the stack. Returns the new node. + pub fn push(&'stack self, item: T) -> Self { + StackLinkedListNode(Some(StackLinkedListNodeData { + item, + previous: self, + })) + } +} + +impl<'stack, T: PartialEq> StackLinkedListNode<'stack, T> { + /// Check if the stack contains the specified item. + /// Returns `true` if the item is found, or `false` if it's not found. + pub fn contains(&self, item: &T) -> bool { + let mut current = self; + while let StackLinkedListNode(Some(StackLinkedListNodeData { + item: ref current_item, + previous, + })) = *current + { + if current_item == item { + return true; + } + current = previous; + } + false + } +} + +// don't use other than strip_visibilities or strip_unsafe +fn strip_word_impl(src: &str, allow_paren: bool) -> Option { + let mut level = 0; + for (i, &b) in src.as_bytes().into_iter().enumerate() { + match b { + b'(' if allow_paren => level += 1, + b')' if allow_paren => level -= 1, + _ if level >= 1 => (), + // stop on the first thing that isn't whitespace + _ if !is_whitespace_byte(b) => { + if i == 0 { + break; + } + return Some(BytePos(i)); + } + _ => continue, + } + } + None +} + +/// remove pub(crate), crate +pub(crate) fn strip_visibility(src: &str) -> Option { + if src.starts_with("pub") { + Some(strip_word_impl(&src[3..], true)? + BytePos(3)) + } else if src.starts_with("crate") { + Some(strip_word_impl(&src[5..], false)? + BytePos(5)) + } else { + None + } +} + +/// remove `unsafe` or other keywords +pub(crate) fn strip_word(src: &str, word: &str) -> Option { + if src.starts_with(word) { + let len = word.len(); + Some(strip_word_impl(&src[len..], false)? + BytePos(len)) + } else { + None + } +} + +/// remove words +pub(crate) fn strip_words(src: &str, words: &[&str]) -> BytePos { + let mut start = BytePos::ZERO; + for word in words { + start += strip_word(&src[start.0..], word).unwrap_or(BytePos::ZERO); + } + start +} + +#[test] +fn test_strip_words() { + assert_eq!( + strip_words("const unsafe fn", &["const", "unsafe"]), + BytePos(15) + ); + assert_eq!(strip_words("unsafe fn", &["const", "unsafe"]), BytePos(8)); + assert_eq!(strip_words("const fn", &["const", "unsafe"]), BytePos(8)); + assert_eq!(strip_words("fn", &["const", "unsafe"]), BytePos(0)); +} + +/// Removes `pub(...)` from the start of a blob so that other code +/// can assess the struct/trait/fn without worrying about restricted +/// visibility. +pub(crate) fn trim_visibility(blob: &str) -> &str { + if let Some(start) = strip_visibility(blob) { + &blob[start.0..] + } else { + blob + } +} + +#[test] +fn test_trim_visibility() { + assert_eq!(trim_visibility("pub fn"), "fn"); + assert_eq!(trim_visibility("pub(crate) struct"), "struct"); + assert_eq!(trim_visibility("pub (in super) const fn"), "const fn"); +} + +/// Checks if the completion point is in a function declaration by looking +/// to see if the second-to-last word is `fn`. +pub fn in_fn_name(line_before_point: &str) -> bool { + // Determine if the cursor is sitting in the whitespace after typing `fn ` before + // typing a name. + let has_started_name = !line_before_point.ends_with(|c: char| c.is_whitespace()); + + let mut words = line_before_point.split_whitespace().rev(); + + // Make sure we haven't finished the name and started generics or arguments + if has_started_name { + if let Some(ident) = words.next() { + if ident.chars().any(|c| !is_ident_char(c)) { + return false; + } + } + } + + words.next().map(|word| word == "fn").unwrap_or_default() +} + +#[test] +fn test_in_fn_name() { + assert!(in_fn_name("fn foo")); + assert!(in_fn_name(" fn foo")); + assert!(in_fn_name("fn ")); + assert!(!in_fn_name("fn foo(b")); + assert!(!in_fn_name("fn")); +} + +/// calculate hash of string +pub fn calculate_str_hash(s: &str) -> u64 { + let mut hasher = DefaultHasher::new(); + s.hash(&mut hasher); + hasher.finish() +} + +#[macro_export] +macro_rules! try_continue { + ($res: expr) => { + match ::std::ops::Try::branch($res) { + ::std::ops::ControlFlow::Continue(o) => o, + ::std::ops::ControlFlow::Break(_) => continue, + } + }; +} + +#[macro_export] +macro_rules! try_vec { + ($res: expr) => { + match ::std::ops::Try::branch($res) { + ::std::ops::ControlFlow::Continue(o) => o, + ::std::ops::ControlFlow::Break(_) => return Vec::new(), + } + }; +} + +pub(crate) fn gen_tuple_fields(u: usize) -> impl Iterator { + const NUM: [&'static str; 16] = [ + "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", + ]; + NUM.iter().take(::std::cmp::min(u, 16)).map(|x| *x) +} diff --git a/rls-ipc/src/rpc.rs b/rls-ipc/src/rpc.rs index f6aad9f99f..fee9a95e74 100644 --- a/rls-ipc/src/rpc.rs +++ b/rls-ipc/src/rpc.rs @@ -75,4 +75,6 @@ pub enum Edition { Edition2018, /// Rust 2021 Edition2021, + /// Rust 2024 + Edition2024, } diff --git a/rls-rustc/src/lib.rs b/rls-rustc/src/lib.rs index 6857ed7c4f..6ffbbfba55 100644 --- a/rls-rustc/src/lib.rs +++ b/rls-rustc/src/lib.rs @@ -137,6 +137,7 @@ impl Callbacks for ShimCalls { rustc_span::edition::Edition::Edition2015 => Edition::Edition2015, rustc_span::edition::Edition::Edition2018 => Edition::Edition2018, rustc_span::edition::Edition::Edition2021 => Edition::Edition2021, + rustc_span::edition::Edition::Edition2024 => Edition::Edition2024, }, }; diff --git a/rls/src/actions/requests.rs b/rls/src/actions/requests.rs index 80c04021e9..1391ee716a 100644 --- a/rls/src/actions/requests.rs +++ b/rls/src/actions/requests.rs @@ -697,6 +697,7 @@ fn reformat( Edition::Edition2015 => RustfmtEdition::Edition2015, Edition::Edition2018 => RustfmtEdition::Edition2018, Edition::Edition2021 => RustfmtEdition::Edition2021, + Edition::Edition2024 => RustfmtEdition::Edition2024, }; config.set().edition(edition); trace!("Detected edition {:?} for file `{}`", edition, path.display()); diff --git a/rls/src/build/cargo.rs b/rls/src/build/cargo.rs index 36efbe45df..e69e10191e 100644 --- a/rls/src/build/cargo.rs +++ b/rls/src/build/cargo.rs @@ -408,17 +408,18 @@ impl Executor for RlsExecutor { // Enforce JSON output so that we can parse the rustc output by // stripping --error-format if it was specified (e.g. Cargo pipelined // build) - let filtered_args = filter_arg(cargo_cmd.get_args(), "--error-format"); + let filtered_args = + filter_arg(&*cargo_cmd.get_args().collect::>(), "--error-format"); cargo_cmd.args_replace(&filtered_args); cargo_cmd.arg("--error-format=json"); // Delete any stale data. We try and remove any json files with // the same crate name as Cargo would emit. This includes files // with the same crate name but different hashes, e.g., those // made with a different compiler. - let cargo_args = cargo_cmd.get_args(); + let cargo_args = cargo_cmd.get_args().collect::>(); let crate_name = - parse_arg(cargo_args, "--crate-name").expect("no crate-name in rustc command line"); - let cfg_test = cargo_args.iter().any(|arg| arg == "--test"); + parse_arg(&cargo_args, "--crate-name").expect("no crate-name in rustc command line"); + let cfg_test = cargo_args.iter().any(|arg| *arg == "--test"); trace!("exec: {} {:?}", crate_name, cargo_cmd); // Send off a window/progress notification for this compile target. @@ -435,7 +436,8 @@ impl Executor for RlsExecutor { .expect("failed to send progress update"); } - let out_dir = parse_arg(cargo_args, "--out-dir").expect("no out-dir in rustc command line"); + let out_dir = + parse_arg(&cargo_args, "--out-dir").expect("no out-dir in rustc command line"); let analysis_dir = Path::new(&out_dir).join("save-analysis"); if let Ok(dir_contents) = read_dir(&analysis_dir) { let lib_crate_name = "lib".to_owned() + &crate_name; @@ -478,7 +480,7 @@ impl Executor for RlsExecutor { // Add args and envs to cmd. let mut args: Vec<_> = - cargo_args.iter().map(|a| a.clone().into_string().unwrap()).collect(); + cargo_args.iter().map(|a| (*a).to_owned().into_string().unwrap()).collect(); let envs = cargo_cmd.get_envs().clone(); let sysroot = super::rustc::current_sysroot() @@ -508,7 +510,7 @@ impl Executor for RlsExecutor { "rustc not intercepted - {}{} - args: {:?} envs: {:?}", id.name(), build_script_notice, - cmd.get_args(), + cmd.get_args().collect::>(), cmd.get_envs(), ); @@ -712,9 +714,9 @@ pub fn make_cargo_config( config } -fn parse_arg(args: &[OsString], arg: &str) -> Option { +fn parse_arg(args: &[&OsString], arg: &str) -> Option { for (i, a) in args.iter().enumerate() { - if a == arg { + if *a == arg { return Some(args[i + 1].clone().into_string().unwrap()); } } @@ -780,7 +782,7 @@ fn dedup_flags(flag_str: &str) -> String { } /// Removes a selected flag of a `--flag=VALUE` or `--flag VALUE` shape from `args` (command line args for Rust). -fn filter_arg(args: &[OsString], key: &str) -> Vec { +fn filter_arg(args: &[&OsString], key: &str) -> Vec { let key_as_prefix = key.to_owned() + "="; let mut ret = vec![]; @@ -934,12 +936,47 @@ mod test { input.split_whitespace().map(OsString::from).collect() } - assert!(filter_arg(&args("--error-format=json"), "--error-format").is_empty()); - assert!(filter_arg(&args("--error-format json"), "--error-format").is_empty()); - assert_eq!(filter_arg(&args("-a --error-format=json"), "--error-format"), ["-a"]); - assert_eq!(filter_arg(&args("-a --error-format json"), "--error-format"), ["-a"]); - assert_eq!(filter_arg(&args("-a --error-format=json -b"), "--error-format"), ["-a", "-b"]); - assert_eq!(filter_arg(&args("-a --error-format json -b"), "--error-format"), ["-a", "-b"]); - assert_eq!(filter_arg(&args("-a -b -x"), "--error-format"), ["-a", "-b", "-x"]); + assert!(filter_arg( + &args("--error-format=json").iter().collect::>(), + "--error-format" + ) + .is_empty()); + assert!(filter_arg( + &args("--error-format json").iter().collect::>(), + "--error-format" + ) + .is_empty()); + assert_eq!( + filter_arg( + &args("-a --error-format=json").iter().collect::>(), + "--error-format" + ), + ["-a"] + ); + assert_eq!( + filter_arg( + &args("-a --error-format json").iter().collect::>(), + "--error-format" + ), + ["-a"] + ); + assert_eq!( + filter_arg( + &args("-a --error-format=json -b").iter().collect::>(), + "--error-format" + ), + ["-a", "-b"] + ); + assert_eq!( + filter_arg( + &args("-a --error-format json -b").iter().collect::>(), + "--error-format" + ), + ["-a", "-b"] + ); + assert_eq!( + filter_arg(&args("-a -b -x").iter().collect::>(), "--error-format"), + ["-a", "-b", "-x"] + ); } } diff --git a/rls/src/build/external.rs b/rls/src/build/external.rs index de95e1d505..326c8a12ed 100644 --- a/rls/src/build/external.rs +++ b/rls/src/build/external.rs @@ -197,7 +197,7 @@ impl BuildKey for Invocation { let mut hash = DefaultHasher::new(); self.command.get_program().hash(&mut hash); - let /*mut*/ args = self.command.get_args().to_owned(); + let /*mut*/ args = self.command.get_args().map(|a| a.to_owned()).collect::>(); // args.sort(); // TODO: parse 2-part args (e.g., `["--extern", "a=b"]`) args.hash(&mut hash); let mut envs: Vec<_> = self.command.get_envs().iter().collect(); @@ -417,10 +417,8 @@ fn guess_rustc_src_path(build_dir: &Path, cmd: &ProcessBuilder) -> Option crate::build::plan::Edition::Edition2015, rls_ipc::rpc::Edition::Edition2018 => crate::build::plan::Edition::Edition2018, rls_ipc::rpc::Edition::Edition2021 => crate::build::plan::Edition::Edition2021, + rls_ipc::rpc::Edition::Edition2024 => crate::build::plan::Edition::Edition2024, }, disambiguator: krate.disambiguator, } diff --git a/rls/src/build/plan.rs b/rls/src/build/plan.rs index cf89c9e8ad..821b5a7e5d 100644 --- a/rls/src/build/plan.rs +++ b/rls/src/build/plan.rs @@ -84,7 +84,7 @@ pub(crate) struct JobQueue(Vec); /// For example, if `[.., "--crate-name", "rls", ...]` arguments are specified, /// then proc_arg(prc, "--crate-name") returns Some(&OsStr::new("rls")); fn proc_argument_value>(prc: &ProcessBuilder, key: T) -> Option<&std::ffi::OsStr> { - let args = prc.get_args(); + let args = prc.get_args().collect::>(); let (idx, _) = args.iter().enumerate().find(|(_, arg)| arg.as_os_str() == key.as_ref())?; Some(args.get(idx + 1)?.as_os_str()) @@ -125,7 +125,6 @@ impl JobQueue { trace!("Executing: {:#?}", job); let mut args: Vec<_> = job .get_args() - .iter() .cloned() .map(|x| x.into_string().expect("cannot stringify job args")) .collect(); @@ -155,7 +154,7 @@ impl JobQueue { let crate_name = proc_argument_value(&job, "--crate-name").and_then(OsStr::to_str); let update = match crate_name { Some(name) => { - let cfg_test = job.get_args().iter().any(|arg| arg == "--test"); + let cfg_test = job.get_args().any(|arg| *arg == "--test"); ProgressUpdate::Message(if cfg_test { format!("{} cfg(test)", name) } else { @@ -237,6 +236,7 @@ pub enum Edition { Edition2015, Edition2018, Edition2021, + Edition2024, } impl Default for Edition { @@ -253,6 +253,7 @@ impl std::convert::TryFrom<&str> for Edition { "2015" => Edition::Edition2015, "2018" => Edition::Edition2018, "2021" => Edition::Edition2021, + "2024" => Edition::Edition2024, _ => return Err("unknown"), }) } diff --git a/rls/src/build/rustc.rs b/rls/src/build/rustc.rs index 69c602d8d8..4ee888b864 100644 --- a/rls/src/build/rustc.rs +++ b/rls/src/build/rustc.rs @@ -255,6 +255,7 @@ impl rustc_driver::Callbacks for RlsRustcCalls { RustcEdition::Edition2015 => Edition::Edition2015, RustcEdition::Edition2018 => Edition::Edition2018, RustcEdition::Edition2021 => Edition::Edition2021, + RustcEdition::Edition2024 => Edition::Edition2024, }, }; diff --git a/rust-toolchain b/rust-toolchain index a07811d43e..d9d9c22a5c 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2022-01-13" +channel = "nightly-2022-05-16" components = ["rust-src", "rustc-dev", "llvm-tools-preview"]