1 //! Module responsible for analyzing the code surrounding the cursor for completion.
2 use std::iter;
3
4 use hir::{Semantics, Type, TypeInfo, Variant};
5 use ide_db::{active_parameter::ActiveParameter, RootDatabase};
6 use syntax::{
7 algo::{find_node_at_offset, non_trivia_sibling},
8 ast::{self, AttrKind, HasArgList, HasLoopBody, HasName, NameOrNameRef},
9 match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode,
10 SyntaxToken, TextRange, TextSize, T,
11 };
12
13 use crate::context::{
14 AttrCtx, CompletionAnalysis, DotAccess, DotAccessKind, ExprCtx, ItemListKind, LifetimeContext,
15 LifetimeKind, NameContext, NameKind, NameRefContext, NameRefKind, ParamContext, ParamKind,
16 PathCompletionCtx, PathKind, PatternContext, PatternRefutability, Qualified, QualifierCtx,
17 TypeAscriptionTarget, TypeLocation, COMPLETION_MARKER,
18 };
19
20 struct ExpansionResult {
21 original_file: SyntaxNode,
22 speculative_file: SyntaxNode,
23 offset: TextSize,
24 fake_ident_token: SyntaxToken,
25 derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
26 }
27
28 pub(super) struct AnalysisResult {
29 pub(super) analysis: CompletionAnalysis,
30 pub(super) expected: (Option<Type>, Option<ast::NameOrNameRef>),
31 pub(super) qualifier_ctx: QualifierCtx,
32 /// the original token of the expanded file
33 pub(super) token: SyntaxToken,
34 pub(super) offset: TextSize,
35 }
36
expand_and_analyze( sema: &Semantics<'_, RootDatabase>, original_file: SyntaxNode, speculative_file: SyntaxNode, offset: TextSize, original_token: &SyntaxToken, ) -> Option<AnalysisResult>37 pub(super) fn expand_and_analyze(
38 sema: &Semantics<'_, RootDatabase>,
39 original_file: SyntaxNode,
40 speculative_file: SyntaxNode,
41 offset: TextSize,
42 original_token: &SyntaxToken,
43 ) -> Option<AnalysisResult> {
44 // as we insert after the offset, right biased will *always* pick the identifier no matter
45 // if there is an ident already typed or not
46 let fake_ident_token = speculative_file.token_at_offset(offset).right_biased()?;
47 // the relative offset between the cursor and the *identifier* token we are completing on
48 let relative_offset = offset - fake_ident_token.text_range().start();
49 // make the offset point to the start of the original token, as that is what the
50 // intermediate offsets calculated in expansion always points to
51 let offset = offset - relative_offset;
52 let expansion =
53 expand(sema, original_file, speculative_file, offset, fake_ident_token, relative_offset);
54
55 // add the relative offset back, so that left_biased finds the proper token
56 let offset = expansion.offset + relative_offset;
57 let token = expansion.original_file.token_at_offset(offset).left_biased()?;
58
59 analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| {
60 AnalysisResult { analysis, expected, qualifier_ctx, token, offset }
61 })
62 }
63
64 /// Expand attributes and macro calls at the current cursor position for both the original file
65 /// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
66 /// and speculative states stay in sync.
expand( sema: &Semantics<'_, RootDatabase>, mut original_file: SyntaxNode, mut speculative_file: SyntaxNode, mut offset: TextSize, mut fake_ident_token: SyntaxToken, relative_offset: TextSize, ) -> ExpansionResult67 fn expand(
68 sema: &Semantics<'_, RootDatabase>,
69 mut original_file: SyntaxNode,
70 mut speculative_file: SyntaxNode,
71 mut offset: TextSize,
72 mut fake_ident_token: SyntaxToken,
73 relative_offset: TextSize,
74 ) -> ExpansionResult {
75 let _p = profile::span("CompletionContext::expand");
76 let mut derive_ctx = None;
77
78 'expansion: loop {
79 let parent_item =
80 |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
81 let ancestor_items = iter::successors(
82 Option::zip(
83 find_node_at_offset::<ast::Item>(&original_file, offset),
84 find_node_at_offset::<ast::Item>(&speculative_file, offset),
85 ),
86 |(a, b)| parent_item(a).zip(parent_item(b)),
87 );
88
89 // first try to expand attributes as these are always the outermost macro calls
90 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
91 match (
92 sema.expand_attr_macro(&actual_item),
93 sema.speculative_expand_attr_macro(
94 &actual_item,
95 &item_with_fake_ident,
96 fake_ident_token.clone(),
97 ),
98 ) {
99 // maybe parent items have attributes, so continue walking the ancestors
100 (None, None) => continue 'ancestors,
101 // successful expansions
102 (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
103 let new_offset = fake_mapped_token.text_range().start();
104 if new_offset + relative_offset > actual_expansion.text_range().end() {
105 // offset outside of bounds from the original expansion,
106 // stop here to prevent problems from happening
107 break 'expansion;
108 }
109 original_file = actual_expansion;
110 speculative_file = fake_expansion;
111 fake_ident_token = fake_mapped_token;
112 offset = new_offset;
113 continue 'expansion;
114 }
115 // exactly one expansion failed, inconsistent state so stop expanding completely
116 _ => break 'expansion,
117 }
118 }
119
120 // No attributes have been expanded, so look for macro_call! token trees or derive token trees
121 let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
122 Some(it) => it,
123 None => break 'expansion,
124 };
125 let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
126 Some(it) => it,
127 None => break 'expansion,
128 };
129
130 // Expand pseudo-derive expansion
131 if let (Some(orig_attr), Some(spec_attr)) = (
132 orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
133 spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
134 ) {
135 if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
136 sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
137 sema.speculative_expand_derive_as_pseudo_attr_macro(
138 &orig_attr,
139 &spec_attr,
140 fake_ident_token.clone(),
141 ),
142 ) {
143 derive_ctx = Some((
144 actual_expansion,
145 fake_expansion,
146 fake_mapped_token.text_range().start(),
147 orig_attr,
148 ));
149 }
150 // at this point we won't have any more successful expansions, so stop
151 break 'expansion;
152 }
153
154 // Expand fn-like macro calls
155 if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
156 orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
157 spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
158 ) {
159 let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
160 let mac_call_path1 =
161 macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
162
163 // inconsistent state, stop expanding
164 if mac_call_path0 != mac_call_path1 {
165 break 'expansion;
166 }
167 let speculative_args = match macro_call_with_fake_ident.token_tree() {
168 Some(tt) => tt,
169 None => break 'expansion,
170 };
171
172 match (
173 sema.expand(&actual_macro_call),
174 sema.speculative_expand(
175 &actual_macro_call,
176 &speculative_args,
177 fake_ident_token.clone(),
178 ),
179 ) {
180 // successful expansions
181 (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
182 let new_offset = fake_mapped_token.text_range().start();
183 if new_offset + relative_offset > actual_expansion.text_range().end() {
184 // offset outside of bounds from the original expansion,
185 // stop here to prevent problems from happening
186 break 'expansion;
187 }
188 original_file = actual_expansion;
189 speculative_file = fake_expansion;
190 fake_ident_token = fake_mapped_token;
191 offset = new_offset;
192 continue 'expansion;
193 }
194 // at least on expansion failed, we won't have anything to expand from this point
195 // onwards so break out
196 _ => break 'expansion,
197 }
198 }
199
200 // none of our states have changed so stop the loop
201 break 'expansion;
202 }
203 ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx }
204 }
205
206 /// Fill the completion context, this is what does semantic reasoning about the surrounding context
207 /// of the completion location.
analyze( sema: &Semantics<'_, RootDatabase>, expansion_result: ExpansionResult, original_token: &SyntaxToken, self_token: &SyntaxToken, ) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)>208 fn analyze(
209 sema: &Semantics<'_, RootDatabase>,
210 expansion_result: ExpansionResult,
211 original_token: &SyntaxToken,
212 self_token: &SyntaxToken,
213 ) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
214 let _p = profile::span("CompletionContext::analyze");
215 let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
216 expansion_result;
217
218 // Overwrite the path kind for derives
219 if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
220 if let Some(ast::NameLike::NameRef(name_ref)) =
221 find_node_at_offset(&file_with_fake_ident, offset)
222 {
223 let parent = name_ref.syntax().parent()?;
224 let (mut nameref_ctx, _) = classify_name_ref(sema, &original_file, name_ref, parent)?;
225 if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
226 path_ctx.kind = PathKind::Derive {
227 existing_derives: sema
228 .resolve_derive_macro(&origin_attr)
229 .into_iter()
230 .flatten()
231 .flatten()
232 .collect(),
233 };
234 }
235 return Some((
236 CompletionAnalysis::NameRef(nameref_ctx),
237 (None, None),
238 QualifierCtx::default(),
239 ));
240 }
241 return None;
242 }
243
244 let Some(name_like) = find_node_at_offset(&speculative_file, offset) else {
245 let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
246 CompletionAnalysis::String {
247 original,
248 expanded: ast::String::cast(self_token.clone()),
249 }
250 } else {
251 // Fix up trailing whitespace problem
252 // #[attr(foo = $0
253 let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
254 let p = token.parent()?;
255 if p.kind() == SyntaxKind::TOKEN_TREE
256 && p.ancestors().any(|it| it.kind() == SyntaxKind::META)
257 {
258 let colon_prefix = previous_non_trivia_token(self_token.clone())
259 .map_or(false, |it| T![:] == it.kind());
260 CompletionAnalysis::UnexpandedAttrTT {
261 fake_attribute_under_caret: fake_ident_token
262 .parent_ancestors()
263 .find_map(ast::Attr::cast),
264 colon_prefix,
265 }
266 } else {
267 return None;
268 }
269 };
270 return Some((analysis, (None, None), QualifierCtx::default()));
271 };
272
273 let expected = expected_type_and_name(sema, self_token, &name_like);
274 let mut qual_ctx = QualifierCtx::default();
275 let analysis = match name_like {
276 ast::NameLike::Lifetime(lifetime) => {
277 CompletionAnalysis::Lifetime(classify_lifetime(sema, &original_file, lifetime)?)
278 }
279 ast::NameLike::NameRef(name_ref) => {
280 let parent = name_ref.syntax().parent()?;
281 let (nameref_ctx, qualifier_ctx) =
282 classify_name_ref(sema, &original_file, name_ref, parent)?;
283
284 if let NameRefContext {
285 kind:
286 NameRefKind::Path(PathCompletionCtx { kind: PathKind::Expr { .. }, path, .. }, ..),
287 ..
288 } = &nameref_ctx
289 {
290 if is_in_token_of_for_loop(path) {
291 // for pat $0
292 // there is nothing to complete here except `in` keyword
293 // don't bother populating the context
294 // Ideally this special casing wouldn't be needed, but the parser recovers
295 return None;
296 }
297 }
298
299 qual_ctx = qualifier_ctx;
300 CompletionAnalysis::NameRef(nameref_ctx)
301 }
302 ast::NameLike::Name(name) => {
303 let name_ctx = classify_name(sema, &original_file, name)?;
304 CompletionAnalysis::Name(name_ctx)
305 }
306 };
307 Some((analysis, expected, qual_ctx))
308 }
309
310 /// Calculate the expected type and name of the cursor position.
expected_type_and_name( sema: &Semantics<'_, RootDatabase>, token: &SyntaxToken, name_like: &ast::NameLike, ) -> (Option<Type>, Option<NameOrNameRef>)311 fn expected_type_and_name(
312 sema: &Semantics<'_, RootDatabase>,
313 token: &SyntaxToken,
314 name_like: &ast::NameLike,
315 ) -> (Option<Type>, Option<NameOrNameRef>) {
316 let mut node = match token.parent() {
317 Some(it) => it,
318 None => return (None, None),
319 };
320
321 let strip_refs = |mut ty: Type| match name_like {
322 ast::NameLike::NameRef(n) => {
323 let p = match n.syntax().parent() {
324 Some(it) => it,
325 None => return ty,
326 };
327 let top_syn = match_ast! {
328 match p {
329 ast::FieldExpr(e) => e
330 .syntax()
331 .ancestors()
332 .take_while(|it| ast::FieldExpr::can_cast(it.kind()))
333 .last(),
334 ast::PathSegment(e) => e
335 .syntax()
336 .ancestors()
337 .skip(1)
338 .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
339 .find(|it| ast::PathExpr::can_cast(it.kind())),
340 _ => None
341 }
342 };
343 let top_syn = match top_syn {
344 Some(it) => it,
345 None => return ty,
346 };
347 for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) {
348 cov_mark::hit!(expected_type_fn_param_ref);
349 ty = ty.strip_reference();
350 }
351 ty
352 }
353 _ => ty,
354 };
355
356 let (ty, name) = loop {
357 break match_ast! {
358 match node {
359 ast::LetStmt(it) => {
360 cov_mark::hit!(expected_type_let_with_leading_char);
361 cov_mark::hit!(expected_type_let_without_leading_char);
362 let ty = it.pat()
363 .and_then(|pat| sema.type_of_pat(&pat))
364 .or_else(|| it.initializer().and_then(|it| sema.type_of_expr(&it)))
365 .map(TypeInfo::original);
366 let name = match it.pat() {
367 Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
368 Some(_) | None => None,
369 };
370
371 (ty, name)
372 },
373 ast::LetExpr(it) => {
374 cov_mark::hit!(expected_type_if_let_without_leading_char);
375 let ty = it.pat()
376 .and_then(|pat| sema.type_of_pat(&pat))
377 .or_else(|| it.expr().and_then(|it| sema.type_of_expr(&it)))
378 .map(TypeInfo::original);
379 (ty, None)
380 },
381 ast::ArgList(_) => {
382 cov_mark::hit!(expected_type_fn_param);
383 ActiveParameter::at_token(
384 sema,
385 token.clone(),
386 ).map(|ap| {
387 let name = ap.ident().map(NameOrNameRef::Name);
388 (Some(ap.ty), name)
389 })
390 .unwrap_or((None, None))
391 },
392 ast::RecordExprFieldList(it) => {
393 // wouldn't try {} be nice...
394 (|| {
395 if token.kind() == T![..]
396 ||token.prev_token().map(|t| t.kind()) == Some(T![..])
397 {
398 cov_mark::hit!(expected_type_struct_func_update);
399 let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
400 let ty = sema.type_of_expr(&record_expr.into())?;
401 Some((
402 Some(ty.original),
403 None
404 ))
405 } else {
406 cov_mark::hit!(expected_type_struct_field_without_leading_char);
407 let expr_field = token.prev_sibling_or_token()?
408 .into_node()
409 .and_then(ast::RecordExprField::cast)?;
410 let (_, _, ty) = sema.resolve_record_field(&expr_field)?;
411 Some((
412 Some(ty),
413 expr_field.field_name().map(NameOrNameRef::NameRef),
414 ))
415 }
416 })().unwrap_or((None, None))
417 },
418 ast::RecordExprField(it) => {
419 if let Some(expr) = it.expr() {
420 cov_mark::hit!(expected_type_struct_field_with_leading_char);
421 (
422 sema.type_of_expr(&expr).map(TypeInfo::original),
423 it.field_name().map(NameOrNameRef::NameRef),
424 )
425 } else {
426 cov_mark::hit!(expected_type_struct_field_followed_by_comma);
427 let ty = sema.resolve_record_field(&it)
428 .map(|(_, _, ty)| ty);
429 (
430 ty,
431 it.field_name().map(NameOrNameRef::NameRef),
432 )
433 }
434 },
435 // match foo { $0 }
436 // match foo { ..., pat => $0 }
437 ast::MatchExpr(it) => {
438 let on_arrow = previous_non_trivia_token(token.clone()).map_or(false, |it| T![=>] == it.kind());
439
440 let ty = if on_arrow {
441 // match foo { ..., pat => $0 }
442 cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
443 cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
444 sema.type_of_expr(&it.into())
445 } else {
446 // match foo { $0 }
447 cov_mark::hit!(expected_type_match_arm_without_leading_char);
448 it.expr().and_then(|e| sema.type_of_expr(&e))
449 }.map(TypeInfo::original);
450 (ty, None)
451 },
452 ast::IfExpr(it) => {
453 let ty = it.condition()
454 .and_then(|e| sema.type_of_expr(&e))
455 .map(TypeInfo::original);
456 (ty, None)
457 },
458 ast::IdentPat(it) => {
459 cov_mark::hit!(expected_type_if_let_with_leading_char);
460 cov_mark::hit!(expected_type_match_arm_with_leading_char);
461 let ty = sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
462 (ty, None)
463 },
464 ast::Fn(it) => {
465 cov_mark::hit!(expected_type_fn_ret_with_leading_char);
466 cov_mark::hit!(expected_type_fn_ret_without_leading_char);
467 let def = sema.to_def(&it);
468 (def.map(|def| def.ret_type(sema.db)), None)
469 },
470 ast::ClosureExpr(it) => {
471 let ty = sema.type_of_expr(&it.into());
472 ty.and_then(|ty| ty.original.as_callable(sema.db))
473 .map(|c| (Some(c.return_type()), None))
474 .unwrap_or((None, None))
475 },
476 ast::ParamList(_) => (None, None),
477 ast::Stmt(_) => (None, None),
478 ast::Item(_) => (None, None),
479 _ => {
480 match node.parent() {
481 Some(n) => {
482 node = n;
483 continue;
484 },
485 None => (None, None),
486 }
487 },
488 }
489 };
490 };
491 (ty.map(strip_refs), name)
492 }
493
classify_lifetime( _sema: &Semantics<'_, RootDatabase>, original_file: &SyntaxNode, lifetime: ast::Lifetime, ) -> Option<LifetimeContext>494 fn classify_lifetime(
495 _sema: &Semantics<'_, RootDatabase>,
496 original_file: &SyntaxNode,
497 lifetime: ast::Lifetime,
498 ) -> Option<LifetimeContext> {
499 let parent = lifetime.syntax().parent()?;
500 if parent.kind() == SyntaxKind::ERROR {
501 return None;
502 }
503
504 let kind = match_ast! {
505 match parent {
506 ast::LifetimeParam(param) => LifetimeKind::LifetimeParam {
507 is_decl: param.lifetime().as_ref() == Some(&lifetime),
508 param
509 },
510 ast::BreakExpr(_) => LifetimeKind::LabelRef,
511 ast::ContinueExpr(_) => LifetimeKind::LabelRef,
512 ast::Label(_) => LifetimeKind::LabelDef,
513 _ => LifetimeKind::Lifetime,
514 }
515 };
516 let lifetime = find_node_at_offset(original_file, lifetime.syntax().text_range().start());
517
518 Some(LifetimeContext { lifetime, kind })
519 }
520
classify_name( sema: &Semantics<'_, RootDatabase>, original_file: &SyntaxNode, name: ast::Name, ) -> Option<NameContext>521 fn classify_name(
522 sema: &Semantics<'_, RootDatabase>,
523 original_file: &SyntaxNode,
524 name: ast::Name,
525 ) -> Option<NameContext> {
526 let parent = name.syntax().parent()?;
527 let kind = match_ast! {
528 match parent {
529 ast::Const(_) => NameKind::Const,
530 ast::ConstParam(_) => NameKind::ConstParam,
531 ast::Enum(_) => NameKind::Enum,
532 ast::Fn(_) => NameKind::Function,
533 ast::IdentPat(bind_pat) => {
534 let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
535 if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
536 pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
537 }
538
539 NameKind::IdentPat(pat_ctx)
540 },
541 ast::MacroDef(_) => NameKind::MacroDef,
542 ast::MacroRules(_) => NameKind::MacroRules,
543 ast::Module(module) => NameKind::Module(module),
544 ast::RecordField(_) => NameKind::RecordField,
545 ast::Rename(_) => NameKind::Rename,
546 ast::SelfParam(_) => NameKind::SelfParam,
547 ast::Static(_) => NameKind::Static,
548 ast::Struct(_) => NameKind::Struct,
549 ast::Trait(_) => NameKind::Trait,
550 ast::TypeAlias(_) => NameKind::TypeAlias,
551 ast::TypeParam(_) => NameKind::TypeParam,
552 ast::Union(_) => NameKind::Union,
553 ast::Variant(_) => NameKind::Variant,
554 _ => return None,
555 }
556 };
557 let name = find_node_at_offset(original_file, name.syntax().text_range().start());
558 Some(NameContext { name, kind })
559 }
560
classify_name_ref( sema: &Semantics<'_, RootDatabase>, original_file: &SyntaxNode, name_ref: ast::NameRef, parent: SyntaxNode, ) -> Option<(NameRefContext, QualifierCtx)>561 fn classify_name_ref(
562 sema: &Semantics<'_, RootDatabase>,
563 original_file: &SyntaxNode,
564 name_ref: ast::NameRef,
565 parent: SyntaxNode,
566 ) -> Option<(NameRefContext, QualifierCtx)> {
567 let nameref = find_node_at_offset(original_file, name_ref.syntax().text_range().start());
568
569 let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
570
571 if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
572 let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
573 .map_or(false, |it| T![.] == it.kind());
574
575 return find_node_in_file_compensated(
576 sema,
577 original_file,
578 &record_field.parent_record_lit(),
579 )
580 .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix })
581 .map(make_res);
582 }
583 if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
584 let kind = NameRefKind::Pattern(PatternContext {
585 param_ctx: None,
586 has_type_ascription: false,
587 ref_token: None,
588 mut_token: None,
589 record_pat: find_node_in_file_compensated(
590 sema,
591 original_file,
592 &record_field.parent_record_pat(),
593 ),
594 ..pattern_context_for(sema, original_file, record_field.parent_record_pat().into())
595 });
596 return Some(make_res(kind));
597 }
598
599 let segment = match_ast! {
600 match parent {
601 ast::PathSegment(segment) => segment,
602 ast::FieldExpr(field) => {
603 let receiver = find_opt_node_in_file(original_file, field.expr());
604 let receiver_is_ambiguous_float_literal = match &receiver {
605 Some(ast::Expr::Literal(l)) => matches! {
606 l.kind(),
607 ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().map_or(false, |it| it.text().ends_with('.'))
608 },
609 _ => false,
610 };
611
612 let receiver_is_part_of_indivisible_expression = match &receiver {
613 Some(ast::Expr::IfExpr(_)) => {
614 let next_token_kind = next_non_trivia_token(name_ref.syntax().clone()).map(|t| t.kind());
615 next_token_kind == Some(SyntaxKind::ELSE_KW)
616 },
617 _ => false
618 };
619 if receiver_is_part_of_indivisible_expression {
620 return None;
621 }
622
623 let kind = NameRefKind::DotAccess(DotAccess {
624 receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
625 kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
626 receiver
627 });
628 return Some(make_res(kind));
629 },
630 ast::MethodCallExpr(method) => {
631 let receiver = find_opt_node_in_file(original_file, method.receiver());
632 let kind = NameRefKind::DotAccess(DotAccess {
633 receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
634 kind: DotAccessKind::Method { has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some()) },
635 receiver
636 });
637 return Some(make_res(kind));
638 },
639 _ => return None,
640 }
641 };
642
643 let path = segment.parent_path();
644 let original_path = find_node_in_file_compensated(sema, original_file, &path);
645
646 let mut path_ctx = PathCompletionCtx {
647 has_call_parens: false,
648 has_macro_bang: false,
649 qualified: Qualified::No,
650 parent: None,
651 path: path.clone(),
652 original_path,
653 kind: PathKind::Item { kind: ItemListKind::SourceFile },
654 has_type_args: false,
655 use_tree_parent: false,
656 };
657
658 let is_in_block = |it: &SyntaxNode| {
659 it.parent()
660 .map(|node| {
661 ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind())
662 })
663 .unwrap_or(false)
664 };
665 let func_update_record = |syn: &SyntaxNode| {
666 if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
667 find_node_in_file_compensated(sema, original_file, &record_expr)
668 } else {
669 None
670 }
671 };
672 let after_if_expr = |node: SyntaxNode| {
673 let prev_expr = (|| {
674 let node = match node.parent().and_then(ast::ExprStmt::cast) {
675 Some(stmt) => stmt.syntax().clone(),
676 None => node,
677 };
678 let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
679
680 ast::ExprStmt::cast(prev_sibling.clone())
681 .and_then(|it| it.expr())
682 .or_else(|| ast::Expr::cast(prev_sibling))
683 })();
684 matches!(prev_expr, Some(ast::Expr::IfExpr(_)))
685 };
686
687 // We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
688 // ex. trait Foo $0 {}
689 // in these cases parser recovery usually kicks in for our inserted identifier, causing it
690 // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block
691 // expression or an item list.
692 // The following code checks if the body is missing, if it is we either cut off the body
693 // from the item or it was missing in the first place
694 let inbetween_body_and_decl_check = |node: SyntaxNode| {
695 if let Some(NodeOrToken::Node(n)) =
696 syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev)
697 {
698 if let Some(item) = ast::Item::cast(n) {
699 let is_inbetween = match &item {
700 ast::Item::Const(it) => it.body().is_none() && it.semicolon_token().is_none(),
701 ast::Item::Enum(it) => it.variant_list().is_none(),
702 ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
703 ast::Item::Fn(it) => it.body().is_none() && it.semicolon_token().is_none(),
704 ast::Item::Impl(it) => it.assoc_item_list().is_none(),
705 ast::Item::Module(it) => {
706 it.item_list().is_none() && it.semicolon_token().is_none()
707 }
708 ast::Item::Static(it) => it.body().is_none(),
709 ast::Item::Struct(it) => {
710 it.field_list().is_none() && it.semicolon_token().is_none()
711 }
712 ast::Item::Trait(it) => it.assoc_item_list().is_none(),
713 ast::Item::TypeAlias(it) => it.ty().is_none() && it.semicolon_token().is_none(),
714 ast::Item::Union(it) => it.record_field_list().is_none(),
715 _ => false,
716 };
717 if is_inbetween {
718 return Some(item);
719 }
720 }
721 }
722 None
723 };
724
725 let type_location = |node: &SyntaxNode| {
726 let parent = node.parent()?;
727 let res = match_ast! {
728 match parent {
729 ast::Const(it) => {
730 let name = find_opt_node_in_file(original_file, it.name())?;
731 let original = ast::Const::cast(name.syntax().parent()?)?;
732 TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
733 },
734 ast::RetType(it) => {
735 if it.thin_arrow_token().is_none() {
736 return None;
737 }
738 let parent = match ast::Fn::cast(parent.parent()?) {
739 Some(x) => x.param_list(),
740 None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
741 };
742
743 let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?;
744 TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! {
745 match parent {
746 ast::ClosureExpr(it) => {
747 it.body()
748 },
749 ast::Fn(it) => {
750 it.body().map(ast::Expr::BlockExpr)
751 },
752 _ => return None,
753 }
754 }))
755 },
756 ast::Param(it) => {
757 if it.colon_token().is_none() {
758 return None;
759 }
760 TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat())))
761 },
762 ast::LetStmt(it) => {
763 if it.colon_token().is_none() {
764 return None;
765 }
766 TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat())))
767 },
768 ast::Impl(it) => {
769 match it.trait_() {
770 Some(t) if t.syntax() == node => TypeLocation::ImplTrait,
771 _ => match it.self_ty() {
772 Some(t) if t.syntax() == node => TypeLocation::ImplTarget,
773 _ => return None,
774 },
775 }
776 },
777 ast::TypeBound(_) => TypeLocation::TypeBound,
778 // is this case needed?
779 ast::TypeBoundList(_) => TypeLocation::TypeBound,
780 ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))),
781 // is this case needed?
782 ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))),
783 ast::TupleField(_) => TypeLocation::TupleField,
784 _ => return None,
785 }
786 };
787 Some(res)
788 };
789
790 let is_in_condition = |it: &ast::Expr| {
791 (|| {
792 let parent = it.syntax().parent()?;
793 if let Some(expr) = ast::WhileExpr::cast(parent.clone()) {
794 Some(expr.condition()? == *it)
795 } else if let Some(expr) = ast::IfExpr::cast(parent) {
796 Some(expr.condition()? == *it)
797 } else {
798 None
799 }
800 })()
801 .unwrap_or(false)
802 };
803
804 let make_path_kind_expr = |expr: ast::Expr| {
805 let it = expr.syntax();
806 let in_block_expr = is_in_block(it);
807 let in_loop_body = is_in_loop_body(it);
808 let after_if_expr = after_if_expr(it.clone());
809 let ref_expr_parent =
810 path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
811 let (innermost_ret_ty, self_param) = {
812 let find_ret_ty = |it: SyntaxNode| {
813 if let Some(item) = ast::Item::cast(it.clone()) {
814 match item {
815 ast::Item::Fn(f) => Some(sema.to_def(&f).map(|it| it.ret_type(sema.db))),
816 ast::Item::MacroCall(_) => None,
817 _ => Some(None),
818 }
819 } else {
820 let expr = ast::Expr::cast(it)?;
821 let callable = match expr {
822 // FIXME
823 // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
824 ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
825 _ => return None,
826 };
827 Some(
828 callable
829 .and_then(|c| c.adjusted().as_callable(sema.db))
830 .map(|it| it.return_type()),
831 )
832 }
833 };
834 let find_fn_self_param = |it| match it {
835 ast::Item::Fn(fn_) => Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db))),
836 ast::Item::MacroCall(_) => None,
837 _ => Some(None),
838 };
839
840 match find_node_in_file_compensated(sema, original_file, &expr) {
841 Some(it) => {
842 let innermost_ret_ty = sema
843 .ancestors_with_macros(it.syntax().clone())
844 .find_map(find_ret_ty)
845 .flatten();
846
847 let self_param = sema
848 .ancestors_with_macros(it.syntax().clone())
849 .filter_map(ast::Item::cast)
850 .find_map(find_fn_self_param)
851 .flatten();
852 (innermost_ret_ty, self_param)
853 }
854 None => (None, None),
855 }
856 };
857 let is_func_update = func_update_record(it);
858 let in_condition = is_in_condition(&expr);
859 let incomplete_let = it
860 .parent()
861 .and_then(ast::LetStmt::cast)
862 .map_or(false, |it| it.semicolon_token().is_none());
863 let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax());
864
865 let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) {
866 Some(arm) => arm
867 .fat_arrow_token()
868 .map_or(true, |arrow| it.text_range().start() < arrow.text_range().start()),
869 None => false,
870 };
871
872 PathKind::Expr {
873 expr_ctx: ExprCtx {
874 in_block_expr,
875 in_loop_body,
876 after_if_expr,
877 in_condition,
878 ref_expr_parent,
879 is_func_update,
880 innermost_ret_ty,
881 self_param,
882 incomplete_let,
883 impl_,
884 in_match_guard,
885 },
886 }
887 };
888 let make_path_kind_type = |ty: ast::Type| {
889 let location = type_location(ty.syntax());
890 PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
891 };
892
893 let mut kind_macro_call = |it: ast::MacroCall| {
894 path_ctx.has_macro_bang = it.excl_token().is_some();
895 let parent = it.syntax().parent()?;
896 // Any path in an item list will be treated as a macro call by the parser
897 let kind = match_ast! {
898 match parent {
899 ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
900 ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
901 ast::MacroType(ty) => make_path_kind_type(ty.into()),
902 ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
903 ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
904 Some(it) => match_ast! {
905 match it {
906 ast::Trait(_) => ItemListKind::Trait,
907 ast::Impl(it) => if it.trait_().is_some() {
908 ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
909 } else {
910 ItemListKind::Impl
911 },
912 _ => return None
913 }
914 },
915 None => return None,
916 } },
917 ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock },
918 ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
919 _ => return None,
920 }
921 };
922 Some(kind)
923 };
924 let make_path_kind_attr = |meta: ast::Meta| {
925 let attr = meta.parent_attr()?;
926 let kind = attr.kind();
927 let attached = attr.syntax().parent()?;
928 let is_trailing_outer_attr = kind != AttrKind::Inner
929 && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none();
930 let annotated_item_kind = if is_trailing_outer_attr { None } else { Some(attached.kind()) };
931 Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } })
932 };
933
934 // Infer the path kind
935 let parent = path.syntax().parent()?;
936 let kind = match_ast! {
937 match parent {
938 ast::PathType(it) => make_path_kind_type(it.into()),
939 ast::PathExpr(it) => {
940 if let Some(p) = it.syntax().parent() {
941 if ast::ExprStmt::can_cast(p.kind()) {
942 if let Some(kind) = inbetween_body_and_decl_check(p) {
943 return Some(make_res(NameRefKind::Keyword(kind)));
944 }
945 }
946 }
947
948 path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
949
950 make_path_kind_expr(it.into())
951 },
952 ast::TupleStructPat(it) => {
953 path_ctx.has_call_parens = true;
954 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
955 },
956 ast::RecordPat(it) => {
957 path_ctx.has_call_parens = true;
958 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
959 },
960 ast::PathPat(it) => {
961 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
962 },
963 ast::MacroCall(it) => {
964 // A macro call in this position is usually a result of parsing recovery, so check that
965 if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
966 return Some(make_res(NameRefKind::Keyword(kind)));
967 }
968
969 kind_macro_call(it)?
970 },
971 ast::Meta(meta) => make_path_kind_attr(meta)?,
972 ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
973 ast::UseTree(_) => PathKind::Use,
974 // completing inside a qualifier
975 ast::Path(parent) => {
976 path_ctx.parent = Some(parent.clone());
977 let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
978 match_ast! {
979 match parent {
980 ast::PathType(it) => make_path_kind_type(it.into()),
981 ast::PathExpr(it) => {
982 path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
983
984 make_path_kind_expr(it.into())
985 },
986 ast::TupleStructPat(it) => {
987 path_ctx.has_call_parens = true;
988 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
989 },
990 ast::RecordPat(it) => {
991 path_ctx.has_call_parens = true;
992 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
993 },
994 ast::PathPat(it) => {
995 PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
996 },
997 ast::MacroCall(it) => {
998 kind_macro_call(it)?
999 },
1000 ast::Meta(meta) => make_path_kind_attr(meta)?,
1001 ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
1002 ast::UseTree(_) => PathKind::Use,
1003 ast::RecordExpr(it) => make_path_kind_expr(it.into()),
1004 _ => return None,
1005 }
1006 }
1007 },
1008 ast::RecordExpr(it) => make_path_kind_expr(it.into()),
1009 _ => return None,
1010 }
1011 };
1012
1013 path_ctx.kind = kind;
1014 path_ctx.has_type_args = segment.generic_arg_list().is_some();
1015
1016 // calculate the qualifier context
1017 if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
1018 path_ctx.use_tree_parent = use_tree_parent;
1019 if !use_tree_parent && segment.coloncolon_token().is_some() {
1020 path_ctx.qualified = Qualified::Absolute;
1021 } else {
1022 let qualifier = qualifier
1023 .segment()
1024 .and_then(|it| find_node_in_file(original_file, &it))
1025 .map(|it| it.parent_path());
1026 if let Some(qualifier) = qualifier {
1027 let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
1028 Some(ast::PathSegmentKind::Type { type_ref: Some(type_ref), trait_ref })
1029 if qualifier.qualifier().is_none() =>
1030 {
1031 Some((type_ref, trait_ref))
1032 }
1033 _ => None,
1034 };
1035
1036 path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor {
1037 let ty = match ty {
1038 ast::Type::InferType(_) => None,
1039 ty => sema.resolve_type(&ty),
1040 };
1041 let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?));
1042 Qualified::TypeAnchor { ty, trait_ }
1043 } else {
1044 let res = sema.resolve_path(&qualifier);
1045
1046 // For understanding how and why super_chain_len is calculated the way it
1047 // is check the documentation at it's definition
1048 let mut segment_count = 0;
1049 let super_count = iter::successors(Some(qualifier.clone()), |p| p.qualifier())
1050 .take_while(|p| {
1051 p.segment()
1052 .and_then(|s| {
1053 segment_count += 1;
1054 s.super_token()
1055 })
1056 .is_some()
1057 })
1058 .count();
1059
1060 let super_chain_len =
1061 if segment_count > super_count { None } else { Some(super_count) };
1062
1063 Qualified::With { path: qualifier, resolution: res, super_chain_len }
1064 }
1065 };
1066 }
1067 } else if let Some(segment) = path.segment() {
1068 if segment.coloncolon_token().is_some() {
1069 path_ctx.qualified = Qualified::Absolute;
1070 }
1071 }
1072
1073 let mut qualifier_ctx = QualifierCtx::default();
1074 if path_ctx.is_trivial_path() {
1075 // fetch the full expression that may have qualifiers attached to it
1076 let top_node = match path_ctx.kind {
1077 PathKind::Expr { expr_ctx: ExprCtx { in_block_expr: true, .. } } => {
1078 parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| {
1079 let parent = p.parent()?;
1080 if ast::StmtList::can_cast(parent.kind()) {
1081 Some(p)
1082 } else if ast::ExprStmt::can_cast(parent.kind()) {
1083 Some(parent)
1084 } else {
1085 None
1086 }
1087 })
1088 }
1089 PathKind::Item { .. } => {
1090 parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind()))
1091 }
1092 _ => None,
1093 };
1094 if let Some(top) = top_node {
1095 if let Some(NodeOrToken::Node(error_node)) =
1096 syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev)
1097 {
1098 if error_node.kind() == SyntaxKind::ERROR {
1099 qualifier_ctx.unsafe_tok = error_node
1100 .children_with_tokens()
1101 .filter_map(NodeOrToken::into_token)
1102 .find(|it| it.kind() == T![unsafe]);
1103 qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast);
1104 }
1105 }
1106
1107 if let PathKind::Item { .. } = path_ctx.kind {
1108 if qualifier_ctx.none() {
1109 if let Some(t) = top.first_token() {
1110 if let Some(prev) = t
1111 .prev_token()
1112 .and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
1113 {
1114 if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) {
1115 // This was inferred to be an item position path, but it seems
1116 // to be part of some other broken node which leaked into an item
1117 // list
1118 return None;
1119 }
1120 }
1121 }
1122 }
1123 }
1124 }
1125 }
1126 Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
1127 }
1128
pattern_context_for( sema: &Semantics<'_, RootDatabase>, original_file: &SyntaxNode, pat: ast::Pat, ) -> PatternContext1129 fn pattern_context_for(
1130 sema: &Semantics<'_, RootDatabase>,
1131 original_file: &SyntaxNode,
1132 pat: ast::Pat,
1133 ) -> PatternContext {
1134 let mut param_ctx = None;
1135
1136 let mut missing_variants = vec![];
1137
1138 let (refutability, has_type_ascription) =
1139 pat
1140 .syntax()
1141 .ancestors()
1142 .skip_while(|it| ast::Pat::can_cast(it.kind()))
1143 .next()
1144 .map_or((PatternRefutability::Irrefutable, false), |node| {
1145 let refutability = match_ast! {
1146 match node {
1147 ast::LetStmt(let_) => return (PatternRefutability::Irrefutable, let_.ty().is_some()),
1148 ast::Param(param) => {
1149 let has_type_ascription = param.ty().is_some();
1150 param_ctx = (|| {
1151 let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
1152 let param_list = find_node_in_file_compensated(sema, original_file, &fake_param_list)?;
1153 let param_list_owner = param_list.syntax().parent()?;
1154 let kind = match_ast! {
1155 match param_list_owner {
1156 ast::ClosureExpr(closure) => ParamKind::Closure(closure),
1157 ast::Fn(fn_) => ParamKind::Function(fn_),
1158 _ => return None,
1159 }
1160 };
1161 Some(ParamContext {
1162 param_list, param, kind
1163 })
1164 })();
1165 return (PatternRefutability::Irrefutable, has_type_ascription)
1166 },
1167 ast::MatchArm(match_arm) => {
1168 let missing_variants_opt = match_arm
1169 .syntax()
1170 .parent()
1171 .and_then(ast::MatchArmList::cast)
1172 .and_then(|match_arm_list| {
1173 match_arm_list
1174 .syntax()
1175 .parent()
1176 .and_then(ast::MatchExpr::cast)
1177 .and_then(|match_expr| {
1178 let expr_opt = find_opt_node_in_file(&original_file, match_expr.expr());
1179
1180 expr_opt.and_then(|expr| {
1181 sema.type_of_expr(&expr)?
1182 .adjusted()
1183 .autoderef(sema.db)
1184 .find_map(|ty| match ty.as_adt() {
1185 Some(hir::Adt::Enum(e)) => Some(e),
1186 _ => None,
1187 }).and_then(|enum_| {
1188 Some(enum_.variants(sema.db))
1189 })
1190 })
1191 }).and_then(|variants| {
1192 Some(variants.iter().filter_map(|variant| {
1193 let variant_name = variant.name(sema.db).display(sema.db).to_string();
1194
1195 let variant_already_present = match_arm_list.arms().any(|arm| {
1196 arm.pat().and_then(|pat| {
1197 let pat_already_present = pat.syntax().to_string().contains(&variant_name);
1198 pat_already_present.then(|| pat_already_present)
1199 }).is_some()
1200 });
1201
1202 (!variant_already_present).then_some(variant.clone())
1203 }).collect::<Vec<Variant>>())
1204 })
1205 });
1206
1207 if let Some(missing_variants_) = missing_variants_opt {
1208 missing_variants = missing_variants_;
1209 };
1210
1211 PatternRefutability::Refutable
1212 },
1213 ast::LetExpr(_) => PatternRefutability::Refutable,
1214 ast::ForExpr(_) => PatternRefutability::Irrefutable,
1215 _ => PatternRefutability::Irrefutable,
1216 }
1217 };
1218 (refutability, false)
1219 });
1220 let (ref_token, mut_token) = match &pat {
1221 ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
1222 _ => (None, None),
1223 };
1224
1225 PatternContext {
1226 refutability,
1227 param_ctx,
1228 has_type_ascription,
1229 parent_pat: pat.syntax().parent().and_then(ast::Pat::cast),
1230 mut_token,
1231 ref_token,
1232 record_pat: None,
1233 impl_: fetch_immediate_impl(sema, original_file, pat.syntax()),
1234 missing_variants,
1235 }
1236 }
1237
fetch_immediate_impl( sema: &Semantics<'_, RootDatabase>, original_file: &SyntaxNode, node: &SyntaxNode, ) -> Option<ast::Impl>1238 fn fetch_immediate_impl(
1239 sema: &Semantics<'_, RootDatabase>,
1240 original_file: &SyntaxNode,
1241 node: &SyntaxNode,
1242 ) -> Option<ast::Impl> {
1243 let mut ancestors = ancestors_in_file_compensated(sema, original_file, node)?
1244 .filter_map(ast::Item::cast)
1245 .filter(|it| !matches!(it, ast::Item::MacroCall(_)));
1246
1247 match ancestors.next()? {
1248 ast::Item::Const(_) | ast::Item::Fn(_) | ast::Item::TypeAlias(_) => (),
1249 ast::Item::Impl(it) => return Some(it),
1250 _ => return None,
1251 }
1252 match ancestors.next()? {
1253 ast::Item::Impl(it) => Some(it),
1254 _ => None,
1255 }
1256 }
1257
1258 /// Attempts to find `node` inside `syntax` via `node`'s text range.
1259 /// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
find_opt_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: Option<N>) -> Option<N>1260 fn find_opt_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: Option<N>) -> Option<N> {
1261 find_node_in_file(syntax, &node?)
1262 }
1263
1264 /// Attempts to find `node` inside `syntax` via `node`'s text range.
1265 /// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N>1266 fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
1267 let syntax_range = syntax.text_range();
1268 let range = node.syntax().text_range();
1269 let intersection = range.intersect(syntax_range)?;
1270 syntax.covering_element(intersection).ancestors().find_map(N::cast)
1271 }
1272
1273 /// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1274 /// for the offset introduced by the fake ident.
1275 /// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
find_node_in_file_compensated<N: AstNode>( sema: &Semantics<'_, RootDatabase>, in_file: &SyntaxNode, node: &N, ) -> Option<N>1276 fn find_node_in_file_compensated<N: AstNode>(
1277 sema: &Semantics<'_, RootDatabase>,
1278 in_file: &SyntaxNode,
1279 node: &N,
1280 ) -> Option<N> {
1281 ancestors_in_file_compensated(sema, in_file, node.syntax())?.find_map(N::cast)
1282 }
1283
ancestors_in_file_compensated<'sema>( sema: &'sema Semantics<'_, RootDatabase>, in_file: &SyntaxNode, node: &SyntaxNode, ) -> Option<impl Iterator<Item = SyntaxNode> + 'sema>1284 fn ancestors_in_file_compensated<'sema>(
1285 sema: &'sema Semantics<'_, RootDatabase>,
1286 in_file: &SyntaxNode,
1287 node: &SyntaxNode,
1288 ) -> Option<impl Iterator<Item = SyntaxNode> + 'sema> {
1289 let syntax_range = in_file.text_range();
1290 let range = node.text_range();
1291 let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
1292 if end < range.start() {
1293 return None;
1294 }
1295 let range = TextRange::new(range.start(), end);
1296 // our inserted ident could cause `range` to go outside of the original syntax, so cap it
1297 let intersection = range.intersect(syntax_range)?;
1298 let node = match in_file.covering_element(intersection) {
1299 NodeOrToken::Node(node) => node,
1300 NodeOrToken::Token(tok) => tok.parent()?,
1301 };
1302 Some(sema.ancestors_with_macros(node))
1303 }
1304
1305 /// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
1306 /// for the offset introduced by the fake ident..
1307 /// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
find_opt_node_in_file_compensated<N: AstNode>( sema: &Semantics<'_, RootDatabase>, syntax: &SyntaxNode, node: Option<N>, ) -> Option<N>1308 fn find_opt_node_in_file_compensated<N: AstNode>(
1309 sema: &Semantics<'_, RootDatabase>,
1310 syntax: &SyntaxNode,
1311 node: Option<N>,
1312 ) -> Option<N> {
1313 find_node_in_file_compensated(sema, syntax, &node?)
1314 }
1315
path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)>1316 fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
1317 if let Some(qual) = path.qualifier() {
1318 return Some((qual, false));
1319 }
1320 let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
1321 let use_tree = use_tree_list.syntax().parent().and_then(ast::UseTree::cast)?;
1322 Some((use_tree.path()?, true))
1323 }
1324
is_in_token_of_for_loop(path: &ast::Path) -> bool1325 fn is_in_token_of_for_loop(path: &ast::Path) -> bool {
1326 // oh my ...
1327 (|| {
1328 let expr = path.syntax().parent().and_then(ast::PathExpr::cast)?;
1329 let for_expr = expr.syntax().parent().and_then(ast::ForExpr::cast)?;
1330 if for_expr.in_token().is_some() {
1331 return Some(false);
1332 }
1333 let pat = for_expr.pat()?;
1334 let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?;
1335 Some(match next_sibl {
1336 syntax::NodeOrToken::Node(n) => {
1337 n.text_range().start() == path.syntax().text_range().start()
1338 }
1339 syntax::NodeOrToken::Token(t) => {
1340 t.text_range().start() == path.syntax().text_range().start()
1341 }
1342 })
1343 })()
1344 .unwrap_or(false)
1345 }
1346
is_in_loop_body(node: &SyntaxNode) -> bool1347 fn is_in_loop_body(node: &SyntaxNode) -> bool {
1348 node.ancestors()
1349 .take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR)
1350 .find_map(|it| {
1351 let loop_body = match_ast! {
1352 match it {
1353 ast::ForExpr(it) => it.loop_body(),
1354 ast::WhileExpr(it) => it.loop_body(),
1355 ast::LoopExpr(it) => it.loop_body(),
1356 _ => None,
1357 }
1358 };
1359 loop_body.filter(|it| it.syntax().text_range().contains_range(node.text_range()))
1360 })
1361 .is_some()
1362 }
1363
previous_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken>1364 fn previous_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
1365 let mut token = match e.into() {
1366 SyntaxElement::Node(n) => n.first_token()?,
1367 SyntaxElement::Token(t) => t,
1368 }
1369 .prev_token();
1370 while let Some(inner) = token {
1371 if !inner.kind().is_trivia() {
1372 return Some(inner);
1373 } else {
1374 token = inner.prev_token();
1375 }
1376 }
1377 None
1378 }
1379
next_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken>1380 fn next_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
1381 let mut token = match e.into() {
1382 SyntaxElement::Node(n) => n.last_token()?,
1383 SyntaxElement::Token(t) => t,
1384 }
1385 .next_token();
1386 while let Some(inner) = token {
1387 if !inner.kind().is_trivia() {
1388 return Some(inner);
1389 } else {
1390 token = inner.next_token();
1391 }
1392 }
1393 None
1394 }
1395
next_non_trivia_sibling(ele: SyntaxElement) -> Option<SyntaxElement>1396 fn next_non_trivia_sibling(ele: SyntaxElement) -> Option<SyntaxElement> {
1397 let mut e = ele.next_sibling_or_token();
1398 while let Some(inner) = e {
1399 if !inner.kind().is_trivia() {
1400 return Some(inner);
1401 } else {
1402 e = inner.next_sibling_or_token();
1403 }
1404 }
1405 None
1406 }
1407