Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Try caching macro calls more aggressively in Semantics #17004

Draft
wants to merge 1 commit into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
13 changes: 10 additions & 3 deletions crates/hir-def/src/body.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,10 @@ use std::ops::Index;

use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
use hir_expand::{name::Name, HirFileId, InFile};
use hir_expand::{name::Name, InFile};
use la_arena::{Arena, ArenaMap};
use rustc_hash::FxHashMap;
use span::MacroFileId;
use syntax::{ast, AstPtr, SyntaxNodePtr};
use triomphe::Arc;

Expand Down Expand Up @@ -98,7 +99,7 @@ pub struct BodySourceMap {

format_args_template_map: FxHashMap<ExprId, Vec<(syntax::TextRange, Name)>>,

expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, HirFileId>,
expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, MacroFileId>,

/// Diagnostics accumulated during body lowering. These contain `AstPtr`s and so are stored in
/// the source map (since they're just as volatile).
Expand Down Expand Up @@ -349,7 +350,7 @@ impl BodySourceMap {
self.expr_map.get(&src).cloned()
}

pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<HirFileId> {
pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<MacroFileId> {
let src = node.map(AstPtr::new);
self.expansions.get(&src).cloned()
}
Expand Down Expand Up @@ -388,6 +389,12 @@ impl BodySourceMap {
self.expr_map.get(&src).copied()
}

pub fn expansions(
&self,
) -> impl Iterator<Item = (&InFile<AstPtr<ast::MacroCall>>, &MacroFileId)> {
self.expansions.iter()
}

pub fn implicit_format_args(
&self,
node: InFile<&ast::FormatArgsExpr>,
Expand Down
8 changes: 6 additions & 2 deletions crates/hir-def/src/body/lower.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use intern::Interned;
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::AstIdMap;
use stdx::never;
use syntax::{
ast::{
self, ArrayExprKind, AstChildren, BlockExpr, HasArgList, HasAttrs, HasLoopBody, HasName,
Expand Down Expand Up @@ -480,7 +481,8 @@ impl ExprCollector<'_> {
} else if e.const_token().is_some() {
Mutability::Shared
} else {
unreachable!("parser only remaps to raw_token() if matching mutability token follows")
never!("parser only remaps to raw_token() if matching mutability token follows");
Mutability::Shared
}
} else {
Mutability::from_mutable(e.mut_token().is_some())
Expand Down Expand Up @@ -1006,7 +1008,9 @@ impl ExprCollector<'_> {
Some((mark, expansion)) => {
// Keep collecting even with expansion errors so we can provide completions and
// other services in incomplete macro expressions.
self.source_map.expansions.insert(macro_call_ptr, self.expander.current_file_id());
if let Some(macro_file) = self.expander.current_file_id().macro_file() {
self.source_map.expansions.insert(macro_call_ptr, macro_file);
}
let prev_ast_id_map = mem::replace(
&mut self.ast_id_map,
self.db.ast_id_map(self.expander.current_file_id()),
Expand Down
54 changes: 34 additions & 20 deletions crates/hir-def/src/child_by_source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

use either::Either;
use hir_expand::{attrs::collect_attrs, HirFileId};
use syntax::ast;
use syntax::{ast, AstPtr};

use crate::{
db::DefDatabase,
Expand All @@ -23,6 +23,8 @@ use crate::{
VariantId,
};

// FIXME: There should be a cache param for the parse trees, as each entry causes a parse otherwise
// thrashing the macro lru cache
pub trait ChildBySource {
fn child_by_source(&self, db: &dyn DefDatabase, file_id: HirFileId) -> DynMap {
let mut res = DynMap::default();
Expand All @@ -38,7 +40,7 @@ impl ChildBySource for TraitId {

data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| {
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
},
);
data.items.iter().for_each(|&(_, item)| {
Expand All @@ -50,9 +52,10 @@ impl ChildBySource for TraitId {
impl ChildBySource for ImplId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let data = db.impl_data(*self);
// FIXME: Macro calls
data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| {
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
},
);
data.items.iter().for_each(|&item| {
Expand Down Expand Up @@ -80,15 +83,15 @@ impl ChildBySource for ItemScope {
.for_each(|konst| insert_item_loc(db, res, file_id, konst, keys::CONST));
self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
|(ast_id, call_id)| {
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
},
);
self.legacy_macros().for_each(|(_, ids)| {
ids.iter().for_each(|&id| {
if let MacroId::MacroRulesId(id) = id {
let loc = id.lookup(db);
if loc.id.file_id() == file_id {
res[keys::MACRO_RULES].insert(loc.source(db).value, id);
res[keys::MACRO_RULES].insert(loc.ast_ptr(db).value, id);
}
}
})
Expand All @@ -100,12 +103,18 @@ impl ChildBySource for ItemScope {
if let Some((_, Either::Left(attr))) =
collect_attrs(&adt).nth(attr_id.ast_index())
{
res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into()));
res[keys::DERIVE_MACRO_CALL]
.insert(AstPtr::new(&attr), (attr_id, call_id, calls.into()));
}
});
},
);

self.iter_macro_invoc().filter(|(id, _)| id.file_id == file_id).for_each(
|(ast_id, &call)| {
let ast = ast_id.to_ptr(db.upcast());
res[keys::MACRO_CALL].insert(ast, call);
},
);
fn add_module_def(
db: &dyn DefDatabase,
map: &mut DynMap,
Expand Down Expand Up @@ -155,8 +164,8 @@ impl ChildBySource for VariantId {
for (local_id, source) in arena_map.value.iter() {
let id = FieldId { parent, local_id };
match source.clone() {
Either::Left(source) => res[keys::TUPLE_FIELD].insert(source, id),
Either::Right(source) => res[keys::RECORD_FIELD].insert(source, id),
Either::Left(source) => res[keys::TUPLE_FIELD].insert(AstPtr::new(&source), id),
Either::Right(source) => res[keys::RECORD_FIELD].insert(AstPtr::new(&source), id),
}
}
}
Expand All @@ -171,29 +180,30 @@ impl ChildBySource for EnumId {

let tree = loc.id.item_tree(db);
let ast_id_map = db.ast_id_map(loc.id.file_id());
let root = db.parse_or_expand(loc.id.file_id());

db.enum_data(*self).variants.iter().for_each(|&(variant, _)| {
res[keys::ENUM_VARIANT].insert(
ast_id_map.get(tree[variant.lookup(db).id.value].ast_id).to_node(&root),
variant,
);
res[keys::ENUM_VARIANT]
.insert(ast_id_map.get(tree[variant.lookup(db).id.value].ast_id), variant);
});
}
}

impl ChildBySource for DefWithBodyId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let body = db.body(*self);
let (body, sm) = db.body_with_source_map(*self);
if let &DefWithBodyId::VariantId(v) = self {
VariantId::EnumVariantId(v).child_by_source_to(db, res, file_id)
}

sm.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(|(ast, &exp_id)| {
res[keys::MACRO_CALL].insert(ast.value, exp_id.macro_call_id);
});

for (block, def_map) in body.blocks(db) {
// All block expressions are merged into the same map, because they logically all add
// inner items to the containing `DefWithBodyId`.
def_map[DefMap::ROOT].scope.child_by_source_to(db, res, file_id);
res[keys::BLOCK].insert(block.lookup(db).ast_id.to_node(db.upcast()), block);
res[keys::BLOCK].insert(block.lookup(db).ast_id.to_ptr(db.upcast()), block);
}
}
}
Expand All @@ -220,13 +230,17 @@ impl ChildBySource for GenericDefId {
{
let id = TypeOrConstParamId { parent: *self, local_id };
match ast_param {
ast::TypeOrConstParam::Type(a) => res[keys::TYPE_PARAM].insert(a, id),
ast::TypeOrConstParam::Const(a) => res[keys::CONST_PARAM].insert(a, id),
ast::TypeOrConstParam::Type(a) => {
res[keys::TYPE_PARAM].insert(AstPtr::new(&a), id)
}
ast::TypeOrConstParam::Const(a) => {
res[keys::CONST_PARAM].insert(AstPtr::new(&a), id)
}
}
}
for (local_id, ast_param) in lts_idx_iter.zip(generic_params_list.lifetime_params()) {
let id = LifetimeParamId { parent: *self, local_id };
res[keys::LIFETIME_PARAM].insert(ast_param, id);
res[keys::LIFETIME_PARAM].insert(AstPtr::new(&ast_param), id);
}
}
}
Expand All @@ -246,7 +260,7 @@ fn insert_item_loc<ID, N, Data>(
{
let loc = id.lookup(db);
if loc.item_tree_id().file_id() == file_id {
res[key].insert(loc.source(db).value, id)
res[key].insert(loc.ast_ptr(db).value, id)
}
}

Expand Down
13 changes: 6 additions & 7 deletions crates/hir-def/src/dyn_map/keys.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use crate::{
TraitId, TypeAliasId, TypeOrConstParamId, UnionId, UseId,
};

pub type Key<K, V> = crate::dyn_map::Key<K, V, AstPtrPolicy<K, V>>;
pub type Key<K, V> = crate::dyn_map::Key<AstPtr<K>, V, AstPtrPolicy<K, V>>;

pub const BLOCK: Key<ast::BlockExpr, BlockId> = Key::new();
pub const FUNCTION: Key<ast::Fn, FunctionId> = Key::new();
Expand All @@ -39,6 +39,7 @@ pub const LIFETIME_PARAM: Key<ast::LifetimeParam, LifetimeParamId> = Key::new();
pub const MACRO_RULES: Key<ast::MacroRules, MacroRulesId> = Key::new();
pub const MACRO2: Key<ast::MacroDef, Macro2Id> = Key::new();
pub const PROC_MACRO: Key<ast::Fn, ProcMacroId> = Key::new();
pub const MACRO_CALL: Key<ast::MacroCall, MacroCallId> = Key::new();
pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new();
pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, MacroCallId, Box<[Option<MacroCallId>]>)> =
Key::new();
Expand All @@ -54,18 +55,16 @@ pub struct AstPtrPolicy<AST, ID> {
}

impl<AST: AstNode + 'static, ID: 'static> Policy for AstPtrPolicy<AST, ID> {
type K = AST;
type K = AstPtr<AST>;
type V = ID;
fn insert(map: &mut DynMap, key: AST, value: ID) {
let key = AstPtr::new(&key);
fn insert(map: &mut DynMap, key: AstPtr<AST>, value: ID) {
map.map
.entry::<FxHashMap<AstPtr<AST>, ID>>()
.or_insert_with(Default::default)
.insert(key, value);
}
fn get<'a>(map: &'a DynMap, key: &AST) -> Option<&'a ID> {
let key = AstPtr::new(key);
map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(&key)
fn get<'a>(map: &'a DynMap, key: &AstPtr<AST>) -> Option<&'a ID> {
map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(key)
}
fn is_empty(map: &DynMap) -> bool {
map.map.get::<FxHashMap<AstPtr<AST>, ID>>().map_or(true, |it| it.is_empty())
Expand Down
31 changes: 7 additions & 24 deletions crates/hir-expand/src/files.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
//! Things to wrap other things in file ids.
use std::iter;

use either::Either;
use span::{
AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr,
Expand Down Expand Up @@ -150,31 +148,16 @@ impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
}
}

impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, &N> {
// unfortunately `syntax` collides with the impl above, because `&_` is fundamental
pub fn syntax_ref(&self) -> InFileWrapper<FileId, &SyntaxNode> {
self.with_value(self.value.syntax())
}
}

// region:specific impls

impl InFile<&SyntaxNode> {
/// Skips the attributed item that caused the macro invocation we are climbing up
pub fn ancestors_with_macros_skip_attr_item(
self,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => {
let macro_file_id = node.file_id.macro_file()?;
let parent_node = macro_file_id.call_node(db);
if macro_file_id.is_attr_macro(db) {
// macro call was an attributed item, skip it
// FIXME: does this fail if this is a direct expansion of another macro?
parent_node.map(|node| node.parent()).transpose()
} else {
Some(parent_node)
}
}
};
iter::successors(succ(&self.cloned()), succ)
}

/// Falls back to the macro call range if the node cannot be mapped up fully.
///
/// For attributes and derives, this will point back to the attribute only.
Expand Down