Skip to content

incr.comp.: Load cached diagnostics lazily and allow more things in the cache. #46338

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Dec 1, 2017
10 changes: 5 additions & 5 deletions src/librustc/dep_graph/graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -461,10 +461,10 @@ impl DepGraph {
self.data.as_ref().and_then(|data| data.colors.borrow().get(dep_node).cloned())
}

pub fn try_mark_green(&self,
tcx: TyCtxt,
dep_node: &DepNode)
-> Option<DepNodeIndex> {
pub fn try_mark_green<'tcx>(&self,
tcx: TyCtxt<'_, 'tcx, 'tcx>,
dep_node: &DepNode)
-> Option<DepNodeIndex> {
debug!("try_mark_green({:?}) - BEGIN", dep_node);
let data = self.data.as_ref().unwrap();

Expand Down Expand Up @@ -621,7 +621,7 @@ impl DepGraph {
// ... emitting any stored diagnostic ...
{
let diagnostics = tcx.on_disk_query_result_cache
.load_diagnostics(prev_dep_node_index);
.load_diagnostics(tcx, prev_dep_node_index);

if diagnostics.len() > 0 {
let handle = tcx.sess.diagnostic();
Expand Down
77 changes: 33 additions & 44 deletions src/librustc/ich/hcx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ use syntax::attr;
use syntax::codemap::CodeMap;
use syntax::ext::hygiene::SyntaxContext;
use syntax::symbol::Symbol;
use syntax_pos::Span;
use syntax_pos::{Span, DUMMY_SP};

use rustc_data_structures::stable_hasher::{HashStable, StableHashingContextProvider,
StableHasher, StableHasherResult,
Expand Down Expand Up @@ -362,64 +362,53 @@ impl<'gcx> HashStable<StableHashingContext<'gcx>> for Span {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
use syntax_pos::Pos;
const TAG_VALID_SPAN: u8 = 0;
const TAG_INVALID_SPAN: u8 = 1;
const TAG_EXPANSION: u8 = 0;
const TAG_NO_EXPANSION: u8 = 1;

if !hcx.hash_spans {
return
}

if *self == DUMMY_SP {
return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
}

// If this is not an empty or invalid span, we want to hash the last
// position that belongs to it, as opposed to hashing the first
// position past it.
let span = self.data();
let span_hi = if span.hi > span.lo {
// We might end up in the middle of a multibyte character here,
// but that's OK, since we are not trying to decode anything at
// this position.
span.hi - ::syntax_pos::BytePos(1)
} else {
span.hi
};

{
let loc1 = hcx.codemap().byte_pos_to_line_and_col(span.lo);
let loc1 = loc1.as_ref()
.map(|&(ref fm, line, col)| (&fm.name[..], line, col.to_usize()))
.unwrap_or(("???", 0, 0));

let loc2 = hcx.codemap().byte_pos_to_line_and_col(span_hi);
let loc2 = loc2.as_ref()
.map(|&(ref fm, line, col)| (&fm.name[..], line, col.to_usize()))
.unwrap_or(("???", 0, 0));

if loc1.0 == loc2.0 {
std_hash::Hash::hash(&0u8, hasher);

std_hash::Hash::hash(loc1.0, hasher);
std_hash::Hash::hash(&loc1.1, hasher);
std_hash::Hash::hash(&loc1.2, hasher);

// Do not hash the file name twice
std_hash::Hash::hash(&loc2.1, hasher);
std_hash::Hash::hash(&loc2.2, hasher);
} else {
std_hash::Hash::hash(&1u8, hasher);

std_hash::Hash::hash(loc1.0, hasher);
std_hash::Hash::hash(&loc1.1, hasher);
std_hash::Hash::hash(&loc1.2, hasher);

std_hash::Hash::hash(loc2.0, hasher);
std_hash::Hash::hash(&loc2.1, hasher);
std_hash::Hash::hash(&loc2.2, hasher);
if span.hi < span.lo {
return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
}

let (file_lo, line_lo, col_lo) = match hcx.codemap()
.byte_pos_to_line_and_col(span.lo) {
Some(pos) => pos,
None => {
return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
}
};

if !file_lo.contains(span.hi) {
return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
}

let len = span.hi - span.lo;

std_hash::Hash::hash(&TAG_VALID_SPAN, hasher);
std_hash::Hash::hash(&file_lo.name, hasher);
std_hash::Hash::hash(&line_lo, hasher);
std_hash::Hash::hash(&col_lo, hasher);
std_hash::Hash::hash(&len, hasher);

if span.ctxt == SyntaxContext::empty() {
0u8.hash_stable(hcx, hasher);
TAG_NO_EXPANSION.hash_stable(hcx, hasher);
} else {
1u8.hash_stable(hcx, hasher);
self.source_callsite().hash_stable(hcx, hasher);
TAG_EXPANSION.hash_stable(hcx, hasher);
span.ctxt.outer().expn_info().hash_stable(hcx, hasher);
}
}
}
Expand Down
24 changes: 24 additions & 0 deletions src/librustc/ich/impls_syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -347,6 +347,30 @@ impl_stable_hash_for!(enum ::syntax::ast::MetaItemKind {
NameValue(lit)
});

impl_stable_hash_for!(struct ::syntax_pos::hygiene::ExpnInfo {
call_site,
callee
});

impl_stable_hash_for!(struct ::syntax_pos::hygiene::NameAndSpan {
format,
allow_internal_unstable,
allow_internal_unsafe,
span
});

impl_stable_hash_for!(enum ::syntax_pos::hygiene::ExpnFormat {
MacroAttribute(sym),
MacroBang(sym),
CompilerDesugaring(kind)
});

impl_stable_hash_for!(enum ::syntax_pos::hygiene::CompilerDesugaringKind {
BackArrow,
DotFill,
QuestionMark
});

impl<'gcx> HashStable<StableHashingContext<'gcx>> for FileMap {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'gcx>,
Expand Down
2 changes: 1 addition & 1 deletion src/librustc/ty/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1235,7 +1235,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
-> Result<(), E::Error>
where E: ty::codec::TyEncoder
{
self.on_disk_query_result_cache.serialize(self.global_tcx(), self.cstore, encoder)
self.on_disk_query_result_cache.serialize(self.global_tcx(), encoder)
}

}
Expand Down
16 changes: 9 additions & 7 deletions src/librustc/ty/maps/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ pub(super) trait QueryDescription<'tcx>: QueryConfig {
false
}

fn load_from_disk<'a>(_: TyCtxt<'a, 'tcx, 'tcx>,
fn try_load_from_disk(_: TyCtxt<'_, 'tcx, 'tcx>,
_: SerializedDepNodeIndex)
-> Self::Value {
-> Option<Self::Value> {
bug!("QueryDescription::load_from_disk() called for unsupport query.")
}
}
Expand Down Expand Up @@ -556,12 +556,14 @@ impl<'tcx> QueryDescription<'tcx> for queries::typeck_tables_of<'tcx> {
def_id.is_local()
}

fn load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
fn try_load_from_disk(tcx: TyCtxt<'_, 'tcx, 'tcx>,
id: SerializedDepNodeIndex)
-> Self::Value {
let typeck_tables: ty::TypeckTables<'tcx> = tcx.on_disk_query_result_cache
.load_query_result(tcx, id);
tcx.alloc_tables(typeck_tables)
-> Option<Self::Value> {
let typeck_tables: Option<ty::TypeckTables<'tcx>> = tcx
.on_disk_query_result_cache
.try_load_query_result(tcx, id);

typeck_tables.map(|tables| tcx.alloc_tables(tables))
}
}

Loading