1use std::num::NonZeroUsize;
2use std::ops::DerefMut;
3use std::sync::OnceLock;
4use std::sync::atomic::{AtomicU64, Ordering};
5use std::{collections::HashSet, ops::Deref};
6
7use comemo::{Track, Tracked};
8use lsp_types::Url;
9use parking_lot::Mutex;
10use rustc_hash::FxHashMap;
11use tinymist_analysis::docs::DocString;
12use tinymist_analysis::stats::AllocStats;
13use tinymist_analysis::syntax::classify_def_loosely;
14use tinymist_analysis::ty::{BuiltinTy, InsTy, term_value};
15use tinymist_analysis::{analyze_expr_, analyze_import_};
16use tinymist_lint::{KnownIssues, LintInfo};
17use tinymist_project::{LspComputeGraph, LspWorld, TaskWhen};
18use tinymist_std::hash::{FxDashMap, hash128};
19use tinymist_std::typst::TypstDocument;
20use tinymist_world::debug_loc::DataSource;
21use tinymist_world::vfs::{PathResolution, WorkspaceResolver};
22use tinymist_world::{DETACHED_ENTRY, EntryReader};
23use typst::diag::{At, FileError, FileResult, SourceDiagnostic, SourceResult, StrResult};
24use typst::foundations::{Bytes, IntoValue, Module, StyleChain, Styles};
25use typst::introspection::Introspector;
26use typst::layout::Position;
27use typst::model::BibliographyElem;
28use typst::syntax::package::{PackageManifest, PackageSpec};
29use typst::syntax::{Span, VirtualPath};
30use typst_shim::eval::{Eval, eval_compat};
31
32use super::{LspQuerySnapshot, TypeEnv};
33use crate::adt::revision::{RevisionLock, RevisionManager, RevisionManagerLike, RevisionSlot};
34use crate::analysis::prelude::*;
35use crate::analysis::{
36 AnalysisStats, BibInfo, CompletionFeat, Definition, PathKind, QueryStatGuard,
37 SemanticTokenCache, SemanticTokenContext, SemanticTokens, Signature, SignatureTarget, Ty,
38 TypeInfo, analyze_signature, bib_info, definition, post_type_check,
39};
40use crate::docs::{DefDocs, TidyModuleDocs};
41use crate::syntax::{
42 Decl, DefKind, ExprInfo, ExprRoute, LexicalScope, ModuleDependency, SyntaxClass,
43 classify_syntax, construct_module_dependencies, is_mark, resolve_id_by_path,
44 scan_workspace_files,
45};
46use crate::upstream::{Tooltip, tooltip_};
47use crate::{
48 ColorTheme, CompilerQueryRequest, LspPosition, LspRange, LspWorldExt, PositionEncoding,
49};
50
51macro_rules! interned_str {
52 ($name:ident, $value:expr) => {
53 static $name: LazyLock<Interned<str>> = LazyLock::new(|| $value.into());
54 };
55}
56
57#[derive(Default, Clone)]
59pub struct Analysis {
60 pub position_encoding: PositionEncoding,
62 pub allow_overlapping_token: bool,
64 pub allow_multiline_token: bool,
66 pub remove_html: bool,
68 pub extended_code_action: bool,
76 pub completion_feat: CompletionFeat,
78 pub color_theme: ColorTheme,
80 pub lint: TaskWhen,
82 pub periscope: Option<Arc<dyn PeriscopeProvider + Send + Sync>>,
84 pub workers: Arc<AnalysisGlobalWorkers>,
86 pub tokens_caches: Arc<Mutex<SemanticTokenCache>>,
88 pub caches: AnalysisGlobalCaches,
90 pub analysis_rev_cache: Arc<Mutex<AnalysisRevCache>>,
92 pub stats: Arc<AnalysisStats>,
94}
95
96impl Analysis {
97 pub fn enter(&self, g: LspComputeGraph) -> LocalContextGuard {
99 self.enter_(g, self.lock_revision(None))
100 }
101
102 pub(crate) fn enter_(&self, g: LspComputeGraph, mut lg: AnalysisRevLock) -> LocalContextGuard {
104 let lifetime = self.caches.lifetime.fetch_add(1, Ordering::SeqCst);
105 let slot = self
106 .analysis_rev_cache
107 .lock()
108 .find_revision(g.world().revision(), &lg);
109 let tokens = lg.tokens.take();
110 LocalContextGuard {
111 _rev_lock: lg,
112 local: LocalContext {
113 tokens,
114 caches: AnalysisLocalCaches::default(),
115 shared: Arc::new(SharedContext {
116 slot,
117 lifetime,
118 graph: g,
119 analysis: self.clone(),
120 }),
121 },
122 }
123 }
124
125 pub fn query_snapshot(
127 self: Arc<Self>,
128 snap: LspComputeGraph,
129 req: Option<&CompilerQueryRequest>,
130 ) -> LspQuerySnapshot {
131 let rev_lock = self.lock_revision(req);
132 LspQuerySnapshot {
133 snap,
134 analysis: self,
135 rev_lock,
136 }
137 }
138
139 #[must_use]
141 pub fn lock_revision(&self, req: Option<&CompilerQueryRequest>) -> AnalysisRevLock {
142 let mut grid = self.analysis_rev_cache.lock();
143
144 AnalysisRevLock {
145 tokens: match req {
146 Some(CompilerQueryRequest::SemanticTokensFull(req)) => Some(
147 SemanticTokenCache::acquire(self.tokens_caches.clone(), &req.path, None),
148 ),
149 Some(CompilerQueryRequest::SemanticTokensDelta(req)) => {
150 Some(SemanticTokenCache::acquire(
151 self.tokens_caches.clone(),
152 &req.path,
153 Some(&req.previous_result_id),
154 ))
155 }
156 _ => None,
157 },
158 inner: grid.manager.lock_estimated(),
159 grid: self.analysis_rev_cache.clone(),
160 }
161 }
162
163 pub fn clear_cache(&self) {
165 self.caches.signatures.clear();
166 self.caches.docstrings.clear();
167 self.caches.def_signatures.clear();
168 self.caches.static_signatures.clear();
169 self.caches.terms.clear();
170 self.tokens_caches.lock().clear();
171 self.analysis_rev_cache.lock().clear();
172 }
173
174 pub fn report_query_stats(&self) -> String {
176 self.stats.report()
177 }
178
179 pub fn report_alloc_stats(&self) -> String {
181 AllocStats::report()
182 }
183
184 pub fn trigger_suggest(&self, context: bool) -> Option<Interned<str>> {
186 interned_str!(INTERNED, "editor.action.triggerSuggest");
187
188 (self.completion_feat.trigger_suggest && context).then(|| INTERNED.clone())
189 }
190
191 pub fn trigger_parameter_hints(&self, context: bool) -> Option<Interned<str>> {
193 interned_str!(INTERNED, "editor.action.triggerParameterHints");
194 (self.completion_feat.trigger_parameter_hints && context).then(|| INTERNED.clone())
195 }
196
197 pub fn trigger_on_snippet(&self, context: bool) -> Option<Interned<str>> {
204 if !self.completion_feat.trigger_on_snippet_placeholders {
205 return None;
206 }
207
208 self.trigger_suggest(context)
209 }
210
211 pub fn trigger_on_snippet_with_param_hint(&self, context: bool) -> Option<Interned<str>> {
213 interned_str!(INTERNED, "tinymist.triggerSuggestAndParameterHints");
214 if !self.completion_feat.trigger_on_snippet_placeholders {
215 return self.trigger_parameter_hints(context);
216 }
217
218 (self.completion_feat.trigger_suggest_and_parameter_hints && context)
219 .then(|| INTERNED.clone())
220 }
221}
222
223pub trait PeriscopeProvider {
225 fn periscope_at(
227 &self,
228 _ctx: &mut LocalContext,
229 _doc: &TypstDocument,
230 _pos: Position,
231 ) -> Option<String> {
232 None
233 }
234}
235
236pub struct LocalContextGuard {
238 pub local: LocalContext,
240 _rev_lock: AnalysisRevLock,
242}
243
244impl Deref for LocalContextGuard {
245 type Target = LocalContext;
246
247 fn deref(&self) -> &Self::Target {
248 &self.local
249 }
250}
251
252impl DerefMut for LocalContextGuard {
253 fn deref_mut(&mut self) -> &mut Self::Target {
254 &mut self.local
255 }
256}
257
258impl Drop for LocalContextGuard {
260 fn drop(&mut self) {
261 self.gc();
262 }
263}
264
265impl LocalContextGuard {
266 fn gc(&self) {
267 let lifetime = self.lifetime;
268 loop {
269 let latest_clear_lifetime = self.analysis.caches.clear_lifetime.load(Ordering::Relaxed);
270 if latest_clear_lifetime >= lifetime {
271 return;
272 }
273
274 if self.analysis.caches.clear_lifetime.compare_exchange(
275 latest_clear_lifetime,
276 lifetime,
277 Ordering::SeqCst,
278 Ordering::SeqCst,
279 ) != Ok(latest_clear_lifetime)
280 {
281 continue;
282 }
283
284 break;
285 }
286
287 let retainer = |l: u64| lifetime.saturating_sub(l) < 60;
288 let caches = &self.analysis.caches;
289 caches.def_signatures.retain(|(l, _)| retainer(*l));
290 caches.static_signatures.retain(|(l, _)| retainer(*l));
291 caches.terms.retain(|(l, _)| retainer(*l));
292 caches.signatures.retain(|(l, _)| retainer(*l));
293 caches.docstrings.retain(|(l, _)| retainer(*l));
294 }
295}
296
297pub struct LocalContext {
300 pub(crate) tokens: Option<SemanticTokenContext>,
302 pub caches: AnalysisLocalCaches,
304 pub shared: Arc<SharedContext>,
306}
307
308impl Deref for LocalContext {
309 type Target = Arc<SharedContext>;
310
311 fn deref(&self) -> &Self::Target {
312 &self.shared
313 }
314}
315
316impl DerefMut for LocalContext {
317 fn deref_mut(&mut self) -> &mut Self::Target {
318 &mut self.shared
319 }
320}
321
322impl LocalContext {
323 #[cfg(test)]
325 pub fn test_package_list(&mut self, f: impl FnOnce() -> Vec<(PackageSpec, Option<EcoString>)>) {
326 self.world().registry.test_package_list(f);
327 }
328
329 #[cfg(test)]
331 pub fn test_completion_files(&mut self, f: impl FnOnce() -> Vec<TypstFileId>) {
332 self.caches.completion_files.get_or_init(f);
333 }
334
335 #[cfg(test)]
337 pub fn test_files(&mut self, f: impl FnOnce() -> Vec<TypstFileId>) {
338 self.caches.root_files.get_or_init(f);
339 }
340
341 pub(crate) fn completion_files(&self, pref: &PathKind) -> impl Iterator<Item = &TypstFileId> {
343 let regexes = pref.ext_matcher();
344 self.caches
345 .completion_files
346 .get_or_init(|| {
347 if let Some(root) = self.world().entry_state().workspace_root() {
348 scan_workspace_files(&root, PathKind::Special.ext_matcher(), |path| {
349 WorkspaceResolver::workspace_file(Some(&root), VirtualPath::new(path))
350 })
351 } else {
352 vec![]
353 }
354 })
355 .iter()
356 .filter(move |fid| {
357 fid.vpath()
358 .as_rooted_path()
359 .extension()
360 .and_then(|path| path.to_str())
361 .is_some_and(|path| regexes.is_match(path))
362 })
363 }
364
365 pub fn source_files(&self) -> &Vec<TypstFileId> {
367 self.caches.root_files.get_or_init(|| {
368 self.completion_files(&PathKind::Source {
369 allow_package: false,
370 })
371 .copied()
372 .collect()
373 })
374 }
375
376 pub fn module_dependencies(&mut self) -> &HashMap<TypstFileId, ModuleDependency> {
378 if self.caches.module_deps.get().is_some() {
379 self.caches.module_deps.get().unwrap()
380 } else {
381 let deps = construct_module_dependencies(self);
384 self.caches.module_deps.get_or_init(|| deps)
385 }
386 }
387
388 pub fn depended_source_files(&self) -> EcoVec<TypstFileId> {
390 let mut ids = self.depended_files();
391 let preference = PathKind::Source {
392 allow_package: false,
393 };
394 ids.retain(|id| preference.is_match(id.vpath().as_rooted_path()));
395 ids
396 }
397
398 pub fn depended_files(&self) -> EcoVec<TypstFileId> {
401 self.world().depended_files()
402 }
403
404 pub fn shared(&self) -> &Arc<SharedContext> {
406 &self.shared
407 }
408
409 pub fn shared_(&self) -> Arc<SharedContext> {
411 self.shared.clone()
412 }
413
414 pub fn fork_for_search(&mut self) -> SearchCtx<'_> {
416 SearchCtx {
417 ctx: self,
418 searched: Default::default(),
419 worklist: Default::default(),
420 }
421 }
422
423 pub(crate) fn preload_package(&self, entry_point: TypstFileId) {
424 self.shared_().preload_package(entry_point);
425 }
426
427 pub(crate) fn with_vm<T>(&self, f: impl FnOnce(&mut typst_shim::eval::Vm) -> T) -> T {
428 crate::upstream::with_vm((self.world() as &dyn World).track(), f)
429 }
430
431 pub(crate) fn const_eval(&self, rr: ast::Expr<'_>) -> Option<Value> {
432 SharedContext::const_eval(rr)
433 }
434
435 pub(crate) fn mini_eval(&self, rr: ast::Expr<'_>) -> Option<Value> {
436 self.const_eval(rr)
437 .or_else(|| self.with_vm(|vm| rr.eval(vm).ok()))
438 }
439
440 pub(crate) fn cached_tokens(&mut self, source: &Source) -> (SemanticTokens, Option<String>) {
441 let tokens = crate::analysis::semantic_tokens::get_semantic_tokens(self, source);
442
443 let result_id = self.tokens.as_ref().map(|t| {
444 let id = t.next.revision;
445 t.next
446 .data
447 .set(tokens.clone())
448 .unwrap_or_else(|_| panic!("unexpected slot overwrite {id}"));
449 id.to_string()
450 });
451 (tokens, result_id)
452 }
453
454 pub(crate) fn expr_stage_by_id(&mut self, fid: TypstFileId) -> Option<ExprInfo> {
456 Some(self.expr_stage(&self.source_by_id(fid).ok()?))
457 }
458
459 pub(crate) fn expr_stage(&mut self, source: &Source) -> ExprInfo {
461 let id = source.id();
462 let cache = &self.caches.modules.entry(id).or_default().expr_stage;
463 cache.get_or_init(|| self.shared.expr_stage(source)).clone()
464 }
465
466 pub(crate) fn type_check(&mut self, source: &Source) -> Arc<TypeInfo> {
468 let id = source.id();
469 let cache = &self.caches.modules.entry(id).or_default().type_check;
470 cache.get_or_init(|| self.shared.type_check(source)).clone()
471 }
472
473 pub(crate) fn lint(
474 &mut self,
475 source: &Source,
476 known_issues: &KnownIssues,
477 ) -> EcoVec<SourceDiagnostic> {
478 self.shared.lint(source, known_issues).diagnostics
479 }
480
481 pub(crate) fn type_check_by_id(&mut self, id: TypstFileId) -> Arc<TypeInfo> {
483 let cache = &self.caches.modules.entry(id).or_default().type_check;
484 cache
485 .clone()
486 .get_or_init(|| {
487 let source = self.source_by_id(id).ok();
488 source
489 .map(|s| self.shared.type_check(&s))
490 .unwrap_or_default()
491 })
492 .clone()
493 }
494
495 pub(crate) fn type_of_span(&mut self, s: Span) -> Option<Ty> {
496 let scheme = self.type_check_by_id(s.id()?);
497 let ty = scheme.type_of_span(s)?;
498 Some(scheme.simplify(ty, false))
499 }
500
501 pub(crate) fn def_docs(&mut self, def: &Definition) -> Option<DefDocs> {
502 match def.decl.kind() {
505 DefKind::Function => {
506 let sig = self.sig_of_def(def.clone())?;
507 let docs = crate::docs::sig_docs(&sig)?;
508 Some(DefDocs::Function(Box::new(docs)))
509 }
510 DefKind::Struct | DefKind::Constant | DefKind::Variable => {
511 let docs = crate::docs::var_docs(self, def.decl.span())?;
512 Some(DefDocs::Variable(docs))
513 }
514 DefKind::Module => {
515 let ei = self.expr_stage_by_id(def.decl.file_id()?)?;
516 Some(DefDocs::Module(TidyModuleDocs {
517 docs: ei.module_docstring.docs.clone().unwrap_or_default(),
518 }))
519 }
520 DefKind::Reference => None,
521 }
522 }
523}
524
525pub struct SharedContext {
527 pub lifetime: u64,
529 pub graph: LspComputeGraph,
533 pub analysis: Analysis,
535 slot: Arc<RevisionSlot<AnalysisRevSlot>>,
537}
538
539impl SharedContext {
540 pub fn revision(&self) -> usize {
542 self.slot.revision
543 }
544
545 pub(crate) fn position_encoding(&self) -> PositionEncoding {
547 self.analysis.position_encoding
548 }
549
550 pub fn world(&self) -> &LspWorld {
552 self.graph.world()
553 }
554
555 pub fn success_doc(&self) -> Option<&TypstDocument> {
557 self.graph.snap.success_doc.as_ref()
558 }
559
560 pub fn to_typst_pos(&self, position: LspPosition, src: &Source) -> Option<usize> {
562 crate::to_typst_position(position, self.analysis.position_encoding, src)
563 }
564
565 pub fn to_typst_pos_offset(
567 &self,
568 source: &Source,
569 position: LspPosition,
570 shift: usize,
571 ) -> Option<usize> {
572 let offset = self.to_typst_pos(position, source)?;
573 Some(ceil_char_boundary(source.text(), offset + shift))
574 }
575
576 pub fn to_lsp_pos(&self, typst_offset: usize, src: &Source) -> LspPosition {
578 crate::to_lsp_position(typst_offset, self.analysis.position_encoding, src)
579 }
580
581 pub fn to_typst_range(&self, position: LspRange, src: &Source) -> Option<Range<usize>> {
583 crate::to_typst_range(position, self.analysis.position_encoding, src)
584 }
585
586 pub fn to_lsp_range(&self, position: Range<usize>, src: &Source) -> LspRange {
588 crate::to_lsp_range(position, src, self.analysis.position_encoding)
589 }
590
591 pub fn to_lsp_range_(&self, position: Range<usize>, fid: TypstFileId) -> Option<LspRange> {
593 let ext = fid
594 .vpath()
595 .as_rootless_path()
596 .extension()
597 .and_then(|ext| ext.to_str());
598 if matches!(ext, Some("yaml" | "yml" | "bib")) {
600 let bytes = self.file_by_id(fid).ok()?;
601 let bytes_len = bytes.len();
602 let loc = loc_info(bytes)?;
603 let start = find_loc(bytes_len, &loc, position.start, self.position_encoding())?;
605 let end = find_loc(bytes_len, &loc, position.end, self.position_encoding())?;
606 return Some(LspRange { start, end });
607 }
608
609 let source = self.source_by_id(fid).ok()?;
610
611 Some(self.to_lsp_range(position, &source))
612 }
613
614 pub fn path_for_id(&self, id: TypstFileId) -> Result<PathResolution, FileError> {
616 self.world().path_for_id(id)
617 }
618
619 pub fn uri_for_id(&self, fid: TypstFileId) -> Result<Url, FileError> {
621 self.world().uri_for_id(fid)
622 }
623
624 pub fn file_id_by_path(&self, path: &Path) -> FileResult<TypstFileId> {
626 self.world().file_id_by_path(path)
627 }
628
629 pub fn file_by_id(&self, fid: TypstFileId) -> FileResult<Bytes> {
631 self.world().file(fid)
632 }
633
634 pub fn source_by_id(&self, fid: TypstFileId) -> FileResult<Source> {
636 self.world().source(fid)
637 }
638
639 pub fn source_by_path(&self, path: &Path) -> FileResult<Source> {
641 self.source_by_id(self.file_id_by_path(path)?)
642 }
643
644 pub fn classify_span<'s>(&self, source: &'s Source, span: Span) -> Option<SyntaxClass<'s>> {
647 let node = LinkedNode::new(source.root()).find(span)?;
648 let cursor = node.offset() + 1;
649 classify_syntax(node, cursor)
650 }
651
652 pub fn classify_for_decl<'s>(
656 &self,
657 source: &'s Source,
658 position: LspPosition,
659 ) -> Option<SyntaxClass<'s>> {
660 let cursor = self.to_typst_pos_offset(source, position, 1)?;
661 let mut node = LinkedNode::new(source.root()).leaf_at_compat(cursor)?;
662
663 if cursor == node.offset() + 1 && is_mark(node.kind()) {
666 let prev_leaf = node.prev_leaf();
667 if let Some(prev_leaf) = prev_leaf
668 && prev_leaf.range().end == node.offset()
669 {
670 node = prev_leaf;
671 }
672 }
673
674 classify_syntax(node, cursor)
675 }
676
677 pub fn font_info(&self, font: typst::text::Font) -> Option<Arc<DataSource>> {
679 self.world().font_resolver.describe_font(&font)
680 }
681
682 #[cfg(feature = "local-registry")]
684 pub fn local_packages(&self) -> EcoVec<PackageSpec> {
685 crate::package::list_package_by_namespace(&self.world().registry, eco_format!("local"))
686 .into_iter()
687 .map(|(_, spec)| spec)
688 .collect()
689 }
690
691 #[cfg(not(feature = "local-registry"))]
693 pub fn local_packages(&self) -> EcoVec<PackageSpec> {
694 eco_vec![]
695 }
696
697 pub(crate) fn const_eval(rr: ast::Expr<'_>) -> Option<Value> {
698 Some(match rr {
699 ast::Expr::None(_) => Value::None,
700 ast::Expr::Auto(_) => Value::Auto,
701 ast::Expr::Bool(v) => Value::Bool(v.get()),
702 ast::Expr::Int(v) => Value::Int(v.get()),
703 ast::Expr::Float(v) => Value::Float(v.get()),
704 ast::Expr::Numeric(v) => Value::numeric(v.get()),
705 ast::Expr::Str(v) => Value::Str(v.get().into()),
706 _ => return None,
707 })
708 }
709
710 pub fn module_by_id(&self, fid: TypstFileId) -> SourceResult<Module> {
712 let source = self.source_by_id(fid).at(Span::detached())?;
713 self.module_by_src(source)
714 }
715
716 pub fn module_by_str(&self, rr: String) -> Option<Module> {
718 let src = Source::new(*DETACHED_ENTRY, rr);
719 self.module_by_src(src).ok()
720 }
721
722 pub fn module_by_src(&self, source: Source) -> SourceResult<Module> {
724 eval_compat(&self.world(), &source)
725 }
726
727 pub fn module_by_syntax(self: &Arc<Self>, source: &SyntaxNode) -> Option<Value> {
729 self.module_term_by_syntax(source, true)
730 .and_then(|ty| ty.value())
731 }
732
733 pub fn module_term_by_syntax(self: &Arc<Self>, source: &SyntaxNode, value: bool) -> Option<Ty> {
736 let (src, scope) = self.analyze_import(source);
737 if let Some(scope) = scope {
738 return Some(match scope {
739 Value::Module(m) if m.file_id().is_some() => {
740 Ty::Builtin(BuiltinTy::Module(Decl::module(m.file_id()?).into()))
741 }
742 scope => Ty::Value(InsTy::new(scope)),
743 });
744 }
745
746 match src {
747 Some(Value::Str(s)) => {
748 let id = resolve_id_by_path(self.world(), source.span().id()?, s.as_str())?;
749
750 Some(if value {
751 Ty::Value(InsTy::new(Value::Module(self.module_by_id(id).ok()?)))
752 } else {
753 Ty::Builtin(BuiltinTy::Module(Decl::module(id).into()))
754 })
755 }
756 _ => None,
757 }
758 }
759
760 pub(crate) fn expr_stage_by_id(self: &Arc<Self>, fid: TypstFileId) -> Option<ExprInfo> {
762 Some(self.expr_stage(&self.source_by_id(fid).ok()?))
763 }
764
765 pub(crate) fn expr_stage(self: &Arc<Self>, source: &Source) -> ExprInfo {
767 let mut route = ExprRoute::default();
768 self.expr_stage_(source, &mut route)
769 }
770
771 pub(crate) fn expr_stage_(
773 self: &Arc<Self>,
774 source: &Source,
775 route: &mut ExprRoute,
776 ) -> ExprInfo {
777 use crate::syntax::expr_of;
778 let guard = self.query_stat(source.id(), "expr_stage");
779 self.slot.expr_stage.compute(hash128(&source), |prev| {
780 expr_of(self.clone(), source.clone(), route, guard, prev)
781 })
782 }
783
784 pub(crate) fn exports_of(
785 self: &Arc<Self>,
786 source: &Source,
787 route: &mut ExprRoute,
788 ) -> Option<Arc<LazyHash<LexicalScope>>> {
789 if let Some(s) = route.get(&source.id()) {
790 return s.clone();
791 }
792
793 Some(self.expr_stage_(source, route).exports.clone())
794 }
795
796 pub(crate) fn type_check(self: &Arc<Self>, source: &Source) -> Arc<TypeInfo> {
798 let mut route = TypeEnv::default();
799 self.type_check_(source, &mut route)
800 }
801
802 pub(crate) fn type_check_(
804 self: &Arc<Self>,
805 source: &Source,
806 route: &mut TypeEnv,
807 ) -> Arc<TypeInfo> {
808 use crate::analysis::type_check;
809
810 let ei = self.expr_stage(source);
811 let guard = self.query_stat(source.id(), "type_check");
812 self.slot.type_check.compute(hash128(&ei), |prev| {
813 if let Some(cache_hint) = prev.filter(|prev| prev.revision == ei.revision) {
815 return cache_hint;
816 }
817
818 guard.miss();
819 type_check(self.clone(), ei, route)
820 })
821 }
822
823 #[typst_macros::time(span = source.root().span())]
825 pub(crate) fn lint(self: &Arc<Self>, source: &Source, issues: &KnownIssues) -> LintInfo {
826 let ei = self.expr_stage(source);
827 let ti = self.type_check(source);
828 let guard = self.query_stat(source.id(), "lint");
829 self.slot.lint.compute(hash128(&(&ei, &ti, issues)), |_| {
830 guard.miss();
831 tinymist_lint::lint_file(self.world(), &ei, ti, issues.clone())
832 })
833 }
834
835 pub(crate) fn type_of_func(self: &Arc<Self>, func: Func) -> Signature {
836 crate::log_debug_ct!("convert runtime func {func:?}");
837 analyze_signature(self, SignatureTarget::Convert(func)).unwrap()
838 }
839
840 pub(crate) fn type_of_value(self: &Arc<Self>, val: &Value) -> Ty {
841 crate::log_debug_ct!("convert runtime value {val:?}");
842
843 let cache_key = val;
845 let cached = self
846 .analysis
847 .caches
848 .terms
849 .m
850 .get(&hash128(&cache_key))
851 .and_then(|slot| (cache_key == &slot.1.0).then_some(slot.1.1.clone()));
852 if let Some(cached) = cached {
853 return cached;
854 }
855
856 let res = term_value(val);
857
858 self.analysis
859 .caches
860 .terms
861 .m
862 .entry(hash128(&cache_key))
863 .or_insert_with(|| (self.lifetime, (cache_key.clone(), res.clone())));
864
865 res
866 }
867
868 pub(crate) fn def_of_span(self: &Arc<Self>, source: &Source, span: Span) -> Option<Definition> {
870 let syntax = self.classify_span(source, span)?;
871 definition(self, source, syntax)
872 }
873
874 pub(crate) fn def_of_decl(&self, decl: &Interned<Decl>) -> Option<Definition> {
876 match decl.as_ref() {
877 Decl::Func(..) => Some(Definition::new(decl.clone(), None)),
878 Decl::Module(..) => None,
879 _ => None,
880 }
881 }
882
883 pub(crate) fn def_of_syntax(
888 self: &Arc<Self>,
889 source: &Source,
890 syntax: SyntaxClass,
891 ) -> Option<Definition> {
892 definition(self, source, syntax)
893 }
894
895 pub(crate) fn def_of_syntax_or_dyn(
903 self: &Arc<Self>,
904 source: &Source,
905 syntax: SyntaxClass,
906 ) -> Option<Definition> {
907 let def = self.def_of_syntax(source, syntax.clone());
908 match def.as_ref().map(|d| d.decl.kind()) {
909 Some(DefKind::Reference | DefKind::Module | DefKind::Function) => return def,
911 Some(DefKind::Struct | DefKind::Constant | DefKind::Variable) | None => {}
912 }
913
914 let know_ty_well = def
916 .as_ref()
917 .and_then(|d| self.simplified_type_of_span(d.decl.span()))
918 .filter(|ty| !matches!(ty, Ty::Any))
919 .is_some();
920 if know_ty_well {
921 return def;
922 }
923
924 let def_ref = def.as_ref();
925 let def_name = || Some(def_ref?.name().clone());
926 let dyn_def = self
927 .analyze_expr(syntax.node())
928 .iter()
929 .find_map(|(value, _)| {
930 let def = Definition::from_value(value.clone(), def_name)?;
931 None.or_else(|| {
932 let source = self.source_by_id(def.decl.file_id()?).ok()?;
933 let node = LinkedNode::new(source.root()).find(def.decl.span())?;
934 let def_at_the_span = classify_def_loosely(node)?;
935 self.def_of_span(&source, def_at_the_span.name()?.span())
936 })
937 .or(Some(def))
938 });
939
940 dyn_def.or(def)
942 }
943
944 pub(crate) fn simplified_type_of_span(self: &Arc<Self>, span: Span) -> Option<Ty> {
945 let source = self.source_by_id(span.id()?).ok()?;
946 let (ti, ty) = self.type_of_span_(&source, span)?;
947 Some(ti.simplify(ty, false))
948 }
949
950 pub(crate) fn type_of_span(self: &Arc<Self>, span: Span) -> Option<Ty> {
951 let source = self.source_by_id(span.id()?).ok()?;
952 Some(self.type_of_span_(&source, span)?.1)
953 }
954
955 pub(crate) fn type_of_span_(
956 self: &Arc<Self>,
957 source: &Source,
958 span: Span,
959 ) -> Option<(Arc<TypeInfo>, Ty)> {
960 let ti = self.type_check(source);
961 let ty = ti.type_of_span(span)?;
962 Some((ti, ty))
963 }
964
965 pub(crate) fn post_type_of_node(self: &Arc<Self>, node: LinkedNode) -> Option<Ty> {
966 let id = node.span().id()?;
967 let source = self.source_by_id(id).ok()?;
968 let ty_chk = self.type_check(&source);
969
970 let ty = post_type_check(self.clone(), &ty_chk, node.clone())
971 .or_else(|| ty_chk.type_of_span(node.span()))?;
972 Some(ty_chk.simplify(ty, false))
973 }
974
975 pub(crate) fn sig_of_def(self: &Arc<Self>, def: Definition) -> Option<Signature> {
976 crate::log_debug_ct!("check definition func {def:?}");
977 let source = def.decl.file_id().and_then(|id| self.source_by_id(id).ok());
978 analyze_signature(self, SignatureTarget::Def(source, def))
979 }
980
981 pub(crate) fn sig_of_type(self: &Arc<Self>, ti: &TypeInfo, ty: Ty) -> Option<Signature> {
982 super::sig_of_type(self, ti, ty)
983 }
984
985 pub(crate) fn sig_of_type_or_dyn(
986 self: &Arc<Self>,
987 ti: &TypeInfo,
988 callee_ty: Ty,
989 callee: &SyntaxNode,
990 ) -> Option<Signature> {
991 self.sig_of_type(ti, callee_ty).or_else(|| {
992 self.analyze_expr(callee).iter().find_map(|(value, _)| {
993 let Value::Func(callee) = value else {
994 return None;
995 };
996
997 analyze_signature(self, SignatureTarget::Runtime(callee.clone()))
999 })
1000 })
1001 }
1002
1003 pub fn analyze_import(&self, source: &SyntaxNode) -> (Option<Value>, Option<Value>) {
1010 if let Some(v) = source.cast::<ast::Expr>().and_then(Self::const_eval) {
1011 return (Some(v), None);
1012 }
1013 let token = &self.analysis.workers.import;
1014 token.enter(|| analyze_import_(self.world(), source))
1015 }
1016
1017 pub fn analyze_expr(&self, source: &SyntaxNode) -> EcoVec<(Value, Option<Styles>)> {
1019 let token = &self.analysis.workers.expression;
1020 token.enter(|| analyze_expr_(self.world(), source))
1021 }
1022
1023 pub fn analyze_bib(&self, introspector: &Introspector) -> Option<Arc<BibInfo>> {
1025 let world = self.world();
1026 let world = (world as &dyn World).track();
1027
1028 analyze_bib(world, introspector.track())
1029 }
1030
1031 pub fn tooltip(&self, source: &Source, cursor: usize) -> Option<Tooltip> {
1037 let token = &self.analysis.workers.tooltip;
1038 token.enter(|| tooltip_(self.world(), source, cursor))
1039 }
1040
1041 pub fn get_manifest(&self, toml_id: TypstFileId) -> StrResult<PackageManifest> {
1043 crate::package::get_manifest(self.world(), toml_id)
1044 }
1045
1046 pub fn compute_signature(
1048 self: &Arc<Self>,
1049 func: SignatureTarget,
1050 compute: impl FnOnce(&Arc<Self>) -> Option<Signature> + Send + Sync + 'static,
1051 ) -> Option<Signature> {
1052 let res = match func {
1053 SignatureTarget::Def(src, def) => self
1054 .analysis
1055 .caches
1056 .def_signatures
1057 .entry(hash128(&(src, def.clone())), self.lifetime),
1058 SignatureTarget::SyntaxFast(source, span) => {
1059 let cache_key = (source, span, true);
1060 self.analysis
1061 .caches
1062 .static_signatures
1063 .entry(hash128(&cache_key), self.lifetime)
1064 }
1065 SignatureTarget::Syntax(source, span) => {
1066 let cache_key = (source, span);
1067 self.analysis
1068 .caches
1069 .static_signatures
1070 .entry(hash128(&cache_key), self.lifetime)
1071 }
1072 SignatureTarget::Convert(rt) => self
1073 .analysis
1074 .caches
1075 .signatures
1076 .entry(hash128(&(&rt, true)), self.lifetime),
1077 SignatureTarget::Runtime(rt) => self
1078 .analysis
1079 .caches
1080 .signatures
1081 .entry(hash128(&rt), self.lifetime),
1082 };
1083 res.get_or_init(|| compute(self)).clone()
1084 }
1085
1086 pub(crate) fn compute_docstring(
1087 self: &Arc<Self>,
1088 fid: TypstFileId,
1089 docs: String,
1090 kind: DefKind,
1091 ) -> Option<Arc<DocString>> {
1092 let res = self
1093 .analysis
1094 .caches
1095 .docstrings
1096 .entry(hash128(&(fid, &docs, kind)), self.lifetime);
1097 res.get_or_init(|| {
1098 crate::syntax::docs::do_compute_docstring(self, fid, docs, kind).map(Arc::new)
1099 })
1100 .clone()
1101 }
1102
1103 pub fn remove_html(&self, markup: EcoString) -> EcoString {
1105 if !self.analysis.remove_html {
1106 return markup;
1107 }
1108
1109 static REMOVE_HTML_COMMENT_REGEX: LazyLock<regex::Regex> =
1110 LazyLock::new(|| regex::Regex::new(r#"<!--[\s\S]*?-->"#).unwrap());
1111 REMOVE_HTML_COMMENT_REGEX
1112 .replace_all(&markup, "")
1113 .trim()
1114 .into()
1115 }
1116
1117 fn query_stat(&self, id: TypstFileId, query: &'static str) -> QueryStatGuard {
1118 self.analysis.stats.stat(id, query)
1119 }
1120
1121 pub(crate) fn prefetch_type_check(self: &Arc<Self>, _fid: TypstFileId) {
1124 }
1134
1135 pub(crate) fn preload_package(self: Arc<Self>, entry_point: TypstFileId) {
1136 crate::log_debug_ct!("preload package start {entry_point:?}");
1137
1138 #[derive(Clone)]
1139 struct Preloader {
1140 shared: Arc<SharedContext>,
1141 analyzed: Arc<Mutex<HashSet<TypstFileId>>>,
1142 }
1143
1144 impl Preloader {
1145 fn work(&self, fid: TypstFileId) {
1146 crate::log_debug_ct!("preload package {fid:?}");
1147 let source = self.shared.source_by_id(fid).ok().unwrap();
1148 let exprs = self.shared.expr_stage(&source);
1149 self.shared.type_check(&source);
1150 exprs.imports.iter().for_each(|(fid, _)| {
1151 if !self.analyzed.lock().insert(*fid) {
1152 return;
1153 }
1154 self.work(*fid);
1155 })
1156 }
1157 }
1158
1159 let preloader = Preloader {
1160 shared: self,
1161 analyzed: Arc::default(),
1162 };
1163
1164 preloader.work(entry_point);
1165 }
1166}
1167
1168type DeferredCompute<T> = Arc<OnceLock<T>>;
1170
1171#[derive(Clone)]
1172struct IncrCacheMap<K, V> {
1173 revision: usize,
1174 global: Arc<Mutex<FxDashMap<K, (usize, V)>>>,
1175 prev: Arc<Mutex<FxHashMap<K, DeferredCompute<V>>>>,
1176 next: Arc<Mutex<FxHashMap<K, DeferredCompute<V>>>>,
1177}
1178
1179impl<K: Eq + Hash, V> Default for IncrCacheMap<K, V> {
1180 fn default() -> Self {
1181 Self {
1182 revision: 0,
1183 global: Arc::default(),
1184 prev: Arc::default(),
1185 next: Arc::default(),
1186 }
1187 }
1188}
1189
1190impl<K, V> IncrCacheMap<K, V> {
1191 fn compute(&self, key: K, compute: impl FnOnce(Option<V>) -> V) -> V
1192 where
1193 K: Clone + Eq + Hash,
1194 V: Clone,
1195 {
1196 let next = self.next.lock().entry(key.clone()).or_default().clone();
1197
1198 next.get_or_init(|| {
1199 let prev = self.prev.lock().get(&key).cloned();
1200 let prev = prev.and_then(|prev| prev.get().cloned());
1201 let prev = prev.or_else(|| {
1202 let global = self.global.lock();
1203 global.get(&key).map(|global| global.1.clone())
1204 });
1205
1206 let res = compute(prev);
1207
1208 let global = self.global.lock();
1209 let entry = global.entry(key.clone());
1210 use dashmap::mapref::entry::Entry;
1211 match entry {
1212 Entry::Occupied(mut entry) => {
1213 let (revision, _) = entry.get();
1214 if *revision < self.revision {
1215 entry.insert((self.revision, res.clone()));
1216 }
1217 }
1218 Entry::Vacant(entry) => {
1219 entry.insert((self.revision, res.clone()));
1220 }
1221 }
1222
1223 res
1224 })
1225 .clone()
1226 }
1227
1228 fn crawl(&self, revision: usize) -> Self {
1229 Self {
1230 revision,
1231 prev: self.next.clone(),
1232 global: self.global.clone(),
1233 next: Default::default(),
1234 }
1235 }
1236}
1237
1238#[derive(Clone)]
1239struct CacheMap<T> {
1240 m: Arc<FxDashMap<u128, (u64, T)>>,
1241 }
1243
1244impl<T> Default for CacheMap<T> {
1245 fn default() -> Self {
1246 Self {
1247 m: Default::default(),
1248 }
1250 }
1251}
1252
1253impl<T> CacheMap<T> {
1254 fn clear(&self) {
1255 self.m.clear();
1256 }
1257
1258 fn retain(&self, mut f: impl FnMut(&mut (u64, T)) -> bool) {
1259 self.m.retain(|_k, v| f(v));
1260 }
1261}
1262
1263impl<T: Default + Clone> CacheMap<T> {
1264 fn entry(&self, key: u128, lifetime: u64) -> T {
1265 let entry = self.m.entry(key);
1266 let entry = entry.or_insert_with(|| (lifetime, T::default()));
1267 entry.1.clone()
1268 }
1269}
1270
1271#[derive(Default)]
1273pub struct AnalysisGlobalWorkers {
1274 import: RateLimiter,
1276 expression: RateLimiter,
1278 tooltip: RateLimiter,
1280}
1281
1282#[derive(Default, Clone)]
1285pub struct AnalysisGlobalCaches {
1286 lifetime: Arc<AtomicU64>,
1287 clear_lifetime: Arc<AtomicU64>,
1288 def_signatures: CacheMap<DeferredCompute<Option<Signature>>>,
1289 static_signatures: CacheMap<DeferredCompute<Option<Signature>>>,
1290 signatures: CacheMap<DeferredCompute<Option<Signature>>>,
1291 docstrings: CacheMap<DeferredCompute<Option<Arc<DocString>>>>,
1292 terms: CacheMap<(Value, Ty)>,
1293}
1294
1295#[derive(Default)]
1301pub struct AnalysisLocalCaches {
1302 modules: HashMap<TypstFileId, ModuleAnalysisLocalCache>,
1303 completion_files: OnceLock<Vec<TypstFileId>>,
1304 root_files: OnceLock<Vec<TypstFileId>>,
1305 module_deps: OnceLock<HashMap<TypstFileId, ModuleDependency>>,
1306}
1307
1308#[derive(Default)]
1313pub struct ModuleAnalysisLocalCache {
1314 expr_stage: OnceLock<ExprInfo>,
1315 type_check: OnceLock<Arc<TypeInfo>>,
1316}
1317
1318#[derive(Default)]
1321pub struct AnalysisRevCache {
1322 default_slot: AnalysisRevSlot,
1323 manager: RevisionManager<AnalysisRevSlot>,
1324}
1325
1326impl RevisionManagerLike for AnalysisRevCache {
1327 fn gc(&mut self, rev: usize) {
1328 self.manager.gc(rev);
1329
1330 {
1332 let mut max_ei = FxHashMap::default();
1333 let es = self.default_slot.expr_stage.global.lock();
1334 for r in es.iter() {
1335 let rev: &mut usize = max_ei.entry(r.1.fid).or_default();
1336 *rev = (*rev).max(r.1.revision);
1337 }
1338 es.retain(|_, r| r.1.revision == *max_ei.get(&r.1.fid).unwrap_or(&0));
1339 }
1340
1341 {
1342 let mut max_ti = FxHashMap::default();
1343 let ts = self.default_slot.type_check.global.lock();
1344 for r in ts.iter() {
1345 let rev: &mut usize = max_ti.entry(r.1.fid).or_default();
1346 *rev = (*rev).max(r.1.revision);
1347 }
1348 ts.retain(|_, r| r.1.revision == *max_ti.get(&r.1.fid).unwrap_or(&0));
1349 }
1350
1351 {
1352 let mut max_li = FxHashMap::default();
1353 let ts = self.default_slot.lint.global.lock();
1354 for r in ts.iter() {
1355 let rev: &mut usize = max_li.entry(r.1.fid).or_default();
1356 *rev = (*rev).max(r.1.revision);
1357 }
1358 ts.retain(|_, r| r.1.revision == *max_li.get(&r.1.fid).unwrap_or(&0));
1359 }
1360 }
1361}
1362
1363impl AnalysisRevCache {
1364 fn clear(&mut self) {
1365 self.manager.clear();
1366 self.default_slot = Default::default();
1367 }
1368
1369 fn find_revision(
1371 &mut self,
1372 revision: NonZeroUsize,
1373 lg: &AnalysisRevLock,
1374 ) -> Arc<RevisionSlot<AnalysisRevSlot>> {
1375 lg.inner.access(revision);
1376 self.manager.find_revision(revision, |slot_base| {
1377 log::debug!("analysis revision {} is created", revision.get());
1378 slot_base
1379 .map(|slot| AnalysisRevSlot {
1380 revision: slot.revision,
1381 expr_stage: slot.data.expr_stage.crawl(revision.get()),
1382 type_check: slot.data.type_check.crawl(revision.get()),
1383 lint: slot.data.lint.crawl(revision.get()),
1384 })
1385 .unwrap_or_else(|| self.default_slot.clone())
1386 })
1387 }
1388}
1389
1390pub struct AnalysisRevLock {
1392 inner: RevisionLock,
1393 tokens: Option<SemanticTokenContext>,
1394 grid: Arc<Mutex<AnalysisRevCache>>,
1395}
1396
1397impl Drop for AnalysisRevLock {
1398 fn drop(&mut self) {
1399 let mut mu = self.grid.lock();
1400 let gc_revision = mu.manager.unlock(&mut self.inner);
1401
1402 if let Some(gc_revision) = gc_revision {
1403 let grid = self.grid.clone();
1404 rayon::spawn(move || {
1405 grid.lock().gc(gc_revision);
1406 });
1407 }
1408 }
1409}
1410
1411#[derive(Default, Clone)]
1412struct AnalysisRevSlot {
1413 revision: usize,
1414 expr_stage: IncrCacheMap<u128, ExprInfo>,
1415 type_check: IncrCacheMap<u128, Arc<TypeInfo>>,
1416 lint: IncrCacheMap<u128, LintInfo>,
1417}
1418
1419impl Drop for AnalysisRevSlot {
1420 fn drop(&mut self) {
1421 log::debug!("analysis revision {} is dropped", self.revision);
1422 }
1423}
1424
1425fn ceil_char_boundary(text: &str, mut cursor: usize) -> usize {
1426 while cursor < text.len() && !text.is_char_boundary(cursor) {
1428 cursor += 1;
1429 }
1430
1431 cursor.min(text.len())
1432}
1433
1434#[typst_macros::time]
1435#[comemo::memoize]
1436fn analyze_bib(
1437 world: Tracked<dyn World + '_>,
1438 introspector: Tracked<Introspector>,
1439) -> Option<Arc<BibInfo>> {
1440 let bib_elem = BibliographyElem::find(introspector).ok()?;
1441
1442 let csl_style = bib_elem.style.get_cloned(StyleChain::default()).derived;
1445
1446 let Value::Array(paths) = bib_elem.sources.clone().into_value() else {
1447 return None;
1448 };
1449 let elem_fid = bib_elem.span().id()?;
1450 let files = paths
1451 .into_iter()
1452 .flat_map(|path| path.cast().ok())
1453 .flat_map(|bib_path: EcoString| {
1454 let bib_fid = resolve_id_by_path(world.deref(), elem_fid, &bib_path)?;
1455 Some((bib_fid, world.file(bib_fid).ok()?))
1456 });
1457
1458 bib_info(csl_style, files)
1459}
1460
1461#[comemo::memoize]
1462fn loc_info(bytes: Bytes) -> Option<EcoVec<(usize, String)>> {
1463 let mut loc = EcoVec::new();
1464 let mut offset = 0;
1465 for line in bytes.split(|byte| *byte == b'\n') {
1466 loc.push((offset, String::from_utf8(line.to_owned()).ok()?));
1467 offset += line.len() + 1;
1468 }
1469 Some(loc)
1470}
1471
1472fn find_loc(
1473 len: usize,
1474 loc: &EcoVec<(usize, String)>,
1475 mut offset: usize,
1476 encoding: PositionEncoding,
1477) -> Option<LspPosition> {
1478 if offset > len {
1479 offset = len;
1480 }
1481
1482 let r = match loc.binary_search_by_key(&offset, |line| line.0) {
1483 Ok(i) => i,
1484 Err(i) => i - 1,
1485 };
1486
1487 let (start, s) = loc.get(r)?;
1488 let byte_offset = offset.saturating_sub(*start);
1489
1490 let column_prefix = if byte_offset <= s.len() {
1491 &s[..byte_offset]
1492 } else {
1493 let line = (r + 1) as u32;
1494 return Some(LspPosition { line, character: 0 });
1495 };
1496
1497 let line = r as u32;
1498 let character = match encoding {
1499 PositionEncoding::Utf8 => column_prefix.chars().count(),
1500 PositionEncoding::Utf16 => column_prefix.chars().map(|ch| ch.len_utf16()).sum(),
1501 } as u32;
1502
1503 Some(LspPosition { line, character })
1504}
1505
1506pub struct SearchCtx<'a> {
1508 pub ctx: &'a mut LocalContext,
1510 pub searched: HashSet<TypstFileId>,
1512 pub worklist: Vec<TypstFileId>,
1514}
1515
1516impl SearchCtx<'_> {
1517 pub fn push(&mut self, fid: TypstFileId) -> bool {
1519 if self.searched.insert(fid) {
1520 self.worklist.push(fid);
1521 true
1522 } else {
1523 false
1524 }
1525 }
1526
1527 pub fn push_dependents(&mut self, fid: TypstFileId) {
1529 let deps = self.ctx.module_dependencies().get(&fid);
1530 let dependents = deps.map(|dep| dep.dependents.clone()).into_iter().flatten();
1531 for dep in dependents {
1532 self.push(dep);
1533 }
1534 }
1535}
1536
1537#[derive(Default)]
1539pub struct RateLimiter {
1540 token: std::sync::Mutex<()>,
1541}
1542
1543impl RateLimiter {
1544 #[must_use]
1546 pub fn enter<T>(&self, f: impl FnOnce() -> T) -> T {
1547 let _c = self.token.lock().unwrap();
1548 f()
1549 }
1550}