1use std::num::NonZeroUsize;
2use std::ops::DerefMut;
3use std::sync::OnceLock;
4use std::sync::atomic::{AtomicU64, Ordering};
5use std::{collections::HashSet, ops::Deref};
6
7use comemo::{Track, Tracked};
8use lsp_types::Url;
9use parking_lot::Mutex;
10use rustc_hash::FxHashMap;
11use tinymist_analysis::docs::DocString;
12use tinymist_analysis::stats::AllocStats;
13use tinymist_analysis::syntax::classify_def_loosely;
14use tinymist_analysis::ty::term_value;
15use tinymist_analysis::{analyze_expr_, analyze_import_};
16use tinymist_lint::LintInfo;
17use tinymist_project::{LspComputeGraph, LspWorld, TaskWhen};
18use tinymist_std::hash::{FxDashMap, hash128};
19use tinymist_std::typst::TypstDocument;
20use tinymist_world::debug_loc::DataSource;
21use tinymist_world::vfs::{PathResolution, WorkspaceResolver};
22use tinymist_world::{DETACHED_ENTRY, EntryReader};
23use typst::diag::{At, FileError, FileResult, SourceDiagnostic, SourceResult, StrResult};
24use typst::foundations::{Bytes, IntoValue, Module, StyleChain, Styles};
25use typst::introspection::Introspector;
26use typst::layout::Position;
27use typst::model::BibliographyElem;
28use typst::syntax::package::{PackageManifest, PackageSpec};
29use typst::syntax::{Span, VirtualPath};
30use typst_shim::eval::{Eval, eval_compat};
31
32use super::{LspQuerySnapshot, TypeEnv};
33use crate::adt::revision::{RevisionLock, RevisionManager, RevisionManagerLike, RevisionSlot};
34use crate::analysis::prelude::*;
35use crate::analysis::{
36 AnalysisStats, BibInfo, CompletionFeat, Definition, PathKind, QueryStatGuard,
37 SemanticTokenCache, SemanticTokenContext, SemanticTokens, Signature, SignatureTarget, Ty,
38 TypeInfo, analyze_signature, bib_info, definition, post_type_check,
39};
40use crate::docs::{DefDocs, TidyModuleDocs};
41use crate::syntax::{
42 Decl, DefKind, ExprInfo, ExprRoute, LexicalScope, ModuleDependency, SyntaxClass,
43 classify_syntax, construct_module_dependencies, is_mark, resolve_id_by_path,
44 scan_workspace_files,
45};
46use crate::upstream::{Tooltip, tooltip_};
47use crate::{
48 ColorTheme, CompilerQueryRequest, LspPosition, LspRange, LspWorldExt, PositionEncoding,
49};
50
51macro_rules! interned_str {
52 ($name:ident, $value:expr) => {
53 static $name: LazyLock<Interned<str>> = LazyLock::new(|| $value.into());
54 };
55}
56
57#[derive(Default, Clone)]
59pub struct Analysis {
60 pub position_encoding: PositionEncoding,
62 pub allow_overlapping_token: bool,
64 pub allow_multiline_token: bool,
66 pub remove_html: bool,
68 pub extended_code_action: bool,
76 pub completion_feat: CompletionFeat,
78 pub color_theme: ColorTheme,
80 pub lint: TaskWhen,
82 pub periscope: Option<Arc<dyn PeriscopeProvider + Send + Sync>>,
84 pub workers: Arc<AnalysisGlobalWorkers>,
86 pub tokens_caches: Arc<Mutex<SemanticTokenCache>>,
88 pub caches: AnalysisGlobalCaches,
90 pub analysis_rev_cache: Arc<Mutex<AnalysisRevCache>>,
92 pub stats: Arc<AnalysisStats>,
94}
95
96impl Analysis {
97 pub fn enter(&self, world: LspWorld) -> LocalContextGuard {
99 self.enter_(world, self.lock_revision(None))
100 }
101
102 pub(crate) fn enter_(&self, world: LspWorld, mut lg: AnalysisRevLock) -> LocalContextGuard {
104 let lifetime = self.caches.lifetime.fetch_add(1, Ordering::SeqCst);
105 let slot = self
106 .analysis_rev_cache
107 .lock()
108 .find_revision(world.revision(), &lg);
109 let tokens = lg.tokens.take();
110 LocalContextGuard {
111 _rev_lock: lg,
112 local: LocalContext {
113 tokens,
114 caches: AnalysisLocalCaches::default(),
115 shared: Arc::new(SharedContext {
116 slot,
117 lifetime,
118 world,
119 analysis: self.clone(),
120 }),
121 },
122 }
123 }
124
125 pub fn query_snapshot(
127 self: Arc<Self>,
128 snap: LspComputeGraph,
129 req: Option<&CompilerQueryRequest>,
130 ) -> LspQuerySnapshot {
131 let rev_lock = self.lock_revision(req);
132 LspQuerySnapshot {
133 snap,
134 analysis: self,
135 rev_lock,
136 }
137 }
138
139 #[must_use]
141 pub fn lock_revision(&self, req: Option<&CompilerQueryRequest>) -> AnalysisRevLock {
142 let mut grid = self.analysis_rev_cache.lock();
143
144 AnalysisRevLock {
145 tokens: match req {
146 Some(CompilerQueryRequest::SemanticTokensFull(req)) => Some(
147 SemanticTokenCache::acquire(self.tokens_caches.clone(), &req.path, None),
148 ),
149 Some(CompilerQueryRequest::SemanticTokensDelta(req)) => {
150 Some(SemanticTokenCache::acquire(
151 self.tokens_caches.clone(),
152 &req.path,
153 Some(&req.previous_result_id),
154 ))
155 }
156 _ => None,
157 },
158 inner: grid.manager.lock_estimated(),
159 grid: self.analysis_rev_cache.clone(),
160 }
161 }
162
163 pub fn clear_cache(&self) {
165 self.caches.signatures.clear();
166 self.caches.docstrings.clear();
167 self.caches.def_signatures.clear();
168 self.caches.static_signatures.clear();
169 self.caches.terms.clear();
170 self.tokens_caches.lock().clear();
171 self.analysis_rev_cache.lock().clear();
172 }
173
174 pub fn report_query_stats(&self) -> String {
176 self.stats.report()
177 }
178
179 pub fn report_alloc_stats(&self) -> String {
181 AllocStats::report()
182 }
183
184 pub fn trigger_suggest(&self, context: bool) -> Option<Interned<str>> {
186 interned_str!(INTERNED, "editor.action.triggerSuggest");
187
188 (self.completion_feat.trigger_suggest && context).then(|| INTERNED.clone())
189 }
190
191 pub fn trigger_parameter_hints(&self, context: bool) -> Option<Interned<str>> {
193 interned_str!(INTERNED, "editor.action.triggerParameterHints");
194 (self.completion_feat.trigger_parameter_hints && context).then(|| INTERNED.clone())
195 }
196
197 pub fn trigger_on_snippet(&self, context: bool) -> Option<Interned<str>> {
204 if !self.completion_feat.trigger_on_snippet_placeholders {
205 return None;
206 }
207
208 self.trigger_suggest(context)
209 }
210
211 pub fn trigger_on_snippet_with_param_hint(&self, context: bool) -> Option<Interned<str>> {
213 interned_str!(INTERNED, "tinymist.triggerSuggestAndParameterHints");
214 if !self.completion_feat.trigger_on_snippet_placeholders {
215 return self.trigger_parameter_hints(context);
216 }
217
218 (self.completion_feat.trigger_suggest_and_parameter_hints && context)
219 .then(|| INTERNED.clone())
220 }
221}
222
223pub trait PeriscopeProvider {
225 fn periscope_at(
227 &self,
228 _ctx: &mut LocalContext,
229 _doc: &TypstDocument,
230 _pos: Position,
231 ) -> Option<String> {
232 None
233 }
234}
235
236pub struct LocalContextGuard {
238 pub local: LocalContext,
240 _rev_lock: AnalysisRevLock,
242}
243
244impl Deref for LocalContextGuard {
245 type Target = LocalContext;
246
247 fn deref(&self) -> &Self::Target {
248 &self.local
249 }
250}
251
252impl DerefMut for LocalContextGuard {
253 fn deref_mut(&mut self) -> &mut Self::Target {
254 &mut self.local
255 }
256}
257
258impl Drop for LocalContextGuard {
260 fn drop(&mut self) {
261 self.gc();
262 }
263}
264
265impl LocalContextGuard {
266 fn gc(&self) {
267 let lifetime = self.lifetime;
268 loop {
269 let latest_clear_lifetime = self.analysis.caches.clear_lifetime.load(Ordering::Relaxed);
270 if latest_clear_lifetime >= lifetime {
271 return;
272 }
273
274 if self.analysis.caches.clear_lifetime.compare_exchange(
275 latest_clear_lifetime,
276 lifetime,
277 Ordering::SeqCst,
278 Ordering::SeqCst,
279 ) != Ok(latest_clear_lifetime)
280 {
281 continue;
282 }
283
284 break;
285 }
286
287 let retainer = |l: u64| lifetime.saturating_sub(l) < 60;
288 let caches = &self.analysis.caches;
289 caches.def_signatures.retain(|(l, _)| retainer(*l));
290 caches.static_signatures.retain(|(l, _)| retainer(*l));
291 caches.terms.retain(|(l, _)| retainer(*l));
292 caches.signatures.retain(|(l, _)| retainer(*l));
293 caches.docstrings.retain(|(l, _)| retainer(*l));
294 }
295}
296
297pub struct LocalContext {
300 pub(crate) tokens: Option<SemanticTokenContext>,
302 pub caches: AnalysisLocalCaches,
304 pub shared: Arc<SharedContext>,
306}
307
308impl Deref for LocalContext {
309 type Target = Arc<SharedContext>;
310
311 fn deref(&self) -> &Self::Target {
312 &self.shared
313 }
314}
315
316impl DerefMut for LocalContext {
317 fn deref_mut(&mut self) -> &mut Self::Target {
318 &mut self.shared
319 }
320}
321
322impl LocalContext {
323 #[cfg(test)]
325 pub fn test_package_list(&mut self, f: impl FnOnce() -> Vec<(PackageSpec, Option<EcoString>)>) {
326 self.world.registry.test_package_list(f);
327 }
328
329 #[cfg(test)]
331 pub fn test_completion_files(&mut self, f: impl FnOnce() -> Vec<TypstFileId>) {
332 self.caches.completion_files.get_or_init(f);
333 }
334
335 #[cfg(test)]
337 pub fn test_files(&mut self, f: impl FnOnce() -> Vec<TypstFileId>) {
338 self.caches.root_files.get_or_init(f);
339 }
340
341 pub(crate) fn completion_files(&self, pref: &PathKind) -> impl Iterator<Item = &TypstFileId> {
343 let regexes = pref.ext_matcher();
344 self.caches
345 .completion_files
346 .get_or_init(|| {
347 if let Some(root) = self.world.entry_state().workspace_root() {
348 scan_workspace_files(&root, PathKind::Special.ext_matcher(), |path| {
349 WorkspaceResolver::workspace_file(Some(&root), VirtualPath::new(path))
350 })
351 } else {
352 vec![]
353 }
354 })
355 .iter()
356 .filter(move |fid| {
357 fid.vpath()
358 .as_rooted_path()
359 .extension()
360 .and_then(|path| path.to_str())
361 .is_some_and(|path| regexes.is_match(path))
362 })
363 }
364
365 pub fn source_files(&self) -> &Vec<TypstFileId> {
367 self.caches.root_files.get_or_init(|| {
368 self.completion_files(&PathKind::Source {
369 allow_package: false,
370 })
371 .copied()
372 .collect()
373 })
374 }
375
376 pub fn module_dependencies(&mut self) -> &HashMap<TypstFileId, ModuleDependency> {
378 if self.caches.module_deps.get().is_some() {
379 self.caches.module_deps.get().unwrap()
380 } else {
381 let deps = construct_module_dependencies(self);
384 self.caches.module_deps.get_or_init(|| deps)
385 }
386 }
387
388 pub fn depended_source_files(&self) -> EcoVec<TypstFileId> {
390 let mut ids = self.depended_files();
391 let preference = PathKind::Source {
392 allow_package: false,
393 };
394 ids.retain(|id| preference.is_match(id.vpath().as_rooted_path()));
395 ids
396 }
397
398 pub fn depended_files(&self) -> EcoVec<TypstFileId> {
401 self.world.depended_files()
402 }
403
404 pub fn world(&self) -> &LspWorld {
406 &self.shared.world
407 }
408
409 pub fn shared(&self) -> &Arc<SharedContext> {
411 &self.shared
412 }
413
414 pub fn shared_(&self) -> Arc<SharedContext> {
416 self.shared.clone()
417 }
418
419 pub fn fork_for_search(&mut self) -> SearchCtx<'_> {
421 SearchCtx {
422 ctx: self,
423 searched: Default::default(),
424 worklist: Default::default(),
425 }
426 }
427
428 pub(crate) fn preload_package(&self, entry_point: TypstFileId) {
429 self.shared_().preload_package(entry_point);
430 }
431
432 pub(crate) fn with_vm<T>(&self, f: impl FnOnce(&mut typst_shim::eval::Vm) -> T) -> T {
433 crate::upstream::with_vm((self.world() as &dyn World).track(), f)
434 }
435
436 pub(crate) fn const_eval(&self, rr: ast::Expr<'_>) -> Option<Value> {
437 SharedContext::const_eval(rr)
438 }
439
440 pub(crate) fn mini_eval(&self, rr: ast::Expr<'_>) -> Option<Value> {
441 self.const_eval(rr)
442 .or_else(|| self.with_vm(|vm| rr.eval(vm).ok()))
443 }
444
445 pub(crate) fn cached_tokens(&mut self, source: &Source) -> (SemanticTokens, Option<String>) {
446 let tokens = crate::analysis::semantic_tokens::get_semantic_tokens(self, source);
447
448 let result_id = self.tokens.as_ref().map(|t| {
449 let id = t.next.revision;
450 t.next
451 .data
452 .set(tokens.clone())
453 .unwrap_or_else(|_| panic!("unexpected slot overwrite {id}"));
454 id.to_string()
455 });
456 (tokens, result_id)
457 }
458
459 pub(crate) fn expr_stage_by_id(&mut self, fid: TypstFileId) -> Option<ExprInfo> {
461 Some(self.expr_stage(&self.source_by_id(fid).ok()?))
462 }
463
464 pub(crate) fn expr_stage(&mut self, source: &Source) -> ExprInfo {
466 let id = source.id();
467 let cache = &self.caches.modules.entry(id).or_default().expr_stage;
468 cache.get_or_init(|| self.shared.expr_stage(source)).clone()
469 }
470
471 pub(crate) fn type_check(&mut self, source: &Source) -> Arc<TypeInfo> {
473 let id = source.id();
474 let cache = &self.caches.modules.entry(id).or_default().type_check;
475 cache.get_or_init(|| self.shared.type_check(source)).clone()
476 }
477
478 pub(crate) fn lint(&mut self, source: &Source) -> EcoVec<SourceDiagnostic> {
479 self.shared.lint(source).diagnostics
480 }
481
482 pub(crate) fn type_check_by_id(&mut self, id: TypstFileId) -> Arc<TypeInfo> {
484 let cache = &self.caches.modules.entry(id).or_default().type_check;
485 cache
486 .clone()
487 .get_or_init(|| {
488 let source = self.source_by_id(id).ok();
489 source
490 .map(|s| self.shared.type_check(&s))
491 .unwrap_or_default()
492 })
493 .clone()
494 }
495
496 pub(crate) fn type_of_span(&mut self, s: Span) -> Option<Ty> {
497 let scheme = self.type_check_by_id(s.id()?);
498 let ty = scheme.type_of_span(s)?;
499 Some(scheme.simplify(ty, false))
500 }
501
502 pub(crate) fn def_docs(&mut self, def: &Definition) -> Option<DefDocs> {
503 match def.decl.kind() {
506 DefKind::Function => {
507 let sig = self.sig_of_def(def.clone())?;
508 let docs = crate::docs::sig_docs(&sig)?;
509 Some(DefDocs::Function(Box::new(docs)))
510 }
511 DefKind::Struct | DefKind::Constant | DefKind::Variable => {
512 let docs = crate::docs::var_docs(self, def.decl.span())?;
513 Some(DefDocs::Variable(docs))
514 }
515 DefKind::Module => {
516 let ei = self.expr_stage_by_id(def.decl.file_id()?)?;
517 Some(DefDocs::Module(TidyModuleDocs {
518 docs: ei.module_docstring.docs.clone().unwrap_or_default(),
519 }))
520 }
521 DefKind::Reference => None,
522 }
523 }
524}
525
526pub struct SharedContext {
528 pub lifetime: u64,
530 pub world: LspWorld,
532 pub analysis: Analysis,
534 slot: Arc<RevisionSlot<AnalysisRevSlot>>,
536}
537
538impl SharedContext {
539 pub fn revision(&self) -> usize {
541 self.slot.revision
542 }
543
544 pub(crate) fn position_encoding(&self) -> PositionEncoding {
546 self.analysis.position_encoding
547 }
548
549 pub fn to_typst_pos(&self, position: LspPosition, src: &Source) -> Option<usize> {
551 crate::to_typst_position(position, self.analysis.position_encoding, src)
552 }
553
554 pub fn to_typst_pos_offset(
556 &self,
557 source: &Source,
558 position: LspPosition,
559 shift: usize,
560 ) -> Option<usize> {
561 let offset = self.to_typst_pos(position, source)?;
562 Some(ceil_char_boundary(source.text(), offset + shift))
563 }
564
565 pub fn to_lsp_pos(&self, typst_offset: usize, src: &Source) -> LspPosition {
567 crate::to_lsp_position(typst_offset, self.analysis.position_encoding, src)
568 }
569
570 pub fn to_typst_range(&self, position: LspRange, src: &Source) -> Option<Range<usize>> {
572 crate::to_typst_range(position, self.analysis.position_encoding, src)
573 }
574
575 pub fn to_lsp_range(&self, position: Range<usize>, src: &Source) -> LspRange {
577 crate::to_lsp_range(position, src, self.analysis.position_encoding)
578 }
579
580 pub fn to_lsp_range_(&self, position: Range<usize>, fid: TypstFileId) -> Option<LspRange> {
582 let ext = fid
583 .vpath()
584 .as_rootless_path()
585 .extension()
586 .and_then(|ext| ext.to_str());
587 if matches!(ext, Some("yaml" | "yml" | "bib")) {
589 let bytes = self.file_by_id(fid).ok()?;
590 let bytes_len = bytes.len();
591 let loc = loc_info(bytes)?;
592 let start = find_loc(bytes_len, &loc, position.start, self.position_encoding())?;
594 let end = find_loc(bytes_len, &loc, position.end, self.position_encoding())?;
595 return Some(LspRange { start, end });
596 }
597
598 let source = self.source_by_id(fid).ok()?;
599
600 Some(self.to_lsp_range(position, &source))
601 }
602
603 pub fn path_for_id(&self, id: TypstFileId) -> Result<PathResolution, FileError> {
605 self.world.path_for_id(id)
606 }
607
608 pub fn uri_for_id(&self, fid: TypstFileId) -> Result<Url, FileError> {
610 self.world.uri_for_id(fid)
611 }
612
613 pub fn file_id_by_path(&self, path: &Path) -> FileResult<TypstFileId> {
615 self.world.file_id_by_path(path)
616 }
617
618 pub fn file_by_id(&self, fid: TypstFileId) -> FileResult<Bytes> {
620 self.world.file(fid)
621 }
622
623 pub fn source_by_id(&self, fid: TypstFileId) -> FileResult<Source> {
625 self.world.source(fid)
626 }
627
628 pub fn source_by_path(&self, path: &Path) -> FileResult<Source> {
630 self.source_by_id(self.file_id_by_path(path)?)
631 }
632
633 pub fn classify_span<'s>(&self, source: &'s Source, span: Span) -> Option<SyntaxClass<'s>> {
636 let node = LinkedNode::new(source.root()).find(span)?;
637 let cursor = node.offset() + 1;
638 classify_syntax(node, cursor)
639 }
640
641 pub fn classify_for_decl<'s>(
645 &self,
646 source: &'s Source,
647 position: LspPosition,
648 ) -> Option<SyntaxClass<'s>> {
649 let cursor = self.to_typst_pos_offset(source, position, 1)?;
650 let mut node = LinkedNode::new(source.root()).leaf_at_compat(cursor)?;
651
652 if cursor == node.offset() + 1 && is_mark(node.kind()) {
655 let prev_leaf = node.prev_leaf();
656 if let Some(prev_leaf) = prev_leaf
657 && prev_leaf.range().end == node.offset()
658 {
659 node = prev_leaf;
660 }
661 }
662
663 classify_syntax(node, cursor)
664 }
665
666 pub fn font_info(&self, font: typst::text::Font) -> Option<Arc<DataSource>> {
668 self.world.font_resolver.describe_font(&font)
669 }
670
671 #[cfg(feature = "local-registry")]
673 pub fn local_packages(&self) -> EcoVec<PackageSpec> {
674 crate::package::list_package_by_namespace(&self.world.registry, eco_format!("local"))
675 .into_iter()
676 .map(|(_, spec)| spec)
677 .collect()
678 }
679
680 #[cfg(not(feature = "local-registry"))]
682 pub fn local_packages(&self) -> EcoVec<PackageSpec> {
683 eco_vec![]
684 }
685
686 pub(crate) fn const_eval(rr: ast::Expr<'_>) -> Option<Value> {
687 Some(match rr {
688 ast::Expr::None(_) => Value::None,
689 ast::Expr::Auto(_) => Value::Auto,
690 ast::Expr::Bool(v) => Value::Bool(v.get()),
691 ast::Expr::Int(v) => Value::Int(v.get()),
692 ast::Expr::Float(v) => Value::Float(v.get()),
693 ast::Expr::Numeric(v) => Value::numeric(v.get()),
694 ast::Expr::Str(v) => Value::Str(v.get().into()),
695 _ => return None,
696 })
697 }
698
699 pub fn module_by_id(&self, fid: TypstFileId) -> SourceResult<Module> {
701 let source = self.source_by_id(fid).at(Span::detached())?;
702 self.module_by_src(source)
703 }
704
705 pub fn module_by_str(&self, rr: String) -> Option<Module> {
707 let src = Source::new(*DETACHED_ENTRY, rr);
708 self.module_by_src(src).ok()
709 }
710
711 pub fn module_by_src(&self, source: Source) -> SourceResult<Module> {
713 eval_compat(&self.world, &source)
714 }
715
716 pub fn module_by_syntax(&self, source: &SyntaxNode) -> Option<Value> {
718 let (src, scope) = self.analyze_import(source);
719 if let Some(scope) = scope {
720 return Some(scope);
721 }
722
723 match src {
724 Some(Value::Str(s)) => {
725 let id = resolve_id_by_path(&self.world, source.span().id()?, s.as_str())?;
726 self.module_by_id(id).ok().map(Value::Module)
727 }
728 _ => None,
729 }
730 }
731
732 pub(crate) fn expr_stage_by_id(self: &Arc<Self>, fid: TypstFileId) -> Option<ExprInfo> {
734 Some(self.expr_stage(&self.source_by_id(fid).ok()?))
735 }
736
737 pub(crate) fn expr_stage(self: &Arc<Self>, source: &Source) -> ExprInfo {
739 let mut route = ExprRoute::default();
740 self.expr_stage_(source, &mut route)
741 }
742
743 pub(crate) fn expr_stage_(
745 self: &Arc<Self>,
746 source: &Source,
747 route: &mut ExprRoute,
748 ) -> ExprInfo {
749 use crate::syntax::expr_of;
750 let guard = self.query_stat(source.id(), "expr_stage");
751 self.slot.expr_stage.compute(hash128(&source), |prev| {
752 expr_of(self.clone(), source.clone(), route, guard, prev)
753 })
754 }
755
756 pub(crate) fn exports_of(
757 self: &Arc<Self>,
758 source: &Source,
759 route: &mut ExprRoute,
760 ) -> Option<Arc<LazyHash<LexicalScope>>> {
761 if let Some(s) = route.get(&source.id()) {
762 return s.clone();
763 }
764
765 Some(self.expr_stage_(source, route).exports.clone())
766 }
767
768 pub(crate) fn type_check(self: &Arc<Self>, source: &Source) -> Arc<TypeInfo> {
770 let mut route = TypeEnv::default();
771 self.type_check_(source, &mut route)
772 }
773
774 pub(crate) fn type_check_(
776 self: &Arc<Self>,
777 source: &Source,
778 route: &mut TypeEnv,
779 ) -> Arc<TypeInfo> {
780 use crate::analysis::type_check;
781
782 let ei = self.expr_stage(source);
783 let guard = self.query_stat(source.id(), "type_check");
784 self.slot.type_check.compute(hash128(&ei), |prev| {
785 if let Some(cache_hint) = prev.filter(|prev| prev.revision == ei.revision) {
787 return cache_hint;
788 }
789
790 guard.miss();
791 type_check(self.clone(), ei, route)
792 })
793 }
794
795 #[typst_macros::time(span = source.root().span())]
797 pub(crate) fn lint(self: &Arc<Self>, source: &Source) -> LintInfo {
798 let ei = self.expr_stage(source);
799 let ti = self.type_check(source);
800 let guard = self.query_stat(source.id(), "lint");
801 self.slot.lint.compute(hash128(&(&ei, &ti)), |_prev| {
802 guard.miss();
803 tinymist_lint::lint_file(&self.world, &ei, ti)
804 })
805 }
806
807 pub(crate) fn type_of_func(self: &Arc<Self>, func: Func) -> Signature {
808 crate::log_debug_ct!("convert runtime func {func:?}");
809 analyze_signature(self, SignatureTarget::Convert(func)).unwrap()
810 }
811
812 pub(crate) fn type_of_value(self: &Arc<Self>, val: &Value) -> Ty {
813 crate::log_debug_ct!("convert runtime value {val:?}");
814
815 let cache_key = val;
817 let cached = self
818 .analysis
819 .caches
820 .terms
821 .m
822 .get(&hash128(&cache_key))
823 .and_then(|slot| (cache_key == &slot.1.0).then_some(slot.1.1.clone()));
824 if let Some(cached) = cached {
825 return cached;
826 }
827
828 let res = term_value(val);
829
830 self.analysis
831 .caches
832 .terms
833 .m
834 .entry(hash128(&cache_key))
835 .or_insert_with(|| (self.lifetime, (cache_key.clone(), res.clone())));
836
837 res
838 }
839
840 pub(crate) fn def_of_span(
841 self: &Arc<Self>,
842 source: &Source,
843 doc: Option<&TypstDocument>,
844 span: Span,
845 ) -> Option<Definition> {
846 let syntax = self.classify_span(source, span)?;
847 definition(self, source, doc, syntax)
848 }
849
850 pub(crate) fn def_of_decl(&self, decl: &Interned<Decl>) -> Option<Definition> {
851 match decl.as_ref() {
852 Decl::Func(..) => Some(Definition::new(decl.clone(), None)),
853 Decl::Module(..) => None,
854 _ => None,
855 }
856 }
857
858 pub(crate) fn def_of_syntax(
859 self: &Arc<Self>,
860 source: &Source,
861 doc: Option<&TypstDocument>,
862 syntax: SyntaxClass,
863 ) -> Option<Definition> {
864 definition(self, source, doc, syntax)
865 }
866
867 pub(crate) fn def_of_syntax_or_dyn(
868 self: &Arc<Self>,
869 source: &Source,
870 doc: Option<&TypstDocument>,
871 syntax: SyntaxClass,
872 ) -> Option<Definition> {
873 let def = self.def_of_syntax(source, doc, syntax.clone());
874 match def.as_ref().map(|d| d.decl.kind()) {
875 Some(DefKind::Reference | DefKind::Module | DefKind::Function) => return def,
877 Some(DefKind::Struct | DefKind::Constant | DefKind::Variable) | None => {}
878 }
879
880 let know_ty_well = def
882 .as_ref()
883 .and_then(|d| self.simplified_type_of_span(d.decl.span()))
884 .filter(|ty| !matches!(ty, Ty::Any))
885 .is_some();
886 if know_ty_well {
887 return def;
888 }
889
890 let def_ref = def.as_ref();
891 let def_name = || Some(def_ref?.name().clone());
892 let dyn_def = self
893 .analyze_expr(syntax.node())
894 .iter()
895 .find_map(|(value, _)| {
896 let def = Definition::from_value(value.clone(), def_name)?;
897 None.or_else(|| {
898 let source = self.source_by_id(def.decl.file_id()?).ok()?;
899 let node = LinkedNode::new(source.root()).find(def.decl.span())?;
900 let def_at_the_span = classify_def_loosely(node)?;
901 self.def_of_span(&source, doc, def_at_the_span.name()?.span())
902 })
903 .or(Some(def))
904 });
905
906 dyn_def.or(def)
908 }
909
910 pub(crate) fn simplified_type_of_span(self: &Arc<Self>, span: Span) -> Option<Ty> {
911 let source = self.source_by_id(span.id()?).ok()?;
912 let (ti, ty) = self.type_of_span_(&source, span)?;
913 Some(ti.simplify(ty, false))
914 }
915
916 pub(crate) fn type_of_span(self: &Arc<Self>, span: Span) -> Option<Ty> {
917 let source = self.source_by_id(span.id()?).ok()?;
918 Some(self.type_of_span_(&source, span)?.1)
919 }
920
921 pub(crate) fn type_of_span_(
922 self: &Arc<Self>,
923 source: &Source,
924 span: Span,
925 ) -> Option<(Arc<TypeInfo>, Ty)> {
926 let ti = self.type_check(source);
927 let ty = ti.type_of_span(span)?;
928 Some((ti, ty))
929 }
930
931 pub(crate) fn post_type_of_node(self: &Arc<Self>, node: LinkedNode) -> Option<Ty> {
932 let id = node.span().id()?;
933 let source = self.source_by_id(id).ok()?;
934 let ty_chk = self.type_check(&source);
935
936 let ty = post_type_check(self.clone(), &ty_chk, node.clone())
937 .or_else(|| ty_chk.type_of_span(node.span()))?;
938 Some(ty_chk.simplify(ty, false))
939 }
940
941 pub(crate) fn sig_of_def(self: &Arc<Self>, def: Definition) -> Option<Signature> {
942 crate::log_debug_ct!("check definition func {def:?}");
943 let source = def.decl.file_id().and_then(|id| self.source_by_id(id).ok());
944 analyze_signature(self, SignatureTarget::Def(source, def))
945 }
946
947 pub(crate) fn sig_of_type(self: &Arc<Self>, ti: &TypeInfo, ty: Ty) -> Option<Signature> {
948 super::sig_of_type(self, ti, ty)
949 }
950
951 pub(crate) fn sig_of_type_or_dyn(
952 self: &Arc<Self>,
953 ti: &TypeInfo,
954 callee_ty: Ty,
955 callee: &SyntaxNode,
956 ) -> Option<Signature> {
957 self.sig_of_type(ti, callee_ty).or_else(|| {
958 self.analyze_expr(callee).iter().find_map(|(value, _)| {
959 let Value::Func(callee) = value else {
960 return None;
961 };
962
963 analyze_signature(self, SignatureTarget::Runtime(callee.clone()))
965 })
966 })
967 }
968
969 pub fn analyze_import(&self, source: &SyntaxNode) -> (Option<Value>, Option<Value>) {
976 if let Some(v) = source.cast::<ast::Expr>().and_then(Self::const_eval) {
977 return (Some(v), None);
978 }
979 let token = &self.analysis.workers.import;
980 token.enter(|| analyze_import_(&self.world, source))
981 }
982
983 pub fn analyze_expr(&self, source: &SyntaxNode) -> EcoVec<(Value, Option<Styles>)> {
985 let token = &self.analysis.workers.expression;
986 token.enter(|| analyze_expr_(&self.world, source))
987 }
988
989 pub fn analyze_bib(&self, introspector: &Introspector) -> Option<Arc<BibInfo>> {
991 let world = &self.world;
992 let world = (world as &dyn World).track();
993
994 analyze_bib(world, introspector.track())
995 }
996
997 pub fn tooltip(&self, source: &Source, cursor: usize) -> Option<Tooltip> {
1003 let token = &self.analysis.workers.tooltip;
1004 token.enter(|| tooltip_(&self.world, source, cursor))
1005 }
1006
1007 pub fn get_manifest(&self, toml_id: TypstFileId) -> StrResult<PackageManifest> {
1009 crate::package::get_manifest(&self.world, toml_id)
1010 }
1011
1012 pub fn compute_signature(
1014 self: &Arc<Self>,
1015 func: SignatureTarget,
1016 compute: impl FnOnce(&Arc<Self>) -> Option<Signature> + Send + Sync + 'static,
1017 ) -> Option<Signature> {
1018 let res = match func {
1019 SignatureTarget::Def(src, def) => self
1020 .analysis
1021 .caches
1022 .def_signatures
1023 .entry(hash128(&(src, def.clone())), self.lifetime),
1024 SignatureTarget::SyntaxFast(source, span) => {
1025 let cache_key = (source, span, true);
1026 self.analysis
1027 .caches
1028 .static_signatures
1029 .entry(hash128(&cache_key), self.lifetime)
1030 }
1031 SignatureTarget::Syntax(source, span) => {
1032 let cache_key = (source, span);
1033 self.analysis
1034 .caches
1035 .static_signatures
1036 .entry(hash128(&cache_key), self.lifetime)
1037 }
1038 SignatureTarget::Convert(rt) => self
1039 .analysis
1040 .caches
1041 .signatures
1042 .entry(hash128(&(&rt, true)), self.lifetime),
1043 SignatureTarget::Runtime(rt) => self
1044 .analysis
1045 .caches
1046 .signatures
1047 .entry(hash128(&rt), self.lifetime),
1048 };
1049 res.get_or_init(|| compute(self)).clone()
1050 }
1051
1052 pub(crate) fn compute_docstring(
1053 self: &Arc<Self>,
1054 fid: TypstFileId,
1055 docs: String,
1056 kind: DefKind,
1057 ) -> Option<Arc<DocString>> {
1058 let res = self
1059 .analysis
1060 .caches
1061 .docstrings
1062 .entry(hash128(&(fid, &docs, kind)), self.lifetime);
1063 res.get_or_init(|| {
1064 crate::syntax::docs::do_compute_docstring(self, fid, docs, kind).map(Arc::new)
1065 })
1066 .clone()
1067 }
1068
1069 pub fn remove_html(&self, markup: EcoString) -> EcoString {
1071 if !self.analysis.remove_html {
1072 return markup;
1073 }
1074
1075 static REMOVE_HTML_COMMENT_REGEX: LazyLock<regex::Regex> =
1076 LazyLock::new(|| regex::Regex::new(r#"<!--[\s\S]*?-->"#).unwrap());
1077 REMOVE_HTML_COMMENT_REGEX
1078 .replace_all(&markup, "")
1079 .trim()
1080 .into()
1081 }
1082
1083 fn query_stat(&self, id: TypstFileId, query: &'static str) -> QueryStatGuard {
1084 let stats = &self.analysis.stats.query_stats;
1085 let entry = stats.entry(id).or_default();
1086 let entry = entry.entry(query).or_default();
1087 QueryStatGuard {
1088 bucket: entry.clone(),
1089 since: tinymist_std::time::Instant::now(),
1090 }
1091 }
1092
1093 pub(crate) fn prefetch_type_check(self: &Arc<Self>, _fid: TypstFileId) {
1096 }
1106
1107 pub(crate) fn preload_package(self: Arc<Self>, entry_point: TypstFileId) {
1108 crate::log_debug_ct!("preload package start {entry_point:?}");
1109
1110 #[derive(Clone)]
1111 struct Preloader {
1112 shared: Arc<SharedContext>,
1113 analyzed: Arc<Mutex<HashSet<TypstFileId>>>,
1114 }
1115
1116 impl Preloader {
1117 fn work(&self, fid: TypstFileId) {
1118 crate::log_debug_ct!("preload package {fid:?}");
1119 let source = self.shared.source_by_id(fid).ok().unwrap();
1120 let exprs = self.shared.expr_stage(&source);
1121 self.shared.type_check(&source);
1122 exprs.imports.iter().for_each(|(fid, _)| {
1123 if !self.analyzed.lock().insert(*fid) {
1124 return;
1125 }
1126 self.work(*fid);
1127 })
1128 }
1129 }
1130
1131 let preloader = Preloader {
1132 shared: self,
1133 analyzed: Arc::default(),
1134 };
1135
1136 preloader.work(entry_point);
1137 }
1138}
1139
1140type DeferredCompute<T> = Arc<OnceLock<T>>;
1142
1143#[derive(Clone)]
1144struct IncrCacheMap<K, V> {
1145 revision: usize,
1146 global: Arc<Mutex<FxDashMap<K, (usize, V)>>>,
1147 prev: Arc<Mutex<FxHashMap<K, DeferredCompute<V>>>>,
1148 next: Arc<Mutex<FxHashMap<K, DeferredCompute<V>>>>,
1149}
1150
1151impl<K: Eq + Hash, V> Default for IncrCacheMap<K, V> {
1152 fn default() -> Self {
1153 Self {
1154 revision: 0,
1155 global: Arc::default(),
1156 prev: Arc::default(),
1157 next: Arc::default(),
1158 }
1159 }
1160}
1161
1162impl<K, V> IncrCacheMap<K, V> {
1163 fn compute(&self, key: K, compute: impl FnOnce(Option<V>) -> V) -> V
1164 where
1165 K: Clone + Eq + Hash,
1166 V: Clone,
1167 {
1168 let next = self.next.lock().entry(key.clone()).or_default().clone();
1169
1170 next.get_or_init(|| {
1171 let prev = self.prev.lock().get(&key).cloned();
1172 let prev = prev.and_then(|prev| prev.get().cloned());
1173 let prev = prev.or_else(|| {
1174 let global = self.global.lock();
1175 global.get(&key).map(|global| global.1.clone())
1176 });
1177
1178 let res = compute(prev);
1179
1180 let global = self.global.lock();
1181 let entry = global.entry(key.clone());
1182 use dashmap::mapref::entry::Entry;
1183 match entry {
1184 Entry::Occupied(mut entry) => {
1185 let (revision, _) = entry.get();
1186 if *revision < self.revision {
1187 entry.insert((self.revision, res.clone()));
1188 }
1189 }
1190 Entry::Vacant(entry) => {
1191 entry.insert((self.revision, res.clone()));
1192 }
1193 }
1194
1195 res
1196 })
1197 .clone()
1198 }
1199
1200 fn crawl(&self, revision: usize) -> Self {
1201 Self {
1202 revision,
1203 prev: self.next.clone(),
1204 global: self.global.clone(),
1205 next: Default::default(),
1206 }
1207 }
1208}
1209
1210#[derive(Clone)]
1211struct CacheMap<T> {
1212 m: Arc<FxDashMap<u128, (u64, T)>>,
1213 }
1215
1216impl<T> Default for CacheMap<T> {
1217 fn default() -> Self {
1218 Self {
1219 m: Default::default(),
1220 }
1222 }
1223}
1224
1225impl<T> CacheMap<T> {
1226 fn clear(&self) {
1227 self.m.clear();
1228 }
1229
1230 fn retain(&self, mut f: impl FnMut(&mut (u64, T)) -> bool) {
1231 self.m.retain(|_k, v| f(v));
1232 }
1233}
1234
1235impl<T: Default + Clone> CacheMap<T> {
1236 fn entry(&self, key: u128, lifetime: u64) -> T {
1237 let entry = self.m.entry(key);
1238 let entry = entry.or_insert_with(|| (lifetime, T::default()));
1239 entry.1.clone()
1240 }
1241}
1242
1243#[derive(Default)]
1245pub struct AnalysisGlobalWorkers {
1246 import: RateLimiter,
1248 expression: RateLimiter,
1250 tooltip: RateLimiter,
1252}
1253
1254#[derive(Default, Clone)]
1257pub struct AnalysisGlobalCaches {
1258 lifetime: Arc<AtomicU64>,
1259 clear_lifetime: Arc<AtomicU64>,
1260 def_signatures: CacheMap<DeferredCompute<Option<Signature>>>,
1261 static_signatures: CacheMap<DeferredCompute<Option<Signature>>>,
1262 signatures: CacheMap<DeferredCompute<Option<Signature>>>,
1263 docstrings: CacheMap<DeferredCompute<Option<Arc<DocString>>>>,
1264 terms: CacheMap<(Value, Ty)>,
1265}
1266
1267#[derive(Default)]
1273pub struct AnalysisLocalCaches {
1274 modules: HashMap<TypstFileId, ModuleAnalysisLocalCache>,
1275 completion_files: OnceLock<Vec<TypstFileId>>,
1276 root_files: OnceLock<Vec<TypstFileId>>,
1277 module_deps: OnceLock<HashMap<TypstFileId, ModuleDependency>>,
1278}
1279
1280#[derive(Default)]
1285pub struct ModuleAnalysisLocalCache {
1286 expr_stage: OnceLock<ExprInfo>,
1287 type_check: OnceLock<Arc<TypeInfo>>,
1288}
1289
1290#[derive(Default)]
1293pub struct AnalysisRevCache {
1294 default_slot: AnalysisRevSlot,
1295 manager: RevisionManager<AnalysisRevSlot>,
1296}
1297
1298impl RevisionManagerLike for AnalysisRevCache {
1299 fn gc(&mut self, rev: usize) {
1300 self.manager.gc(rev);
1301
1302 {
1304 let mut max_ei = FxHashMap::default();
1305 let es = self.default_slot.expr_stage.global.lock();
1306 for r in es.iter() {
1307 let rev: &mut usize = max_ei.entry(r.1.fid).or_default();
1308 *rev = (*rev).max(r.1.revision);
1309 }
1310 es.retain(|_, r| r.1.revision == *max_ei.get(&r.1.fid).unwrap_or(&0));
1311 }
1312
1313 {
1314 let mut max_ti = FxHashMap::default();
1315 let ts = self.default_slot.type_check.global.lock();
1316 for r in ts.iter() {
1317 let rev: &mut usize = max_ti.entry(r.1.fid).or_default();
1318 *rev = (*rev).max(r.1.revision);
1319 }
1320 ts.retain(|_, r| r.1.revision == *max_ti.get(&r.1.fid).unwrap_or(&0));
1321 }
1322
1323 {
1324 let mut max_li = FxHashMap::default();
1325 let ts = self.default_slot.lint.global.lock();
1326 for r in ts.iter() {
1327 let rev: &mut usize = max_li.entry(r.1.fid).or_default();
1328 *rev = (*rev).max(r.1.revision);
1329 }
1330 ts.retain(|_, r| r.1.revision == *max_li.get(&r.1.fid).unwrap_or(&0));
1331 }
1332 }
1333}
1334
1335impl AnalysisRevCache {
1336 fn clear(&mut self) {
1337 self.manager.clear();
1338 self.default_slot = Default::default();
1339 }
1340
1341 fn find_revision(
1343 &mut self,
1344 revision: NonZeroUsize,
1345 lg: &AnalysisRevLock,
1346 ) -> Arc<RevisionSlot<AnalysisRevSlot>> {
1347 lg.inner.access(revision);
1348 self.manager.find_revision(revision, |slot_base| {
1349 log::debug!("analysis revision {} is created", revision.get());
1350 slot_base
1351 .map(|slot| AnalysisRevSlot {
1352 revision: slot.revision,
1353 expr_stage: slot.data.expr_stage.crawl(revision.get()),
1354 type_check: slot.data.type_check.crawl(revision.get()),
1355 lint: slot.data.lint.crawl(revision.get()),
1356 })
1357 .unwrap_or_else(|| self.default_slot.clone())
1358 })
1359 }
1360}
1361
1362pub struct AnalysisRevLock {
1364 inner: RevisionLock,
1365 tokens: Option<SemanticTokenContext>,
1366 grid: Arc<Mutex<AnalysisRevCache>>,
1367}
1368
1369impl Drop for AnalysisRevLock {
1370 fn drop(&mut self) {
1371 let mut mu = self.grid.lock();
1372 let gc_revision = mu.manager.unlock(&mut self.inner);
1373
1374 if let Some(gc_revision) = gc_revision {
1375 let grid = self.grid.clone();
1376 rayon::spawn(move || {
1377 grid.lock().gc(gc_revision);
1378 });
1379 }
1380 }
1381}
1382
1383#[derive(Default, Clone)]
1384struct AnalysisRevSlot {
1385 revision: usize,
1386 expr_stage: IncrCacheMap<u128, ExprInfo>,
1387 type_check: IncrCacheMap<u128, Arc<TypeInfo>>,
1388 lint: IncrCacheMap<u128, LintInfo>,
1389}
1390
1391impl Drop for AnalysisRevSlot {
1392 fn drop(&mut self) {
1393 log::debug!("analysis revision {} is dropped", self.revision);
1394 }
1395}
1396
1397fn ceil_char_boundary(text: &str, mut cursor: usize) -> usize {
1398 while cursor < text.len() && !text.is_char_boundary(cursor) {
1400 cursor += 1;
1401 }
1402
1403 cursor.min(text.len())
1404}
1405
1406#[typst_macros::time]
1407#[comemo::memoize]
1408fn analyze_bib(
1409 world: Tracked<dyn World + '_>,
1410 introspector: Tracked<Introspector>,
1411) -> Option<Arc<BibInfo>> {
1412 let bib_elem = BibliographyElem::find(introspector).ok()?;
1413
1414 let csl_style = bib_elem.style(StyleChain::default()).derived;
1417
1418 let Value::Array(paths) = bib_elem.sources.clone().into_value() else {
1419 return None;
1420 };
1421 let elem_fid = bib_elem.span().id()?;
1422 let files = paths
1423 .into_iter()
1424 .flat_map(|path| path.cast().ok())
1425 .flat_map(|bib_path: EcoString| {
1426 let bib_fid = resolve_id_by_path(world.deref(), elem_fid, &bib_path)?;
1427 Some((bib_fid, world.file(bib_fid).ok()?))
1428 });
1429
1430 bib_info(csl_style, files)
1431}
1432
1433#[comemo::memoize]
1434fn loc_info(bytes: Bytes) -> Option<EcoVec<(usize, String)>> {
1435 let mut loc = EcoVec::new();
1436 let mut offset = 0;
1437 for line in bytes.split(|byte| *byte == b'\n') {
1438 loc.push((offset, String::from_utf8(line.to_owned()).ok()?));
1439 offset += line.len() + 1;
1440 }
1441 Some(loc)
1442}
1443
1444fn find_loc(
1445 len: usize,
1446 loc: &EcoVec<(usize, String)>,
1447 mut offset: usize,
1448 encoding: PositionEncoding,
1449) -> Option<LspPosition> {
1450 if offset > len {
1451 offset = len;
1452 }
1453
1454 let r = match loc.binary_search_by_key(&offset, |line| line.0) {
1455 Ok(i) => i,
1456 Err(i) => i - 1,
1457 };
1458
1459 let (start, s) = loc.get(r)?;
1460 let byte_offset = offset.saturating_sub(*start);
1461
1462 let column_prefix = if byte_offset <= s.len() {
1463 &s[..byte_offset]
1464 } else {
1465 let line = (r + 1) as u32;
1466 return Some(LspPosition { line, character: 0 });
1467 };
1468
1469 let line = r as u32;
1470 let character = match encoding {
1471 PositionEncoding::Utf8 => column_prefix.chars().count(),
1472 PositionEncoding::Utf16 => column_prefix.chars().map(|ch| ch.len_utf16()).sum(),
1473 } as u32;
1474
1475 Some(LspPosition { line, character })
1476}
1477
1478pub struct SearchCtx<'a> {
1480 pub ctx: &'a mut LocalContext,
1482 pub searched: HashSet<TypstFileId>,
1484 pub worklist: Vec<TypstFileId>,
1486}
1487
1488impl SearchCtx<'_> {
1489 pub fn push(&mut self, fid: TypstFileId) -> bool {
1491 if self.searched.insert(fid) {
1492 self.worklist.push(fid);
1493 true
1494 } else {
1495 false
1496 }
1497 }
1498
1499 pub fn push_dependents(&mut self, fid: TypstFileId) {
1501 let deps = self.ctx.module_dependencies().get(&fid);
1502 let dependents = deps.map(|dep| dep.dependents.clone()).into_iter().flatten();
1503 for dep in dependents {
1504 self.push(dep);
1505 }
1506 }
1507}
1508
1509#[derive(Default)]
1511pub struct RateLimiter {
1512 token: std::sync::Mutex<()>,
1513}
1514
1515impl RateLimiter {
1516 #[must_use]
1518 pub fn enter<T>(&self, f: impl FnOnce() -> T) -> T {
1519 let _c = self.token.lock().unwrap();
1520 f()
1521 }
1522}