1use std::num::NonZeroUsize;
2use std::ops::DerefMut;
3use std::sync::OnceLock;
4use std::sync::atomic::{AtomicU64, Ordering};
5use std::{collections::HashSet, ops::Deref};
6
7use comemo::{Track, Tracked};
8use lsp_types::Url;
9use parking_lot::Mutex;
10use rustc_hash::FxHashMap;
11use tinymist_analysis::docs::DocString;
12use tinymist_analysis::stats::AllocStats;
13use tinymist_analysis::syntax::classify_def_loosely;
14use tinymist_analysis::ty::{BuiltinTy, InsTy, term_value};
15use tinymist_analysis::{analyze_expr_, analyze_import_};
16use tinymist_lint::{KnownIssues, LintInfo};
17use tinymist_project::{LspComputeGraph, LspWorld, TaskWhen};
18use tinymist_std::hash::{FxDashMap, hash128};
19use tinymist_std::typst::TypstDocument;
20use tinymist_world::debug_loc::DataSource;
21use tinymist_world::vfs::{PathResolution, WorkspaceResolver};
22use tinymist_world::{DETACHED_ENTRY, EntryReader};
23use typst::diag::{At, FileError, FileResult, SourceDiagnostic, SourceResult, StrResult};
24use typst::foundations::{Bytes, IntoValue, Module, StyleChain, Styles};
25use typst::introspection::Introspector;
26use typst::layout::Position;
27use typst::model::BibliographyElem;
28use typst::syntax::package::{PackageManifest, PackageSpec};
29use typst::syntax::{Span, VirtualPath};
30use typst_shim::eval::{Eval, eval_compat};
31
32use super::{LspQuerySnapshot, TypeEnv};
33use crate::adt::revision::{RevisionLock, RevisionManager, RevisionManagerLike, RevisionSlot};
34use crate::analysis::prelude::*;
35use crate::analysis::{
36 AnalysisStats, BibInfo, CompletionFeat, Definition, PathKind, QueryStatGuard,
37 SemanticTokenCache, SemanticTokenContext, SemanticTokens, Signature, SignatureTarget, Ty,
38 TypeInfo, analyze_signature, bib_info, definition, post_type_check,
39};
40use crate::docs::{DefDocs, TidyModuleDocs};
41use crate::syntax::{
42 Decl, DefKind, ExprInfo, ExprRoute, LexicalScope, ModuleDependency, SyntaxClass,
43 classify_syntax, construct_module_dependencies, is_mark, resolve_id_by_path,
44 scan_workspace_files,
45};
46use crate::upstream::{Tooltip, tooltip_};
47use crate::{
48 ColorTheme, CompilerQueryRequest, LspPosition, LspRange, LspWorldExt, PositionEncoding,
49};
50
51macro_rules! interned_str {
52 ($name:ident, $value:expr) => {
53 static $name: LazyLock<Interned<str>> = LazyLock::new(|| $value.into());
54 };
55}
56
57#[derive(Default, Clone)]
59pub struct Analysis {
60 pub position_encoding: PositionEncoding,
62 pub allow_overlapping_token: bool,
64 pub allow_multiline_token: bool,
66 pub remove_html: bool,
68 pub support_client_codelens: bool,
70 pub extended_code_action: bool,
78 pub completion_feat: CompletionFeat,
80 pub color_theme: ColorTheme,
82 pub lint: TaskWhen,
84 pub periscope: Option<Arc<dyn PeriscopeProvider + Send + Sync>>,
86 pub workers: Arc<AnalysisGlobalWorkers>,
88 pub tokens_caches: Arc<Mutex<SemanticTokenCache>>,
90 pub caches: AnalysisGlobalCaches,
92 pub analysis_rev_cache: Arc<Mutex<AnalysisRevCache>>,
94 pub stats: Arc<AnalysisStats>,
96}
97
98impl Analysis {
99 pub fn enter(&self, g: LspComputeGraph) -> LocalContextGuard {
101 self.enter_(g, self.lock_revision(None))
102 }
103
104 pub(crate) fn enter_(&self, g: LspComputeGraph, mut lg: AnalysisRevLock) -> LocalContextGuard {
106 let lifetime = self.caches.lifetime.fetch_add(1, Ordering::SeqCst);
107 let slot = self
108 .analysis_rev_cache
109 .lock()
110 .find_revision(g.world().revision(), &lg);
111 let tokens = lg.tokens.take();
112 LocalContextGuard {
113 _rev_lock: lg,
114 local: LocalContext {
115 tokens,
116 caches: AnalysisLocalCaches::default(),
117 shared: Arc::new(SharedContext {
118 slot,
119 lifetime,
120 graph: g,
121 analysis: self.clone(),
122 }),
123 },
124 }
125 }
126
127 pub fn query_snapshot(
129 self: Arc<Self>,
130 snap: LspComputeGraph,
131 req: Option<&CompilerQueryRequest>,
132 ) -> LspQuerySnapshot {
133 let rev_lock = self.lock_revision(req);
134 LspQuerySnapshot {
135 snap,
136 analysis: self,
137 rev_lock,
138 }
139 }
140
141 #[must_use]
143 pub fn lock_revision(&self, req: Option<&CompilerQueryRequest>) -> AnalysisRevLock {
144 let mut grid = self.analysis_rev_cache.lock();
145
146 AnalysisRevLock {
147 tokens: match req {
148 Some(CompilerQueryRequest::SemanticTokensFull(req)) => Some(
149 SemanticTokenCache::acquire(self.tokens_caches.clone(), &req.path, None),
150 ),
151 Some(CompilerQueryRequest::SemanticTokensDelta(req)) => {
152 Some(SemanticTokenCache::acquire(
153 self.tokens_caches.clone(),
154 &req.path,
155 Some(&req.previous_result_id),
156 ))
157 }
158 _ => None,
159 },
160 inner: grid.manager.lock_estimated(),
161 grid: self.analysis_rev_cache.clone(),
162 }
163 }
164
165 pub fn clear_cache(&self) {
167 self.caches.signatures.clear();
168 self.caches.docstrings.clear();
169 self.caches.def_signatures.clear();
170 self.caches.static_signatures.clear();
171 self.caches.terms.clear();
172 self.tokens_caches.lock().clear();
173 self.analysis_rev_cache.lock().clear();
174 }
175
176 pub fn report_query_stats(&self) -> String {
178 self.stats.report()
179 }
180
181 pub fn report_alloc_stats(&self) -> String {
183 AllocStats::report()
184 }
185
186 pub fn trigger_suggest(&self, context: bool) -> Option<Interned<str>> {
188 interned_str!(INTERNED, "editor.action.triggerSuggest");
189
190 (self.completion_feat.trigger_suggest && context).then(|| INTERNED.clone())
191 }
192
193 pub fn trigger_parameter_hints(&self, context: bool) -> Option<Interned<str>> {
195 interned_str!(INTERNED, "editor.action.triggerParameterHints");
196 (self.completion_feat.trigger_parameter_hints && context).then(|| INTERNED.clone())
197 }
198
199 pub fn trigger_on_snippet(&self, context: bool) -> Option<Interned<str>> {
206 if !self.completion_feat.trigger_on_snippet_placeholders {
207 return None;
208 }
209
210 self.trigger_suggest(context)
211 }
212
213 pub fn trigger_on_snippet_with_param_hint(&self, context: bool) -> Option<Interned<str>> {
215 interned_str!(INTERNED, "tinymist.triggerSuggestAndParameterHints");
216 if !self.completion_feat.trigger_on_snippet_placeholders {
217 return self.trigger_parameter_hints(context);
218 }
219
220 (self.completion_feat.trigger_suggest_and_parameter_hints && context)
221 .then(|| INTERNED.clone())
222 }
223}
224
225pub trait PeriscopeProvider {
227 fn periscope_at(
229 &self,
230 _ctx: &mut LocalContext,
231 _doc: &TypstDocument,
232 _pos: Position,
233 ) -> Option<String> {
234 None
235 }
236}
237
238pub struct LocalContextGuard {
240 pub local: LocalContext,
242 _rev_lock: AnalysisRevLock,
244}
245
246impl Deref for LocalContextGuard {
247 type Target = LocalContext;
248
249 fn deref(&self) -> &Self::Target {
250 &self.local
251 }
252}
253
254impl DerefMut for LocalContextGuard {
255 fn deref_mut(&mut self) -> &mut Self::Target {
256 &mut self.local
257 }
258}
259
260impl Drop for LocalContextGuard {
262 fn drop(&mut self) {
263 self.gc();
264 }
265}
266
267impl LocalContextGuard {
268 fn gc(&self) {
269 let lifetime = self.lifetime;
270 loop {
271 let latest_clear_lifetime = self.analysis.caches.clear_lifetime.load(Ordering::Relaxed);
272 if latest_clear_lifetime >= lifetime {
273 return;
274 }
275
276 if self.analysis.caches.clear_lifetime.compare_exchange(
277 latest_clear_lifetime,
278 lifetime,
279 Ordering::SeqCst,
280 Ordering::SeqCst,
281 ) != Ok(latest_clear_lifetime)
282 {
283 continue;
284 }
285
286 break;
287 }
288
289 let retainer = |l: u64| lifetime.saturating_sub(l) < 60;
290 let caches = &self.analysis.caches;
291 caches.def_signatures.retain(|(l, _)| retainer(*l));
292 caches.static_signatures.retain(|(l, _)| retainer(*l));
293 caches.terms.retain(|(l, _)| retainer(*l));
294 caches.signatures.retain(|(l, _)| retainer(*l));
295 caches.docstrings.retain(|(l, _)| retainer(*l));
296 }
297}
298
299pub struct LocalContext {
302 pub(crate) tokens: Option<SemanticTokenContext>,
304 pub caches: AnalysisLocalCaches,
306 pub shared: Arc<SharedContext>,
308}
309
310impl Deref for LocalContext {
311 type Target = Arc<SharedContext>;
312
313 fn deref(&self) -> &Self::Target {
314 &self.shared
315 }
316}
317
318impl DerefMut for LocalContext {
319 fn deref_mut(&mut self) -> &mut Self::Target {
320 &mut self.shared
321 }
322}
323
324impl LocalContext {
325 #[cfg(test)]
327 pub fn test_package_list(&mut self, f: impl FnOnce() -> Vec<(PackageSpec, Option<EcoString>)>) {
328 self.world().registry.test_package_list(f);
329 }
330
331 #[cfg(test)]
333 pub fn test_completion_files(&mut self, f: impl FnOnce() -> Vec<TypstFileId>) {
334 self.caches.completion_files.get_or_init(f);
335 }
336
337 #[cfg(test)]
339 pub fn test_files(&mut self, f: impl FnOnce() -> Vec<TypstFileId>) {
340 self.caches.root_files.get_or_init(f);
341 }
342
343 pub(crate) fn completion_files(&self, pref: &PathKind) -> impl Iterator<Item = &TypstFileId> {
345 let regexes = pref.ext_matcher();
346 self.caches
347 .completion_files
348 .get_or_init(|| {
349 if let Some(root) = self.world().entry_state().workspace_root() {
350 scan_workspace_files(&root, PathKind::Special.ext_matcher(), |path| {
351 WorkspaceResolver::workspace_file(Some(&root), VirtualPath::new(path))
352 })
353 } else {
354 vec![]
355 }
356 })
357 .iter()
358 .filter(move |fid| {
359 fid.vpath()
360 .as_rooted_path()
361 .extension()
362 .and_then(|path| path.to_str())
363 .is_some_and(|path| regexes.is_match(path))
364 })
365 }
366
367 pub fn source_files(&self) -> &Vec<TypstFileId> {
369 self.caches.root_files.get_or_init(|| {
370 self.completion_files(&PathKind::Source {
371 allow_package: false,
372 })
373 .copied()
374 .collect()
375 })
376 }
377
378 pub fn module_dependencies(&mut self) -> &HashMap<TypstFileId, ModuleDependency> {
380 if self.caches.module_deps.get().is_some() {
381 self.caches.module_deps.get().unwrap()
382 } else {
383 let deps = construct_module_dependencies(self);
386 self.caches.module_deps.get_or_init(|| deps)
387 }
388 }
389
390 pub fn depended_source_files(&self) -> EcoVec<TypstFileId> {
392 let mut ids = self.depended_files();
393 let preference = PathKind::Source {
394 allow_package: false,
395 };
396 ids.retain(|id| preference.is_match(id.vpath().as_rooted_path()));
397 ids
398 }
399
400 pub fn depended_files(&self) -> EcoVec<TypstFileId> {
403 self.world().depended_files()
404 }
405
406 pub fn shared(&self) -> &Arc<SharedContext> {
408 &self.shared
409 }
410
411 pub fn shared_(&self) -> Arc<SharedContext> {
413 self.shared.clone()
414 }
415
416 pub fn fork_for_search(&mut self) -> SearchCtx<'_> {
418 SearchCtx {
419 ctx: self,
420 searched: Default::default(),
421 worklist: Default::default(),
422 }
423 }
424
425 pub(crate) fn preload_package(&self, entry_point: TypstFileId) {
426 self.shared_().preload_package(entry_point);
427 }
428
429 pub(crate) fn with_vm<T>(&self, f: impl FnOnce(&mut typst_shim::eval::Vm) -> T) -> T {
430 crate::upstream::with_vm((self.world() as &dyn World).track(), f)
431 }
432
433 pub(crate) fn const_eval(&self, rr: ast::Expr<'_>) -> Option<Value> {
434 SharedContext::const_eval(rr)
435 }
436
437 pub(crate) fn mini_eval(&self, rr: ast::Expr<'_>) -> Option<Value> {
438 self.const_eval(rr)
439 .or_else(|| self.with_vm(|vm| rr.eval(vm).ok()))
440 }
441
442 pub(crate) fn cached_tokens(&mut self, source: &Source) -> (SemanticTokens, Option<String>) {
443 let tokens = crate::analysis::semantic_tokens::get_semantic_tokens(self, source);
444
445 let result_id = self.tokens.as_ref().map(|t| {
446 let id = t.next.revision;
447 t.next
448 .data
449 .set(tokens.clone())
450 .unwrap_or_else(|_| panic!("unexpected slot overwrite {id}"));
451 id.to_string()
452 });
453 (tokens, result_id)
454 }
455
456 pub(crate) fn expr_stage_by_id(&mut self, fid: TypstFileId) -> Option<ExprInfo> {
458 Some(self.expr_stage(&self.source_by_id(fid).ok()?))
459 }
460
461 pub(crate) fn expr_stage(&mut self, source: &Source) -> ExprInfo {
463 let id = source.id();
464 let cache = &self.caches.modules.entry(id).or_default().expr_stage;
465 cache.get_or_init(|| self.shared.expr_stage(source)).clone()
466 }
467
468 pub(crate) fn type_check(&mut self, source: &Source) -> Arc<TypeInfo> {
470 let id = source.id();
471 let cache = &self.caches.modules.entry(id).or_default().type_check;
472 cache.get_or_init(|| self.shared.type_check(source)).clone()
473 }
474
475 pub(crate) fn lint(
476 &mut self,
477 source: &Source,
478 known_issues: &KnownIssues,
479 ) -> EcoVec<SourceDiagnostic> {
480 self.shared.lint(source, known_issues).diagnostics
481 }
482
483 pub(crate) fn type_check_by_id(&mut self, id: TypstFileId) -> Arc<TypeInfo> {
485 let cache = &self.caches.modules.entry(id).or_default().type_check;
486 cache
487 .clone()
488 .get_or_init(|| {
489 let source = self.source_by_id(id).ok();
490 source
491 .map(|s| self.shared.type_check(&s))
492 .unwrap_or_default()
493 })
494 .clone()
495 }
496
497 pub(crate) fn type_of_span(&mut self, s: Span) -> Option<Ty> {
498 let scheme = self.type_check_by_id(s.id()?);
499 let ty = scheme.type_of_span(s)?;
500 Some(scheme.simplify(ty, false))
501 }
502
503 pub(crate) fn def_docs(&mut self, def: &Definition) -> Option<DefDocs> {
504 match def.decl.kind() {
507 DefKind::Function => {
508 let sig = self.sig_of_def(def.clone())?;
509 let docs = crate::docs::sig_docs(&sig)?;
510 Some(DefDocs::Function(Box::new(docs)))
511 }
512 DefKind::Struct | DefKind::Constant | DefKind::Variable => {
513 let docs = crate::docs::var_docs(self, def.decl.span())?;
514 Some(DefDocs::Variable(docs))
515 }
516 DefKind::Module => {
517 let ei = self.expr_stage_by_id(def.decl.file_id()?)?;
518 Some(DefDocs::Module(TidyModuleDocs {
519 docs: ei.module_docstring.docs.clone().unwrap_or_default(),
520 }))
521 }
522 DefKind::Reference => None,
523 }
524 }
525}
526
527pub struct SharedContext {
529 pub lifetime: u64,
531 pub graph: LspComputeGraph,
535 pub analysis: Analysis,
537 slot: Arc<RevisionSlot<AnalysisRevSlot>>,
539}
540
541impl SharedContext {
542 pub fn revision(&self) -> usize {
544 self.slot.revision
545 }
546
547 pub(crate) fn position_encoding(&self) -> PositionEncoding {
549 self.analysis.position_encoding
550 }
551
552 pub fn world(&self) -> &LspWorld {
554 self.graph.world()
555 }
556
557 pub fn success_doc(&self) -> Option<&TypstDocument> {
559 self.graph.snap.success_doc.as_ref()
560 }
561
562 pub fn to_typst_pos(&self, position: LspPosition, src: &Source) -> Option<usize> {
564 crate::to_typst_position(position, self.analysis.position_encoding, src)
565 }
566
567 pub fn to_typst_pos_offset(
569 &self,
570 source: &Source,
571 position: LspPosition,
572 shift: usize,
573 ) -> Option<usize> {
574 let offset = self.to_typst_pos(position, source)?;
575 Some(ceil_char_boundary(source.text(), offset + shift))
576 }
577
578 pub fn to_lsp_pos(&self, typst_offset: usize, src: &Source) -> LspPosition {
580 crate::to_lsp_position(typst_offset, self.analysis.position_encoding, src)
581 }
582
583 pub fn to_typst_range(&self, position: LspRange, src: &Source) -> Option<Range<usize>> {
585 crate::to_typst_range(position, self.analysis.position_encoding, src)
586 }
587
588 pub fn to_lsp_range(&self, position: Range<usize>, src: &Source) -> LspRange {
590 crate::to_lsp_range(position, src, self.analysis.position_encoding)
591 }
592
593 pub fn to_lsp_range_(&self, position: Range<usize>, fid: TypstFileId) -> Option<LspRange> {
595 let ext = fid
596 .vpath()
597 .as_rootless_path()
598 .extension()
599 .and_then(|ext| ext.to_str());
600 if matches!(ext, Some("yaml" | "yml" | "bib")) {
602 let bytes = self.file_by_id(fid).ok()?;
603 let bytes_len = bytes.len();
604 let loc = loc_info(bytes)?;
605 let start = find_loc(bytes_len, &loc, position.start, self.position_encoding())?;
607 let end = find_loc(bytes_len, &loc, position.end, self.position_encoding())?;
608 return Some(LspRange { start, end });
609 }
610
611 let source = self.source_by_id(fid).ok()?;
612
613 Some(self.to_lsp_range(position, &source))
614 }
615
616 pub fn path_for_id(&self, id: TypstFileId) -> Result<PathResolution, FileError> {
618 self.world().path_for_id(id)
619 }
620
621 pub fn uri_for_id(&self, fid: TypstFileId) -> Result<Url, FileError> {
623 self.world().uri_for_id(fid)
624 }
625
626 pub fn file_id_by_path(&self, path: &Path) -> FileResult<TypstFileId> {
628 self.world().file_id_by_path(path)
629 }
630
631 pub fn file_by_id(&self, fid: TypstFileId) -> FileResult<Bytes> {
633 self.world().file(fid)
634 }
635
636 pub fn source_by_id(&self, fid: TypstFileId) -> FileResult<Source> {
638 self.world().source(fid)
639 }
640
641 pub fn source_by_path(&self, path: &Path) -> FileResult<Source> {
643 self.source_by_id(self.file_id_by_path(path)?)
644 }
645
646 pub fn classify_span<'s>(&self, source: &'s Source, span: Span) -> Option<SyntaxClass<'s>> {
649 let node = LinkedNode::new(source.root()).find(span)?;
650 let cursor = node.offset() + 1;
651 classify_syntax(node, cursor)
652 }
653
654 pub fn classify_for_decl<'s>(
658 &self,
659 source: &'s Source,
660 position: LspPosition,
661 ) -> Option<SyntaxClass<'s>> {
662 let cursor = self.to_typst_pos_offset(source, position, 1)?;
663 let mut node = LinkedNode::new(source.root()).leaf_at_compat(cursor)?;
664
665 if cursor == node.offset() + 1 && is_mark(node.kind()) {
668 let prev_leaf = node.prev_leaf();
669 if let Some(prev_leaf) = prev_leaf
670 && prev_leaf.range().end == node.offset()
671 {
672 node = prev_leaf;
673 }
674 }
675
676 classify_syntax(node, cursor)
677 }
678
679 pub fn font_info(&self, font: typst::text::Font) -> Option<Arc<DataSource>> {
681 self.world().font_resolver.describe_font(&font)
682 }
683
684 #[cfg(feature = "local-registry")]
686 pub fn local_packages(&self) -> EcoVec<PackageSpec> {
687 crate::package::list_package_by_namespace(&self.world().registry, eco_format!("local"))
688 .into_iter()
689 .map(|(_, spec)| spec)
690 .collect()
691 }
692
693 #[cfg(not(feature = "local-registry"))]
695 pub fn local_packages(&self) -> EcoVec<PackageSpec> {
696 eco_vec![]
697 }
698
699 pub(crate) fn const_eval(rr: ast::Expr<'_>) -> Option<Value> {
700 Some(match rr {
701 ast::Expr::None(_) => Value::None,
702 ast::Expr::Auto(_) => Value::Auto,
703 ast::Expr::Bool(v) => Value::Bool(v.get()),
704 ast::Expr::Int(v) => Value::Int(v.get()),
705 ast::Expr::Float(v) => Value::Float(v.get()),
706 ast::Expr::Numeric(v) => Value::numeric(v.get()),
707 ast::Expr::Str(v) => Value::Str(v.get().into()),
708 _ => return None,
709 })
710 }
711
712 pub fn module_by_id(&self, fid: TypstFileId) -> SourceResult<Module> {
714 let source = self.source_by_id(fid).at(Span::detached())?;
715 self.module_by_src(source)
716 }
717
718 pub fn module_by_str(&self, rr: String) -> Option<Module> {
720 let src = Source::new(*DETACHED_ENTRY, rr);
721 self.module_by_src(src).ok()
722 }
723
724 pub fn module_by_src(&self, source: Source) -> SourceResult<Module> {
726 eval_compat(&self.world(), &source)
727 }
728
729 pub fn module_by_syntax(self: &Arc<Self>, source: &SyntaxNode) -> Option<Value> {
731 self.module_term_by_syntax(source, true)
732 .and_then(|ty| ty.value())
733 }
734
735 pub fn module_term_by_syntax(self: &Arc<Self>, source: &SyntaxNode, value: bool) -> Option<Ty> {
738 let (src, scope) = self.analyze_import(source);
739 if let Some(scope) = scope {
740 return Some(match scope {
741 Value::Module(m) if m.file_id().is_some() => {
742 Ty::Builtin(BuiltinTy::Module(Decl::module(m.file_id()?).into()))
743 }
744 scope => Ty::Value(InsTy::new(scope)),
745 });
746 }
747
748 match src {
749 Some(Value::Str(s)) => {
750 let id = resolve_id_by_path(self.world(), source.span().id()?, s.as_str())?;
751
752 Some(if value {
753 Ty::Value(InsTy::new(Value::Module(self.module_by_id(id).ok()?)))
754 } else {
755 Ty::Builtin(BuiltinTy::Module(Decl::module(id).into()))
756 })
757 }
758 _ => None,
759 }
760 }
761
762 pub(crate) fn expr_stage_by_id(self: &Arc<Self>, fid: TypstFileId) -> Option<ExprInfo> {
764 Some(self.expr_stage(&self.source_by_id(fid).ok()?))
765 }
766
767 pub(crate) fn expr_stage(self: &Arc<Self>, source: &Source) -> ExprInfo {
769 let mut route = ExprRoute::default();
770 self.expr_stage_(source, &mut route)
771 }
772
773 pub(crate) fn expr_stage_(
775 self: &Arc<Self>,
776 source: &Source,
777 route: &mut ExprRoute,
778 ) -> ExprInfo {
779 use crate::syntax::expr_of;
780 let guard = self.query_stat(source.id(), "expr_stage");
781 self.slot.expr_stage.compute(hash128(&source), |prev| {
782 expr_of(self.clone(), source.clone(), route, guard, prev)
783 })
784 }
785
786 pub(crate) fn exports_of(
787 self: &Arc<Self>,
788 source: &Source,
789 route: &mut ExprRoute,
790 ) -> Option<Arc<LazyHash<LexicalScope>>> {
791 if let Some(s) = route.get(&source.id()) {
792 return s.clone();
793 }
794
795 Some(self.expr_stage_(source, route).exports.clone())
796 }
797
798 pub(crate) fn type_check(self: &Arc<Self>, source: &Source) -> Arc<TypeInfo> {
800 let mut route = TypeEnv::default();
801 self.type_check_(source, &mut route)
802 }
803
804 pub(crate) fn type_check_(
806 self: &Arc<Self>,
807 source: &Source,
808 route: &mut TypeEnv,
809 ) -> Arc<TypeInfo> {
810 use crate::analysis::type_check;
811
812 let ei = self.expr_stage(source);
813 let guard = self.query_stat(source.id(), "type_check");
814 self.slot.type_check.compute(hash128(&ei), |prev| {
815 if let Some(cache_hint) = prev.filter(|prev| prev.revision == ei.revision) {
817 return cache_hint;
818 }
819
820 guard.miss();
821 type_check(self.clone(), ei, route)
822 })
823 }
824
825 #[typst_macros::time(span = source.root().span())]
827 pub(crate) fn lint(self: &Arc<Self>, source: &Source, issues: &KnownIssues) -> LintInfo {
828 let ei = self.expr_stage(source);
829 let ti = self.type_check(source);
830 let guard = self.query_stat(source.id(), "lint");
831 self.slot.lint.compute(hash128(&(&ei, &ti, issues)), |_| {
832 guard.miss();
833 tinymist_lint::lint_file(self.world(), &ei, ti, issues.clone())
834 })
835 }
836
837 pub(crate) fn type_of_func(self: &Arc<Self>, func: Func) -> Signature {
838 crate::log_debug_ct!("convert runtime func {func:?}");
839 analyze_signature(self, SignatureTarget::Convert(func)).unwrap()
840 }
841
842 pub(crate) fn type_of_value(self: &Arc<Self>, val: &Value) -> Ty {
843 crate::log_debug_ct!("convert runtime value {val:?}");
844
845 let cache_key = val;
847 let cached = self
848 .analysis
849 .caches
850 .terms
851 .m
852 .get(&hash128(&cache_key))
853 .and_then(|slot| (cache_key == &slot.1.0).then_some(slot.1.1.clone()));
854 if let Some(cached) = cached {
855 return cached;
856 }
857
858 let res = term_value(val);
859
860 self.analysis
861 .caches
862 .terms
863 .m
864 .entry(hash128(&cache_key))
865 .or_insert_with(|| (self.lifetime, (cache_key.clone(), res.clone())));
866
867 res
868 }
869
870 pub(crate) fn def_of_span(self: &Arc<Self>, source: &Source, span: Span) -> Option<Definition> {
872 let syntax = self.classify_span(source, span)?;
873 definition(self, source, syntax)
874 }
875
876 pub(crate) fn def_of_decl(&self, decl: &Interned<Decl>) -> Option<Definition> {
878 match decl.as_ref() {
879 Decl::Func(..) => Some(Definition::new(decl.clone(), None)),
880 Decl::Module(..) => None,
881 _ => None,
882 }
883 }
884
885 pub(crate) fn def_of_syntax(
890 self: &Arc<Self>,
891 source: &Source,
892 syntax: SyntaxClass,
893 ) -> Option<Definition> {
894 definition(self, source, syntax)
895 }
896
897 pub(crate) fn def_of_syntax_or_dyn(
905 self: &Arc<Self>,
906 source: &Source,
907 syntax: SyntaxClass,
908 ) -> Option<Definition> {
909 let def = self.def_of_syntax(source, syntax.clone());
910 match def.as_ref().map(|d| d.decl.kind()) {
911 Some(DefKind::Reference | DefKind::Module | DefKind::Function) => return def,
913 Some(DefKind::Struct | DefKind::Constant | DefKind::Variable) | None => {}
914 }
915
916 let know_ty_well = def
918 .as_ref()
919 .and_then(|d| self.simplified_type_of_span(d.decl.span()))
920 .filter(|ty| !matches!(ty, Ty::Any))
921 .is_some();
922 if know_ty_well {
923 return def;
924 }
925
926 let def_ref = def.as_ref();
927 let def_name = || Some(def_ref?.name().clone());
928 let dyn_def = self
929 .analyze_expr(syntax.node())
930 .iter()
931 .find_map(|(value, _)| {
932 let def = Definition::from_value(value.clone(), def_name)?;
933 None.or_else(|| {
934 let source = self.source_by_id(def.decl.file_id()?).ok()?;
935 let node = LinkedNode::new(source.root()).find(def.decl.span())?;
936 let def_at_the_span = classify_def_loosely(node)?;
937 self.def_of_span(&source, def_at_the_span.name()?.span())
938 })
939 .or(Some(def))
940 });
941
942 dyn_def.or(def)
944 }
945
946 pub(crate) fn simplified_type_of_span(self: &Arc<Self>, span: Span) -> Option<Ty> {
947 let source = self.source_by_id(span.id()?).ok()?;
948 let (ti, ty) = self.type_of_span_(&source, span)?;
949 Some(ti.simplify(ty, false))
950 }
951
952 pub(crate) fn type_of_span(self: &Arc<Self>, span: Span) -> Option<Ty> {
953 let source = self.source_by_id(span.id()?).ok()?;
954 Some(self.type_of_span_(&source, span)?.1)
955 }
956
957 pub(crate) fn type_of_span_(
958 self: &Arc<Self>,
959 source: &Source,
960 span: Span,
961 ) -> Option<(Arc<TypeInfo>, Ty)> {
962 let ti = self.type_check(source);
963 let ty = ti.type_of_span(span)?;
964 Some((ti, ty))
965 }
966
967 pub(crate) fn post_type_of_node(self: &Arc<Self>, node: LinkedNode) -> Option<Ty> {
968 let id = node.span().id()?;
969 let source = self.source_by_id(id).ok()?;
970 let ty_chk = self.type_check(&source);
971
972 let ty = post_type_check(self.clone(), &ty_chk, node.clone())
973 .or_else(|| ty_chk.type_of_span(node.span()))?;
974 Some(ty_chk.simplify(ty, false))
975 }
976
977 pub(crate) fn sig_of_def(self: &Arc<Self>, def: Definition) -> Option<Signature> {
978 crate::log_debug_ct!("check definition func {def:?}");
979 let source = def.decl.file_id().and_then(|id| self.source_by_id(id).ok());
980 analyze_signature(self, SignatureTarget::Def(source, def))
981 }
982
983 pub(crate) fn sig_of_type(self: &Arc<Self>, ti: &TypeInfo, ty: Ty) -> Option<Signature> {
984 super::sig_of_type(self, ti, ty)
985 }
986
987 pub(crate) fn sig_of_type_or_dyn(
988 self: &Arc<Self>,
989 ti: &TypeInfo,
990 callee_ty: Ty,
991 callee: &SyntaxNode,
992 ) -> Option<Signature> {
993 self.sig_of_type(ti, callee_ty).or_else(|| {
994 self.analyze_expr(callee).iter().find_map(|(value, _)| {
995 let Value::Func(callee) = value else {
996 return None;
997 };
998
999 analyze_signature(self, SignatureTarget::Runtime(callee.clone()))
1001 })
1002 })
1003 }
1004
1005 pub fn analyze_import(&self, source: &SyntaxNode) -> (Option<Value>, Option<Value>) {
1012 if let Some(v) = source.cast::<ast::Expr>().and_then(Self::const_eval) {
1013 return (Some(v), None);
1014 }
1015 let token = &self.analysis.workers.import;
1016 token.enter(|| analyze_import_(self.world(), source))
1017 }
1018
1019 pub fn analyze_expr(&self, source: &SyntaxNode) -> EcoVec<(Value, Option<Styles>)> {
1021 let token = &self.analysis.workers.expression;
1022 token.enter(|| analyze_expr_(self.world(), source))
1023 }
1024
1025 pub fn analyze_bib(&self, introspector: &Introspector) -> Option<Arc<BibInfo>> {
1027 let world = self.world();
1028 let world = (world as &dyn World).track();
1029
1030 analyze_bib(world, introspector.track())
1031 }
1032
1033 pub fn tooltip(&self, source: &Source, cursor: usize) -> Option<Tooltip> {
1039 let token = &self.analysis.workers.tooltip;
1040 token.enter(|| tooltip_(self.world(), source, cursor))
1041 }
1042
1043 pub fn get_manifest(&self, toml_id: TypstFileId) -> StrResult<PackageManifest> {
1045 crate::package::get_manifest(self.world(), toml_id)
1046 }
1047
1048 pub fn compute_signature(
1050 self: &Arc<Self>,
1051 func: SignatureTarget,
1052 compute: impl FnOnce(&Arc<Self>) -> Option<Signature> + Send + Sync + 'static,
1053 ) -> Option<Signature> {
1054 let res = match func {
1055 SignatureTarget::Def(src, def) => self
1056 .analysis
1057 .caches
1058 .def_signatures
1059 .entry(hash128(&(src, def.clone())), self.lifetime),
1060 SignatureTarget::SyntaxFast(source, span) => {
1061 let cache_key = (source, span, true);
1062 self.analysis
1063 .caches
1064 .static_signatures
1065 .entry(hash128(&cache_key), self.lifetime)
1066 }
1067 SignatureTarget::Syntax(source, span) => {
1068 let cache_key = (source, span);
1069 self.analysis
1070 .caches
1071 .static_signatures
1072 .entry(hash128(&cache_key), self.lifetime)
1073 }
1074 SignatureTarget::Convert(rt) => self
1075 .analysis
1076 .caches
1077 .signatures
1078 .entry(hash128(&(&rt, true)), self.lifetime),
1079 SignatureTarget::Runtime(rt) => self
1080 .analysis
1081 .caches
1082 .signatures
1083 .entry(hash128(&rt), self.lifetime),
1084 };
1085 res.get_or_init(|| compute(self)).clone()
1086 }
1087
1088 pub(crate) fn compute_docstring(
1089 self: &Arc<Self>,
1090 fid: TypstFileId,
1091 docs: String,
1092 kind: DefKind,
1093 ) -> Option<Arc<DocString>> {
1094 let res = self
1095 .analysis
1096 .caches
1097 .docstrings
1098 .entry(hash128(&(fid, &docs, kind)), self.lifetime);
1099 res.get_or_init(|| {
1100 crate::syntax::docs::do_compute_docstring(self, fid, docs, kind).map(Arc::new)
1101 })
1102 .clone()
1103 }
1104
1105 pub fn remove_html(&self, markup: EcoString) -> EcoString {
1107 if !self.analysis.remove_html {
1108 return markup;
1109 }
1110
1111 static REMOVE_HTML_COMMENT_REGEX: LazyLock<regex::Regex> =
1112 LazyLock::new(|| regex::Regex::new(r#"<!--[\s\S]*?-->"#).unwrap());
1113 REMOVE_HTML_COMMENT_REGEX
1114 .replace_all(&markup, "")
1115 .trim()
1116 .into()
1117 }
1118
1119 fn query_stat(&self, id: TypstFileId, query: &'static str) -> QueryStatGuard {
1120 self.analysis.stats.stat(Some(id), query)
1121 }
1122
1123 pub(crate) fn prefetch_type_check(self: &Arc<Self>, _fid: TypstFileId) {
1126 }
1136
1137 pub(crate) fn preload_package(self: Arc<Self>, entry_point: TypstFileId) {
1138 crate::log_debug_ct!("preload package start {entry_point:?}");
1139
1140 #[derive(Clone)]
1141 struct Preloader {
1142 shared: Arc<SharedContext>,
1143 analyzed: Arc<Mutex<HashSet<TypstFileId>>>,
1144 }
1145
1146 impl Preloader {
1147 fn work(&self, fid: TypstFileId) {
1148 crate::log_debug_ct!("preload package {fid:?}");
1149 let source = self.shared.source_by_id(fid).ok().unwrap();
1150 let exprs = self.shared.expr_stage(&source);
1151 self.shared.type_check(&source);
1152 exprs.imports.iter().for_each(|(fid, _)| {
1153 if !self.analyzed.lock().insert(*fid) {
1154 return;
1155 }
1156 self.work(*fid);
1157 })
1158 }
1159 }
1160
1161 let preloader = Preloader {
1162 shared: self,
1163 analyzed: Arc::default(),
1164 };
1165
1166 preloader.work(entry_point);
1167 }
1168}
1169
1170type DeferredCompute<T> = Arc<OnceLock<T>>;
1172
1173#[derive(Clone)]
1174struct IncrCacheMap<K, V> {
1175 revision: usize,
1176 global: Arc<Mutex<FxDashMap<K, (usize, V)>>>,
1177 prev: Arc<Mutex<FxHashMap<K, DeferredCompute<V>>>>,
1178 next: Arc<Mutex<FxHashMap<K, DeferredCompute<V>>>>,
1179}
1180
1181impl<K: Eq + Hash, V> Default for IncrCacheMap<K, V> {
1182 fn default() -> Self {
1183 Self {
1184 revision: 0,
1185 global: Arc::default(),
1186 prev: Arc::default(),
1187 next: Arc::default(),
1188 }
1189 }
1190}
1191
1192impl<K, V> IncrCacheMap<K, V> {
1193 fn compute(&self, key: K, compute: impl FnOnce(Option<V>) -> V) -> V
1194 where
1195 K: Clone + Eq + Hash,
1196 V: Clone,
1197 {
1198 let next = self.next.lock().entry(key.clone()).or_default().clone();
1199
1200 next.get_or_init(|| {
1201 let prev = self.prev.lock().get(&key).cloned();
1202 let prev = prev.and_then(|prev| prev.get().cloned());
1203 let prev = prev.or_else(|| {
1204 let global = self.global.lock();
1205 global.get(&key).map(|global| global.1.clone())
1206 });
1207
1208 let res = compute(prev);
1209
1210 let global = self.global.lock();
1211 let entry = global.entry(key.clone());
1212 use dashmap::mapref::entry::Entry;
1213 match entry {
1214 Entry::Occupied(mut entry) => {
1215 let (revision, _) = entry.get();
1216 if *revision < self.revision {
1217 entry.insert((self.revision, res.clone()));
1218 }
1219 }
1220 Entry::Vacant(entry) => {
1221 entry.insert((self.revision, res.clone()));
1222 }
1223 }
1224
1225 res
1226 })
1227 .clone()
1228 }
1229
1230 fn crawl(&self, revision: usize) -> Self {
1231 Self {
1232 revision,
1233 prev: self.next.clone(),
1234 global: self.global.clone(),
1235 next: Default::default(),
1236 }
1237 }
1238}
1239
1240#[derive(Clone)]
1241struct CacheMap<T> {
1242 m: Arc<FxDashMap<u128, (u64, T)>>,
1243 }
1245
1246impl<T> Default for CacheMap<T> {
1247 fn default() -> Self {
1248 Self {
1249 m: Default::default(),
1250 }
1252 }
1253}
1254
1255impl<T> CacheMap<T> {
1256 fn clear(&self) {
1257 self.m.clear();
1258 }
1259
1260 fn retain(&self, mut f: impl FnMut(&mut (u64, T)) -> bool) {
1261 self.m.retain(|_k, v| f(v));
1262 }
1263}
1264
1265impl<T: Default + Clone> CacheMap<T> {
1266 fn entry(&self, key: u128, lifetime: u64) -> T {
1267 let entry = self.m.entry(key);
1268 let entry = entry.or_insert_with(|| (lifetime, T::default()));
1269 entry.1.clone()
1270 }
1271}
1272
1273#[derive(Default)]
1275pub struct AnalysisGlobalWorkers {
1276 import: RateLimiter,
1278 expression: RateLimiter,
1280 tooltip: RateLimiter,
1282}
1283
1284#[derive(Default, Clone)]
1287pub struct AnalysisGlobalCaches {
1288 lifetime: Arc<AtomicU64>,
1289 clear_lifetime: Arc<AtomicU64>,
1290 def_signatures: CacheMap<DeferredCompute<Option<Signature>>>,
1291 static_signatures: CacheMap<DeferredCompute<Option<Signature>>>,
1292 signatures: CacheMap<DeferredCompute<Option<Signature>>>,
1293 docstrings: CacheMap<DeferredCompute<Option<Arc<DocString>>>>,
1294 terms: CacheMap<(Value, Ty)>,
1295}
1296
1297#[derive(Default)]
1303pub struct AnalysisLocalCaches {
1304 modules: HashMap<TypstFileId, ModuleAnalysisLocalCache>,
1305 completion_files: OnceLock<Vec<TypstFileId>>,
1306 root_files: OnceLock<Vec<TypstFileId>>,
1307 module_deps: OnceLock<HashMap<TypstFileId, ModuleDependency>>,
1308}
1309
1310#[derive(Default)]
1315pub struct ModuleAnalysisLocalCache {
1316 expr_stage: OnceLock<ExprInfo>,
1317 type_check: OnceLock<Arc<TypeInfo>>,
1318}
1319
1320#[derive(Default)]
1323pub struct AnalysisRevCache {
1324 default_slot: AnalysisRevSlot,
1325 manager: RevisionManager<AnalysisRevSlot>,
1326}
1327
1328impl RevisionManagerLike for AnalysisRevCache {
1329 fn gc(&mut self, rev: usize) {
1330 self.manager.gc(rev);
1331
1332 {
1334 let mut max_ei = FxHashMap::default();
1335 let es = self.default_slot.expr_stage.global.lock();
1336 for r in es.iter() {
1337 let rev: &mut usize = max_ei.entry(r.1.fid).or_default();
1338 *rev = (*rev).max(r.1.revision);
1339 }
1340 es.retain(|_, r| r.1.revision == *max_ei.get(&r.1.fid).unwrap_or(&0));
1341 }
1342
1343 {
1344 let mut max_ti = FxHashMap::default();
1345 let ts = self.default_slot.type_check.global.lock();
1346 for r in ts.iter() {
1347 let rev: &mut usize = max_ti.entry(r.1.fid).or_default();
1348 *rev = (*rev).max(r.1.revision);
1349 }
1350 ts.retain(|_, r| r.1.revision == *max_ti.get(&r.1.fid).unwrap_or(&0));
1351 }
1352
1353 {
1354 let mut max_li = FxHashMap::default();
1355 let ts = self.default_slot.lint.global.lock();
1356 for r in ts.iter() {
1357 let rev: &mut usize = max_li.entry(r.1.fid).or_default();
1358 *rev = (*rev).max(r.1.revision);
1359 }
1360 ts.retain(|_, r| r.1.revision == *max_li.get(&r.1.fid).unwrap_or(&0));
1361 }
1362 }
1363}
1364
1365impl AnalysisRevCache {
1366 fn clear(&mut self) {
1367 self.manager.clear();
1368 self.default_slot = Default::default();
1369 }
1370
1371 fn find_revision(
1373 &mut self,
1374 revision: NonZeroUsize,
1375 lg: &AnalysisRevLock,
1376 ) -> Arc<RevisionSlot<AnalysisRevSlot>> {
1377 lg.inner.access(revision);
1378 self.manager.find_revision(revision, |slot_base| {
1379 log::debug!("analysis revision {} is created", revision.get());
1380 slot_base
1381 .map(|slot| AnalysisRevSlot {
1382 revision: slot.revision,
1383 expr_stage: slot.data.expr_stage.crawl(revision.get()),
1384 type_check: slot.data.type_check.crawl(revision.get()),
1385 lint: slot.data.lint.crawl(revision.get()),
1386 })
1387 .unwrap_or_else(|| self.default_slot.clone())
1388 })
1389 }
1390}
1391
1392pub struct AnalysisRevLock {
1394 inner: RevisionLock,
1395 tokens: Option<SemanticTokenContext>,
1396 grid: Arc<Mutex<AnalysisRevCache>>,
1397}
1398
1399impl Drop for AnalysisRevLock {
1400 fn drop(&mut self) {
1401 let mut mu = self.grid.lock();
1402 let gc_revision = mu.manager.unlock(&mut self.inner);
1403
1404 if let Some(gc_revision) = gc_revision {
1405 let grid = self.grid.clone();
1406 rayon::spawn(move || {
1407 grid.lock().gc(gc_revision);
1408 });
1409 }
1410 }
1411}
1412
1413#[derive(Default, Clone)]
1414struct AnalysisRevSlot {
1415 revision: usize,
1416 expr_stage: IncrCacheMap<u128, ExprInfo>,
1417 type_check: IncrCacheMap<u128, Arc<TypeInfo>>,
1418 lint: IncrCacheMap<u128, LintInfo>,
1419}
1420
1421impl Drop for AnalysisRevSlot {
1422 fn drop(&mut self) {
1423 log::debug!("analysis revision {} is dropped", self.revision);
1424 }
1425}
1426
1427fn ceil_char_boundary(text: &str, mut cursor: usize) -> usize {
1428 while cursor < text.len() && !text.is_char_boundary(cursor) {
1430 cursor += 1;
1431 }
1432
1433 cursor.min(text.len())
1434}
1435
1436#[typst_macros::time]
1437#[comemo::memoize]
1438fn analyze_bib(
1439 world: Tracked<dyn World + '_>,
1440 introspector: Tracked<Introspector>,
1441) -> Option<Arc<BibInfo>> {
1442 let bib_elem = BibliographyElem::find(introspector).ok()?;
1443
1444 let csl_style = bib_elem.style.get_cloned(StyleChain::default()).derived;
1447
1448 let Value::Array(paths) = bib_elem.sources.clone().into_value() else {
1449 return None;
1450 };
1451 let elem_fid = bib_elem.span().id()?;
1452 let files = paths
1453 .into_iter()
1454 .flat_map(|path| path.cast().ok())
1455 .flat_map(|bib_path: EcoString| {
1456 let bib_fid = resolve_id_by_path(world.deref(), elem_fid, &bib_path)?;
1457 Some((bib_fid, world.file(bib_fid).ok()?))
1458 });
1459
1460 bib_info(csl_style, files)
1461}
1462
1463#[comemo::memoize]
1464fn loc_info(bytes: Bytes) -> Option<EcoVec<(usize, String)>> {
1465 let mut loc = EcoVec::new();
1466 let mut offset = 0;
1467 for line in bytes.split(|byte| *byte == b'\n') {
1468 loc.push((offset, String::from_utf8(line.to_owned()).ok()?));
1469 offset += line.len() + 1;
1470 }
1471 Some(loc)
1472}
1473
1474fn find_loc(
1475 len: usize,
1476 loc: &EcoVec<(usize, String)>,
1477 mut offset: usize,
1478 encoding: PositionEncoding,
1479) -> Option<LspPosition> {
1480 if offset > len {
1481 offset = len;
1482 }
1483
1484 let r = match loc.binary_search_by_key(&offset, |line| line.0) {
1485 Ok(i) => i,
1486 Err(i) => i - 1,
1487 };
1488
1489 let (start, s) = loc.get(r)?;
1490 let byte_offset = offset.saturating_sub(*start);
1491
1492 let column_prefix = if byte_offset <= s.len() {
1493 &s[..byte_offset]
1494 } else {
1495 let line = (r + 1) as u32;
1496 return Some(LspPosition { line, character: 0 });
1497 };
1498
1499 let line = r as u32;
1500 let character = match encoding {
1501 PositionEncoding::Utf8 => column_prefix.chars().count(),
1502 PositionEncoding::Utf16 => column_prefix.chars().map(|ch| ch.len_utf16()).sum(),
1503 } as u32;
1504
1505 Some(LspPosition { line, character })
1506}
1507
1508pub struct SearchCtx<'a> {
1510 pub ctx: &'a mut LocalContext,
1512 pub searched: HashSet<TypstFileId>,
1514 pub worklist: Vec<TypstFileId>,
1516}
1517
1518impl SearchCtx<'_> {
1519 pub fn push(&mut self, fid: TypstFileId) -> bool {
1521 if self.searched.insert(fid) {
1522 self.worklist.push(fid);
1523 true
1524 } else {
1525 false
1526 }
1527 }
1528
1529 pub fn push_dependents(&mut self, fid: TypstFileId) {
1531 let deps = self.ctx.module_dependencies().get(&fid);
1532 let dependents = deps.map(|dep| dep.dependents.clone()).into_iter().flatten();
1533 for dep in dependents {
1534 self.push(dep);
1535 }
1536 }
1537}
1538
1539#[derive(Default)]
1541pub struct RateLimiter {
1542 token: std::sync::Mutex<()>,
1543}
1544
1545impl RateLimiter {
1546 #[must_use]
1548 pub fn enter<T>(&self, f: impl FnOnce() -> T) -> T {
1549 let _c = self.token.lock().unwrap();
1550 f()
1551 }
1552}