tvix_eval/compiler/
mod.rs

1//! This module implements a compiler for compiling the rnix AST
2//! representation to Tvix bytecode.
3//!
4//! A note on `unwrap()`: This module contains a lot of calls to
5//! `unwrap()` or `expect(...)` on data structures returned by `rnix`.
6//! The reason for this is that rnix uses the same data structures to
7//! represent broken and correct ASTs, so all typed AST variants have
8//! the ability to represent an incorrect node.
9//!
10//! However, at the time that the AST is passed to the compiler we
11//! have verified that `rnix` considers the code to be correct, so all
12//! variants are fulfilled. In cases where the invariant is guaranteed
13//! by the code in this module, `debug_assert!` has been used to catch
14//! mistakes early during development.
15
16mod bindings;
17mod import;
18mod optimiser;
19mod scope;
20
21use codemap::Span;
22use rnix::ast::{self, AstToken};
23use rustc_hash::FxHashMap;
24use smol_str::SmolStr;
25use std::collections::BTreeMap;
26use std::path::{Path, PathBuf};
27use std::rc::{Rc, Weak};
28
29use crate::chunk::Chunk;
30use crate::errors::{CatchableErrorKind, Error, ErrorKind, EvalResult};
31use crate::observer::CompilerObserver;
32use crate::opcode::{CodeIdx, Op, Position, UpvalueIdx};
33use crate::spans::ToSpan;
34use crate::value::{Closure, Formals, Lambda, NixAttrs, Thunk, Value};
35use crate::warnings::{EvalWarning, WarningKind};
36use crate::CoercionKind;
37use crate::SourceCode;
38
39use self::scope::{LocalIdx, LocalPosition, Scope, Upvalue, UpvalueKind};
40
41/// Represents the result of compiling a piece of Nix code. If
42/// compilation was successful, the resulting bytecode can be passed
43/// to the VM.
44pub struct CompilationOutput {
45    pub lambda: Rc<Lambda>,
46    pub warnings: Vec<EvalWarning>,
47    pub errors: Vec<Error>,
48}
49
50/// Represents the lambda currently being compiled.
51struct LambdaCtx {
52    lambda: Lambda,
53    scope: Scope,
54    captures_with_stack: bool,
55}
56
57impl LambdaCtx {
58    fn new() -> Self {
59        LambdaCtx {
60            lambda: Lambda::default(),
61            scope: Default::default(),
62            captures_with_stack: false,
63        }
64    }
65
66    fn inherit(&self) -> Self {
67        LambdaCtx {
68            lambda: Lambda::default(),
69            scope: self.scope.inherit(),
70            captures_with_stack: false,
71        }
72    }
73}
74
75/// When compiling functions with an argument attribute set destructuring pattern,
76/// we need to do multiple passes over the declared formal arguments when setting
77/// up their local bindings (similarly to `let … in` expressions and recursive
78/// attribute sets. For this purpose, this struct is used to represent the two
79/// kinds of formal arguments:
80///
81/// - `TrackedFormal::NoDefault` is always required and causes an evaluation error
82///   if the corresponding attribute is missing in a function call.
83/// - `TrackedFormal::WithDefault` may be missing in the passed attribute set—
84///   in which case a `default_expr` will be evaluated and placed in the formal
85///   argument's local variable slot.
86enum TrackedFormal {
87    NoDefault {
88        local_idx: LocalIdx,
89        pattern_entry: ast::PatEntry,
90    },
91    WithDefault {
92        local_idx: LocalIdx,
93        /// Extra phantom local used for coordinating runtime dispatching not observable to
94        /// the language user. Detailed description in `compile_param_pattern()`.
95        finalise_request_idx: LocalIdx,
96        default_expr: ast::Expr,
97        pattern_entry: ast::PatEntry,
98    },
99}
100
101impl TrackedFormal {
102    fn pattern_entry(&self) -> &ast::PatEntry {
103        match self {
104            TrackedFormal::NoDefault { pattern_entry, .. } => pattern_entry,
105            TrackedFormal::WithDefault { pattern_entry, .. } => pattern_entry,
106        }
107    }
108    fn local_idx(&self) -> LocalIdx {
109        match self {
110            TrackedFormal::NoDefault { local_idx, .. } => *local_idx,
111            TrackedFormal::WithDefault { local_idx, .. } => *local_idx,
112        }
113    }
114}
115
116/// The map of globally available functions and other values that
117/// should implicitly be resolvable in the global scope.
118pub type GlobalsMap = FxHashMap<&'static str, Value>;
119
120/// Set of builtins that (if they exist) should be made available in
121/// the global scope, meaning that they can be accessed not just
122/// through `builtins.<name>`, but directly as `<name>`. This is not
123/// configurable, it is based on what Nix 2.3 exposed.
124const GLOBAL_BUILTINS: &[&str] = &[
125    "abort",
126    "baseNameOf",
127    "derivation",
128    "derivationStrict",
129    "dirOf",
130    "fetchGit",
131    "fetchMercurial",
132    "fetchTarball",
133    "fromTOML",
134    "import",
135    "isNull",
136    "map",
137    "placeholder",
138    "removeAttrs",
139    "scopedImport",
140    "throw",
141    "toString",
142    "__curPos",
143];
144
145pub struct Compiler<'source, 'observer> {
146    contexts: Vec<LambdaCtx>,
147    warnings: Vec<EvalWarning>,
148    errors: Vec<Error>,
149    root_dir: PathBuf,
150
151    /// Carries all known global tokens; the full set of which is
152    /// created when the compiler is invoked.
153    ///
154    /// Each global has an associated token, which when encountered as
155    /// an identifier is resolved against the scope poisoning logic,
156    /// and a function that should emit code for the token.
157    globals: Rc<GlobalsMap>,
158
159    /// Reference to the struct holding all of the source code, which
160    /// is used for error creation.
161    source: &'source SourceCode,
162
163    /// File reference in the source map for the current file, which
164    /// is used for creating spans.
165    file: &'source codemap::File,
166
167    /// Carry an observer for the compilation process, which is called
168    /// whenever a chunk is emitted.
169    observer: &'observer mut dyn CompilerObserver,
170
171    /// Carry a count of nested scopes which have requested the
172    /// compiler not to emit anything. This used for compiling dead
173    /// code branches to catch errors & warnings in them.
174    dead_scope: usize,
175}
176
177impl Compiler<'_, '_> {
178    pub(super) fn span_for<S: ToSpan>(&self, to_span: &S) -> Span {
179        to_span.span_for(self.file)
180    }
181}
182
183/// Compiler construction
184impl<'source, 'observer> Compiler<'source, 'observer> {
185    pub(crate) fn new(
186        location: Option<PathBuf>,
187        globals: Rc<GlobalsMap>,
188        env: Option<&FxHashMap<SmolStr, Value>>,
189        source: &'source SourceCode,
190        file: &'source codemap::File,
191        observer: &'observer mut dyn CompilerObserver,
192    ) -> EvalResult<Self> {
193        let mut root_dir = match location {
194            Some(dir) if cfg!(target_arch = "wasm32") || dir.is_absolute() => Ok(dir),
195            _ => {
196                let current_dir = std::env::current_dir().map_err(|e| {
197                    Error::new(
198                        ErrorKind::RelativePathResolution(format!(
199                            "could not determine current directory: {e}"
200                        )),
201                        file.span,
202                        source.clone(),
203                    )
204                })?;
205                if let Some(dir) = location {
206                    Ok(current_dir.join(dir))
207                } else {
208                    Ok(current_dir)
209                }
210            }
211        }?;
212
213        // If the path passed from the caller points to a file, the
214        // filename itself needs to be truncated as this must point to a
215        // directory.
216        if root_dir.is_file() {
217            root_dir.pop();
218        }
219
220        #[cfg(not(target_arch = "wasm32"))]
221        debug_assert!(root_dir.is_absolute());
222
223        let mut compiler = Self {
224            root_dir,
225            source,
226            file,
227            observer,
228            globals,
229            contexts: vec![LambdaCtx::new()],
230            warnings: vec![],
231            errors: vec![],
232            dead_scope: 0,
233        };
234
235        if let Some(env) = env {
236            compiler.compile_env(env);
237        }
238
239        Ok(compiler)
240    }
241}
242
243// Helper functions for emitting code and metadata to the internal
244// structures of the compiler.
245impl Compiler<'_, '_> {
246    fn context(&self) -> &LambdaCtx {
247        &self.contexts[self.contexts.len() - 1]
248    }
249
250    fn context_mut(&mut self) -> &mut LambdaCtx {
251        let idx = self.contexts.len() - 1;
252        &mut self.contexts[idx]
253    }
254
255    fn chunk(&mut self) -> &mut Chunk {
256        &mut self.context_mut().lambda.chunk
257    }
258
259    fn scope(&self) -> &Scope {
260        &self.context().scope
261    }
262
263    fn scope_mut(&mut self) -> &mut Scope {
264        &mut self.context_mut().scope
265    }
266
267    /// Push a single instruction to the current bytecode chunk and
268    /// track the source span from which it was compiled.
269    fn push_op<T: ToSpan>(&mut self, data: Op, node: &T) -> CodeIdx {
270        if self.dead_scope > 0 {
271            return CodeIdx(0);
272        }
273
274        let span = self.span_for(node);
275        CodeIdx(self.chunk().push_op(data, span))
276    }
277
278    fn push_u8(&mut self, data: u8) {
279        if self.dead_scope > 0 {
280            return;
281        }
282
283        self.chunk().code.push(data);
284    }
285
286    fn push_uvarint(&mut self, data: u64) {
287        if self.dead_scope > 0 {
288            return;
289        }
290
291        self.chunk().push_uvarint(data);
292    }
293
294    fn push_u16(&mut self, data: u16) {
295        if self.dead_scope > 0 {
296            return;
297        }
298
299        self.chunk().push_u16(data);
300    }
301
302    /// Emit a single constant to the current bytecode chunk and track
303    /// the source span from which it was compiled.
304    pub(super) fn emit_constant<T: ToSpan>(&mut self, value: Value, node: &T) {
305        if self.dead_scope > 0 {
306            return;
307        }
308
309        let idx = self.chunk().push_constant(value);
310        self.push_op(Op::Constant, node);
311        self.push_uvarint(idx.0 as u64);
312    }
313}
314
315// Actual code-emitting AST traversal methods.
316impl Compiler<'_, '_> {
317    fn compile(&mut self, slot: LocalIdx, expr: ast::Expr) {
318        let expr = optimiser::optimise_expr(self, slot, expr);
319
320        match &expr {
321            ast::Expr::Literal(literal) => self.compile_literal(literal),
322            ast::Expr::Path(path) => self.compile_path(slot, path),
323            ast::Expr::Str(s) => self.compile_str(slot, s),
324
325            ast::Expr::UnaryOp(op) => self.thunk(slot, op, move |c, s| c.compile_unary_op(s, op)),
326
327            ast::Expr::BinOp(binop) => {
328                self.thunk(slot, binop, move |c, s| c.compile_binop(s, binop))
329            }
330
331            ast::Expr::HasAttr(has_attr) => {
332                self.thunk(slot, has_attr, move |c, s| c.compile_has_attr(s, has_attr))
333            }
334
335            ast::Expr::List(list) => self.thunk(slot, list, move |c, s| c.compile_list(s, list)),
336
337            ast::Expr::AttrSet(attrs) => {
338                self.thunk(slot, attrs, move |c, s| c.compile_attr_set(s, attrs))
339            }
340
341            ast::Expr::Select(select) => {
342                self.thunk(slot, select, move |c, s| c.compile_select(s, select))
343            }
344
345            ast::Expr::Assert(assert) => {
346                self.thunk(slot, assert, move |c, s| c.compile_assert(s, assert))
347            }
348            ast::Expr::IfElse(if_else) => {
349                self.thunk(slot, if_else, move |c, s| c.compile_if_else(s, if_else))
350            }
351
352            ast::Expr::LetIn(let_in) => {
353                self.thunk(slot, let_in, move |c, s| c.compile_let_in(s, let_in))
354            }
355
356            ast::Expr::Ident(ident) => self.compile_ident(slot, ident),
357            ast::Expr::With(with) => self.thunk(slot, with, |c, s| c.compile_with(s, with)),
358            ast::Expr::Lambda(lambda) => self.thunk(slot, lambda, move |c, s| {
359                c.compile_lambda_or_thunk(false, s, lambda, |c, s| c.compile_lambda(s, lambda))
360            }),
361            ast::Expr::Apply(apply) => {
362                self.thunk(slot, apply, move |c, s| c.compile_apply(s, apply))
363            }
364
365            // Parenthesized expressions are simply unwrapped, leaving
366            // their value on the stack.
367            ast::Expr::Paren(paren) => self.compile(slot, paren.expr().unwrap()),
368
369            ast::Expr::LegacyLet(legacy_let) => self.thunk(slot, legacy_let, move |c, s| {
370                c.compile_legacy_let(s, legacy_let)
371            }),
372
373            ast::Expr::Root(_) => unreachable!("there cannot be more than one root"),
374            ast::Expr::Error(_) => unreachable!("compile is only called on validated trees"),
375        }
376    }
377
378    /// Compiles an expression, but does not emit any code for it as
379    /// it is considered dead. This will still catch errors and
380    /// warnings in that expression.
381    ///
382    /// A warning about the that code being dead is assumed to already be
383    /// emitted by the caller of this.
384    fn compile_dead_code(&mut self, slot: LocalIdx, node: ast::Expr) {
385        self.dead_scope += 1;
386        self.compile(slot, node);
387        self.dead_scope -= 1;
388    }
389
390    fn compile_literal(&mut self, node: &ast::Literal) {
391        let value = match node.kind() {
392            ast::LiteralKind::Float(f) => Value::Float(f.value().unwrap()),
393            ast::LiteralKind::Integer(i) => match i.value() {
394                Ok(v) => Value::Integer(v),
395                Err(err) => return self.emit_error(node, err.into()),
396            },
397
398            ast::LiteralKind::Uri(u) => {
399                self.emit_warning(node, WarningKind::DeprecatedLiteralURL);
400                Value::from(u.syntax().text())
401            }
402        };
403
404        self.emit_constant(value, node);
405    }
406
407    fn compile_path(&mut self, slot: LocalIdx, node: &ast::Path) {
408        // TODO(tazjin): placeholder implementation while waiting for
409        // https://github.com/nix-community/rnix-parser/pull/96
410
411        let raw_path = node.to_string();
412        let path = if raw_path.starts_with('/') {
413            Path::new(&raw_path).to_owned()
414        } else if raw_path.starts_with('~') {
415            // We assume that home paths start with ~/ or fail to parse
416            // TODO: this should be checked using a parse-fail test.
417            debug_assert!(raw_path.len() > 2 && raw_path.starts_with("~/"));
418
419            let home_relative_path = &raw_path[2..(raw_path.len())];
420            self.emit_constant(
421                Value::UnresolvedPath(Box::new(home_relative_path.into())),
422                node,
423            );
424            self.push_op(Op::ResolveHomePath, node);
425            return;
426        } else if raw_path.starts_with('<') {
427            // TODO: decide what to do with findFile
428            if raw_path.len() == 2 {
429                return self.emit_constant(
430                    Value::Catchable(Box::new(CatchableErrorKind::NixPathResolution(
431                        "Empty <> path not allowed".into(),
432                    ))),
433                    node,
434                );
435            }
436            let path = &raw_path[1..(raw_path.len() - 1)];
437            // Make a thunk to resolve the path (without using `findFile`, at least for now?)
438            return self.thunk(slot, node, move |c, _| {
439                c.emit_constant(Value::UnresolvedPath(Box::new(path.into())), node);
440                c.push_op(Op::FindFile, node);
441            });
442        } else {
443            let mut buf = self.root_dir.clone();
444            buf.push(&raw_path);
445            buf
446        };
447
448        // TODO: Use https://github.com/rust-lang/rfcs/issues/2208
449        // once it is available
450        let value = Value::Path(Box::new(crate::value::canon_path(path)));
451        self.emit_constant(value, node);
452    }
453
454    /// Helper that compiles the given string parts strictly. The caller
455    /// (`compile_str`) needs to figure out if the result of compiling this
456    /// needs to be thunked or not.
457    fn compile_str_parts(
458        &mut self,
459        slot: LocalIdx,
460        parent_node: &ast::Str,
461        parts: Vec<ast::InterpolPart<String>>,
462    ) {
463        // The string parts are produced in literal order, however
464        // they need to be reversed on the stack in order to
465        // efficiently create the real string in case of
466        // interpolation.
467        for part in parts.iter().rev() {
468            match part {
469                // Interpolated expressions are compiled as normal and
470                // dealt with by the VM before being assembled into
471                // the final string. We need to coerce them here,
472                // so OpInterpolate definitely has a string to consume.
473                ast::InterpolPart::Interpolation(ipol) => {
474                    self.compile(slot, ipol.expr().unwrap());
475                    // implicitly forces as well
476                    self.push_op(Op::CoerceToString, ipol);
477
478                    let encoded: u8 = CoercionKind {
479                        strong: false,
480                        import_paths: true,
481                    }
482                    .into();
483
484                    self.push_u8(encoded);
485                }
486
487                ast::InterpolPart::Literal(lit) => {
488                    self.emit_constant(Value::from(lit.as_str()), parent_node);
489                }
490            }
491        }
492
493        if parts.len() != 1 {
494            self.push_op(Op::Interpolate, parent_node);
495            self.push_uvarint(parts.len() as u64);
496        }
497    }
498
499    fn compile_str(&mut self, slot: LocalIdx, node: &ast::Str) {
500        let parts = node.normalized_parts();
501
502        // We need to thunk string expressions if they are the result of
503        // interpolation. A string that only consists of a single part (`"${foo}"`)
504        // can't desugar to the enclosed expression (`foo`) because we need to
505        // coerce the result to a string value. This would require forcing the
506        // value of the inner expression, so we need to wrap it in another thunk.
507        if parts.len() != 1 || matches!(&parts[0], ast::InterpolPart::Interpolation(_)) {
508            self.thunk(slot, node, move |c, s| {
509                c.compile_str_parts(s, node, parts);
510            });
511        } else {
512            self.compile_str_parts(slot, node, parts);
513        }
514    }
515
516    fn compile_unary_op(&mut self, slot: LocalIdx, op: &ast::UnaryOp) {
517        self.compile(slot, op.expr().unwrap());
518        self.emit_force(op);
519
520        let opcode = match op.operator().unwrap() {
521            ast::UnaryOpKind::Invert => Op::Invert,
522            ast::UnaryOpKind::Negate => Op::Negate,
523        };
524
525        self.push_op(opcode, op);
526    }
527
528    fn compile_binop(&mut self, slot: LocalIdx, op: &ast::BinOp) {
529        use ast::BinOpKind;
530
531        // Short-circuiting and other strange operators, which are
532        // under the same node type as NODE_BIN_OP, but need to be
533        // handled separately (i.e. before compiling the expressions
534        // used for standard binary operators).
535
536        match op.operator().unwrap() {
537            BinOpKind::And => return self.compile_and(slot, op),
538            BinOpKind::Or => return self.compile_or(slot, op),
539            BinOpKind::Implication => return self.compile_implication(slot, op),
540            _ => {}
541        };
542
543        // For all other operators, the two values need to be left on
544        // the stack in the correct order before pushing the
545        // instruction for the operation itself.
546        self.compile(slot, op.lhs().unwrap());
547        self.emit_force(&op.lhs().unwrap());
548
549        self.compile(slot, op.rhs().unwrap());
550        self.emit_force(&op.rhs().unwrap());
551
552        match op.operator().unwrap() {
553            BinOpKind::Add => self.push_op(Op::Add, op),
554            BinOpKind::Sub => self.push_op(Op::Sub, op),
555            BinOpKind::Mul => self.push_op(Op::Mul, op),
556            BinOpKind::Div => self.push_op(Op::Div, op),
557            BinOpKind::Update => self.push_op(Op::AttrsUpdate, op),
558            BinOpKind::Equal => self.push_op(Op::Equal, op),
559            BinOpKind::Less => self.push_op(Op::Less, op),
560            BinOpKind::LessOrEq => self.push_op(Op::LessOrEq, op),
561            BinOpKind::More => self.push_op(Op::More, op),
562            BinOpKind::MoreOrEq => self.push_op(Op::MoreOrEq, op),
563            BinOpKind::Concat => self.push_op(Op::Concat, op),
564
565            BinOpKind::NotEqual => {
566                self.push_op(Op::Equal, op);
567                self.push_op(Op::Invert, op)
568            }
569
570            // Handled by separate branch above.
571            BinOpKind::And | BinOpKind::Implication | BinOpKind::Or => {
572                unreachable!()
573            }
574        };
575    }
576
577    fn compile_and(&mut self, slot: LocalIdx, node: &ast::BinOp) {
578        debug_assert!(
579            matches!(node.operator(), Some(ast::BinOpKind::And)),
580            "compile_and called with wrong operator kind: {:?}",
581            node.operator(),
582        );
583
584        // Leave left-hand side value on the stack.
585        self.compile(slot, node.lhs().unwrap());
586        self.emit_force(&node.lhs().unwrap());
587
588        let throw_idx = self.push_op(Op::JumpIfCatchable, node);
589        self.push_u16(0);
590        // If this value is false, jump over the right-hand side - the
591        // whole expression is false.
592        let end_idx = self.push_op(Op::JumpIfFalse, node);
593        self.push_u16(0);
594
595        // Otherwise, remove the previous value and leave the
596        // right-hand side on the stack. Its result is now the value
597        // of the whole expression.
598        self.push_op(Op::Pop, node);
599        self.compile(slot, node.rhs().unwrap());
600        self.emit_force(&node.rhs().unwrap());
601
602        self.patch_jump(end_idx);
603        self.push_op(Op::AssertBool, node);
604        self.patch_jump(throw_idx);
605    }
606
607    fn compile_or(&mut self, slot: LocalIdx, node: &ast::BinOp) {
608        debug_assert!(
609            matches!(node.operator(), Some(ast::BinOpKind::Or)),
610            "compile_or called with wrong operator kind: {:?}",
611            node.operator(),
612        );
613
614        // Leave left-hand side value on the stack
615        self.compile(slot, node.lhs().unwrap());
616        self.emit_force(&node.lhs().unwrap());
617
618        let throw_idx = self.push_op(Op::JumpIfCatchable, node);
619        self.push_u16(0);
620        // Opposite of above: If this value is **true**, we can
621        // short-circuit the right-hand side.
622        let end_idx = self.push_op(Op::JumpIfTrue, node);
623        self.push_u16(0);
624        self.push_op(Op::Pop, node);
625        self.compile(slot, node.rhs().unwrap());
626        self.emit_force(&node.rhs().unwrap());
627
628        self.patch_jump(end_idx);
629        self.push_op(Op::AssertBool, node);
630        self.patch_jump(throw_idx);
631    }
632
633    fn compile_implication(&mut self, slot: LocalIdx, node: &ast::BinOp) {
634        debug_assert!(
635            matches!(node.operator(), Some(ast::BinOpKind::Implication)),
636            "compile_implication called with wrong operator kind: {:?}",
637            node.operator(),
638        );
639
640        // Leave left-hand side value on the stack and invert it.
641        self.compile(slot, node.lhs().unwrap());
642        self.emit_force(&node.lhs().unwrap());
643        let throw_idx = self.push_op(Op::JumpIfCatchable, node);
644        self.push_u16(0);
645        self.push_op(Op::Invert, node);
646
647        // Exactly as `||` (because `a -> b` = `!a || b`).
648        let end_idx = self.push_op(Op::JumpIfTrue, node);
649        self.push_u16(0);
650
651        self.push_op(Op::Pop, node);
652        self.compile(slot, node.rhs().unwrap());
653        self.emit_force(&node.rhs().unwrap());
654
655        self.patch_jump(end_idx);
656        self.push_op(Op::AssertBool, node);
657        self.patch_jump(throw_idx);
658    }
659
660    /// Compile list literals into equivalent bytecode. List
661    /// construction is fairly simple, consisting of pushing code for
662    /// each literal element and an instruction with the element
663    /// count.
664    ///
665    /// The VM, after evaluating the code for each element, simply
666    /// constructs the list from the given number of elements.
667    fn compile_list(&mut self, slot: LocalIdx, node: &ast::List) {
668        let mut count = 0;
669
670        // Open a temporary scope to correctly account for stack items
671        // that exist during the construction.
672        self.scope_mut().begin_scope();
673
674        for item in node.items() {
675            // Start tracing new stack slots from the second list
676            // element onwards. The first list element is located in
677            // the stack slot of the list itself.
678            let item_slot = match count {
679                0 => slot,
680                _ => {
681                    let item_span = self.span_for(&item);
682                    self.scope_mut().declare_phantom(item_span, false)
683                }
684            };
685
686            count += 1;
687            self.compile(item_slot, item);
688            self.scope_mut().mark_initialised(item_slot);
689        }
690
691        self.push_op(Op::List, node);
692        self.push_uvarint(count as u64);
693        self.scope_mut().end_scope();
694    }
695
696    fn compile_attr(&mut self, slot: LocalIdx, node: &ast::Attr) {
697        match node {
698            ast::Attr::Dynamic(dynamic) => {
699                self.compile(slot, dynamic.expr().unwrap());
700                self.emit_force(&dynamic.expr().unwrap());
701            }
702
703            ast::Attr::Str(s) => {
704                self.compile_str(slot, s);
705                self.emit_force(s);
706            }
707
708            ast::Attr::Ident(ident) => self.emit_literal_ident(ident),
709        }
710    }
711
712    fn compile_has_attr(&mut self, slot: LocalIdx, node: &ast::HasAttr) {
713        // Put the attribute set on the stack.
714        self.compile(slot, node.expr().unwrap());
715        self.emit_force(node);
716
717        // Push all path fragments with an operation for fetching the
718        // next nested element, for all fragments except the last one.
719        for (count, fragment) in node.attrpath().unwrap().attrs().enumerate() {
720            if count > 0 {
721                self.push_op(Op::AttrsTrySelect, &fragment);
722                self.emit_force(&fragment);
723            }
724
725            self.compile_attr(slot, &fragment);
726        }
727
728        // After the last fragment, emit the actual instruction that
729        // leaves a boolean on the stack.
730        self.push_op(Op::HasAttr, node);
731    }
732
733    /// When compiling select or select_or expressions, an optimisation is
734    /// possible of compiling the set emitted a constant attribute set by
735    /// immediately replacing it with the actual value.
736    ///
737    /// We take care not to emit an error here, as that would interfere with
738    /// thunking behaviour (there can be perfectly valid Nix code that accesses
739    /// a statically known attribute set that is lacking a key, because that
740    /// thunk is never evaluated). If anything is missing, just inform the
741    /// caller that the optimisation did not take place and move on. We may want
742    /// to emit warnings here in the future.
743    fn optimise_select(&mut self, path: &ast::Attrpath) -> bool {
744        // If compiling the set emitted a constant attribute set, the
745        // associated constant can immediately be replaced with the
746        // actual value.
747        //
748        // We take care not to emit an error here, as that would
749        // interfere with thunking behaviour (there can be perfectly
750        // valid Nix code that accesses a statically known attribute
751        // set that is lacking a key, because that thunk is never
752        // evaluated). If anything is missing, just move on. We may
753        // want to emit warnings here in the future.
754        if let Some((Op::Constant, op_idx)) = self.chunk().last_op() {
755            let (idx, _) = self.chunk().read_uvarint(op_idx + 1);
756            let constant = &mut self.chunk().constants[idx as usize];
757            if let Value::Attrs(attrs) = constant {
758                let mut path_iter = path.attrs();
759
760                // Only do this optimisation if there is a *single*
761                // element in the attribute path. It is extremely
762                // unlikely that we'd have a static nested set.
763                if let (Some(attr), None) = (path_iter.next(), path_iter.next()) {
764                    // Only do this optimisation for statically known attrs.
765                    if let Some(ident) = expr_static_attr_str(&attr) {
766                        if let Some(selected_value) = attrs.select(ident.as_bytes()) {
767                            *constant = selected_value.clone();
768                            return true;
769                        }
770                    }
771                }
772            }
773        }
774
775        false
776    }
777
778    fn compile_select(&mut self, slot: LocalIdx, node: &ast::Select) {
779        let set = node.expr().unwrap();
780        let path = node.attrpath().unwrap();
781
782        if node.or_token().is_some() {
783            return self.compile_select_or(slot, set, path, node.default_expr().unwrap());
784        }
785
786        // Push the set onto the stack
787        self.compile(slot, set.clone());
788        if self.optimise_select(&path) {
789            return;
790        }
791
792        // Compile each key fragment and emit access instructions.
793        //
794        // TODO: multi-select instruction to avoid re-pushing attrs on
795        // nested selects.
796        for fragment in path.attrs() {
797            // Force the current set value.
798            self.emit_force(&set);
799
800            self.compile_attr(slot, &fragment);
801            self.push_op(Op::AttrsSelect, &fragment);
802        }
803    }
804
805    /// Compile an `or` expression into a chunk of conditional jumps.
806    ///
807    /// If at any point during attribute set traversal a key is
808    /// missing, the `OpAttrOrNotFound` instruction will leave a
809    /// special sentinel value on the stack.
810    ///
811    /// After each access, a conditional jump evaluates the top of the
812    /// stack and short-circuits to the default value if it sees the
813    /// sentinel.
814    ///
815    /// Code like `{ a.b = 1; }.a.c or 42` yields this bytecode and
816    /// runtime stack:
817    ///
818    /// ```notrust
819    ///            Bytecode                     Runtime stack
820    ///  ┌────────────────────────────┐   ┌─────────────────────────┐
821    ///  │    ...                     │   │ ...                     │
822    ///  │ 5  OP_ATTRS(1)             │ → │ 5  [ { a.b = 1; }     ] │
823    ///  │ 6  OP_CONSTANT("a")        │ → │ 6  [ { a.b = 1; } "a" ] │
824    ///  │ 7  OP_ATTR_OR_NOT_FOUND    │ → │ 7  [ { b = 1; }       ] │
825    ///  │ 8  JUMP_IF_NOT_FOUND(13)   │ → │ 8  [ { b = 1; }       ] │
826    ///  │ 9  OP_CONSTANT("C")        │ → │ 9  [ { b = 1; } "c"   ] │
827    ///  │ 10 OP_ATTR_OR_NOT_FOUND    │ → │ 10 [ NOT_FOUND        ] │
828    ///  │ 11 JUMP_IF_NOT_FOUND(13)   │ → │ 11 [                  ] │
829    ///  │ 12 JUMP(14)                │   │ ..     jumped over      │
830    ///  │ 13 CONSTANT(42)            │ → │ 12 [ 42 ]               │
831    ///  │ 14 ...                     │   │ ..   ....               │
832    ///  └────────────────────────────┘   └─────────────────────────┘
833    /// ```
834    fn compile_select_or(
835        &mut self,
836        slot: LocalIdx,
837        set: ast::Expr,
838        path: ast::Attrpath,
839        default: ast::Expr,
840    ) {
841        self.compile(slot, set);
842        if self.optimise_select(&path) {
843            return;
844        }
845
846        let mut jumps = vec![];
847
848        for fragment in path.attrs() {
849            self.emit_force(&fragment);
850            self.compile_attr(slot, &fragment.clone());
851            self.push_op(Op::AttrsTrySelect, &fragment);
852            jumps.push(self.push_op(Op::JumpIfNotFound, &fragment));
853            self.push_u16(0);
854        }
855
856        let final_jump = self.push_op(Op::Jump, &path);
857        self.push_u16(0);
858
859        for jump in jumps {
860            self.patch_jump(jump);
861        }
862
863        // Compile the default value expression and patch the final
864        // jump to point *beyond* it.
865        self.compile(slot, default);
866        self.patch_jump(final_jump);
867    }
868
869    /// Compile `assert` expressions using jumping instructions in the VM.
870    ///
871    /// ```notrust
872    ///                        ┌─────────────────────┐
873    ///                        │ 0  [ conditional ]  │
874    ///                        │ 1   JUMP_IF_FALSE  →┼─┐
875    ///                        │ 2  [  main body  ]  │ │ Jump to else body if
876    ///                       ┌┼─3─←     JUMP        │ │ condition is false.
877    ///  Jump over else body  ││ 4   OP_ASSERT_FAIL ←┼─┘
878    ///  if condition is true.└┼─5─→     ...         │
879    ///                        └─────────────────────┘
880    /// ```
881    fn compile_assert(&mut self, slot: LocalIdx, node: &ast::Assert) {
882        // Compile the assertion condition to leave its value on the stack.
883        self.compile(slot, node.condition().unwrap());
884        self.emit_force(&node.condition().unwrap());
885
886        let throw_idx = self.push_op(Op::JumpIfCatchable, node);
887        self.push_u16(0);
888
889        let then_idx = self.push_op(Op::JumpIfFalse, node);
890        self.push_u16(0);
891
892        self.push_op(Op::Pop, node);
893        self.compile(slot, node.body().unwrap());
894
895        let else_idx = self.push_op(Op::Jump, node);
896        self.push_u16(0);
897
898        self.patch_jump(then_idx);
899        self.push_op(Op::Pop, node);
900        self.push_op(Op::AssertFail, &node.condition().unwrap());
901
902        self.patch_jump(else_idx);
903        self.patch_jump(throw_idx);
904    }
905
906    /// Compile conditional expressions using jumping instructions in the VM.
907    ///
908    /// ```notrust
909    ///                        ┌────────────────────┐
910    ///                        │ 0  [ conditional ] │
911    ///                        │ 1   JUMP_IF_FALSE →┼─┐
912    ///                        │ 2  [  main body  ] │ │ Jump to else body if
913    ///                       ┌┼─3─←     JUMP       │ │ condition is false.
914    ///  Jump over else body  ││ 4  [  else body  ]←┼─┘
915    ///  if condition is true.└┼─5─→     ...        │
916    ///                        └────────────────────┘
917    /// ```
918    fn compile_if_else(&mut self, slot: LocalIdx, node: &ast::IfElse) {
919        self.compile(slot, node.condition().unwrap());
920        self.emit_force(&node.condition().unwrap());
921
922        let throw_idx = self.push_op(Op::JumpIfCatchable, &node.condition().unwrap());
923        self.push_u16(0);
924
925        let then_idx = self.push_op(Op::JumpIfFalse, &node.condition().unwrap());
926        self.push_u16(0);
927
928        self.push_op(Op::Pop, node); // discard condition value
929        self.compile(slot, node.body().unwrap());
930
931        let else_idx = self.push_op(Op::Jump, node);
932        self.push_u16(0);
933
934        self.patch_jump(then_idx); // patch jump *to* else_body
935        self.push_op(Op::Pop, node); // discard condition value
936        self.compile(slot, node.else_body().unwrap());
937
938        self.patch_jump(else_idx); // patch jump *over* else body
939        self.patch_jump(throw_idx); // patch jump *over* else body
940    }
941
942    /// Compile `with` expressions by emitting instructions that
943    /// pop/remove the indices of attribute sets that are implicitly
944    /// in scope through `with` on the "with-stack".
945    fn compile_with(&mut self, slot: LocalIdx, node: &ast::With) {
946        self.scope_mut().begin_scope();
947        // TODO: Detect if the namespace is just an identifier, and
948        // resolve that directly (thus avoiding duplication on the
949        // stack).
950        self.compile(slot, node.namespace().unwrap());
951
952        let span = self.span_for(&node.namespace().unwrap());
953
954        // The attribute set from which `with` inherits values
955        // occupies a slot on the stack, but this stack slot is not
956        // directly accessible. As it must be accounted for to
957        // calculate correct offsets, what we call a "phantom" local
958        // is declared here.
959        let local_idx = self.scope_mut().declare_phantom(span, true);
960        let with_idx = self.scope().stack_index(local_idx);
961
962        self.scope_mut().push_with();
963
964        self.push_op(Op::PushWith, &node.namespace().unwrap());
965        self.push_uvarint(with_idx.0 as u64);
966
967        self.compile(slot, node.body().unwrap());
968
969        self.push_op(Op::PopWith, node);
970        self.scope_mut().pop_with();
971        self.cleanup_scope(node);
972    }
973
974    /// Compiles pattern function arguments, such as `{ a, b }: ...`.
975    ///
976    /// These patterns are treated as a special case of locals binding
977    /// where the attribute set itself is placed on the first stack
978    /// slot of the call frame (either as a phantom, or named in case
979    /// of an `@` binding), and the function call sets up the rest of
980    /// the stack as if the parameters were rewritten into a `let`
981    /// binding.
982    ///
983    /// For example:
984    ///
985    /// ```nix
986    /// ({ a, b ? 2, c ? a * b, ... }@args: <body>)  { a = 10; }
987    /// ```
988    ///
989    /// would be compiled similarly to a binding such as
990    ///
991    /// ```nix
992    /// let args = { a = 10; };
993    /// in let a = args.a;
994    ///        b = args.a or 2;
995    ///        c = args.c or a * b;
996    ///    in <body>
997    /// ```
998    ///
999    /// However, there are two properties of pattern function arguments that can
1000    /// not be compiled by desugaring in this way:
1001    ///
1002    /// 1. Bindings have to fail if too many arguments are provided. This is
1003    ///    done by emitting a special instruction that checks the set of keys
1004    ///    from a constant containing the expected keys.
1005    /// 2. Formal arguments with a default expression are (as an optimization and
1006    ///    because it is simpler) not wrapped in another thunk, instead compiled
1007    ///    and accessed separately. This means that the default expression may
1008    ///    never make it into the local's stack slot if the argument is provided
1009    ///    by the caller. We need to take this into account and skip any
1010    ///    operations specific to the expression like thunk finalisation in such
1011    ///    cases.
1012    fn compile_param_pattern(&mut self, pattern: &ast::Pattern) -> (Formals, CodeIdx) {
1013        let span = self.span_for(pattern);
1014
1015        let (set_idx, pat_bind_name) = match pattern.pat_bind() {
1016            Some(name) => {
1017                let pat_bind_name = name.ident().unwrap().to_string();
1018                (
1019                    self.declare_local(&name, pat_bind_name.clone()),
1020                    Some(pat_bind_name),
1021                )
1022            }
1023            None => (self.scope_mut().declare_phantom(span, true), None),
1024        };
1025
1026        // At call time, the attribute set is already at the top of the stack.
1027        self.scope_mut().mark_initialised(set_idx);
1028        self.emit_force(pattern);
1029        let throw_idx = self.push_op(Op::JumpIfCatchable, pattern);
1030        self.push_u16(0);
1031
1032        // Evaluation fails on a type error, even if the argument(s) are unused.
1033        self.push_op(Op::AssertAttrs, pattern);
1034
1035        let ellipsis = pattern.ellipsis_token().is_some();
1036        if !ellipsis {
1037            self.push_op(Op::ValidateClosedFormals, pattern);
1038        }
1039
1040        // Similar to `let ... in ...`, we now do multiple passes over
1041        // the bindings to first declare them, then populate them, and
1042        // then finalise any necessary recursion into the scope.
1043        let mut entries: Vec<TrackedFormal> = vec![];
1044        let mut arguments = BTreeMap::default();
1045
1046        for entry in pattern.pat_entries() {
1047            let ident = entry.ident().unwrap();
1048            let idx = self.declare_local(&ident, ident.to_string());
1049
1050            arguments.insert(ident.into(), entry.default().is_some());
1051
1052            if let Some(default_expr) = entry.default() {
1053                entries.push(TrackedFormal::WithDefault {
1054                    local_idx: idx,
1055                    // This phantom is used to track at runtime (!) whether we need to
1056                    // finalise the local's stack slot or not. The relevant instructions are
1057                    // emitted in the second pass where the mechanism is explained as well.
1058                    finalise_request_idx: {
1059                        let span = self.span_for(&default_expr);
1060                        self.scope_mut().declare_phantom(span, false)
1061                    },
1062                    default_expr,
1063                    pattern_entry: entry,
1064                });
1065            } else {
1066                entries.push(TrackedFormal::NoDefault {
1067                    local_idx: idx,
1068                    pattern_entry: entry,
1069                });
1070            }
1071        }
1072
1073        // For each of the bindings, push the set on the stack and
1074        // attempt to select from it.
1075        let stack_idx = self.scope().stack_index(set_idx);
1076        for tracked_formal in entries.iter() {
1077            self.push_op(Op::GetLocal, pattern);
1078            self.push_uvarint(stack_idx.0 as u64);
1079            self.emit_literal_ident(&tracked_formal.pattern_entry().ident().unwrap());
1080
1081            let idx = tracked_formal.local_idx();
1082
1083            // Use the same mechanism as `compile_select_or` if a
1084            // default value was provided, or simply select otherwise.
1085            match tracked_formal {
1086                TrackedFormal::WithDefault {
1087                    default_expr,
1088                    pattern_entry,
1089                    ..
1090                } => {
1091                    // The tricky bit about compiling a formal argument with a default value
1092                    // is that the default may be a thunk that may depend on the value of
1093                    // other formal arguments, i.e. may need to be finalised. This
1094                    // finalisation can only happen if we are actually using the default
1095                    // value—otherwise OpFinalise will crash on an already finalised (or
1096                    // non-thunk) value.
1097                    //
1098                    // Thus we use an additional local to track whether we wound up
1099                    // defaulting or not. `FinaliseRequest(false)` indicates that we should
1100                    // not finalise, as we did not default.
1101                    //
1102                    // We are being wasteful with VM stack space in case of default
1103                    // expressions that don't end up needing to be finalised. Unfortunately
1104                    // we only know better after compiling the default expression, so
1105                    // avoiding unnecessary locals would mean we'd need to modify the chunk
1106                    // after the fact.
1107                    self.push_op(Op::AttrsTrySelect, &pattern_entry.ident().unwrap());
1108                    let jump_to_default = self.push_op(Op::JumpIfNotFound, default_expr);
1109                    self.push_u16(0);
1110
1111                    self.emit_constant(Value::FinaliseRequest(false), default_expr);
1112
1113                    let jump_over_default = self.push_op(Op::Jump, default_expr);
1114                    self.push_u16(0);
1115
1116                    self.patch_jump(jump_to_default);
1117
1118                    // Does not need to thunked since compile() already does so when necessary
1119                    self.compile(idx, default_expr.clone());
1120
1121                    self.emit_constant(Value::FinaliseRequest(true), default_expr);
1122
1123                    self.patch_jump(jump_over_default);
1124                }
1125                TrackedFormal::NoDefault { pattern_entry, .. } => {
1126                    self.push_op(Op::AttrsSelect, &pattern_entry.ident().unwrap());
1127                }
1128            }
1129
1130            self.scope_mut().mark_initialised(idx);
1131            if let TrackedFormal::WithDefault {
1132                finalise_request_idx,
1133                ..
1134            } = tracked_formal
1135            {
1136                self.scope_mut().mark_initialised(*finalise_request_idx);
1137            }
1138        }
1139
1140        for tracked_formal in entries.iter() {
1141            if self.scope()[tracked_formal.local_idx()].needs_finaliser {
1142                let stack_idx = self.scope().stack_index(tracked_formal.local_idx());
1143                match tracked_formal {
1144                    TrackedFormal::NoDefault { .. } =>
1145                        panic!("Tvix bug: local for pattern formal needs finaliser, but has no default expr"),
1146                    TrackedFormal::WithDefault { finalise_request_idx, .. } => {
1147                        let finalise_request_stack_idx = self.scope().stack_index(*finalise_request_idx);
1148
1149                        // TODO(sterni): better spans
1150                        self.push_op(Op::GetLocal, pattern);
1151                        self.push_uvarint(finalise_request_stack_idx.0 as u64);
1152                        let jump_over_finalise =
1153                            self.push_op(Op::JumpIfNoFinaliseRequest, pattern);
1154                        self.push_u16(0);
1155                        self.push_op(Op::Finalise, pattern);
1156                        self.push_uvarint(stack_idx.0 as u64);
1157                        self.patch_jump(jump_over_finalise);
1158                        // Get rid of finaliser request value on the stack
1159                        self.push_op(Op::Pop, pattern);
1160                    }
1161                }
1162            }
1163        }
1164
1165        (
1166            (Formals {
1167                arguments,
1168                ellipsis,
1169                span,
1170                name: pat_bind_name,
1171            }),
1172            throw_idx,
1173        )
1174    }
1175
1176    fn compile_lambda(&mut self, slot: LocalIdx, node: &ast::Lambda) -> Option<CodeIdx> {
1177        // Compile the function itself, recording its formal arguments (if any)
1178        // for later use
1179        let formals = match node.param().unwrap() {
1180            ast::Param::Pattern(pat) => Some(self.compile_param_pattern(&pat)),
1181
1182            ast::Param::IdentParam(param) => {
1183                let name = param
1184                    .ident()
1185                    .unwrap()
1186                    .ident_token()
1187                    .unwrap()
1188                    .text()
1189                    .to_string();
1190
1191                let idx = self.declare_local(&param, &name);
1192                self.scope_mut().mark_initialised(idx);
1193                None
1194            }
1195        };
1196
1197        self.compile(slot, node.body().unwrap());
1198        if let Some((formals, throw_idx)) = formals {
1199            self.context_mut().lambda.formals = Some(formals);
1200            Some(throw_idx)
1201        } else {
1202            self.context_mut().lambda.formals = None;
1203            None
1204        }
1205    }
1206
1207    fn thunk<N, F>(&mut self, outer_slot: LocalIdx, node: &N, content: F)
1208    where
1209        N: ToSpan,
1210        F: FnOnce(&mut Compiler, LocalIdx),
1211    {
1212        self.compile_lambda_or_thunk(true, outer_slot, node, |comp, idx| {
1213            content(comp, idx);
1214            None
1215        })
1216    }
1217
1218    /// Compile an expression into a runtime closure or thunk
1219    fn compile_lambda_or_thunk<N, F>(
1220        &mut self,
1221        is_suspended_thunk: bool,
1222        outer_slot: LocalIdx,
1223        node: &N,
1224        content: F,
1225    ) where
1226        N: ToSpan,
1227        F: FnOnce(&mut Compiler, LocalIdx) -> Option<CodeIdx>,
1228    {
1229        let name = self.scope()[outer_slot].name();
1230        self.new_context();
1231
1232        // Set the (optional) name of the current slot on the lambda that is
1233        // being compiled.
1234        self.context_mut().lambda.name = name;
1235
1236        let span = self.span_for(node);
1237        let slot = self.scope_mut().declare_phantom(span, false);
1238        self.scope_mut().begin_scope();
1239
1240        let throw_idx = content(self, slot);
1241        self.cleanup_scope(node);
1242        if let Some(throw_idx) = throw_idx {
1243            self.patch_jump(throw_idx);
1244        }
1245
1246        // Pop the lambda context back off, and emit the finished
1247        // lambda as a constant.
1248        let mut compiled = self.contexts.pop().unwrap();
1249
1250        // Emit an instruction to inform the VM that the chunk has ended.
1251        compiled
1252            .lambda
1253            .chunk
1254            .push_op(Op::Return, self.span_for(node));
1255
1256        let lambda = Rc::new(compiled.lambda);
1257        if is_suspended_thunk {
1258            self.observer.observe_compiled_thunk(&lambda);
1259        } else {
1260            self.observer.observe_compiled_lambda(&lambda);
1261        }
1262
1263        // If no upvalues are captured, emit directly and move on.
1264        if lambda.upvalue_count == 0 && !compiled.captures_with_stack {
1265            self.emit_constant(
1266                if is_suspended_thunk {
1267                    Value::Thunk(Thunk::new_suspended(lambda, span))
1268                } else {
1269                    Value::Closure(Rc::new(Closure::new(lambda)))
1270                },
1271                node,
1272            );
1273            return;
1274        }
1275
1276        // Otherwise, we need to emit the variable number of
1277        // operands that allow the runtime to close over the
1278        // upvalues and leave a blueprint in the constant index from
1279        // which the result can be constructed.
1280        let blueprint_idx = self.chunk().push_constant(Value::Blueprint(lambda));
1281
1282        let code_idx = self.push_op(
1283            if is_suspended_thunk {
1284                Op::ThunkSuspended
1285            } else {
1286                Op::ThunkClosure
1287            },
1288            node,
1289        );
1290        self.push_uvarint(blueprint_idx.0 as u64);
1291
1292        self.emit_upvalue_data(
1293            outer_slot,
1294            node,
1295            compiled.scope.upvalues,
1296            compiled.captures_with_stack,
1297        );
1298
1299        if !is_suspended_thunk && !self.scope()[outer_slot].needs_finaliser {
1300            if !self.scope()[outer_slot].must_thunk {
1301                // The closure has upvalues, but is not recursive. Therefore no
1302                // thunk is required, which saves us the overhead of
1303                // Rc<RefCell<>>
1304                self.chunk().code[code_idx.0] = Op::Closure as u8;
1305            } else {
1306                // This case occurs when a closure has upvalue-references to
1307                // itself but does not need a finaliser. Since no OpFinalise
1308                // will be emitted later on we synthesize one here. It is needed
1309                // here only to set [`Closure::is_finalised`] which is used for
1310                // sanity checks.
1311                #[cfg(debug_assertions)]
1312                {
1313                    self.push_op(Op::Finalise, &self.span_for(node));
1314                    self.push_uvarint(self.scope().stack_index(outer_slot).0 as u64);
1315                }
1316            }
1317        }
1318    }
1319
1320    fn compile_apply(&mut self, slot: LocalIdx, node: &ast::Apply) {
1321        // To call a function, we leave its arguments on the stack,
1322        // followed by the function expression itself, and then emit a
1323        // call instruction. This way, the stack is perfectly laid out
1324        // to enter the function call straight away.
1325        self.compile(slot, node.argument().unwrap());
1326        self.compile(slot, node.lambda().unwrap());
1327        self.emit_force(&node.lambda().unwrap());
1328        self.push_op(Op::Call, node);
1329    }
1330
1331    /// Emit the data instructions that the runtime needs to correctly
1332    /// assemble the upvalues struct.
1333    fn emit_upvalue_data<T: ToSpan>(
1334        &mut self,
1335        slot: LocalIdx,
1336        _: &T, // TODO
1337        upvalues: Vec<Upvalue>,
1338        capture_with: bool,
1339    ) {
1340        // Push the count of arguments to be expected, with one bit set to
1341        // indicate whether the with stack needs to be captured.
1342        let mut count = (upvalues.len() as u64) << 1;
1343        if capture_with {
1344            count |= 1;
1345        }
1346        self.push_uvarint(count);
1347
1348        for upvalue in upvalues {
1349            match upvalue.kind {
1350                UpvalueKind::Local(idx) => {
1351                    let target = &self.scope()[idx];
1352                    let stack_idx = self.scope().stack_index(idx);
1353
1354                    // If the target is not yet initialised, we need to defer
1355                    // the local access
1356                    if !target.initialised {
1357                        self.push_uvarint(Position::deferred_local(stack_idx).0);
1358                        self.scope_mut().mark_needs_finaliser(slot);
1359                    } else {
1360                        // a self-reference
1361                        if slot == idx {
1362                            self.scope_mut().mark_must_thunk(slot);
1363                        }
1364                        self.push_uvarint(Position::stack_index(stack_idx).0);
1365                    }
1366                }
1367
1368                UpvalueKind::Upvalue(idx) => {
1369                    self.push_uvarint(Position::upvalue_index(idx).0);
1370                }
1371            };
1372        }
1373    }
1374
1375    /// Emit the literal string value of an identifier. Required for
1376    /// several operations related to attribute sets, where
1377    /// identifiers are used as string keys.
1378    fn emit_literal_ident(&mut self, ident: &ast::Ident) {
1379        self.emit_constant(Value::String(ident.clone().into()), ident);
1380    }
1381
1382    /// Patch the jump instruction at the given index, setting its
1383    /// jump offset from the placeholder to the current code position.
1384    ///
1385    /// This is required because the actual target offset of jumps is
1386    /// not known at the time when the jump operation itself is
1387    /// emitted.
1388    fn patch_jump(&mut self, idx: CodeIdx) {
1389        self.chunk().patch_jump(idx.0);
1390    }
1391
1392    /// Decrease scope depth of the current function and emit
1393    /// instructions to clean up the stack at runtime.
1394    fn cleanup_scope<N: ToSpan>(&mut self, node: &N) {
1395        // When ending a scope, all corresponding locals need to be
1396        // removed, but the value of the body needs to remain on the
1397        // stack. This is implemented by a separate instruction.
1398        let (popcount, unused_spans) = self.scope_mut().end_scope();
1399
1400        for span in &unused_spans {
1401            self.emit_warning(span, WarningKind::UnusedBinding);
1402        }
1403
1404        if popcount > 0 {
1405            self.push_op(Op::CloseScope, node);
1406            self.push_uvarint(popcount as u64);
1407        }
1408    }
1409
1410    /// Open a new lambda context within which to compile a function,
1411    /// closure or thunk.
1412    fn new_context(&mut self) {
1413        self.contexts.push(self.context().inherit());
1414    }
1415
1416    /// Declare a local variable known in the scope that is being
1417    /// compiled by pushing it to the locals. This is used to
1418    /// determine the stack offset of variables.
1419    fn declare_local<S: Into<String>, N: ToSpan>(&mut self, node: &N, name: S) -> LocalIdx {
1420        let name = name.into();
1421        let depth = self.scope().scope_depth();
1422
1423        // Do this little dance to turn name:&'a str into the same
1424        // string with &'static lifetime, as required by WarningKind
1425        if let Some((global_ident, _)) = self.globals.get_key_value(name.as_str()) {
1426            self.emit_warning(node, WarningKind::ShadowedGlobal(global_ident));
1427        }
1428
1429        let span = self.span_for(node);
1430        let (idx, shadowed) = self.scope_mut().declare_local(name, span);
1431
1432        if let Some(shadow_idx) = shadowed {
1433            let other = &self.scope()[shadow_idx];
1434            if other.depth == depth {
1435                self.emit_error(node, ErrorKind::VariableAlreadyDefined(other.span));
1436            }
1437        }
1438
1439        idx
1440    }
1441
1442    /// Determine whether the current lambda context has any ancestors
1443    /// that use dynamic scope resolution, and mark contexts as
1444    /// needing to capture their enclosing `with`-stack in their
1445    /// upvalues.
1446    fn has_dynamic_ancestor(&mut self) -> bool {
1447        let mut ancestor_has_with = false;
1448
1449        for ctx in self.contexts.iter_mut() {
1450            if ancestor_has_with {
1451                // If the ancestor has an active with stack, mark this
1452                // lambda context as needing to capture it.
1453                ctx.captures_with_stack = true;
1454            } else {
1455                // otherwise, check this context and move on
1456                ancestor_has_with = ctx.scope.has_with();
1457            }
1458        }
1459
1460        ancestor_has_with
1461    }
1462
1463    fn emit_force<N: ToSpan>(&mut self, node: &N) {
1464        self.push_op(Op::Force, node);
1465    }
1466
1467    fn emit_warning<N: ToSpan>(&mut self, node: &N, kind: WarningKind) {
1468        let span = self.span_for(node);
1469        self.warnings.push(EvalWarning { kind, span })
1470    }
1471
1472    fn emit_error<N: ToSpan>(&mut self, node: &N, kind: ErrorKind) {
1473        let span = self.span_for(node);
1474        self.errors
1475            .push(Error::new(kind, span, self.source.clone()))
1476    }
1477}
1478
1479/// Convert a non-dynamic string expression to a string if possible.
1480fn expr_static_str(node: &ast::Str) -> Option<SmolStr> {
1481    let mut parts = node.normalized_parts();
1482
1483    if parts.len() != 1 {
1484        return None;
1485    }
1486
1487    if let Some(ast::InterpolPart::Literal(lit)) = parts.pop() {
1488        return Some(SmolStr::new(lit));
1489    }
1490
1491    None
1492}
1493
1494/// Convert the provided `ast::Attr` into a statically known string if
1495/// possible.
1496fn expr_static_attr_str(node: &ast::Attr) -> Option<SmolStr> {
1497    match node {
1498        ast::Attr::Ident(ident) => Some(ident.ident_token().unwrap().text().into()),
1499        ast::Attr::Str(s) => expr_static_str(s),
1500
1501        // The dynamic node type is just a wrapper. C++ Nix does not care
1502        // about the dynamic wrapper when determining whether the node
1503        // itself is dynamic, it depends solely on the expression inside
1504        // (i.e. `let ${"a"} = 1; in a` is valid).
1505        ast::Attr::Dynamic(ref dynamic) => match dynamic.expr().unwrap() {
1506            ast::Expr::Str(s) => expr_static_str(&s),
1507            _ => None,
1508        },
1509    }
1510}
1511
1512/// Create a delayed source-only builtin compilation, for a builtin
1513/// which is written in Nix code.
1514///
1515/// **Important:** tvix *panics* if a builtin with invalid source code
1516/// is supplied. This is because there is no user-friendly way to
1517/// thread the errors out of this function right now.
1518fn compile_src_builtin(
1519    name: &'static str,
1520    code: &str,
1521    source: SourceCode,
1522    weak: &Weak<GlobalsMap>,
1523) -> Value {
1524    use std::fmt::Write;
1525
1526    let parsed = rnix::ast::Root::parse(code);
1527
1528    if !parsed.errors().is_empty() {
1529        let mut out = format!("BUG: code for source-builtin '{name}' had parser errors");
1530        for error in parsed.errors() {
1531            writeln!(out, "{error}").unwrap();
1532        }
1533
1534        panic!("{}", out);
1535    }
1536
1537    let file = source.add_file(format!("<src-builtins/{name}.nix>"), code.to_string());
1538    let weak = weak.clone();
1539
1540    Value::Thunk(Thunk::new_suspended_native(Box::new(move || {
1541        let result = compile(
1542            &parsed.tree().expr().unwrap(),
1543            None,
1544            weak.upgrade().unwrap(),
1545            None,
1546            &source,
1547            &file,
1548            &mut crate::observer::NoOpObserver {},
1549        )
1550        .map_err(|e| ErrorKind::NativeError {
1551            gen_type: "derivation",
1552            err: Box::new(e),
1553        })?;
1554
1555        if !result.errors.is_empty() {
1556            return Err(ErrorKind::ImportCompilerError {
1557                path: format!("src-builtins/{name}.nix").into(),
1558                errors: result.errors,
1559            });
1560        }
1561
1562        Ok(Value::Thunk(Thunk::new_suspended(result.lambda, file.span)))
1563    })))
1564}
1565
1566/// Prepare the full set of globals available in evaluated code. These
1567/// are constructed from the set of builtins supplied by the caller,
1568/// which are made available globally under the `builtins` identifier.
1569///
1570/// A subset of builtins (specified by [`GLOBAL_BUILTINS`]) is
1571/// available globally *iff* they are set.
1572///
1573/// Optionally adds the `import` feature if desired by the caller.
1574pub fn prepare_globals(
1575    builtins: Vec<(&'static str, Value)>,
1576    src_builtins: Vec<(&'static str, &'static str)>,
1577    source: SourceCode,
1578    enable_import: bool,
1579) -> Rc<GlobalsMap> {
1580    Rc::new_cyclic(Box::new(move |weak: &Weak<GlobalsMap>| {
1581        // First step is to construct the builtins themselves as
1582        // `NixAttrs`.
1583        let mut builtins: GlobalsMap = FxHashMap::from_iter(builtins);
1584
1585        // At this point, optionally insert `import` if enabled. To
1586        // "tie the knot" of `import` needing the full set of globals
1587        // to instantiate its compiler, the `Weak` reference is passed
1588        // here.
1589        if enable_import {
1590            let import = Value::Builtin(import::builtins_import(weak, source.clone()));
1591            builtins.insert("import", import);
1592        }
1593
1594        // Next, the actual map of globals which the compiler will use
1595        // to resolve identifiers is constructed.
1596        let mut globals: GlobalsMap = FxHashMap::default();
1597
1598        // builtins contain themselves (`builtins.builtins`), which we
1599        // can resolve by manually constructing a suspended thunk that
1600        // dereferences the same weak pointer as above.
1601        let weak_globals = weak.clone();
1602        builtins.insert(
1603            "builtins",
1604            Value::Thunk(Thunk::new_suspended_native(Box::new(move || {
1605                Ok(weak_globals
1606                    .upgrade()
1607                    .unwrap()
1608                    .get("builtins")
1609                    .cloned()
1610                    .unwrap())
1611            }))),
1612        );
1613
1614        // Insert top-level static value builtins.
1615        globals.insert("true", Value::Bool(true));
1616        globals.insert("false", Value::Bool(false));
1617        globals.insert("null", Value::Null);
1618
1619        // If "source builtins" were supplied, compile them and insert
1620        // them.
1621        builtins.extend(src_builtins.into_iter().map(move |(name, code)| {
1622            let compiled = compile_src_builtin(name, code, source.clone(), weak);
1623            (name, compiled)
1624        }));
1625
1626        // Construct the actual `builtins` attribute set and insert it
1627        // in the global scope.
1628        globals.insert(
1629            "builtins",
1630            Value::attrs(NixAttrs::from_iter(builtins.clone())),
1631        );
1632
1633        // Finally, the builtins that should be globally available are
1634        // "elevated" to the outer scope.
1635        for global in GLOBAL_BUILTINS {
1636            if let Some(builtin) = builtins.get(global).cloned() {
1637                globals.insert(global, builtin);
1638            }
1639        }
1640
1641        globals
1642    }))
1643}
1644
1645pub fn compile(
1646    expr: &ast::Expr,
1647    location: Option<PathBuf>,
1648    globals: Rc<GlobalsMap>,
1649    env: Option<&FxHashMap<SmolStr, Value>>,
1650    source: &SourceCode,
1651    file: &codemap::File,
1652    observer: &mut dyn CompilerObserver,
1653) -> EvalResult<CompilationOutput> {
1654    let mut c = Compiler::new(location, globals.clone(), env, source, file, observer)?;
1655
1656    let root_span = c.span_for(expr);
1657    let root_slot = c.scope_mut().declare_phantom(root_span, false);
1658    c.compile(root_slot, expr.clone());
1659
1660    // The final operation of any top-level Nix program must always be
1661    // `OpForce`. A thunk should not be returned to the user in an
1662    // unevaluated state (though in practice, a value *containing* a
1663    // thunk might be returned).
1664    c.emit_force(expr);
1665    if let Some(env) = env {
1666        if !env.is_empty() {
1667            c.push_op(Op::CloseScope, &root_span);
1668            c.push_uvarint(env.len() as u64);
1669        }
1670    }
1671    c.push_op(Op::Return, &root_span);
1672
1673    let lambda = Rc::new(c.contexts.pop().unwrap().lambda);
1674    c.observer.observe_compiled_toplevel(&lambda);
1675
1676    Ok(CompilationOutput {
1677        lambda,
1678        warnings: c.warnings,
1679        errors: c.errors,
1680    })
1681}