tvix_eval/vm/mod.rs
1//! This module implements the abstract/virtual machine that runs Tvix
2//! bytecode.
3//!
4//! The operation of the VM is facilitated by the [`Frame`] type,
5//! which controls the current execution state of the VM and is
6//! processed within the VM's operating loop.
7//!
8//! A [`VM`] is used by instantiating it with an initial [`Frame`],
9//! then triggering its execution and waiting for the VM to return or
10//! yield an error.
11
12pub mod generators;
13mod macros;
14
15use bstr::{BString, ByteSlice, ByteVec};
16use codemap::Span;
17use rustc_hash::FxHashMap;
18use serde_json::json;
19use std::{cmp::Ordering, ops::DerefMut, path::PathBuf, rc::Rc};
20
21use crate::{
22 NixString, SourceCode, arithmetic_op,
23 chunk::Chunk,
24 cmp_op,
25 compiler::GlobalsMap,
26 errors::{CatchableErrorKind, Error, ErrorKind, EvalResult},
27 io::EvalIO,
28 lifted_pop,
29 nix_search_path::NixSearchPath,
30 observer::RuntimeObserver,
31 opcode::{CodeIdx, Op, Position, UpvalueIdx},
32 upvalues::Upvalues,
33 value::{
34 Builtin, BuiltinResult, Closure, CoercionKind, Lambda, NixAttrs, NixContext, NixList,
35 PointerEquality, Thunk, Value,
36 },
37 vm::generators::GenCo,
38 warnings::{EvalWarning, WarningKind},
39};
40
41use generators::{Generator, GeneratorState, call_functor};
42
43use self::generators::{VMRequest, VMResponse};
44
45/// Internal helper trait for taking a span from a variety of types, to make use
46/// of `WithSpan` (defined below) more ergonomic at call sites.
47trait GetSpan {
48 fn get_span(self) -> Span;
49}
50
51impl GetSpan for &VM<'_> {
52 fn get_span(self) -> Span {
53 self.reasonable_span
54 }
55}
56
57impl GetSpan for &CallFrame {
58 fn get_span(self) -> Span {
59 self.current_span()
60 }
61}
62
63impl GetSpan for &Span {
64 fn get_span(self) -> Span {
65 *self
66 }
67}
68
69impl GetSpan for Span {
70 fn get_span(self) -> Span {
71 self
72 }
73}
74
75/// Internal helper trait for ergonomically converting from a `Result<T,
76/// ErrorKind>` to a `Result<T, Error>` using the current span of a call frame,
77/// and chaining the VM's frame stack around it for printing a cause chain.
78trait WithSpan<T, S: GetSpan> {
79 fn with_span(self, top_span: S, vm: &VM) -> Result<T, Error>;
80}
81
82impl<T, S: GetSpan> WithSpan<T, S> for Result<T, ErrorKind> {
83 fn with_span(self, top_span: S, vm: &VM) -> Result<T, Error> {
84 match self {
85 Ok(something) => Ok(something),
86 Err(kind) => {
87 let mut error = Error::new(kind, top_span.get_span(), vm.source.clone());
88
89 // Wrap the top-level error in chaining errors for each element
90 // of the frame stack.
91 for frame in vm.frames.iter().rev() {
92 match frame {
93 Frame::CallFrame { span, .. } => {
94 error = Error::new(
95 ErrorKind::BytecodeError(Box::new(error)),
96 *span,
97 vm.source.clone(),
98 );
99 }
100 Frame::Generator { name, span, .. } => {
101 error = Error::new(
102 ErrorKind::NativeError {
103 err: Box::new(error),
104 gen_type: name,
105 },
106 *span,
107 vm.source.clone(),
108 );
109 }
110 }
111 }
112
113 Err(error)
114 }
115 }
116 }
117}
118
119struct CallFrame {
120 /// The lambda currently being executed.
121 lambda: Rc<Lambda>,
122
123 /// Optional captured upvalues of this frame (if a thunk or
124 /// closure if being evaluated).
125 upvalues: Rc<Upvalues>,
126
127 /// Instruction pointer to the instruction currently being
128 /// executed.
129 ip: CodeIdx,
130
131 /// Stack offset, i.e. the frames "view" into the VM's full stack.
132 stack_offset: usize,
133}
134
135impl CallFrame {
136 /// Retrieve an upvalue from this frame at the given index.
137 fn upvalue(&self, idx: UpvalueIdx) -> &Value {
138 &self.upvalues[idx]
139 }
140
141 /// Borrow the chunk of this frame's lambda.
142 fn chunk(&self) -> &Chunk {
143 &self.lambda.chunk
144 }
145
146 /// Increment this frame's instruction pointer and return the operation that
147 /// the pointer moved past.
148 fn inc_ip(&mut self) -> Op {
149 debug_assert!(
150 self.ip.0 < self.chunk().code.len(),
151 "out of bounds code at IP {} in {:p}",
152 self.ip.0,
153 self.lambda
154 );
155
156 let op = self.chunk().code[self.ip.0];
157 self.ip += 1;
158 op.into()
159 }
160
161 /// Read a varint-encoded operand and return it. The frame pointer is
162 /// incremented internally.
163 fn read_uvarint(&mut self) -> u64 {
164 let (arg, size) = self.chunk().read_uvarint(self.ip.0);
165 self.ip += size;
166 arg
167 }
168
169 /// Read a fixed-size u16 and increment the frame pointer.
170 fn read_u16(&mut self) -> u16 {
171 let arg = self.chunk().read_u16(self.ip.0);
172 self.ip += 2;
173 arg
174 }
175
176 /// Construct an error result from the given ErrorKind and the source span
177 /// of the current instruction.
178 pub fn error<T>(&self, vm: &VM, kind: ErrorKind) -> Result<T, Error> {
179 Err(kind).with_span(self, vm)
180 }
181
182 /// Returns the current span. This is potentially expensive and should only
183 /// be used when actually constructing an error or warning.
184 pub fn current_span(&self) -> Span {
185 self.chunk().get_span(self.ip - 1)
186 }
187}
188
189/// A frame represents an execution state of the VM. The VM has a stack of
190/// frames representing the nesting of execution inside of the VM, and operates
191/// on the frame at the top.
192///
193/// When a frame has been fully executed, it is removed from the VM's frame
194/// stack and expected to leave a result [`Value`] on the top of the stack.
195enum Frame {
196 /// CallFrame represents the execution of Tvix bytecode within a thunk,
197 /// function or closure.
198 CallFrame {
199 /// The call frame itself, separated out into another type to pass it
200 /// around easily.
201 call_frame: CallFrame,
202
203 /// Span from which the call frame was launched.
204 span: Span,
205 },
206
207 /// Generator represents a frame that can yield further
208 /// instructions to the VM while its execution is being driven.
209 ///
210 /// A generator is essentially an asynchronous function that can
211 /// be suspended while waiting for the VM to do something (e.g.
212 /// thunk forcing), and resume at the same point.
213 Generator {
214 /// human-readable description of the generator,
215 name: &'static str,
216
217 /// Span from which the generator was launched.
218 span: Span,
219
220 state: GeneratorState,
221
222 /// Generator itself, which can be resumed with `.resume()`.
223 generator: Generator,
224 },
225}
226
227impl Frame {
228 pub fn span(&self) -> Span {
229 match self {
230 Frame::CallFrame { span, .. } | Frame::Generator { span, .. } => *span,
231 }
232 }
233}
234
235#[derive(Default)]
236struct ImportCache(FxHashMap<PathBuf, Value>);
237
238/// The `ImportCache` holds the `Value` resulting from `import`ing a certain
239/// file, so that the same file doesn't need to be re-evaluated multiple times.
240/// Currently the real path of the imported file (determined using
241/// [`std::fs::canonicalize()`], not to be confused with our
242/// [`crate::value::canon_path()`]) is used to identify the file,
243/// just like C++ Nix does.
244///
245/// Errors while determining the real path are currently just ignored, since we
246/// pass around some fake paths like `/__corepkgs__/fetchurl.nix`.
247///
248/// In the future, we could use something more sophisticated, like file hashes.
249/// However, a consideration is that the eval cache is observable via impurities
250/// like pointer equality and `builtins.trace`.
251impl ImportCache {
252 fn get(&self, path: PathBuf) -> Option<&Value> {
253 let path = match std::fs::canonicalize(path.as_path()).map_err(ErrorKind::from) {
254 Ok(path) => path,
255 Err(_) => path,
256 };
257 self.0.get(&path)
258 }
259
260 fn insert(&mut self, path: PathBuf, value: Value) -> Option<Value> {
261 self.0.insert(
262 match std::fs::canonicalize(path.as_path()).map_err(ErrorKind::from) {
263 Ok(path) => path,
264 Err(_) => path,
265 },
266 value,
267 )
268 }
269}
270
271struct VM<'o> {
272 /// VM's frame stack, representing the execution contexts the VM is working
273 /// through. Elements are usually pushed when functions are called, or
274 /// thunks are being forced.
275 frames: Vec<Frame>,
276
277 /// The VM's top-level value stack. Within this stack, each code-executing
278 /// frame holds a "view" of the stack representing the slice of the
279 /// top-level stack that is relevant to its operation. This is done to avoid
280 /// allocating a new `Vec` for each frame's stack.
281 pub(crate) stack: Vec<Value>,
282
283 /// Stack indices (absolute indexes into `stack`) of attribute
284 /// sets from which variables should be dynamically resolved
285 /// (`with`).
286 with_stack: Vec<usize>,
287
288 /// Runtime warnings collected during evaluation.
289 warnings: Vec<EvalWarning>,
290
291 /// Import cache, mapping absolute file paths to the value that
292 /// they compile to. Note that this reuses thunks, too!
293 // TODO: should probably be based on a file hash
294 pub import_cache: ImportCache,
295
296 /// Data structure holding all source code evaluated in this VM,
297 /// used for pretty error reporting.
298 source: SourceCode,
299
300 /// Parsed Nix search path, which is used to resolve `<...>`
301 /// references.
302 nix_search_path: NixSearchPath,
303
304 /// Implementation of I/O operations used for impure builtins and
305 /// features like `import`.
306 io_handle: Rc<dyn EvalIO>,
307
308 /// Runtime observer which can print traces of runtime operations.
309 observer: &'o mut dyn RuntimeObserver,
310
311 /// Strong reference to the globals, guaranteeing that they are
312 /// kept alive for the duration of evaluation.
313 ///
314 /// This is important because recursive builtins (specifically
315 /// `import`) hold a weak reference to the builtins, while the
316 /// original strong reference is held by the compiler which does
317 /// not exist anymore at runtime.
318 #[allow(dead_code)]
319 globals: Rc<GlobalsMap>,
320
321 /// A reasonably applicable span that can be used for errors in each
322 /// execution situation.
323 ///
324 /// The VM should update this whenever control flow changes take place (i.e.
325 /// entering or exiting a frame to yield control somewhere).
326 reasonable_span: Span,
327
328 /// This field is responsible for handling `builtins.tryEval`. When that
329 /// builtin is encountered, it sends a special message to the VM which
330 /// pushes the frame index that requested to be informed of catchable
331 /// errors in this field.
332 ///
333 /// The frame stack is then laid out like this:
334 ///
335 /// ```notrust
336 /// ┌──┬──────────────────────────┐
337 /// │ 0│ `Result`-producing frame │
338 /// ├──┼──────────────────────────┤
339 /// │-1│ `builtins.tryEval` frame │
340 /// ├──┼──────────────────────────┤
341 /// │..│ ... other frames ... │
342 /// └──┴──────────────────────────┘
343 /// ```
344 ///
345 /// Control is yielded to the outer VM loop, which evaluates the next frame
346 /// and returns the result itself to the `builtins.tryEval` frame.
347 try_eval_frames: Vec<usize>,
348}
349
350impl<'o> VM<'o> {
351 pub fn new(
352 nix_search_path: NixSearchPath,
353 io_handle: Rc<dyn EvalIO>,
354 observer: &'o mut dyn RuntimeObserver,
355 source: SourceCode,
356 globals: Rc<GlobalsMap>,
357 reasonable_span: Span,
358 ) -> Self {
359 Self {
360 nix_search_path,
361 io_handle,
362 observer,
363 globals,
364 reasonable_span,
365 source,
366 frames: vec![],
367 stack: vec![],
368 with_stack: vec![],
369 warnings: vec![],
370 import_cache: Default::default(),
371 try_eval_frames: vec![],
372 }
373 }
374
375 /// Push a call frame onto the frame stack.
376 fn push_call_frame(&mut self, span: Span, call_frame: CallFrame) {
377 self.frames.push(Frame::CallFrame { span, call_frame })
378 }
379
380 /// Run the VM's primary (outer) execution loop, continuing execution based
381 /// on the current frame at the top of the frame stack.
382 fn execute(mut self) -> EvalResult<RuntimeResult> {
383 while let Some(frame) = self.frames.pop() {
384 self.reasonable_span = frame.span();
385 let frame_id = self.frames.len();
386
387 match frame {
388 Frame::CallFrame { call_frame, span } => {
389 self.observer
390 .observe_enter_call_frame(0, &call_frame.lambda, frame_id);
391
392 match self.execute_bytecode(span, call_frame) {
393 Ok(true) => self.observer.observe_exit_call_frame(frame_id, &self.stack),
394 Ok(false) => self
395 .observer
396 .observe_suspend_call_frame(frame_id, &self.stack),
397
398 Err(err) => return Err(err),
399 };
400 }
401
402 // Handle generator frames, which can request thunk forcing
403 // during their execution.
404 Frame::Generator {
405 name,
406 span,
407 state,
408 generator,
409 } => {
410 self.observer
411 .observe_enter_generator(frame_id, name, &self.stack);
412
413 match self.run_generator(name, span, frame_id, state, generator, None) {
414 Ok(true) => {
415 self.observer
416 .observe_exit_generator(frame_id, name, &self.stack)
417 }
418 Ok(false) => {
419 self.observer
420 .observe_suspend_generator(frame_id, name, &self.stack)
421 }
422
423 Err(err) => return Err(err),
424 };
425 }
426 }
427 }
428
429 // Once no more frames are present, return the stack's top value as the
430 // result.
431 let value = self
432 .stack
433 .pop()
434 .expect("tvix bug: runtime stack empty after execution");
435 Ok(RuntimeResult {
436 value,
437 warnings: self.warnings,
438 })
439 }
440
441 /// Run the VM's inner execution loop, processing Tvix bytecode from a
442 /// chunk. This function returns if:
443 ///
444 /// 1. The code has run to the end, and has left a value on the top of the
445 /// stack. In this case, the frame is not returned to the frame stack.
446 ///
447 /// 2. The code encounters a generator, in which case the frame in its
448 /// current state is pushed back on the stack, and the generator is left
449 /// on top of it for the outer loop to execute.
450 ///
451 /// 3. An error is encountered.
452 ///
453 /// This function *must* ensure that it leaves the frame stack in the
454 /// correct order, especially when re-enqueuing a frame to execute.
455 ///
456 /// The return value indicates whether the bytecode has been executed to
457 /// completion, or whether it has been suspended in favour of a generator.
458 fn execute_bytecode(&mut self, span: Span, mut frame: CallFrame) -> EvalResult<bool> {
459 loop {
460 let op = frame.inc_ip();
461 self.observer.observe_execute_op(frame.ip, &op, &self.stack);
462
463 match op {
464 Op::ThunkSuspended | Op::ThunkClosure => {
465 let idx = frame.read_uvarint() as usize;
466
467 let blueprint = match &frame.chunk().constants[idx] {
468 Value::Blueprint(lambda) => lambda.clone(),
469 _ => panic!("compiler bug: non-blueprint in blueprint slot"),
470 };
471
472 let upvalue_count = frame.read_uvarint();
473
474 debug_assert!(
475 (upvalue_count >> 1) == blueprint.upvalue_count as u64,
476 "TODO: new upvalue count not correct",
477 );
478
479 let thunk = if op == Op::ThunkClosure {
480 debug_assert!(
481 (((upvalue_count >> 1) > 0) || (upvalue_count & 0b1 == 1)),
482 "OpThunkClosure should not be called for plain lambdas",
483 );
484 Thunk::new_closure(blueprint)
485 } else {
486 Thunk::new_suspended(blueprint, frame.current_span())
487 };
488 let upvalues = thunk.upvalues_mut();
489 self.stack.push(Value::Thunk(thunk.clone()));
490
491 // From this point on we internally mutate the
492 // upvalues. The closure (if `is_closure`) is
493 // already in its stack slot, which means that it
494 // can capture itself as an upvalue for
495 // self-recursion.
496 self.populate_upvalues(&mut frame, upvalue_count, upvalues)?;
497 }
498
499 Op::Force => {
500 if let Some(Value::Thunk(_)) = self.stack.last() {
501 let thunk = match self.stack_pop() {
502 Value::Thunk(t) => t,
503 _ => unreachable!(),
504 };
505
506 if !thunk.is_forced() {
507 let gen_span = frame.current_span();
508
509 self.push_call_frame(span, frame);
510 self.enqueue_generator("force", gen_span, |co| {
511 Thunk::force(thunk, co, gen_span)
512 });
513
514 return Ok(false);
515 }
516
517 self.stack.push(thunk.unwrap_or_clone());
518 }
519 }
520
521 Op::GetUpvalue => {
522 let idx = UpvalueIdx(frame.read_uvarint() as usize);
523 let value = frame.upvalue(idx).clone();
524 self.stack.push(value);
525 }
526
527 // Discard the current frame.
528 Op::Return => {
529 // TODO(amjoseph): I think this should assert `==` rather
530 // than `<=` but it fails with the stricter condition.
531 debug_assert!(self.stack.len() - 1 <= frame.stack_offset);
532 return Ok(true);
533 }
534
535 Op::Constant => {
536 let idx = frame.read_uvarint() as usize;
537
538 debug_assert!(
539 idx < frame.chunk().constants.len(),
540 "out of bounds constant at IP {} in {:p}",
541 frame.ip.0,
542 frame.lambda
543 );
544
545 let c = frame.chunk().constants[idx].clone();
546 self.stack.push(c);
547 }
548
549 Op::Call => {
550 let callable = self.stack_pop();
551 self.call_value(frame.current_span(), Some((span, frame)), callable)?;
552
553 // exit this loop and let the outer loop enter the new call
554 return Ok(true);
555 }
556
557 // Remove the given number of elements from the stack,
558 // but retain the top value.
559 Op::CloseScope => {
560 let count = frame.read_uvarint() as usize;
561 // Immediately move the top value into the right
562 // position.
563 let target_idx = self.stack.len() - 1 - count;
564 self.stack[target_idx] = self.stack_pop();
565
566 // Then drop the remaining values.
567 for _ in 0..(count - 1) {
568 self.stack.pop();
569 }
570 }
571
572 Op::Closure => {
573 let idx = frame.read_uvarint() as usize;
574 let blueprint = match &frame.chunk().constants[idx] {
575 Value::Blueprint(lambda) => lambda.clone(),
576 _ => panic!("compiler bug: non-blueprint in blueprint slot"),
577 };
578
579 let upvalue_count = frame.read_uvarint();
580
581 debug_assert!(
582 (upvalue_count >> 1) == blueprint.upvalue_count as u64,
583 "TODO: new upvalue count not correct in closure",
584 );
585
586 debug_assert!(
587 ((upvalue_count >> 1) > 0 || (upvalue_count & 0b1 == 1)),
588 "OpClosure should not be called for plain lambdas"
589 );
590
591 let mut upvalues = Upvalues::with_capacity(blueprint.upvalue_count);
592 self.populate_upvalues(&mut frame, upvalue_count, &mut upvalues)?;
593 self.stack
594 .push(Value::Closure(Rc::new(Closure::new_with_upvalues(
595 Rc::new(upvalues),
596 blueprint,
597 ))));
598 }
599
600 Op::AttrsSelect => lifted_pop! {
601 self(key, attrs) => {
602 let key = key.to_str().with_span(&frame, self)?;
603 let attrs = attrs.to_attrs().with_span(&frame, self)?;
604
605 match attrs.select(&key) {
606 Some(value) => self.stack.push(value.clone()),
607
608 None => {
609 return frame.error(
610 self,
611 ErrorKind::AttributeNotFound {
612 name: key.to_str_lossy().into_owned()
613 },
614 );
615 }
616 }
617 }
618 },
619
620 Op::JumpIfFalse => {
621 let offset = frame.read_u16() as usize;
622 debug_assert!(offset != 0);
623 if !self.stack_peek(0).as_bool().with_span(&frame, self)? {
624 frame.ip += offset;
625 }
626 }
627
628 Op::JumpIfCatchable => {
629 let offset = frame.read_u16() as usize;
630 debug_assert!(offset != 0);
631 if self.stack_peek(0).is_catchable() {
632 frame.ip += offset;
633 }
634 }
635
636 Op::JumpIfNoFinaliseRequest => {
637 let offset = frame.read_u16() as usize;
638 debug_assert!(offset != 0);
639 match self.stack_peek(0) {
640 Value::FinaliseRequest(finalise) => {
641 if !finalise {
642 frame.ip += offset;
643 }
644 }
645 val => panic!(
646 "Tvix bug: OpJumIfNoFinaliseRequest: expected FinaliseRequest, but got {}",
647 val.type_of()
648 ),
649 }
650 }
651
652 Op::Pop => {
653 self.stack.pop();
654 }
655
656 Op::AttrsTrySelect => {
657 let key = self.stack_pop().to_str().with_span(&frame, self)?;
658 let value = match self.stack_pop() {
659 Value::Attrs(attrs) => match attrs.select(&key) {
660 Some(value) => value.clone(),
661 None => Value::AttrNotFound,
662 },
663
664 _ => Value::AttrNotFound,
665 };
666
667 self.stack.push(value);
668 }
669
670 Op::GetLocal => {
671 let local_idx = frame.read_uvarint() as usize;
672 let idx = frame.stack_offset + local_idx;
673 self.stack.push(self.stack[idx].clone());
674 }
675
676 Op::JumpIfNotFound => {
677 let offset = frame.read_u16() as usize;
678 debug_assert!(offset != 0);
679 if matches!(self.stack_peek(0), Value::AttrNotFound) {
680 self.stack_pop();
681 frame.ip += offset;
682 }
683 }
684
685 Op::Jump => {
686 let offset = frame.read_u16() as usize;
687 debug_assert!(offset != 0);
688 frame.ip += offset;
689 }
690
691 Op::Equal => lifted_pop! {
692 self(b, a) => {
693 let gen_span = frame.current_span();
694 self.push_call_frame(span, frame);
695 self.enqueue_generator("nix_eq", gen_span, |co| {
696 a.nix_eq_owned_genco(b, co, PointerEquality::ForbidAll, gen_span)
697 });
698 return Ok(false);
699 }
700 },
701
702 // These assertion operations error out if the stack
703 // top is not of the expected type. This is necessary
704 // to implement some specific behaviours of Nix
705 // exactly.
706 Op::AssertBool => {
707 let val = self.stack_peek(0);
708 // TODO(edef): propagate this into is_bool, since bottom values *are* values of any type
709 if !val.is_catchable() && !val.is_bool() {
710 return frame.error(
711 self,
712 ErrorKind::TypeError {
713 expected: "bool",
714 actual: val.type_of(),
715 },
716 );
717 }
718 }
719
720 Op::AssertAttrs => {
721 let val = self.stack_peek(0);
722 // TODO(edef): propagate this into is_attrs, since bottom values *are* values of any type
723 if !val.is_catchable() && !val.is_attrs() {
724 return frame.error(
725 self,
726 ErrorKind::TypeError {
727 expected: "set",
728 actual: val.type_of(),
729 },
730 );
731 }
732 }
733
734 Op::Attrs => self.run_attrset(frame.read_uvarint() as usize, &frame)?,
735
736 Op::AttrsUpdate => lifted_pop! {
737 self(rhs, lhs) => {
738 let rhs = rhs.to_attrs().with_span(&frame, self)?;
739 let lhs = lhs.to_attrs().with_span(&frame, self)?;
740 self.stack.push(Value::attrs(lhs.update(*rhs)))
741 }
742 },
743
744 Op::Invert => lifted_pop! {
745 self(v) => {
746 let v = v.as_bool().with_span(&frame, self)?;
747 self.stack.push(Value::Bool(!v));
748 }
749 },
750
751 Op::List => {
752 let count = frame.read_uvarint() as usize;
753 let list =
754 NixList::construct(count, self.stack.split_off(self.stack.len() - count));
755
756 self.stack.push(Value::List(list));
757 }
758
759 Op::JumpIfTrue => {
760 let offset = frame.read_u16() as usize;
761 debug_assert!(offset != 0);
762 if self.stack_peek(0).as_bool().with_span(&frame, self)? {
763 frame.ip += offset;
764 }
765 }
766
767 Op::HasAttr => lifted_pop! {
768 self(key, attrs) => {
769 let key = key.to_str().with_span(&frame, self)?;
770 let result = match attrs {
771 Value::Attrs(attrs) => attrs.contains(&key),
772
773 // Nix allows use of `?` on non-set types, but
774 // always returns false in those cases.
775 _ => false,
776 };
777
778 self.stack.push(Value::Bool(result));
779 }
780 },
781
782 Op::Concat => lifted_pop! {
783 self(rhs, lhs) => {
784 let rhs = rhs.to_list().with_span(&frame, self)?.into_inner();
785 let mut lhs = lhs.to_list().with_span(&frame, self)?.into_inner();
786 lhs.extend(rhs.into_iter());
787 self.stack.push(Value::List(lhs.into()))
788 }
789 },
790
791 Op::ResolveWith => {
792 let ident = self.stack_pop().to_str().with_span(&frame, self)?;
793
794 // Re-enqueue this frame.
795 let op_span = frame.current_span();
796 self.push_call_frame(span, frame);
797
798 // Construct a generator frame doing the lookup in constant
799 // stack space.
800 let with_stack_len = self.with_stack.len();
801 let closed_with_stack_len = self
802 .last_call_frame()
803 .map(|frame| frame.upvalues.with_stack_len())
804 .unwrap_or(0);
805
806 self.enqueue_generator("resolve_with", op_span, |co| {
807 resolve_with(co, ident, with_stack_len, closed_with_stack_len)
808 });
809
810 return Ok(false);
811 }
812
813 Op::Finalise => {
814 let idx = frame.read_uvarint() as usize;
815 match &self.stack[frame.stack_offset + idx] {
816 Value::Closure(_) => panic!("attempted to finalise a closure"),
817 Value::Thunk(thunk) => thunk.finalise(&self.stack[frame.stack_offset..]),
818 _ => panic!("attempted to finalise a non-thunk"),
819 }
820 }
821
822 Op::CoerceToString => {
823 let kind: CoercionKind = frame.chunk().code[frame.ip.0].into();
824 frame.ip.0 += 1;
825
826 let value = self.stack_pop();
827 let gen_span = frame.current_span();
828 self.push_call_frame(span, frame);
829
830 self.enqueue_generator("coerce_to_string", gen_span, |co| {
831 value.coerce_to_string(co, kind, gen_span)
832 });
833
834 return Ok(false);
835 }
836
837 Op::Interpolate => self.run_interpolate(frame.read_uvarint(), &frame)?,
838
839 Op::ValidateClosedFormals => {
840 let formals = frame.lambda.formals.as_ref().expect(
841 "OpValidateClosedFormals called within the frame of a lambda without formals",
842 );
843
844 let peeked = self.stack_peek(0);
845 if peeked.is_catchable() {
846 continue;
847 }
848
849 let args = peeked.to_attrs().with_span(&frame, self)?;
850 for arg in args.keys() {
851 if !formals.contains(arg) {
852 return frame.error(
853 self,
854 ErrorKind::UnexpectedArgumentFormals {
855 arg: arg.clone(),
856 formals_span: formals.span,
857 },
858 );
859 }
860 }
861 }
862
863 Op::Add => lifted_pop! {
864 self(b, a) => {
865 let gen_span = frame.current_span();
866 self.push_call_frame(span, frame);
867
868 // OpAdd can add not just numbers, but also string-like
869 // things, which requires more VM logic. This operation is
870 // evaluated in a generator frame.
871 self.enqueue_generator("add_values", gen_span, |co| add_values(co, a, b));
872 return Ok(false);
873 }
874 },
875
876 Op::Sub => lifted_pop! {
877 self(b, a) => {
878 let result = arithmetic_op!(&a, &b, -).with_span(&frame, self)?;
879 self.stack.push(result);
880 }
881 },
882
883 Op::Mul => lifted_pop! {
884 self(b, a) => {
885 let result = arithmetic_op!(&a, &b, *).with_span(&frame, self)?;
886 self.stack.push(result);
887 }
888 },
889
890 Op::Div => lifted_pop! {
891 self(b, a) => {
892 match b {
893 Value::Integer(0) => return frame.error(self, ErrorKind::DivisionByZero),
894 Value::Float(0.0_f64) => {
895 return frame.error(self, ErrorKind::DivisionByZero)
896 }
897 _ => {}
898 };
899
900 let result = arithmetic_op!(&a, &b, /).with_span(&frame, self)?;
901 self.stack.push(result);
902 }
903 },
904
905 Op::Negate => match self.stack_pop() {
906 Value::Integer(i) => self.stack.push(Value::Integer(-i)),
907 Value::Float(f) => self.stack.push(Value::Float(-f)),
908 Value::Catchable(cex) => self.stack.push(Value::Catchable(cex)),
909 v => {
910 return frame.error(
911 self,
912 ErrorKind::TypeError {
913 expected: "number (either int or float)",
914 actual: v.type_of(),
915 },
916 );
917 }
918 },
919
920 Op::Less => cmp_op!(self, frame, span, <),
921 Op::LessOrEq => cmp_op!(self, frame, span, <=),
922 Op::More => cmp_op!(self, frame, span, >),
923 Op::MoreOrEq => cmp_op!(self, frame, span, >=),
924
925 Op::FindFile => match self.stack_pop() {
926 Value::UnresolvedPath(path) => {
927 let resolved = self
928 .nix_search_path
929 .resolve(&self.io_handle, *path)
930 .with_span(&frame, self)?;
931 self.stack.push(resolved.into());
932 }
933
934 _ => panic!("tvix compiler bug: OpFindFile called on non-UnresolvedPath"),
935 },
936
937 Op::ResolveHomePath => match self.stack_pop() {
938 Value::UnresolvedPath(path) => {
939 match dirs::home_dir() {
940 None => {
941 return frame.error(
942 self,
943 ErrorKind::RelativePathResolution(
944 "failed to determine home directory".into(),
945 ),
946 );
947 }
948 Some(mut buf) => {
949 buf.push(*path);
950 self.stack.push(buf.into());
951 }
952 };
953 }
954
955 _ => {
956 panic!("tvix compiler bug: OpResolveHomePath called on non-UnresolvedPath")
957 }
958 },
959
960 Op::PushWith => self
961 .with_stack
962 .push(frame.stack_offset + frame.read_uvarint() as usize),
963
964 Op::PopWith => {
965 self.with_stack.pop();
966 }
967
968 Op::AssertFail => {
969 self.stack
970 .push(Value::from(CatchableErrorKind::AssertionFailed));
971 }
972
973 // Encountering an invalid opcode is a critical error in the
974 // VM/compiler.
975 Op::Invalid => {
976 panic!("Tvix bug: attempted to execute invalid opcode")
977 }
978 }
979 }
980 }
981}
982
983/// Implementation of helper functions for the runtime logic above.
984impl VM<'_> {
985 pub(crate) fn stack_pop(&mut self) -> Value {
986 self.stack.pop().expect("runtime stack empty")
987 }
988
989 fn stack_peek(&self, offset: usize) -> &Value {
990 &self.stack[self.stack.len() - 1 - offset]
991 }
992
993 fn run_attrset(&mut self, count: usize, frame: &CallFrame) -> EvalResult<()> {
994 let attrs = NixAttrs::construct(count, self.stack.split_off(self.stack.len() - count * 2))
995 .with_span(frame, self)?
996 .map(Value::attrs)
997 .into();
998
999 self.stack.push(attrs);
1000 Ok(())
1001 }
1002
1003 /// Access the last call frame present in the frame stack.
1004 fn last_call_frame(&self) -> Option<&CallFrame> {
1005 for frame in self.frames.iter().rev() {
1006 if let Frame::CallFrame { call_frame, .. } = frame {
1007 return Some(call_frame);
1008 }
1009 }
1010
1011 None
1012 }
1013
1014 /// Push an already constructed warning.
1015 pub fn push_warning(&mut self, warning: EvalWarning) {
1016 self.warnings.push(warning);
1017 }
1018
1019 /// Emit a warning with the given WarningKind and the source span
1020 /// of the current instruction.
1021 pub fn emit_warning(&mut self, kind: WarningKind) {
1022 self.push_warning(EvalWarning {
1023 kind,
1024 span: self.get_span(),
1025 });
1026 }
1027
1028 /// Interpolate string fragments by popping the specified number of
1029 /// fragments of the stack, evaluating them to strings, and pushing
1030 /// the concatenated result string back on the stack.
1031 fn run_interpolate(&mut self, count: u64, frame: &CallFrame) -> EvalResult<()> {
1032 let mut out = BString::default();
1033 // Interpolation propagates the context and union them.
1034 let mut context: NixContext = NixContext::new();
1035
1036 for i in 0..count {
1037 let val = self.stack_pop();
1038 if val.is_catchable() {
1039 for _ in (i + 1)..count {
1040 self.stack.pop();
1041 }
1042 self.stack.push(val);
1043 return Ok(());
1044 }
1045 let mut nix_string = val.to_contextful_str().with_span(frame, self)?;
1046 out.push_str(nix_string.as_bstr());
1047 if let Some(nix_string_ctx) = nix_string.take_context() {
1048 context.extend(nix_string_ctx.into_iter())
1049 }
1050 }
1051
1052 self.stack
1053 .push(Value::String(NixString::new_context_from(context, out)));
1054 Ok(())
1055 }
1056
1057 /// Apply an argument from the stack to a builtin, and attempt to call it.
1058 ///
1059 /// All calls are tail-calls in Tvix, as every function application is a
1060 /// separate thunk and OpCall is thus the last result in the thunk.
1061 ///
1062 /// Due to this, once control flow exits this function, the generator will
1063 /// automatically be run by the VM.
1064 fn call_builtin(&mut self, span: Span, mut builtin: Builtin) -> EvalResult<()> {
1065 let builtin_name = builtin.name();
1066 self.observer.observe_enter_builtin(builtin_name);
1067
1068 builtin.apply_arg(self.stack_pop());
1069
1070 match builtin.call() {
1071 // Partially applied builtin is just pushed back on the stack.
1072 BuiltinResult::Partial(partial) => self.stack.push(Value::Builtin(partial)),
1073
1074 // Builtin is fully applied and the generator needs to be run by the VM.
1075 BuiltinResult::Called(name, generator) => self.frames.push(Frame::Generator {
1076 generator,
1077 span,
1078 name,
1079 state: GeneratorState::Running,
1080 }),
1081 }
1082
1083 Ok(())
1084 }
1085
1086 fn call_value(
1087 &mut self,
1088 span: Span,
1089 parent: Option<(Span, CallFrame)>,
1090 callable: Value,
1091 ) -> EvalResult<()> {
1092 match callable {
1093 Value::Builtin(builtin) => self.call_builtin(span, builtin),
1094 Value::Thunk(thunk) => self.call_value(span, parent, thunk.value().clone()),
1095
1096 Value::Closure(closure) => {
1097 let lambda = closure.lambda();
1098 self.observer.observe_tail_call(self.frames.len(), &lambda);
1099
1100 // The stack offset is always `stack.len() - arg_count`, and
1101 // since this branch handles native Nix functions (which always
1102 // take only a single argument and are curried), the offset is
1103 // `stack_len - 1`.
1104 let stack_offset = self.stack.len() - 1;
1105
1106 // Reenqueue the parent frame, which should only have
1107 // `OpReturn` left. Not throwing it away leads to more
1108 // useful error traces.
1109 if let Some((parent_span, parent_frame)) = parent {
1110 self.push_call_frame(parent_span, parent_frame);
1111 }
1112
1113 self.push_call_frame(
1114 span,
1115 CallFrame {
1116 lambda,
1117 upvalues: closure.upvalues(),
1118 ip: CodeIdx(0),
1119 stack_offset,
1120 },
1121 );
1122
1123 Ok(())
1124 }
1125
1126 // Attribute sets with a __functor attribute are callable.
1127 val @ Value::Attrs(_) => {
1128 if let Some((parent_span, parent_frame)) = parent {
1129 self.push_call_frame(parent_span, parent_frame);
1130 }
1131
1132 self.enqueue_generator("__functor call", span, |co| call_functor(co, val));
1133 Ok(())
1134 }
1135
1136 val @ Value::Catchable(_) => {
1137 // the argument that we tried to apply a catchable to
1138 self.stack.pop();
1139 // applying a `throw` to anything is still a `throw`, so we just
1140 // push it back on the stack.
1141 self.stack.push(val);
1142 Ok(())
1143 }
1144
1145 v => Err(ErrorKind::NotCallable(v.type_of())).with_span(span, self),
1146 }
1147 }
1148
1149 /// Populate the upvalue fields of a thunk or closure under construction.
1150 ///
1151 /// See the closely tied function `emit_upvalue_data` in the compiler
1152 /// implementation for details on the argument processing.
1153 fn populate_upvalues(
1154 &mut self,
1155 frame: &mut CallFrame,
1156 count: u64,
1157 mut upvalues: impl DerefMut<Target = Upvalues>,
1158 ) -> EvalResult<()> {
1159 // Determine whether to capture the with stack, and then shift the
1160 // actual count of upvalues back.
1161 let capture_with = count & 0b1 == 1;
1162 let count = count >> 1;
1163 if capture_with {
1164 // Start the captured with_stack off of the
1165 // current call frame's captured with_stack, ...
1166 let mut captured_with_stack = frame
1167 .upvalues
1168 .with_stack()
1169 .cloned()
1170 // ... or make an empty one if there isn't one already.
1171 .unwrap_or_else(|| Vec::with_capacity(self.with_stack.len()));
1172
1173 for idx in &self.with_stack {
1174 captured_with_stack.push(self.stack[*idx].clone());
1175 }
1176
1177 upvalues.deref_mut().set_with_stack(captured_with_stack);
1178 }
1179
1180 for _ in 0..count {
1181 let pos = Position(frame.read_uvarint());
1182
1183 if let Some(stack_idx) = pos.runtime_stack_index() {
1184 let idx = frame.stack_offset + stack_idx.0;
1185
1186 let val = match self.stack.get(idx) {
1187 Some(val) => val.clone(),
1188 None => {
1189 return frame.error(
1190 self,
1191 ErrorKind::TvixBug {
1192 msg: "upvalue to be captured was missing on stack",
1193 metadata: Some(Rc::new(json!({
1194 "ip": format!("{:#x}", frame.ip.0 - 1),
1195 "stack_idx(relative)": stack_idx.0,
1196 "stack_idx(absolute)": idx,
1197 }))),
1198 },
1199 );
1200 }
1201 };
1202
1203 upvalues.deref_mut().push(val);
1204 continue;
1205 }
1206
1207 if let Some(idx) = pos.runtime_deferred_local() {
1208 upvalues.deref_mut().push(Value::DeferredUpvalue(idx));
1209 continue;
1210 }
1211
1212 if let Some(idx) = pos.runtime_upvalue_index() {
1213 upvalues.deref_mut().push(frame.upvalue(idx).clone());
1214 continue;
1215 }
1216
1217 panic!("Tvix bug: invalid capture position emitted")
1218 }
1219
1220 Ok(())
1221 }
1222}
1223
1224// TODO(amjoseph): de-asyncify this
1225/// Resolve a dynamically bound identifier (through `with`) by looking
1226/// for matching values in the with-stacks carried at runtime.
1227async fn resolve_with(
1228 co: GenCo,
1229 ident: NixString,
1230 vm_with_len: usize,
1231 upvalue_with_len: usize,
1232) -> Result<Value, ErrorKind> {
1233 /// Fetch and force a value on the with-stack from the VM.
1234 async fn fetch_forced_with(co: &GenCo, idx: usize) -> Value {
1235 match co.yield_(VMRequest::WithValue(idx)).await {
1236 VMResponse::Value(value) => value,
1237 msg => panic!("Tvix bug: VM responded with incorrect generator message: {msg}"),
1238 }
1239 }
1240
1241 /// Fetch and force a value on the *captured* with-stack from the VM.
1242 async fn fetch_captured_with(co: &GenCo, idx: usize) -> Value {
1243 match co.yield_(VMRequest::CapturedWithValue(idx)).await {
1244 VMResponse::Value(value) => value,
1245 msg => panic!("Tvix bug: VM responded with incorrect generator message: {msg}"),
1246 }
1247 }
1248
1249 for with_stack_idx in (0..vm_with_len).rev() {
1250 // TODO(tazjin): is this branch still live with the current with-thunking?
1251 let with = fetch_forced_with(&co, with_stack_idx).await;
1252
1253 if with.is_catchable() {
1254 return Ok(with);
1255 }
1256
1257 match with.to_attrs()?.select(&ident) {
1258 None => continue,
1259 Some(val) => return Ok(val.clone()),
1260 }
1261 }
1262
1263 for upvalue_with_idx in (0..upvalue_with_len).rev() {
1264 let with = fetch_captured_with(&co, upvalue_with_idx).await;
1265
1266 if with.is_catchable() {
1267 return Ok(with);
1268 }
1269
1270 match with.to_attrs()?.select(&ident) {
1271 None => continue,
1272 Some(val) => return Ok(val.clone()),
1273 }
1274 }
1275
1276 Err(ErrorKind::UnknownDynamicVariable(ident.to_string()))
1277}
1278
1279// TODO(amjoseph): de-asyncify this
1280async fn add_values(co: GenCo, a: Value, b: Value) -> Result<Value, ErrorKind> {
1281 // What we try to do is solely determined by the type of the first value!
1282 let result = match (a, b) {
1283 (Value::Path(p), v) => {
1284 let mut path = p.into_os_string();
1285 match generators::request_string_coerce(
1286 &co,
1287 v,
1288 CoercionKind {
1289 strong: false,
1290
1291 // Concatenating a Path with something else results in a
1292 // Path, so we don't need to import any paths (paths
1293 // imported by Nix always exist as a string, unless
1294 // converted by the user). In C++ Nix they even may not
1295 // contain any string context, the resulting error of such a
1296 // case can not be replicated by us.
1297 import_paths: false,
1298 // FIXME(raitobezarius): per https://b.tvl.fyi/issues/364, this is a usecase
1299 // for having a `reject_context: true` option here. This didn't occur yet in
1300 // nixpkgs during my evaluations, therefore, I skipped it.
1301 },
1302 )
1303 .await
1304 {
1305 Ok(vs) => {
1306 path.push(vs.to_os_str()?);
1307 crate::value::canon_path(PathBuf::from(path)).into()
1308 }
1309 Err(c) => Value::Catchable(Box::new(c)),
1310 }
1311 }
1312 (Value::String(s1), Value::String(s2)) => Value::String(s1.concat(&s2)),
1313 (Value::String(s1), v) => generators::request_string_coerce(
1314 &co,
1315 v,
1316 CoercionKind {
1317 strong: false,
1318 // Behaves the same as string interpolation
1319 import_paths: true,
1320 },
1321 )
1322 .await
1323 .map(|s2| Value::String(s1.concat(&s2)))
1324 .into(),
1325 (a @ Value::Integer(_), b) | (a @ Value::Float(_), b) => arithmetic_op!(&a, &b, +)?,
1326 (a, b) => {
1327 let r1 = generators::request_string_coerce(
1328 &co,
1329 a,
1330 CoercionKind {
1331 strong: false,
1332 import_paths: false,
1333 },
1334 )
1335 .await;
1336 let r2 = generators::request_string_coerce(
1337 &co,
1338 b,
1339 CoercionKind {
1340 strong: false,
1341 import_paths: false,
1342 },
1343 )
1344 .await;
1345 match (r1, r2) {
1346 (Ok(s1), Ok(s2)) => Value::String(s1.concat(&s2)),
1347 (Err(c), _) => return Ok(Value::from(c)),
1348 (_, Err(c)) => return Ok(Value::from(c)),
1349 }
1350 }
1351 };
1352
1353 Ok(result)
1354}
1355
1356/// The result of a VM's runtime evaluation.
1357pub struct RuntimeResult {
1358 pub value: Value,
1359 pub warnings: Vec<EvalWarning>,
1360}
1361
1362// TODO(amjoseph): de-asyncify this
1363/// Generator that retrieves the final value from the stack, and deep-forces it
1364/// before returning.
1365async fn final_deep_force(co: GenCo) -> Result<Value, ErrorKind> {
1366 let value = generators::request_stack_pop(&co).await;
1367 Ok(generators::request_deep_force(&co, value).await)
1368}
1369
1370/// Specification for how to handle top-level values returned by evaluation
1371#[derive(Debug, Clone, Copy, Default)]
1372pub enum EvalMode {
1373 /// The default. Values are returned from evaluations as-is, without any extra forcing or
1374 /// special handling.
1375 #[default]
1376 Lazy,
1377
1378 /// Strictly and deeply evaluate top-level values returned by evaluation.
1379 Strict,
1380}
1381
1382pub fn run_lambda(
1383 nix_search_path: NixSearchPath,
1384 io_handle: Rc<dyn EvalIO>,
1385 observer: &mut dyn RuntimeObserver,
1386 source: SourceCode,
1387 globals: Rc<GlobalsMap>,
1388 lambda: Rc<Lambda>,
1389 mode: EvalMode,
1390) -> EvalResult<RuntimeResult> {
1391 // Retain the top-level span of the expression in this lambda, as
1392 // synthetic "calls" in deep_force will otherwise not have a span
1393 // to fall back to.
1394 //
1395 // We exploit the fact that the compiler emits a final instruction
1396 // with the span of the entire file for top-level expressions.
1397 let root_span = lambda.chunk.get_span(CodeIdx(lambda.chunk.code.len() - 1));
1398
1399 let mut vm = VM::new(
1400 nix_search_path,
1401 io_handle,
1402 observer,
1403 source,
1404 globals,
1405 root_span,
1406 );
1407
1408 // When evaluating strictly, synthesise a frame that will instruct
1409 // the VM to deep-force the final value before returning it.
1410 match mode {
1411 EvalMode::Lazy => {}
1412 EvalMode::Strict => vm.enqueue_generator("final_deep_force", root_span, final_deep_force),
1413 }
1414
1415 vm.frames.push(Frame::CallFrame {
1416 span: root_span,
1417 call_frame: CallFrame {
1418 lambda,
1419 upvalues: Rc::new(Upvalues::with_capacity(0)),
1420 ip: CodeIdx(0),
1421 stack_offset: 0,
1422 },
1423 });
1424
1425 vm.execute()
1426}