stage2: Keep error return traces alive when storing to const

This change extends the "lifetime" of the error return trace associated
with an error to continue throughout the block of a `const` variable
that it is assigned to.

This is necessary to support patterns like this one in test_runner.zig:
```zig
const result = foo();
if (result) |_| {
    // ... success logic
} else |err| {
    // `foo()` should be included in the error trace here
    return error.TestFailed;
}
```

To make this happen, the majority of the error return trace popping logic
needed to move into Sema, since `const x = foo();` cannot be examined
syntactically to determine whether it modifies the error return trace. We
also have to make sure not to delete pertinent block information before it
makes it to Sema, so that Sema can pop/restore around blocks correctly.

* Why do this only for `const` and not `var`? *

There is room to relax things for `var`, but only a little bit. We could
do the same thing we do for const and keep the error trace alive for the
remainder of the block where the *assignment* happens. Any wider scope
would violate the stack discipline for traces, so it's not viable.

In the end, I decided the most consistent behavior for the user is just
to kill all error return traces assigned to a mutable `var`.
This commit is contained in:
Cody Tapscott 2022-09-25 19:51:38 -07:00
parent 597ead5318
commit d060cbbec7
9 changed files with 601 additions and 255 deletions

View File

@ -44,24 +44,23 @@ pub fn main() void {
if (!have_tty) {
std.debug.print("{d}/{d} {s}... ", .{ i + 1, test_fn_list.len, test_fn.name });
}
if (result: {
if (test_fn.async_frame_size) |size| switch (io_mode) {
.evented => {
if (async_frame_buffer.len < size) {
std.heap.page_allocator.free(async_frame_buffer);
async_frame_buffer = std.heap.page_allocator.alignedAlloc(u8, std.Target.stack_align, size) catch @panic("out of memory");
}
const casted_fn = @ptrCast(fn () callconv(.Async) anyerror!void, test_fn.func);
break :result await @asyncCall(async_frame_buffer, {}, casted_fn, .{});
},
.blocking => {
skip_count += 1;
test_node.end();
progress.log("SKIP (async test)\n", .{});
continue;
},
} else break :result test_fn.func();
}) |_| {
const result = if (test_fn.async_frame_size) |size| switch (io_mode) {
.evented => blk: {
if (async_frame_buffer.len < size) {
std.heap.page_allocator.free(async_frame_buffer);
async_frame_buffer = std.heap.page_allocator.alignedAlloc(u8, std.Target.stack_align, size) catch @panic("out of memory");
}
const casted_fn = @ptrCast(fn () callconv(.Async) anyerror!void, test_fn.func);
break :blk await @asyncCall(async_frame_buffer, {}, casted_fn, .{});
},
.blocking => {
skip_count += 1;
test_node.end();
progress.log("SKIP (async test)\n", .{});
continue;
},
} else test_fn.func();
if (result) |_| {
ok_count += 1;
test_node.end();
if (!have_tty) std.debug.print("OK\n", .{});

View File

@ -734,7 +734,7 @@ pub const Inst = struct {
addrspace_cast,
/// Saves the error return trace index, if any. Otherwise, returns 0.
/// Uses the `ty_op` field.
/// Uses the `ty_pl` field.
save_err_return_trace_index,
pub fn fromCmpOp(op: std.math.CompareOperator, optimized: bool) Tag {

View File

@ -337,6 +337,8 @@ pub const ResultInfo = struct {
shift_op,
/// The expression is an argument in a function call.
fn_arg,
/// The expression is the right-hand side of an initializer for a `const` variable
const_init,
/// No specific operator in particular.
none,
};
@ -1850,6 +1852,45 @@ fn comptimeExprAst(
return result;
}
/// Restore the error return trace index. Performs the restore only if the result is a non-error or
/// if the result location is a non-error-handling expression.
fn restoreErrRetIndex(
gz: *GenZir,
bt: GenZir.BranchTarget,
ri: ResultInfo,
node: Ast.Node.Index,
result: Zir.Inst.Ref,
) !void {
const op = switch (nodeMayEvalToError(gz.astgen.tree, node)) {
.always => return, // never restore/pop
.never => .none, // always restore/pop
.maybe => switch (ri.ctx) {
.error_handling_expr, .@"return", .fn_arg, .const_init => switch (ri.rl) {
.ptr => |ptr_res| try gz.addUnNode(.load, ptr_res.inst, node),
.inferred_ptr => |ptr| try gz.addUnNode(.load, ptr, node),
.block_ptr => |block_scope| if (block_scope.rvalue_rl_count != block_scope.break_count) b: {
// The result location may have been used by this expression, in which case
// the operand is not the result and we need to load the rl ptr.
switch (gz.astgen.instructions.items(.tag)[Zir.refToIndex(block_scope.rl_ptr).?]) {
.alloc_inferred, .alloc_inferred_mut => {
// This is a terrible workaround for Sema's inability to load from a .alloc_inferred ptr
// before its type has been resolved. The operand we use here instead is not guaranteed
// to be valid, and when it's not, we will pop error traces prematurely.
//
// TODO: Update this to do a proper load from the rl_ptr, once Sema can support it.
break :b result;
},
else => break :b try gz.addUnNode(.load, block_scope.rl_ptr, node),
}
} else result,
else => result,
},
else => .none, // always restore/pop
},
};
_ = try gz.addRestoreErrRetIndex(bt, .{ .if_non_error = op });
}
fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
const astgen = parent_gz.astgen;
const tree = astgen.tree;
@ -1857,13 +1898,6 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
const break_label = node_datas[node].lhs;
const rhs = node_datas[node].rhs;
// Breaking out of a `catch { ... }` or `else |err| { ... }` block with a non-error value
// means that the corresponding error was correctly handled, and the error trace index
// needs to be restored so that any entries from the caught error are effectively "popped"
//
// Note: We only restore for the outermost block, since that will "pop" any nested blocks.
var err_trace_index_to_restore: Zir.Inst.Ref = .none;
// Look for the label in the scope.
var scope = parent_scope;
while (true) {
@ -1882,11 +1916,6 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
});
}
if (block_gz.saved_err_trace_index != .none) {
// We are breaking out of a `catch { ... }` or `else |err| { ... }`.
err_trace_index_to_restore = block_gz.saved_err_trace_index;
}
const block_inst = blk: {
if (break_label != 0) {
if (block_gz.label) |*label| {
@ -1913,10 +1942,8 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
try genDefers(parent_gz, scope, parent_scope, .normal_only);
// As our last action before the break, "pop" the error trace if needed
if (err_trace_index_to_restore != .none) {
// void is a non-error so we always pop - no need to call `popErrorReturnTrace`
_ = try parent_gz.addUnNode(.restore_err_ret_index, err_trace_index_to_restore, node);
}
if (!block_gz.force_comptime)
_ = try parent_gz.addRestoreErrRetIndex(.{ .block = block_inst }, .always);
_ = try parent_gz.addBreak(break_tag, block_inst, .void_value);
return Zir.Inst.Ref.unreachable_value;
@ -1929,17 +1956,8 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
try genDefers(parent_gz, scope, parent_scope, .normal_only);
// As our last action before the break, "pop" the error trace if needed
if (err_trace_index_to_restore != .none) {
// Pop the error trace, unless the operand is an error and breaking to an error-handling expr.
try popErrorReturnTrace(
parent_gz,
scope,
block_gz.break_result_info,
rhs,
operand,
err_trace_index_to_restore,
);
}
if (!block_gz.force_comptime)
try restoreErrRetIndex(parent_gz, .{ .block = block_inst }, block_gz.break_result_info, rhs, operand);
switch (block_gz.break_result_info.rl) {
.block_ptr => {
@ -2066,8 +2084,34 @@ fn blockExpr(
return labeledBlockExpr(gz, scope, ri, block_node, statements);
}
var sub_gz = gz.makeSubBlock(scope);
try blockExprStmts(&sub_gz, &sub_gz.base, statements);
if (!gz.force_comptime) {
// Since this block is unlabeled, its control flow is effectively linear and we
// can *almost* get away with inlining the block here. However, we actually need
// to preserve the .block for Sema, to properly pop the error return trace.
const block_tag: Zir.Inst.Tag = .block;
const block_inst = try gz.makeBlockInst(block_tag, block_node);
try gz.instructions.append(astgen.gpa, block_inst);
var block_scope = gz.makeSubBlock(scope);
defer block_scope.unstack();
try blockExprStmts(&block_scope, &block_scope.base, statements);
if (!block_scope.endsWithNoReturn()) {
// As our last action before the break, "pop" the error trace if needed
_ = try gz.addRestoreErrRetIndex(.{ .block = block_inst }, .always);
const break_tag: Zir.Inst.Tag = if (block_scope.force_comptime) .break_inline else .@"break";
_ = try block_scope.addBreak(break_tag, block_inst, .void_value);
}
try block_scope.setBlockBody(block_inst);
} else {
var sub_gz = gz.makeSubBlock(scope);
try blockExprStmts(&sub_gz, &sub_gz.base, statements);
}
return rvalue(gz, ri, .void_value, block_node);
}
@ -2141,6 +2185,9 @@ fn labeledBlockExpr(
try blockExprStmts(&block_scope, &block_scope.base, statements);
if (!block_scope.endsWithNoReturn()) {
// As our last action before the return, "pop" the error trace if needed
_ = try gz.addRestoreErrRetIndex(.{ .block = block_inst }, .always);
const break_tag: Zir.Inst.Tag = if (block_scope.force_comptime) .break_inline else .@"break";
_ = try block_scope.addBreak(break_tag, block_inst, .void_value);
}
@ -2164,7 +2211,8 @@ fn labeledBlockExpr(
return indexToRef(block_inst);
},
.break_operand => {
// All break operands are values that did not use the result location pointer.
// All break operands are values that did not use the result location pointer
// (except for a single .store_to_block_ptr inst which we re-write here).
// The break instructions need to have their operands coerced if the
// block's result location is a `ty`. In this case we overwrite the
// `store_to_block_ptr` instruction with an `as` instruction and repurpose
@ -2528,7 +2576,6 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.try_ptr,
//.try_inline,
//.try_ptr_inline,
.save_err_ret_index,
=> break :b false,
.extended => switch (gz.astgen.instructions.items(.data)[inst].extended.opcode) {
@ -2591,6 +2638,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.validate_array_init_ty,
.validate_struct_init_ty,
.validate_deref,
.save_err_ret_index,
.restore_err_ret_index,
=> break :b true,
@ -2877,7 +2925,8 @@ fn varDecl(
{
const result_info: ResultInfo = if (type_node != 0) .{
.rl = .{ .ty = try typeExpr(gz, scope, type_node) },
} else .{ .rl = .none };
.ctx = .const_init,
} else .{ .rl = .none, .ctx = .const_init };
const prev_anon_name_strategy = gz.anon_name_strategy;
gz.anon_name_strategy = .dbg_var;
const init_inst = try reachableExpr(gz, scope, result_info, var_decl.ast.init_node, node);
@ -2885,6 +2934,11 @@ fn varDecl(
try gz.addDbgVar(.dbg_var_val, ident_name, init_inst);
// The const init expression may have modified the error return trace, so signal
// to Sema that it should save the new index for restoring later.
if (nodeMayAppendToErrorTrace(tree, var_decl.ast.init_node))
_ = try gz.addSaveErrRetIndex(.{ .if_of_error_type = init_inst });
const sub_scope = try block_arena.create(Scope.LocalVal);
sub_scope.* = .{
.parent = scope,
@ -2950,9 +3004,14 @@ fn varDecl(
init_scope.rl_ptr = alloc;
init_scope.rl_ty_inst = .none;
}
const init_result_info: ResultInfo = .{ .rl = .{ .block_ptr = &init_scope } };
const init_result_info: ResultInfo = .{ .rl = .{ .block_ptr = &init_scope }, .ctx = .const_init };
const init_inst = try reachableExpr(&init_scope, &init_scope.base, init_result_info, var_decl.ast.init_node, node);
// The const init expression may have modified the error return trace, so signal
// to Sema that it should save the new index for restoring later.
if (nodeMayAppendToErrorTrace(tree, var_decl.ast.init_node))
_ = try init_scope.addSaveErrRetIndex(.{ .if_of_error_type = init_inst });
const zir_tags = astgen.instructions.items(.tag);
const zir_datas = astgen.instructions.items(.data);
@ -3775,6 +3834,9 @@ fn fnDecl(
try checkUsed(gz, &fn_gz.base, params_scope);
if (!fn_gz.endsWithNoReturn()) {
// As our last action before the return, "pop" the error trace if needed
_ = try gz.addRestoreErrRetIndex(.ret, .always);
// Since we are adding the return instruction here, we must handle the coercion.
// We do this by using the `ret_tok` instruction.
_ = try fn_gz.addUnTok(.ret_tok, .void_value, tree.lastToken(body_node));
@ -4217,6 +4279,10 @@ fn testDecl(
const block_result = try expr(&fn_block, &fn_block.base, .{ .rl = .none }, body_node);
if (fn_block.isEmpty() or !fn_block.refIsNoReturn(block_result)) {
// As our last action before the return, "pop" the error trace if needed
_ = try gz.addRestoreErrRetIndex(.ret, .always);
// Since we are adding the return instruction here, we must handle the coercion.
// We do this by using the `ret_tok` instruction.
_ = try fn_block.addUnTok(.ret_tok, .void_value, tree.lastToken(body_node));
@ -5196,76 +5262,6 @@ fn tryExpr(
}
}
/// Pops the error return trace, unless:
/// 1. the result is a non-error, AND
/// 2. the result location corresponds to an error-handling expression
///
/// For reference, the full list of error-handling expressions is:
/// - try X
/// - X catch ...
/// - if (X) |_| { ... } |_| { ... }
/// - return X
///
fn popErrorReturnTrace(
gz: *GenZir,
scope: *Scope,
ri: ResultInfo,
node: Ast.Node.Index,
result_inst: Zir.Inst.Ref,
error_trace_index: Zir.Inst.Ref,
) InnerError!void {
const astgen = gz.astgen;
const tree = astgen.tree;
const result_is_err = nodeMayEvalToError(tree, node);
// If we are breaking to a try/catch/error-union-if/return or a function call, the error trace propagates.
const propagate_error_trace = switch (ri.ctx) {
.error_handling_expr, .@"return", .fn_arg => true,
else => false,
};
if (result_is_err == .never or !propagate_error_trace) {
// We are returning a non-error, or returning to a non-error-handling operator.
// In either case, we need to pop the error trace.
_ = try gz.addUnNode(.restore_err_ret_index, error_trace_index, node);
} else if (result_is_err == .maybe) {
// We are returning to an error-handling operator with a maybe-error.
// Restore only if it's a non-error, implying the catch was successfully handled.
var block_scope = gz.makeSubBlock(scope);
block_scope.setBreakResultInfo(.{ .rl = .discard });
defer block_scope.unstack();
// Emit conditional branch for restoring error trace index
const is_non_err = switch (ri.rl) {
.ref => try block_scope.addUnNode(.is_non_err_ptr, result_inst, node),
.ptr => |ptr| try block_scope.addUnNode(.is_non_err_ptr, ptr.inst, node),
.ty, .none => try block_scope.addUnNode(.is_non_err, result_inst, node),
else => unreachable, // Error-handling operators only generate the above result locations
};
const condbr = try block_scope.addCondBr(.condbr, node);
const block = try gz.makeBlockInst(.block, node);
try block_scope.setBlockBody(block);
// block_scope unstacked now, can add new instructions to gz
try gz.instructions.append(astgen.gpa, block);
var then_scope = block_scope.makeSubBlock(scope);
defer then_scope.unstack();
_ = try then_scope.addUnNode(.restore_err_ret_index, error_trace_index, node);
const then_break = try then_scope.makeBreak(.@"break", block, .void_value);
var else_scope = block_scope.makeSubBlock(scope);
defer else_scope.unstack();
const else_break = try else_scope.makeBreak(.@"break", block, .void_value);
try setCondBrPayload(condbr, is_non_err, &then_scope, then_break, &else_scope, else_break);
}
}
fn orelseCatchExpr(
parent_gz: *GenZir,
scope: *Scope,
@ -5287,8 +5283,6 @@ fn orelseCatchExpr(
block_scope.setBreakResultInfo(ri);
defer block_scope.unstack();
const saved_err_trace_index = if (do_err_trace) try parent_gz.addNode(.save_err_ret_index, node) else .none;
const operand_ri: ResultInfo = switch (block_scope.break_result_info.rl) {
.ref => .{ .rl = .ref, .ctx = if (do_err_trace) .error_handling_expr else .none },
else => .{ .rl = .none, .ctx = if (do_err_trace) .error_handling_expr else .none },
@ -5320,11 +5314,10 @@ fn orelseCatchExpr(
var else_scope = block_scope.makeSubBlock(scope);
defer else_scope.unstack();
// Any break (of a non-error value) that navigates out of this scope means
// that the error was handled successfully, so this index will be restored.
else_scope.saved_err_trace_index = saved_err_trace_index;
if (else_scope.outermost_err_trace_index == .none)
else_scope.outermost_err_trace_index = saved_err_trace_index;
// We know that the operand (almost certainly) modified the error return trace,
// so signal to Sema that it should save the new index for restoring later.
if (do_err_trace and nodeMayAppendToErrorTrace(tree, lhs))
_ = try else_scope.addSaveErrRetIndex(.always);
var err_val_scope: Scope.LocalVal = undefined;
const else_sub_scope = blk: {
@ -5352,16 +5345,9 @@ fn orelseCatchExpr(
if (!else_scope.endsWithNoReturn()) {
block_scope.break_count += 1;
if (do_err_trace) {
try popErrorReturnTrace(
&else_scope,
else_sub_scope,
block_scope.break_result_info,
rhs,
else_result,
saved_err_trace_index,
);
}
// As our last action before the break, "pop" the error trace if needed
if (do_err_trace)
try restoreErrRetIndex(&else_scope, .{ .block = block }, block_scope.break_result_info, rhs, else_result);
}
try checkUsed(parent_gz, &else_scope.base, else_sub_scope);
@ -5587,8 +5573,6 @@ fn ifExpr(
block_scope.setBreakResultInfo(ri);
defer block_scope.unstack();
const saved_err_trace_index = if (do_err_trace) try parent_gz.addNode(.save_err_ret_index, node) else .none;
const payload_is_ref = if (if_full.payload_token) |payload_token|
token_tags[payload_token] == .asterisk
else
@ -5705,11 +5689,10 @@ fn ifExpr(
var else_scope = parent_gz.makeSubBlock(scope);
defer else_scope.unstack();
// Any break (of a non-error value) that navigates out of this scope means
// that the error was handled successfully, so this index will be restored.
else_scope.saved_err_trace_index = saved_err_trace_index;
if (else_scope.outermost_err_trace_index == .none)
else_scope.outermost_err_trace_index = saved_err_trace_index;
// We know that the operand (almost certainly) modified the error return trace,
// so signal to Sema that it should save the new index for restoring later.
if (do_err_trace and nodeMayAppendToErrorTrace(tree, if_full.ast.cond_expr))
_ = try else_scope.addSaveErrRetIndex(.always);
const else_node = if_full.ast.else_expr;
const else_info: struct {
@ -5747,16 +5730,9 @@ fn ifExpr(
if (!else_scope.endsWithNoReturn()) {
block_scope.break_count += 1;
if (do_err_trace) {
try popErrorReturnTrace(
&else_scope,
sub_scope,
block_scope.break_result_info,
else_node,
e,
saved_err_trace_index,
);
}
// As our last action before the break, "pop" the error trace if needed
if (do_err_trace)
try restoreErrRetIndex(&else_scope, .{ .block = block }, block_scope.break_result_info, else_node, e);
}
try checkUsed(parent_gz, &else_scope.base, sub_scope);
try else_scope.addDbgBlockEnd();
@ -6886,6 +6862,10 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
if (operand_node == 0) {
// Returning a void value; skip error defers.
try genDefers(gz, defer_outer, scope, .normal_only);
// As our last action before the return, "pop" the error trace if needed
_ = try gz.addRestoreErrRetIndex(.ret, .always);
_ = try gz.addUnNode(.ret_node, .void_value, node);
return Zir.Inst.Ref.unreachable_value;
}
@ -6921,15 +6901,13 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
const operand = try reachableExpr(gz, scope, ri, operand_node, node);
gz.anon_name_strategy = prev_anon_name_strategy;
// TODO: This should be almost identical for every break/ret
switch (nodeMayEvalToError(tree, operand_node)) {
.never => {
// Returning a value that cannot be an error; skip error defers.
try genDefers(gz, defer_outer, scope, .normal_only);
// As our last action before the return, "pop" the error trace if needed
if (gz.outermost_err_trace_index != .none)
_ = try gz.addUnNode(.restore_err_ret_index, gz.outermost_err_trace_index, node);
_ = try gz.addRestoreErrRetIndex(.ret, .always);
try emitDbgStmt(gz, ret_line, ret_column);
try gz.addRet(ri, operand, node);
@ -6949,6 +6927,11 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
// Only regular defers; no branch needed.
try genDefers(gz, defer_outer, scope, .normal_only);
try emitDbgStmt(gz, ret_line, ret_column);
// As our last action before the return, "pop" the error trace if needed
const result = if (ri.rl == .ptr) try gz.addUnNode(.load, ri.rl.ptr.inst, node) else operand;
_ = try gz.addRestoreErrRetIndex(.ret, .{ .if_non_error = result });
try gz.addRet(ri, operand, node);
return Zir.Inst.Ref.unreachable_value;
}
@ -6964,8 +6947,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
try genDefers(&then_scope, defer_outer, scope, .normal_only);
// As our last action before the return, "pop" the error trace if needed
if (then_scope.outermost_err_trace_index != .none)
_ = try then_scope.addUnNode(.restore_err_ret_index, then_scope.outermost_err_trace_index, node);
_ = try then_scope.addRestoreErrRetIndex(.ret, .always);
try emitDbgStmt(&then_scope, ret_line, ret_column);
try then_scope.addRet(ri, operand, node);
@ -8561,10 +8543,11 @@ fn callExpr(
scratch_index += 1;
}
// If our result location is a try/catch/error-union-if/return, the error trace propagates.
// If our result location is a try/catch/error-union-if/return, a function argument,
// or an initializer for a `const` variable, the error trace propagates.
// Otherwise, it should always be popped (handled in Sema).
const propagate_error_trace = switch (ri.ctx) {
.error_handling_expr, .@"return", .fn_arg => true, // Propagate to try/catch/error-union-if, return, and other function calls
.error_handling_expr, .@"return", .fn_arg, .const_init => true,
else => false,
};
@ -8932,6 +8915,33 @@ fn nodeMayNeedMemoryLocation(tree: *const Ast, start_node: Ast.Node.Index, have_
}
}
fn nodeMayAppendToErrorTrace(tree: *const Ast, start_node: Ast.Node.Index) bool {
const node_tags = tree.nodes.items(.tag);
const node_datas = tree.nodes.items(.data);
var node = start_node;
while (true) {
switch (node_tags[node]) {
// These don't have the opportunity to call any runtime functions.
.error_value,
.identifier,
.@"comptime",
=> return false,
// Forward the question to the LHS sub-expression.
.grouped_expression,
.@"try",
.@"nosuspend",
.unwrap_optional,
=> node = node_datas[node].lhs,
// Anything that does not eval to an error is guaranteed to pop any
// additions to the error trace, so it effectively does not append.
else => return nodeMayEvalToError(tree, start_node) != .never,
}
}
}
fn nodeMayEvalToError(tree: *const Ast, start_node: Ast.Node.Index) BuiltinFn.EvalToError {
const node_tags = tree.nodes.items(.tag);
const node_datas = tree.nodes.items(.data);
@ -10494,13 +10504,6 @@ const GenZir = struct {
/// Keys are the raw instruction index, values are the closure_capture instruction.
captures: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .{},
/// If this GenZir corresponds to a `catch { ... }` or `else |err| { ... }` block,
/// this err_trace_index can be restored to "pop" the trace entries for the block.
saved_err_trace_index: Zir.Inst.Ref = .none,
/// When returning from a function with a non-error, we must pop all trace entries
/// from any containing `catch { ... }` or `else |err| { ... }` blocks.
outermost_err_trace_index: Zir.Inst.Ref = .none,
const unstacked_top = std.math.maxInt(usize);
/// Call unstack before adding any new instructions to containing GenZir.
fn unstack(self: *GenZir) void {
@ -10545,7 +10548,6 @@ const GenZir = struct {
.any_defer_node = gz.any_defer_node,
.instructions = gz.instructions,
.instructions_top = gz.instructions.items.len,
.outermost_err_trace_index = gz.outermost_err_trace_index,
};
}
@ -11359,6 +11361,46 @@ const GenZir = struct {
});
}
fn addSaveErrRetIndex(
gz: *GenZir,
cond: union(enum) {
always: void,
if_of_error_type: Zir.Inst.Ref,
},
) !Zir.Inst.Index {
return gz.addAsIndex(.{
.tag = .save_err_ret_index,
.data = .{ .save_err_ret_index = .{
.operand = if (cond == .if_of_error_type) cond.if_of_error_type else .none,
} },
});
}
const BranchTarget = union(enum) {
ret,
block: Zir.Inst.Index,
};
fn addRestoreErrRetIndex(
gz: *GenZir,
bt: BranchTarget,
cond: union(enum) {
always: void,
if_non_error: Zir.Inst.Ref,
},
) !Zir.Inst.Index {
return gz.addAsIndex(.{
.tag = .restore_err_ret_index,
.data = .{ .restore_err_ret_index = .{
.block = switch (bt) {
.ret => .none,
.block => |b| Zir.indexToRef(b),
},
.operand = if (cond == .if_non_error) cond.if_non_error else .none,
} },
});
}
fn addBreak(
gz: *GenZir,
tag: Zir.Inst.Tag,

View File

@ -5633,6 +5633,13 @@ pub fn analyzeFnBody(mod: *Module, func: *Fn, arena: Allocator) SemaError!Air {
const last_arg_index = inner_block.instructions.items.len;
// Save the error trace as our first action in the function.
// If this is unnecessary after all, Liveness will clean it up for us.
const err_ret_trace_index = try sema.analyzeSaveErrRetIndex(&inner_block);
inner_block.error_return_trace_index = err_ret_trace_index;
inner_block.error_return_trace_index_on_block_entry = err_ret_trace_index;
inner_block.error_return_trace_index_on_function_entry = err_ret_trace_index;
sema.analyzeBody(&inner_block, fn_info.body) catch |err| switch (err) {
// TODO make these unreachable instead of @panic
error.NeededSourceLocation => @panic("zig compiler bug: NeededSourceLocation"),

View File

@ -153,6 +153,12 @@ pub const Block = struct {
is_typeof: bool = false,
is_coerce_result_ptr: bool = false,
/// Keep track of the active error return trace index around blocks so that we can correctly
/// pop the error trace upon block exit.
error_return_trace_index: Air.Inst.Ref = .none,
error_return_trace_index_on_block_entry: Air.Inst.Ref = .none,
error_return_trace_index_on_function_entry: Air.Inst.Ref = .none,
/// when null, it is determined by build mode, changed by @setRuntimeSafety
want_safety: ?bool = null,
@ -226,6 +232,9 @@ pub const Block = struct {
.float_mode = parent.float_mode,
.c_import_buf = parent.c_import_buf,
.switch_else_err_ty = parent.switch_else_err_ty,
.error_return_trace_index = parent.error_return_trace_index,
.error_return_trace_index_on_block_entry = parent.error_return_trace_index,
.error_return_trace_index_on_function_entry = parent.error_return_trace_index_on_function_entry,
};
}
@ -945,8 +954,6 @@ fn analyzeBodyInner(
.ret_ptr => try sema.zirRetPtr(block, inst),
.ret_type => try sema.addType(sema.fn_ret_ty),
.save_err_ret_index => try sema.zirSaveErrRetIndex(block, inst),
// Instructions that we know to *always* be noreturn based solely on their tag.
// These functions match the return type of analyzeBody so that we can
// tail call them here.
@ -1229,6 +1236,11 @@ fn analyzeBodyInner(
i += 1;
continue;
},
.save_err_ret_index => {
try sema.zirSaveErrRetIndex(block, inst);
i += 1;
continue;
},
.restore_err_ret_index => {
try sema.zirRestoreErrRetIndex(block, inst);
i += 1;
@ -1326,31 +1338,32 @@ fn analyzeBodyInner(
const extra = sema.code.extraData(Zir.Inst.Block, inst_data.payload_index);
const inline_body = sema.code.extra[extra.end..][0..extra.data.body_len];
const gpa = sema.gpa;
// If this block contains a function prototype, we need to reset the
// current list of parameters and restore it later.
// Note: this probably needs to be resolved in a more general manner.
const prev_params = block.params;
const need_sub_block = tags[inline_body[inline_body.len - 1]] == .repeat_inline;
var sub_block = block;
var block_space: Block = undefined;
// NOTE: this has to be done like this because branching in
// defers here breaks stage1.
block_space.instructions = .{};
if (need_sub_block) {
block_space = block.makeSubBlock();
block_space.inline_block = inline_body[0];
sub_block = &block_space;
}
block.params = .{};
defer {
block.params.deinit(gpa);
block.params = prev_params;
block_space.instructions.deinit(gpa);
}
const opt_break_data = try sema.analyzeBodyBreak(sub_block, inline_body);
if (need_sub_block) {
try block.instructions.appendSlice(gpa, block_space.instructions.items);
}
const opt_break_data = b: {
// Create a temporary child block so that this inline block is properly
// labeled for any .restore_err_ret_index instructions
var child_block = block.makeSubBlock();
// If this block contains a function prototype, we need to reset the
// current list of parameters and restore it later.
// Note: this probably needs to be resolved in a more general manner.
if (tags[inline_body[inline_body.len - 1]] == .repeat_inline) {
child_block.inline_block = inline_body[0];
} else child_block.inline_block = block.inline_block;
var label: Block.Label = .{
.zir_block = inst,
.merges = undefined,
};
child_block.label = &label;
defer child_block.params.deinit(gpa);
// Write these instructions directly into the parent block
child_block.instructions = block.instructions;
defer block.instructions = child_block.instructions;
break :b try sema.analyzeBodyBreak(&child_block, inline_body);
};
// A runtime conditional branch that needs a post-hoc block to be
// emitted communicates this by mapping the block index into the inst map.
@ -4994,7 +5007,7 @@ fn zirBlock(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileErro
// Reserve space for a Block instruction so that generated Break instructions can
// point to it, even if it doesn't end up getting used because the code ends up being
// comptime evaluated.
// comptime evaluated or is an unlabeled block.
const block_inst = @intCast(Air.Inst.Index, sema.air_instructions.len);
try sema.air_instructions.append(gpa, .{
.tag = .block,
@ -5025,6 +5038,9 @@ fn zirBlock(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileErro
.runtime_cond = parent_block.runtime_cond,
.runtime_loop = parent_block.runtime_loop,
.runtime_index = parent_block.runtime_index,
.error_return_trace_index = parent_block.error_return_trace_index,
.error_return_trace_index_on_block_entry = parent_block.error_return_trace_index,
.error_return_trace_index_on_function_entry = parent_block.error_return_trace_index_on_function_entry,
};
defer child_block.instructions.deinit(gpa);
@ -5667,19 +5683,51 @@ fn funcDeclSrc(sema: *Sema, block: *Block, src: LazySrcLoc, func_inst: Air.Inst.
return owner_decl.srcLoc();
}
pub fn analyzeSaveErrRetIndex(sema: *Sema, block: *Block) SemaError!Air.Inst.Ref {
const src = sema.src;
const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm;
if (!backend_supports_error_return_tracing or !sema.mod.comp.bin_file.options.error_return_tracing)
return .none;
if (block.is_comptime)
return .none;
const unresolved_stack_trace_ty = sema.getBuiltinType(block, src, "StackTrace") catch |err| switch (err) {
error.NeededSourceLocation, error.GenericPoison, error.ComptimeReturn, error.ComptimeBreak => unreachable,
else => |e| return e,
};
const stack_trace_ty = sema.resolveTypeFields(block, src, unresolved_stack_trace_ty) catch |err| switch (err) {
error.NeededSourceLocation, error.GenericPoison, error.ComptimeReturn, error.ComptimeBreak => unreachable,
else => |e| return e,
};
const field_index = sema.structFieldIndex(block, stack_trace_ty, "index", src) catch |err| switch (err) {
error.NeededSourceLocation, error.GenericPoison, error.ComptimeReturn, error.ComptimeBreak => unreachable,
else => |e| return e,
};
return try block.addInst(.{
.tag = .save_err_return_trace_index,
.data = .{ .ty_pl = .{
.ty = try sema.addType(stack_trace_ty),
.payload = @intCast(u32, field_index),
} },
});
}
/// Add instructions to block to "pop" the error return trace.
/// If `operand` is provided, only pops if operand is non-error.
fn popErrorReturnTrace(
sema: *Sema,
block: *Block,
src: LazySrcLoc,
operand: ?Air.Inst.Ref,
operand: Air.Inst.Ref,
saved_error_trace_index: Air.Inst.Ref,
) CompileError!void {
var is_non_error: ?bool = null;
var is_non_error_inst: Air.Inst.Ref = undefined;
if (operand) |op| {
is_non_error_inst = try sema.analyzeIsNonErr(block, src, op);
if (operand != .none) {
is_non_error_inst = try sema.analyzeIsNonErr(block, src, operand);
if (try sema.resolveDefinedValue(block, src, is_non_error_inst)) |cond_val|
is_non_error = cond_val.toBool();
} else is_non_error = true; // no operand means pop unconditionally
@ -5906,7 +5954,7 @@ fn zirCall(
});
// Pop the error return trace, testing the result for non-error if necessary
const operand = if (pop_error_return_trace or modifier == .always_tail) null else call_inst;
const operand = if (pop_error_return_trace or modifier == .always_tail) .none else call_inst;
try sema.popErrorReturnTrace(block, call_src, operand, save_inst);
}
@ -6221,6 +6269,9 @@ fn analyzeCall(
.label = null,
.inlining = &inlining,
.is_comptime = is_comptime_call,
.error_return_trace_index = block.error_return_trace_index,
.error_return_trace_index_on_block_entry = block.error_return_trace_index,
.error_return_trace_index_on_function_entry = block.error_return_trace_index,
};
const merges = &child_block.inlining.?.merges;
@ -6966,6 +7017,14 @@ fn instantiateGenericCall(
}
arg_i += 1;
}
// Save the error trace as our first action in the function.
// If this is unnecessary after all, Liveness will clean it up for us.
const err_ret_trace_index = try sema.analyzeSaveErrRetIndex(&child_block);
child_block.error_return_trace_index = err_ret_trace_index;
child_block.error_return_trace_index_on_block_entry = err_ret_trace_index;
child_block.error_return_trace_index_on_function_entry = err_ret_trace_index;
const new_func_inst = child_sema.resolveBody(&child_block, fn_info.param_body, fn_info.param_body_inst) catch |err| {
// TODO look up the compile error that happened here and attach a note to it
// pointing here, at the generic instantiation callsite.
@ -9855,6 +9914,7 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
.defer_err_code,
.err_union_code,
.ret_err_value_code,
.restore_err_ret_index,
.is_non_err,
.condbr,
=> {},
@ -10157,6 +10217,9 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
.runtime_cond = block.runtime_cond,
.runtime_loop = block.runtime_loop,
.runtime_index = block.runtime_index,
.error_return_trace_index = block.error_return_trace_index,
.error_return_trace_index_on_block_entry = block.error_return_trace_index,
.error_return_trace_index_on_function_entry = block.error_return_trace_index_on_function_entry,
};
const merges = &child_block.label.?.merges;
defer child_block.instructions.deinit(gpa);
@ -11040,6 +11103,7 @@ fn maybeErrorUnwrap(sema: *Sema, block: *Block, body: []const Zir.Inst.Index, op
const tags = sema.code.instructions.items(.tag);
for (body) |inst| {
switch (tags[inst]) {
.save_err_ret_index,
.dbg_block_begin,
.dbg_block_end,
.dbg_stmt,
@ -11062,6 +11126,10 @@ fn maybeErrorUnwrap(sema: *Sema, block: *Block, body: []const Zir.Inst.Index, op
try sema.zirDbgStmt(block, inst);
continue;
},
.save_err_ret_index => {
try sema.zirSaveErrRetIndex(block, inst);
continue;
},
.str => try sema.zirStr(block, inst),
.as_node => try sema.zirAsNode(block, inst),
.field_val => try sema.zirFieldVal(block, inst),
@ -15672,6 +15740,9 @@ fn zirTypeofBuiltin(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErr
.is_comptime = false,
.is_typeof = true,
.want_safety = false,
.error_return_trace_index = block.error_return_trace_index,
.error_return_trace_index_on_block_entry = block.error_return_trace_index,
.error_return_trace_index_on_function_entry = block.error_return_trace_index_on_function_entry,
};
defer child_block.instructions.deinit(sema.gpa);
@ -16329,61 +16400,67 @@ fn wantErrorReturnTracing(sema: *Sema, fn_ret_ty: Type) bool {
backend_supports_error_return_tracing;
}
fn zirSaveErrRetIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const inst_data = sema.code.instructions.items(.data)[inst].node;
const src = LazySrcLoc.nodeOffset(inst_data);
// This is only relevant at runtime.
if (block.is_comptime) return Air.Inst.Ref.zero_usize;
fn zirSaveErrRetIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void {
const inst_data = sema.code.instructions.items(.data)[inst].save_err_ret_index;
const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm;
const ok = sema.mod.comp.bin_file.options.error_return_tracing and
backend_supports_error_return_tracing;
if (!ok) return Air.Inst.Ref.zero_usize;
// This is encoded as a primitive AIR instruction to resolve one corner case: A function
// may include a `catch { ... }` or `else |err| { ... }` block but not call any errorable
// fn. In that case, there is no error return trace to save the index of and codegen needs
// to avoid interacting with the non-existing error trace.
//
// By using a primitive AIR op, we can depend on Liveness to mark this unused in this corner case.
const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace");
const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty);
const field_index = try sema.structFieldIndex(block, stack_trace_ty, "index", src);
return block.addInst(.{
.tag = .save_err_return_trace_index,
.data = .{ .ty_pl = .{
.ty = try sema.addType(stack_trace_ty),
.payload = @intCast(u32, field_index),
} },
});
}
fn zirRestoreErrRetIndex(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void {
const inst_data = sema.code.instructions.items(.data)[inst].un_node;
const src = inst_data.src();
const ok = backend_supports_error_return_tracing and sema.mod.comp.bin_file.options.error_return_tracing;
if (!ok) return;
// This is only relevant at runtime.
if (block.is_comptime) return;
// This is only relevant within functions.
if (sema.func == null) return;
const save_index = inst_data.operand == .none or b: {
const operand = try sema.resolveInst(inst_data.operand);
const operand_ty = sema.typeOf(operand);
break :b operand_ty.isError();
};
if (save_index)
block.error_return_trace_index = try sema.analyzeSaveErrRetIndex(block);
}
fn zirRestoreErrRetIndex(sema: *Sema, start_block: *Block, inst: Zir.Inst.Index) CompileError!void {
const inst_data = sema.code.instructions.items(.data)[inst].restore_err_ret_index;
const src = sema.src; // TODO
// This is only relevant at runtime.
if (start_block.is_comptime) return;
const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm;
const ok = sema.owner_func.?.calls_or_awaits_errorable_fn and
sema.mod.comp.bin_file.options.error_return_tracing and
backend_supports_error_return_tracing;
if (!ok) return;
const operand = if (inst_data.operand != .none)
try sema.resolveInst(inst_data.operand)
else
.zero_usize;
const tracy = trace(@src());
defer tracy.end();
const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace");
const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty);
const ptr_stack_trace_ty = try Type.Tag.single_mut_pointer.create(sema.arena, stack_trace_ty);
const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty);
const field_ptr = try sema.structFieldPtr(block, src, err_return_trace, "index", src, stack_trace_ty, true);
try sema.storePtr2(block, src, field_ptr, src, operand, src, .store);
const saved_index = if (Zir.refToIndex(inst_data.block)) |zir_block| b: {
var block = start_block;
while (true) {
if (block.label) |label| {
if (label.zir_block == zir_block) {
if (start_block.error_return_trace_index != block.error_return_trace_index_on_block_entry)
break :b block.error_return_trace_index_on_block_entry;
return; // No need to restore
}
}
block = block.parent.?;
}
} else b: {
if (start_block.error_return_trace_index != start_block.error_return_trace_index_on_function_entry)
break :b start_block.error_return_trace_index_on_function_entry;
return; // No need to restore
};
assert(saved_index != .none); // The .error_return_trace_index field was dropped somewhere
const operand = try sema.resolveInst(inst_data.operand);
return sema.popErrorReturnTrace(start_block, src, operand, saved_index);
}
fn addToInferredErrorSet(sema: *Sema, uncasted_operand: Air.Inst.Ref) !void {

View File

@ -988,13 +988,13 @@ pub const Inst = struct {
/// Uses the `err_defer_code` union field.
defer_err_code,
/// Saves the current error return case if it exists,
/// otherwise just returns zero.
/// Uses the `node` union field.
/// Requests that Sema update the saved error return trace index for the enclosing
/// block, if the operand is .none or of an error/error-union type.
/// Uses the `save_err_ret_index` field.
save_err_ret_index,
/// Sets error return trace to zero if no operand is given,
/// otherwise sets the value to the given amount.
/// Uses the `un_node` union field.
/// Uses the `restore_err_ret_index` union field.
restore_err_ret_index,
/// The ZIR instruction tag is one of the `Extended` ones.
@ -1317,6 +1317,7 @@ pub const Inst = struct {
.@"defer",
.defer_err_code,
.restore_err_ret_index,
.save_err_ret_index,
=> true,
.param,
@ -1542,7 +1543,6 @@ pub const Inst = struct {
.try_ptr,
//.try_inline,
//.try_ptr_inline,
.save_err_ret_index,
=> false,
.extended => switch (data.extended.opcode) {
@ -1823,8 +1823,8 @@ pub const Inst = struct {
.@"defer" = .@"defer",
.defer_err_code = .defer_err_code,
.save_err_ret_index = .node,
.restore_err_ret_index = .un_node,
.save_err_ret_index = .save_err_ret_index,
.restore_err_ret_index = .restore_err_ret_index,
.extended = .extended,
});
@ -2602,6 +2602,13 @@ pub const Inst = struct {
err_code: Ref,
payload_index: u32,
},
save_err_ret_index: struct {
operand: Ref, // If error type (or .none), save new trace index
},
restore_err_ret_index: struct {
block: Ref, // If restored, the index is from this block's entrypoint
operand: Ref, // If non-error (or .none), then restore the index
},
// Make sure we don't accidentally add a field to make this union
// bigger than expected. Note that in Debug builds, Zig is allowed
@ -2640,6 +2647,8 @@ pub const Inst = struct {
str_op,
@"defer",
defer_err_code,
save_err_ret_index,
restore_err_ret_index,
};
};

View File

@ -232,7 +232,6 @@ const Writer = struct {
.validate_deref,
.overflow_arithmetic_ptr,
.check_comptime_control_flow,
.restore_err_ret_index,
=> try self.writeUnNode(stream, inst),
.ref,
@ -255,6 +254,9 @@ const Writer = struct {
.str => try self.writeStr(stream, inst),
.int_type => try self.writeIntType(stream, inst),
.save_err_ret_index => try self.writeSaveErrRetIndex(stream, inst),
.restore_err_ret_index => try self.writeRestoreErrRetIndex(stream, inst),
.@"break",
.break_inline,
=> try self.writeBreak(stream, inst),
@ -406,7 +408,6 @@ const Writer = struct {
.alloc_inferred_comptime_mut,
.ret_ptr,
.ret_type,
.save_err_ret_index,
=> try self.writeNode(stream, inst),
.error_value,
@ -2274,6 +2275,22 @@ const Writer = struct {
try self.writeSrc(stream, int_type.src());
}
fn writeSaveErrRetIndex(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
const inst_data = self.code.instructions.items(.data)[inst].save_err_ret_index;
try self.writeInstRef(stream, inst_data.operand);
try stream.writeAll(")");
}
fn writeRestoreErrRetIndex(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
const inst_data = self.code.instructions.items(.data)[inst].restore_err_ret_index;
try self.writeInstRef(stream, inst_data.block);
try stream.writeAll(", ");
try self.writeInstRef(stream, inst_data.operand);
try stream.writeAll(")");
}
fn writeBreak(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
const inst_data = self.code.instructions.items(.data)[inst].@"break";

View File

@ -830,3 +830,16 @@ test "compare error union and error set" {
try expect(a != b);
try expect(b != a);
}
fn non_errorable() void {
// Make sure catch works even in a function that does not call any errorable functions.
//
// This test is needed because stage 2's fix for #1923 means that catch blocks interact
// with the error return trace index.
var x: error{Foo}!void = {};
return x catch {};
}
test "catch within a function that calls no errorable functions" {
non_errorable();
}

View File

@ -97,6 +97,59 @@ pub fn addCases(cases: *tests.StackTracesContext) void {
,
},
});
cases.addCase(.{
.name = "non-error return pops error trace",
.source =
\\fn bar() !void {
\\ return error.UhOh;
\\}
\\
\\fn foo() !void {
\\ bar() catch {
\\ return; // non-error result: success
\\ };
\\}
\\
\\pub fn main() !void {
\\ try foo();
\\ return error.UnrelatedError;
\\}
,
.Debug = .{
.expect =
\\error: UnrelatedError
\\source.zig:13:5: [address] in main (test)
\\ return error.UnrelatedError;
\\ ^
\\
,
},
.ReleaseSafe = .{
.exclude_os = .{
.windows, // TODO
.linux, // defeated by aggressive inlining
},
.expect =
\\error: UnrelatedError
\\source.zig:13:5: [address] in [function]
\\ return error.UnrelatedError;
\\ ^
\\
,
},
.ReleaseFast = .{
.expect =
\\error: UnrelatedError
\\
,
},
.ReleaseSmall = .{
.expect =
\\error: UnrelatedError
\\
,
},
});
cases.addCase(.{
.name = "try return + handled catch/if-else",
@ -155,6 +208,59 @@ pub fn addCases(cases: *tests.StackTracesContext) void {
},
});
cases.addCase(.{
.name = "break from inline loop pops error return trace",
.source =
\\fn foo() !void { return error.FooBar; }
\\
\\pub fn main() !void {
\\ comptime var i: usize = 0;
\\ b: inline while (i < 5) : (i += 1) {
\\ foo() catch {
\\ break :b; // non-error break, success
\\ };
\\ }
\\ // foo() was successfully handled, should not appear in trace
\\
\\ return error.BadTime;
\\}
,
.Debug = .{
.expect =
\\error: BadTime
\\source.zig:12:5: [address] in main (test)
\\ return error.BadTime;
\\ ^
\\
,
},
.ReleaseSafe = .{
.exclude_os = .{
.windows, // TODO
.linux, // defeated by aggressive inlining
},
.expect =
\\error: BadTime
\\source.zig:12:5: [address] in [function]
\\ return error.BadTime;
\\ ^
\\
,
},
.ReleaseFast = .{
.expect =
\\error: BadTime
\\
,
},
.ReleaseSmall = .{
.expect =
\\error: BadTime
\\
,
},
});
cases.addCase(.{
.name = "catch and re-throw error",
.source =
@ -209,7 +315,7 @@ pub fn addCases(cases: *tests.StackTracesContext) void {
});
cases.addCase(.{
.name = "stored errors do not contribute to error trace",
.name = "errors stored in var do not contribute to error trace",
.source =
\\fn foo() !void {
\\ return error.TheSkyIsFalling;
@ -260,6 +366,82 @@ pub fn addCases(cases: *tests.StackTracesContext) void {
},
});
cases.addCase(.{
.name = "error stored in const has trace preserved for duration of block",
.source =
\\fn foo() !void { return error.TheSkyIsFalling; }
\\fn bar() !void { return error.InternalError; }
\\fn baz() !void { return error.UnexpectedReality; }
\\
\\pub fn main() !void {
\\ const x = foo();
\\ const y = b: {
\\ if (true)
\\ break :b bar();
\\
\\ break :b {};
\\ };
\\ x catch {};
\\ y catch {};
\\ // foo()/bar() error traces not popped until end of block
\\
\\ {
\\ const z = baz();
\\ z catch {};
\\ // baz() error trace still alive here
\\ }
\\ // baz() error trace popped, foo(), bar() still alive
\\ return error.StillUnresolved;
\\}
,
.Debug = .{
.expect =
\\error: StillUnresolved
\\source.zig:1:18: [address] in foo (test)
\\fn foo() !void { return error.TheSkyIsFalling; }
\\ ^
\\source.zig:2:18: [address] in bar (test)
\\fn bar() !void { return error.InternalError; }
\\ ^
\\source.zig:23:5: [address] in main (test)
\\ return error.StillUnresolved;
\\ ^
\\
,
},
.ReleaseSafe = .{
.exclude_os = .{
.windows, // TODO
.linux, // defeated by aggressive inlining
},
.expect =
\\error: StillUnresolved
\\source.zig:1:18: [address] in [function]
\\fn foo() !void { return error.TheSkyIsFalling; }
\\ ^
\\source.zig:2:18: [address] in [function]
\\fn bar() !void { return error.InternalError; }
\\ ^
\\source.zig:23:5: [address] in [function]
\\ return error.StillUnresolved;
\\ ^
\\
,
},
.ReleaseFast = .{
.expect =
\\error: StillUnresolved
\\
,
},
.ReleaseSmall = .{
.expect =
\\error: StillUnresolved
\\
,
},
});
cases.addCase(.{
.name = "error passed to function has its trace preserved for duration of the call",
.source =