async functions have error return traces where appropriate

however the traces are not merged on `await` or async function calls
yet.

When an async function has an error set or error union as its return
type, it has a `StackTrace` before the args in the frame, so that it is
accessible from `anyframe->T` awaiters. However when it does not have an
errorable return type, but it does call or await an errorable, it has a
stack trace just before the locals. This way when doing an `@asyncCall`
on an async function pointer, it can populate the args (which are after
the `StackTrace`) because it knows the offset of the args based only on
the return type.

This sort of matches normal functions, where a stack trace pointer could
be supplied by a parameter, or it could be supplied by the stack of the
function, depending on whether the function itself is errorable.
This commit is contained in:
Andrew Kelley 2019-08-05 03:10:14 -04:00
parent dfe8c5a2e9
commit 20f63e588e
No known key found for this signature in database
GPG Key ID: 7C5F548F728501A9
6 changed files with 91 additions and 70 deletions

View File

@ -1,5 +1,5 @@
* error return tracing - handle `await` and function calls
* go over the commented out tests
* error return tracing
* compile error for error: expected anyframe->T, found 'anyframe'
* compile error for error: expected anyframe->T, found 'i32'
* await of a non async function

View File

@ -5230,9 +5230,8 @@ static Error resolve_coro_frame(CodeGen *g, ZigType *frame_type) {
field_names.append("@result");
field_types.append(fn_type_id->return_type);
if (codegen_fn_has_err_ret_tracing(g, fn_type_id->return_type)) {
field_names.append("@ptr_stack_trace");
field_types.append(get_ptr_to_stack_trace_type(g));
if (codegen_fn_has_err_ret_tracing_arg(g, fn_type_id->return_type)) {
(void)get_ptr_to_stack_trace_type(g); // populate g->stack_trace_type
field_names.append("@stack_trace");
field_types.append(g->stack_trace_type);
@ -5256,6 +5255,16 @@ static Error resolve_coro_frame(CodeGen *g, ZigType *frame_type) {
field_types.append(param_type);
}
if (codegen_fn_has_err_ret_tracing_stack(g, fn)) {
(void)get_ptr_to_stack_trace_type(g); // populate g->stack_trace_type
field_names.append("@stack_trace");
field_types.append(g->stack_trace_type);
field_names.append("@instruction_addresses");
field_types.append(get_array_type(g, g->builtin_types.entry_usize, stack_trace_ptr_count));
}
for (size_t alloca_i = 0; alloca_i < fn->alloca_gen_list.length; alloca_i += 1) {
IrInstructionAllocaGen *instruction = fn->alloca_gen_list.at(alloca_i);
ZigType *ptr_type = instruction->base.value.type;
@ -7563,8 +7572,7 @@ static void resolve_llvm_types_any_frame(CodeGen *g, ZigType *any_frame_type, Re
if (have_result_type) {
field_types.append(get_llvm_type(g, ptr_result_type)); // ptr_result
field_types.append(get_llvm_type(g, result_type)); // result
if (codegen_fn_has_err_ret_tracing(g, result_type)) {
field_types.append(get_llvm_type(g, get_ptr_to_stack_trace_type(g))); // ptr_stack_trace
if (codegen_fn_has_err_ret_tracing_arg(g, result_type)) {
field_types.append(get_llvm_type(g, g->stack_trace_type)); // stack_trace
field_types.append(get_llvm_type(g, get_array_type(g, g->builtin_types.entry_usize, stack_trace_ptr_count))); // instruction_addresses
}
@ -7614,15 +7622,7 @@ static void resolve_llvm_types_any_frame(CodeGen *g, ZigType *any_frame_type, Re
8*LLVMOffsetOfElement(g->target_data_ref, frame_header_type, di_element_types.length),
ZigLLVM_DIFlags_Zero, get_llvm_di_type(g, result_type)));
if (codegen_fn_has_err_ret_tracing(g, result_type)) {
di_element_types.append(
ZigLLVMCreateDebugMemberType(g->dbuilder,
ZigLLVMTypeToScope(any_frame_type->llvm_di_type), "ptr_stack_trace",
di_file, line,
8*LLVMABISizeOfType(g->target_data_ref, field_types.at(di_element_types.length)),
8*LLVMABIAlignmentOfType(g->target_data_ref, field_types.at(di_element_types.length)),
8*LLVMOffsetOfElement(g->target_data_ref, frame_header_type, di_element_types.length),
ZigLLVM_DIFlags_Zero, get_llvm_di_type(g, get_ptr_to_stack_trace_type(g))));
if (codegen_fn_has_err_ret_tracing_arg(g, result_type)) {
di_element_types.append(
ZigLLVMCreateDebugMemberType(g->dbuilder,
ZigLLVMTypeToScope(any_frame_type->llvm_di_type), "stack_trace",

View File

@ -298,7 +298,7 @@ static LLVMLinkage to_llvm_linkage(GlobalLinkageId id) {
}
// label (grep this): [coro_frame_struct_layout]
static uint32_t frame_index_trace(CodeGen *g, FnTypeId *fn_type_id) {
static uint32_t frame_index_trace_arg(CodeGen *g, FnTypeId *fn_type_id) {
// [0] *ReturnType
// [1] ReturnType
uint32_t return_field_count = type_has_bits(fn_type_id->return_type) ? 2 : 0;
@ -307,14 +307,25 @@ static uint32_t frame_index_trace(CodeGen *g, FnTypeId *fn_type_id) {
// label (grep this): [coro_frame_struct_layout]
static uint32_t frame_index_arg(CodeGen *g, FnTypeId *fn_type_id) {
bool have_stack_trace = g->have_err_ret_tracing && codegen_fn_has_err_ret_tracing(g, fn_type_id->return_type);
// [0] *StackTrace
// [1] StackTrace
// [2] [stack_trace_ptr_count]usize
uint32_t trace_field_count = have_stack_trace ? 3 : 0;
return frame_index_trace(g, fn_type_id) + trace_field_count;
bool have_stack_trace = codegen_fn_has_err_ret_tracing_arg(g, fn_type_id->return_type);
// [0] StackTrace
// [1] [stack_trace_ptr_count]usize
uint32_t trace_field_count = have_stack_trace ? 2 : 0;
return frame_index_trace_arg(g, fn_type_id) + trace_field_count;
}
// label (grep this): [coro_frame_struct_layout]
static uint32_t frame_index_trace_stack(CodeGen *g, FnTypeId *fn_type_id) {
uint32_t result = frame_index_arg(g, fn_type_id);
for (size_t i = 0; i < fn_type_id->param_count; i += 1) {
if (type_has_bits(fn_type_id->param_info->type)) {
result += 1;
}
}
return result;
}
static uint32_t get_err_ret_trace_arg_index(CodeGen *g, ZigFn *fn_table_entry) {
if (!g->have_err_ret_tracing) {
return UINT32_MAX;
@ -1287,9 +1298,6 @@ static LLVMValueRef get_cur_err_ret_trace_val(CodeGen *g, Scope *scope) {
if (!g->have_err_ret_tracing) {
return nullptr;
}
if (fn_is_async(g->cur_fn)) {
return LLVMBuildLoad(g->builder, g->cur_err_ret_trace_val_arg, "");
}
if (g->cur_err_ret_trace_val_stack != nullptr) {
return g->cur_err_ret_trace_val_stack;
}
@ -3441,6 +3449,10 @@ static void render_async_spills(CodeGen *g) {
gen_var_debug_decl(g, var);
}
}
// label (grep this): [coro_frame_struct_layout]
if (codegen_fn_has_err_ret_tracing_stack(g, g->cur_fn)) {
async_var_index += 2;
}
for (size_t alloca_i = 0; alloca_i < g->cur_fn->alloca_gen_list.length; alloca_i += 1) {
IrInstructionAllocaGen *instruction = g->cur_fn->alloca_gen_list.at(alloca_i);
ZigType *ptr_type = instruction->base.value.type;
@ -3525,7 +3537,7 @@ static LLVMValueRef ir_render_call(CodeGen *g, IrExecutable *executable, IrInstr
CallingConvention cc = fn_type->data.fn.fn_type_id.cc;
bool first_arg_ret = ret_has_bits && want_first_arg_sret(g, fn_type_id);
bool prefix_arg_err_ret_stack = codegen_fn_has_err_ret_tracing(g, fn_type_id->return_type);
bool prefix_arg_err_ret_stack = codegen_fn_has_err_ret_tracing_arg(g, fn_type_id->return_type);
bool is_var_args = fn_type_id->is_var_args;
ZigList<LLVMValueRef> gen_param_values = {};
LLVMValueRef result_loc = instruction->result_loc ? ir_llvm_value(g, instruction->result_loc) : nullptr;
@ -3572,28 +3584,8 @@ static LLVMValueRef ir_render_call(CodeGen *g, IrExecutable *executable, IrInstr
}
}
if (prefix_arg_err_ret_stack) {
uint32_t trace_field_index = frame_index_trace(g, fn_type_id);
LLVMValueRef trace_field_ptr_ptr = LLVMBuildStructGEP(g->builder, frame_result_loc,
trace_field_index, "");
LLVMValueRef trace_field_ptr = LLVMBuildStructGEP(g->builder, frame_result_loc,
trace_field_index + 1, "");
LLVMValueRef trace_field_addrs = LLVMBuildStructGEP(g->builder, frame_result_loc,
trace_field_index + 2, "");
LLVMBuildStore(g->builder, trace_field_ptr, trace_field_ptr_ptr);
LLVMValueRef index_ptr = LLVMBuildStructGEP(g->builder, trace_field_ptr, 0, "");
LLVMBuildStore(g->builder, zero, index_ptr);
LLVMValueRef addrs_slice_ptr = LLVMBuildStructGEP(g->builder, trace_field_ptr, 1, "");
LLVMValueRef addrs_ptr_ptr = LLVMBuildStructGEP(g->builder, addrs_slice_ptr, slice_ptr_index, "");
LLVMValueRef indices[] = { LLVMConstNull(usize_type_ref), LLVMConstNull(usize_type_ref) };
LLVMValueRef trace_field_addrs_as_ptr = LLVMBuildInBoundsGEP(g->builder, trace_field_addrs, indices, 2, "");
LLVMBuildStore(g->builder, trace_field_addrs_as_ptr, addrs_ptr_ptr);
LLVMValueRef addrs_len_ptr = LLVMBuildStructGEP(g->builder, addrs_slice_ptr, slice_len_index, "");
LLVMBuildStore(g->builder, LLVMConstInt(usize_type_ref, stack_trace_ptr_count, false), addrs_len_ptr);
}
// even if prefix_arg_err_ret_stack is true, let the async function do its own
// initialization.
} else if (callee_is_async) {
frame_result_loc = ir_llvm_value(g, instruction->frame_result_loc);
awaiter_init_val = LLVMBuildPtrToInt(g->builder, g->cur_ret_ptr, usize_type_ref, ""); // caller's own frame pointer
@ -3607,13 +3599,8 @@ static LLVMValueRef ir_render_call(CodeGen *g, IrExecutable *executable, IrInstr
}
}
if (prefix_arg_err_ret_stack) {
uint32_t trace_field_index = frame_index_trace(g, fn_type_id);
LLVMValueRef trace_field_ptr = LLVMBuildStructGEP(g->builder, frame_result_loc, trace_field_index, "");
LLVMValueRef err_trace_val = get_cur_err_ret_trace_val(g, instruction->base.scope);
LLVMBuildStore(g->builder, err_trace_val, trace_field_ptr);
}
// even if prefix_arg_err_ret_stack is true, let the async function do its
// error return tracing normally, and then we'll invoke merge_error_return_traces like normal.
}
if (instruction->is_async || callee_is_async) {
assert(frame_result_loc != nullptr);
@ -6790,9 +6777,16 @@ static void do_code_gen(CodeGen *g) {
g->cur_async_awaiter_ptr = LLVMBuildStructGEP(g->builder, g->cur_ret_ptr, coro_awaiter_index, "");
LLVMValueRef resume_index_ptr = LLVMBuildStructGEP(g->builder, g->cur_ret_ptr, coro_resume_index, "");
g->cur_async_resume_index_ptr = resume_index_ptr;
if (codegen_fn_has_err_ret_tracing(g, fn_type_id->return_type)) {
uint32_t field_index = frame_index_trace(g, fn_type_id);
g->cur_err_ret_trace_val_arg = LLVMBuildStructGEP(g->builder, g->cur_ret_ptr, field_index, "");
LLVMValueRef err_ret_trace_val = nullptr;
uint32_t trace_field_index;
if (codegen_fn_has_err_ret_tracing_arg(g, fn_type_id->return_type)) {
trace_field_index = frame_index_trace_arg(g, fn_type_id);
err_ret_trace_val = LLVMBuildStructGEP(g->builder, g->cur_ret_ptr, trace_field_index, "");
g->cur_err_ret_trace_val_arg = err_ret_trace_val;
} else if (codegen_fn_has_err_ret_tracing_stack(g, fn_table_entry)) {
trace_field_index = frame_index_trace_stack(g, fn_type_id);
err_ret_trace_val = LLVMBuildStructGEP(g->builder, g->cur_ret_ptr, trace_field_index, "");
g->cur_err_ret_trace_val_stack = err_ret_trace_val;
}
LLVMValueRef resume_index = LLVMBuildLoad(g->builder, resume_index_ptr, "");
@ -6804,6 +6798,24 @@ static void do_code_gen(CodeGen *g) {
LLVMAddCase(switch_instr, zero, entry_block->llvm_block);
g->cur_resume_block_count += 1;
LLVMPositionBuilderAtEnd(g->builder, entry_block->llvm_block);
if (err_ret_trace_val != nullptr) {
LLVMValueRef trace_field_ptr = LLVMBuildStructGEP(g->builder, g->cur_ret_ptr,
trace_field_index, "");
LLVMValueRef trace_field_addrs = LLVMBuildStructGEP(g->builder, g->cur_ret_ptr,
trace_field_index + 1, "");
LLVMValueRef index_ptr = LLVMBuildStructGEP(g->builder, trace_field_ptr, 0, "");
LLVMBuildStore(g->builder, zero, index_ptr);
LLVMValueRef addrs_slice_ptr = LLVMBuildStructGEP(g->builder, trace_field_ptr, 1, "");
LLVMValueRef addrs_ptr_ptr = LLVMBuildStructGEP(g->builder, addrs_slice_ptr, slice_ptr_index, "");
LLVMValueRef indices[] = { LLVMConstNull(usize_type_ref), LLVMConstNull(usize_type_ref) };
LLVMValueRef trace_field_addrs_as_ptr = LLVMBuildInBoundsGEP(g->builder, trace_field_addrs, indices, 2, "");
LLVMBuildStore(g->builder, trace_field_addrs_as_ptr, addrs_ptr_ptr);
LLVMValueRef addrs_len_ptr = LLVMBuildStructGEP(g->builder, addrs_slice_ptr, slice_len_index, "");
LLVMBuildStore(g->builder, LLVMConstInt(usize_type_ref, stack_trace_ptr_count, false), addrs_len_ptr);
}
render_async_var_decls(g, entry_block->instruction_list.at(0)->scope);
} else {
// create debug variable declarations for parameters
@ -9707,8 +9719,13 @@ CodeGen *codegen_create(Buf *main_pkg_path, Buf *root_src_path, const ZigTarget
return g;
}
bool codegen_fn_has_err_ret_tracing(CodeGen *g, ZigType *return_type) {
bool codegen_fn_has_err_ret_tracing_arg(CodeGen *g, ZigType *return_type) {
return g->have_err_ret_tracing &&
(return_type->id == ZigTypeIdErrorUnion ||
return_type->id == ZigTypeIdErrorSet);
}
bool codegen_fn_has_err_ret_tracing_stack(CodeGen *g, ZigFn *fn) {
return g->have_err_ret_tracing && fn->calls_or_awaits_errorable_fn &&
!codegen_fn_has_err_ret_tracing_arg(g, fn->type_entry->data.fn.fn_type_id.return_type);
}

View File

@ -61,6 +61,7 @@ Buf *codegen_generate_builtin_source(CodeGen *g);
TargetSubsystem detect_subsystem(CodeGen *g);
void codegen_release_caches(CodeGen *codegen);
bool codegen_fn_has_err_ret_tracing(CodeGen *g, ZigType *return_type);
bool codegen_fn_has_err_ret_tracing_arg(CodeGen *g, ZigType *return_type);
bool codegen_fn_has_err_ret_tracing_stack(CodeGen *g, ZigFn *fn);
#endif

View File

@ -15560,9 +15560,6 @@ static IrInstruction *ir_analyze_fn_call(IrAnalyze *ira, IrInstructionCallSrc *c
break;
}
}
if (call_instruction->is_async) {
zig_panic("TODO async call");
}
auto existing_entry = ira->codegen->generic_table.put_unique(generic_id, impl_fn);
if (existing_entry) {
@ -24483,6 +24480,10 @@ static IrInstruction *ir_analyze_instruction_await(IrAnalyze *ira, IrInstruction
fn_entry->inferred_async_node = instruction->base.source_node;
}
if (type_can_fail(result_type)) {
fn_entry->calls_or_awaits_errorable_fn = true;
}
IrInstruction *result = ir_build_await(&ira->new_irb,
instruction->base.scope, instruction->base.source_node, frame);
result->value.type = result_type;

View File

@ -337,19 +337,21 @@ test "async fn with inferred error set" {
//test "error return trace across suspend points - early return" {
// const p = nonFailing();
// resume p;
// const p2 = try async<allocator> printTrace(p);
// cancel p2;
// const p2 = async printTrace(p);
//}
//
//test "error return trace across suspend points - async return" {
// const p = nonFailing();
// const p2 = try async<std.debug.global_allocator> printTrace(p);
// const p2 = async printTrace(p);
// resume p;
// cancel p2;
//}
//
//fn nonFailing() (anyframe->anyerror!void) {
// return async<std.debug.global_allocator> suspendThenFail() catch unreachable;
// const Static = struct {
// var frame: @Frame(suspendThenFail) = undefined;
// };
// Static.frame = async suspendThenFail();
// return &Static.frame;
//}
//async fn suspendThenFail() anyerror!void {
// suspend;
@ -361,8 +363,8 @@ test "async fn with inferred error set" {
// if (@errorReturnTrace()) |trace| {
// expect(trace.index == 1);
// } else switch (builtin.mode) {
// builtin.Mode.Debug, builtin.Mode.ReleaseSafe => @panic("expected return trace"),
// builtin.Mode.ReleaseFast, builtin.Mode.ReleaseSmall => {},
// .Debug, .ReleaseSafe => @panic("expected return trace"),
// .ReleaseFast, .ReleaseSmall => {},
// }
// };
//}