mirror of
https://github.com/c3lang/c3c.git
synced 2026-02-27 12:01:16 +00:00
Refactored builtins. Added reduce operations and powi. Version bump.
This commit is contained in:
committed by
Christoffer Lerno
parent
9b14340a57
commit
4fa4b2a631
@@ -4129,71 +4129,6 @@ void llvm_value_struct_gep(GenContext *c, BEValue *element, BEValue *struct_poin
|
||||
element->alignment = alignment;
|
||||
}
|
||||
|
||||
static void llvm_emit_intrinsic_expr(GenContext *c, unsigned intrinsic, BEValue *be_value, Expr *expr)
|
||||
{
|
||||
unsigned arguments = vec_size(expr->call_expr.arguments);
|
||||
assert(arguments < 10 && "Only has room for 10");
|
||||
LLVMValueRef arg_results[10];
|
||||
if (intrinsic == intrinsic_id.memcpy) arguments -= 2;
|
||||
if (intrinsic == intrinsic_id.memset) arguments--;
|
||||
|
||||
Expr **args = expr->call_expr.arguments;
|
||||
LLVMTypeRef call_type[3];
|
||||
int call_args = 0;
|
||||
for (unsigned i = 0; i < arguments; i++)
|
||||
{
|
||||
llvm_emit_expr(c, be_value, args[i]);
|
||||
llvm_value_rvalue(c, be_value);
|
||||
arg_results[i] = be_value->value;
|
||||
}
|
||||
if (intrinsic == intrinsic_id.ctlz || intrinsic == intrinsic_id.cttz || intrinsic == intrinsic_id.abs)
|
||||
{
|
||||
arg_results[1] = llvm_get_zero_raw(c->bool_type);
|
||||
arguments++;
|
||||
}
|
||||
else if (intrinsic == intrinsic_id.prefetch)
|
||||
{
|
||||
arg_results[arguments++] = llvm_const_int(c, type_int, 1);
|
||||
call_args = 1;
|
||||
call_type[0] = llvm_get_type(c, type_voidptr);
|
||||
}
|
||||
if (expr->type != type_void)
|
||||
{
|
||||
call_args = 1;
|
||||
call_type[0] = llvm_get_type(c, expr->type);
|
||||
if (intrinsic == intrinsic_id.readcyclecounter) call_args = 0;
|
||||
}
|
||||
else if (intrinsic == intrinsic_id.memcpy)
|
||||
{
|
||||
call_type[0] = call_type[1] = llvm_get_type(c, type_voidptr);
|
||||
call_type[2] = llvm_get_type(c, type_usize);
|
||||
call_args = 3;
|
||||
}
|
||||
else if (intrinsic == intrinsic_id.memset)
|
||||
{
|
||||
call_type[0] = llvm_get_type(c, type_voidptr);
|
||||
call_type[1] = llvm_get_type(c, type_usize);
|
||||
call_args = 2;
|
||||
}
|
||||
LLVMValueRef result = llvm_emit_call_intrinsic(c, intrinsic, call_type, call_args, arg_results, arguments);
|
||||
llvm_value_set(be_value, result, expr->type);
|
||||
if (intrinsic == intrinsic_id.memcpy)
|
||||
{
|
||||
assert(args[4]->const_expr.const_kind == CONST_INTEGER);
|
||||
assert(args[5]->const_expr.const_kind == CONST_INTEGER);
|
||||
uint64_t dst_align = int_to_u64(args[4]->const_expr.ixx);
|
||||
uint64_t src_align = int_to_u64(args[5]->const_expr.ixx);
|
||||
if (dst_align > 0) llvm_attribute_add_call(c, result, attribute_id.align, 1, dst_align);
|
||||
if (src_align > 0) llvm_attribute_add_call(c, result, attribute_id.align, 2, src_align);
|
||||
}
|
||||
else if (intrinsic == intrinsic_id.memset)
|
||||
{
|
||||
assert(args[4]->const_expr.const_kind == CONST_INTEGER);
|
||||
uint64_t dst_align = int_to_u64(args[4]->const_expr.ixx);
|
||||
if (dst_align > 0) llvm_attribute_add_call(c, result, attribute_id.align, 1, dst_align);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void llvm_emit_parameter(GenContext *c, LLVMValueRef **args, ABIArgInfo *info, BEValue *be_value, Type *type)
|
||||
{
|
||||
@@ -4343,99 +4278,6 @@ static void llvm_emit_splatted_variadic_arg(GenContext *c, Expr *expr, BEValue *
|
||||
}
|
||||
}
|
||||
|
||||
unsigned llvm_get_intrinsic(BuiltinFunction func)
|
||||
{
|
||||
switch (func)
|
||||
{
|
||||
case BUILTIN_NONE:
|
||||
case BUILTIN_UNREACHABLE:
|
||||
case BUILTIN_STACKTRACE:
|
||||
case BUILTIN_ABS:
|
||||
case BUILTIN_SHUFFLEVECTOR:
|
||||
case BUILTIN_REVERSE:
|
||||
case BUILTIN_SAT_ADD:
|
||||
case BUILTIN_SAT_SHL:
|
||||
case BUILTIN_SAT_SUB:
|
||||
UNREACHABLE
|
||||
case BUILTIN_SYSCLOCK:
|
||||
return intrinsic_id.readcyclecounter;
|
||||
case BUILTIN_TRAP:
|
||||
return intrinsic_id.trap;
|
||||
case BUILTIN_CEIL:
|
||||
return intrinsic_id.ceil;
|
||||
case BUILTIN_TRUNC:
|
||||
return intrinsic_id.trunc;
|
||||
case BUILTIN_SQRT:
|
||||
return intrinsic_id.sqrt;
|
||||
case BUILTIN_COS:
|
||||
return intrinsic_id.cos;
|
||||
case BUILTIN_SIN:
|
||||
return intrinsic_id.sin;
|
||||
case BUILTIN_LOG:
|
||||
return intrinsic_id.log;
|
||||
case BUILTIN_LOG10:
|
||||
return intrinsic_id.log10;
|
||||
case BUILTIN_MAX:
|
||||
return intrinsic_id.maxnum;
|
||||
case BUILTIN_MIN:
|
||||
return intrinsic_id.minnum;
|
||||
case BUILTIN_FMA:
|
||||
return intrinsic_id.fma;
|
||||
case BUILTIN_FSHL:
|
||||
return intrinsic_id.fshl;
|
||||
case BUILTIN_FSHR:
|
||||
return intrinsic_id.fshr;
|
||||
case BUILTIN_BITREVERSE:
|
||||
return intrinsic_id.bitreverse;
|
||||
case BUILTIN_BSWAP:
|
||||
return intrinsic_id.bswap;
|
||||
case BUILTIN_CTLZ:
|
||||
return intrinsic_id.ctlz;
|
||||
case BUILTIN_CTTZ:
|
||||
return intrinsic_id.cttz;
|
||||
case BUILTIN_POPCOUNT:
|
||||
return intrinsic_id.ctpop;
|
||||
case BUILTIN_LOG2:
|
||||
return intrinsic_id.log2;
|
||||
case BUILTIN_POW:
|
||||
return intrinsic_id.pow;
|
||||
case BUILTIN_PREFETCH:
|
||||
return intrinsic_id.prefetch;
|
||||
case BUILTIN_EXP:
|
||||
return intrinsic_id.exp;
|
||||
case BUILTIN_MEMCOPY:
|
||||
return intrinsic_id.memcpy;
|
||||
case BUILTIN_MEMSET:
|
||||
return intrinsic_id.memset;
|
||||
case BUILTIN_COPYSIGN:
|
||||
return intrinsic_id.copysign;
|
||||
case BUILTIN_EXP2:
|
||||
return intrinsic_id.exp2;
|
||||
case BUILTIN_FLOOR:
|
||||
return intrinsic_id.floor;
|
||||
case BUILTIN_LLRINT:
|
||||
return intrinsic_id.llrint;
|
||||
case BUILTIN_LLROUND:
|
||||
return intrinsic_id.llround;
|
||||
case BUILTIN_LRINT:
|
||||
return intrinsic_id.lrint;
|
||||
case BUILTIN_LROUND:
|
||||
return intrinsic_id.lround;
|
||||
case BUILTIN_NEARBYINT:
|
||||
return intrinsic_id.nearbyint;
|
||||
case BUILTIN_RINT:
|
||||
return intrinsic_id.rint;
|
||||
case BUILTIN_ROUND:
|
||||
return intrinsic_id.round;
|
||||
case BUILTIN_ROUNDEVEN:
|
||||
return intrinsic_id.roundeven;
|
||||
case BUILTIN_VOLATILE_STORE:
|
||||
case BUILTIN_VOLATILE_LOAD:
|
||||
case BUILTIN_SYSCALL:
|
||||
UNREACHABLE
|
||||
}
|
||||
UNREACHABLE
|
||||
}
|
||||
|
||||
LLVMAtomicOrdering llvm_atomic_ordering(Atomicity atomicity)
|
||||
{
|
||||
@@ -4452,254 +4294,8 @@ LLVMAtomicOrdering llvm_atomic_ordering(Atomicity atomicity)
|
||||
UNREACHABLE
|
||||
}
|
||||
|
||||
static inline void llvm_syscall_write_regs_to_scratch(const char** registers, unsigned args)
|
||||
{
|
||||
for (unsigned i = 0; i < args; i++)
|
||||
{
|
||||
scratch_buffer_append(",{");
|
||||
scratch_buffer_append(registers[i]);
|
||||
scratch_buffer_append("}");
|
||||
}
|
||||
}
|
||||
|
||||
static inline LLVMValueRef llvm_syscall_asm(GenContext *c, LLVMTypeRef func_type, char *call)
|
||||
{
|
||||
return LLVMGetInlineAsm(func_type, call, strlen(call),
|
||||
scratch_buffer_to_string(), scratch_buffer.len,
|
||||
true, true, LLVMInlineAsmDialectATT, /* can throw */ false);
|
||||
}
|
||||
|
||||
static inline void llvm_emit_syscall(GenContext *c, BEValue *be_value, Expr *expr)
|
||||
{
|
||||
unsigned arguments = vec_size(expr->call_expr.arguments);
|
||||
assert(arguments < 10 && "Only has room for 10");
|
||||
LLVMValueRef arg_results[10];
|
||||
LLVMTypeRef arg_types[10];
|
||||
Expr **args = expr->call_expr.arguments;
|
||||
LLVMTypeRef type = llvm_get_type(c, type_uptr);
|
||||
for (unsigned i = 0; i < arguments; i++)
|
||||
{
|
||||
llvm_emit_expr(c, be_value, args[i]);
|
||||
llvm_value_rvalue(c, be_value);
|
||||
arg_results[i] = be_value->value;
|
||||
arg_types[i] = type;
|
||||
}
|
||||
LLVMTypeRef func_type = LLVMFunctionType(type, arg_types, arguments, false);
|
||||
scratch_buffer_clear();
|
||||
LLVMValueRef inline_asm;
|
||||
switch (platform_target.arch)
|
||||
{
|
||||
case ARCH_TYPE_AARCH64:
|
||||
case ARCH_TYPE_AARCH64_BE:
|
||||
scratch_buffer_append("={x0}");
|
||||
assert(arguments < 8);
|
||||
if (os_is_apple(platform_target.os))
|
||||
{
|
||||
static char const *regs[] = { "x16", "x0", "x1", "x2", "x3", "x4", "x5" };
|
||||
llvm_syscall_write_regs_to_scratch(regs, arguments);
|
||||
}
|
||||
else
|
||||
{
|
||||
static char const *regs[] = { "x8", "x0", "x1", "x2", "x3", "x4", "x5" };
|
||||
llvm_syscall_write_regs_to_scratch(regs, arguments);
|
||||
}
|
||||
inline_asm = llvm_syscall_asm(c, func_type, "svc #0x80");
|
||||
break;
|
||||
case ARCH_TYPE_X86:
|
||||
{
|
||||
scratch_buffer_append("={eax}");
|
||||
assert(arguments < 8);
|
||||
static char const *regs[] = { "eax", "ebx", "ecx", "edx", "esi", "edi" };
|
||||
llvm_syscall_write_regs_to_scratch(regs, arguments < 6 ? arguments : 6);
|
||||
if (arguments == 7)
|
||||
{
|
||||
scratch_buffer_append(",rm");
|
||||
char *asm_str = "push %[arg6]\npush %%ebp\nmov 4(%%esp), %%ebp\nint $0x80\npop %%ebp\nadd $4, %%esp";
|
||||
inline_asm = llvm_syscall_asm(c, func_type, asm_str);
|
||||
break;
|
||||
}
|
||||
inline_asm = llvm_syscall_asm(c, func_type, "int $0x80");
|
||||
break;
|
||||
}
|
||||
case ARCH_TYPE_X86_64:
|
||||
scratch_buffer_append("={rax}");
|
||||
assert(arguments < 8);
|
||||
{
|
||||
static char const *regs[] = { "rax", "rdi", "rsi", "rdx", "r10", "r8", "r9" };
|
||||
llvm_syscall_write_regs_to_scratch(regs, arguments);
|
||||
}
|
||||
// Check clobbers on different OSes
|
||||
scratch_buffer_append(",~{rcx},~{r11},~{memory}");
|
||||
inline_asm = llvm_syscall_asm(c, func_type, "syscall");
|
||||
break;
|
||||
case ARCH_UNSUPPORTED:
|
||||
default:
|
||||
UNREACHABLE
|
||||
}
|
||||
LLVMValueRef result = LLVMBuildCall2(c->builder, func_type, inline_asm, arg_results, arguments, "syscall");
|
||||
llvm_value_set(be_value, result, type_uptr);
|
||||
}
|
||||
|
||||
INLINE void llvm_emit_shufflevector(GenContext *c, BEValue *result_value, Expr *expr)
|
||||
{
|
||||
Expr **args = expr->call_expr.arguments;
|
||||
unsigned count = vec_size(args);
|
||||
LLVMValueRef arg1;
|
||||
LLVMValueRef arg2;
|
||||
LLVMValueRef mask;
|
||||
llvm_emit_expr(c, result_value, args[0]);
|
||||
llvm_value_rvalue(c, result_value);
|
||||
Type *rtype = result_value->type;
|
||||
arg1 = result_value->value;
|
||||
llvm_emit_expr(c, result_value, args[count - 1]);
|
||||
llvm_value_rvalue(c, result_value);
|
||||
mask = result_value->value;
|
||||
assert(LLVMIsConstant(mask));
|
||||
if (count == 2)
|
||||
{
|
||||
arg2 = LLVMGetPoison(LLVMTypeOf(arg1));
|
||||
}
|
||||
else
|
||||
{
|
||||
llvm_emit_expr(c, result_value, args[1]);
|
||||
llvm_value_rvalue(c, result_value);
|
||||
arg2 = result_value->value;
|
||||
}
|
||||
LLVMValueRef val = LLVMBuildShuffleVector(c->builder, arg1, arg2, mask, "shuffle");
|
||||
llvm_value_set(result_value, val, rtype);
|
||||
return;
|
||||
}
|
||||
|
||||
INLINE void llvm_emit_reverse(GenContext *c, BEValue *result_value, Expr *expr)
|
||||
{
|
||||
Expr **args = expr->call_expr.arguments;
|
||||
llvm_emit_expr(c, result_value, args[0]);
|
||||
llvm_value_rvalue(c, result_value);
|
||||
Type *rtype = result_value->type;
|
||||
LLVMValueRef arg1 = result_value->value;
|
||||
LLVMValueRef arg2 = LLVMGetPoison(LLVMTypeOf(arg1));
|
||||
LLVMValueRef buff[128];
|
||||
unsigned elements = rtype->array.len;
|
||||
LLVMValueRef *mask_element = elements > 128 ? MALLOC(sizeof(LLVMValueRef)) : buff;
|
||||
LLVMTypeRef mask_element_type = llvm_get_type(c, type_int);
|
||||
for (unsigned i = 0; i < elements; i++)
|
||||
{
|
||||
mask_element[i] = LLVMConstInt(mask_element_type, elements - i - 1, false);
|
||||
}
|
||||
LLVMValueRef mask = LLVMConstVector(mask_element, elements);
|
||||
llvm_value_set(result_value, LLVMBuildShuffleVector(c->builder, arg1, arg2, mask, "reverse"), rtype);
|
||||
}
|
||||
|
||||
INLINE unsigned llvm_intrinsic_by_type(Type *type, unsigned int_intrinsic, unsigned uint_intrinsic, unsigned float_intrinsic)
|
||||
{
|
||||
type = type_flatten(type);
|
||||
RETRY:
|
||||
switch (type->type_kind)
|
||||
{
|
||||
case ALL_SIGNED_INTS:
|
||||
return int_intrinsic;
|
||||
case TYPE_BOOL:
|
||||
case ALL_UNSIGNED_INTS:
|
||||
return uint_intrinsic;
|
||||
case ALL_FLOATS:
|
||||
return float_intrinsic;
|
||||
case TYPE_VECTOR:
|
||||
type = type->array.base;
|
||||
goto RETRY;
|
||||
default:
|
||||
UNREACHABLE
|
||||
}
|
||||
}
|
||||
void llvm_emit_builtin_call(GenContext *c, BEValue *result_value, Expr *expr)
|
||||
{
|
||||
BuiltinFunction func = exprptr(expr->call_expr.function)->builtin_expr.builtin;
|
||||
unsigned intrinsic;
|
||||
switch (func)
|
||||
{
|
||||
case BUILTIN_UNREACHABLE:
|
||||
llvm_value_set(result_value, LLVMBuildUnreachable(c->builder), type_void);
|
||||
c->current_block = NULL;
|
||||
c->current_block_is_target = false;
|
||||
LLVMBasicBlockRef after_unreachable = llvm_basic_block_new(c, "after.unreachable");
|
||||
llvm_emit_block(c, after_unreachable);
|
||||
return;
|
||||
case BUILTIN_SHUFFLEVECTOR:
|
||||
llvm_emit_shufflevector(c, result_value, expr);
|
||||
return;
|
||||
case BUILTIN_REVERSE:
|
||||
llvm_emit_reverse(c, result_value, expr);
|
||||
return;
|
||||
case BUILTIN_STACKTRACE:
|
||||
if (!c->debug.enable_stacktrace)
|
||||
{
|
||||
llvm_value_set(result_value, llvm_get_zero(c, type_voidptr), type_voidptr);
|
||||
return;
|
||||
}
|
||||
llvm_value_set(result_value, llvm_emit_bitcast(c, c->debug.stack_slot, type_voidptr), type_voidptr);
|
||||
case BUILTIN_VOLATILE_STORE:
|
||||
{
|
||||
BEValue value;
|
||||
llvm_emit_expr(c, &value, expr->call_expr.arguments[0]);
|
||||
llvm_emit_expr(c, result_value, expr->call_expr.arguments[1]);
|
||||
llvm_value_rvalue(c, &value);
|
||||
value.kind = BE_ADDRESS;
|
||||
BEValue store_value = *result_value;
|
||||
LLVMValueRef store = llvm_store(c, &value, &store_value);
|
||||
if (store) LLVMSetVolatile(store, true);
|
||||
return;
|
||||
}
|
||||
case BUILTIN_VOLATILE_LOAD:
|
||||
{
|
||||
llvm_emit_expr(c, result_value, expr->call_expr.arguments[0]);
|
||||
llvm_value_rvalue(c, result_value);
|
||||
result_value->kind = BE_ADDRESS;
|
||||
result_value->type = type_lowering(result_value->type->pointer);
|
||||
llvm_value_rvalue(c, result_value);
|
||||
LLVMSetVolatile(result_value->value, true);
|
||||
return;
|
||||
}
|
||||
case BUILTIN_SYSCALL:
|
||||
llvm_emit_syscall(c, result_value, expr);
|
||||
return;
|
||||
case BUILTIN_MAX:
|
||||
intrinsic = llvm_intrinsic_by_type(expr->call_expr.arguments[0]->type,
|
||||
intrinsic_id.smax,
|
||||
intrinsic_id.umax,
|
||||
intrinsic_id.maxnum);
|
||||
break;
|
||||
case BUILTIN_MIN:
|
||||
intrinsic = llvm_intrinsic_by_type(expr->call_expr.arguments[0]->type,
|
||||
intrinsic_id.smin,
|
||||
intrinsic_id.umin,
|
||||
intrinsic_id.minnum);
|
||||
break;
|
||||
case BUILTIN_ABS:
|
||||
intrinsic = llvm_intrinsic_by_type(expr->call_expr.arguments[0]->type,
|
||||
intrinsic_id.abs,
|
||||
intrinsic_id.abs,
|
||||
intrinsic_id.fabs);
|
||||
break;
|
||||
case BUILTIN_SAT_SHL:
|
||||
intrinsic = llvm_intrinsic_by_type(expr->call_expr.arguments[0]->type,
|
||||
intrinsic_id.sshl_sat,
|
||||
intrinsic_id.ushl_sat, 0);
|
||||
break;
|
||||
case BUILTIN_SAT_ADD:
|
||||
intrinsic = llvm_intrinsic_by_type(expr->call_expr.arguments[0]->type,
|
||||
intrinsic_id.sadd_sat,
|
||||
intrinsic_id.uadd_sat, 0);
|
||||
break;
|
||||
case BUILTIN_SAT_SUB:
|
||||
intrinsic = llvm_intrinsic_by_type(expr->call_expr.arguments[0]->type,
|
||||
intrinsic_id.ssub_sat,
|
||||
intrinsic_id.usub_sat, 0);
|
||||
break;
|
||||
default:
|
||||
intrinsic = llvm_get_intrinsic(func);
|
||||
break;
|
||||
}
|
||||
llvm_emit_intrinsic_expr(c, intrinsic, result_value, expr);
|
||||
}
|
||||
|
||||
void llvm_add_abi_call_attributes(GenContext *c, LLVMValueRef call_value, int count, ABIArgInfo **infos)
|
||||
{
|
||||
@@ -4774,6 +4370,7 @@ static inline void llvm_emit_vararg_parameter(GenContext *c, BEValue *value, Typ
|
||||
llvm_store_raw(c, &pointer_addr, llvm_emit_bitcast_ptr(c, array_ref, pointee_type));
|
||||
}
|
||||
|
||||
|
||||
void llvm_emit_call_expr(GenContext *c, BEValue *result_value, Expr *expr)
|
||||
{
|
||||
|
||||
|
||||
Reference in New Issue
Block a user