2023-01-14 06:10:01 +08:00
|
|
|
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
|
|
|
#pragma once
|
|
|
|
|
|
|
|
#include "Luau/Bytecode.h"
|
|
|
|
#include "Luau/Common.h"
|
Sync to upstream/release/562 (#828)
* Fixed rare use-after-free in analysis during table unification
A lot of work these past months went into two new Luau components:
* A near full rewrite of the typechecker using a new deferred constraint
resolution system
* Native code generation for AoT/JiT compilation of VM bytecode into x64
(avx)/arm64 instructions
Both of these components are far from finished and we don't provide
documentation on building and using them at this point.
However, curious community members expressed interest in learning about
changes that go into these components each week, so we are now listing
them here in the 'sync' pull request descriptions.
---
New typechecker can be enabled by setting
DebugLuauDeferredConstraintResolution flag to 'true'.
It is considered unstable right now, so try it at your own risk.
Even though it already provides better type inference than the current
one in some cases, our main goal right now is to reach feature parity
with current typechecker.
Features which improve over the capabilities of the current typechecker
are marked as '(NEW)'.
Changes to new typechecker:
* Regular for loop index and parameters are now typechecked
* Invalid type annotations on local variables are ignored to improve
autocomplete
* Fixed missing autocomplete type suggestions for function arguments
* Type reduction is now performed to produce simpler types to be
presented to the user (error messages, custom LSPs)
* Internally, complex types like '((number | string) & ~(false?)) |
string' can be produced, which is just 'string | number' when simplified
* Fixed spots where support for unknown and never types was missing
* (NEW) Length operator '#' is now valid to use on top table type, this
type comes up when doing typeof(x) == "table" guards and isn't available
in current typechecker
---
Changes to native code generation:
* Additional math library fast calls are now lowered to x64: math.ldexp,
math.round, math.frexp, math.modf, math.sign and math.clamp
2023-02-04 03:26:13 +08:00
|
|
|
#include "Luau/IrData.h"
|
2023-01-14 06:10:01 +08:00
|
|
|
|
|
|
|
namespace Luau
|
|
|
|
{
|
|
|
|
namespace CodeGen
|
|
|
|
{
|
|
|
|
|
2023-02-18 07:41:51 +08:00
|
|
|
struct IrBuilder;
|
|
|
|
|
2023-01-14 06:10:01 +08:00
|
|
|
inline bool isJumpD(LuauOpcode op)
|
|
|
|
{
|
|
|
|
switch (op)
|
|
|
|
{
|
|
|
|
case LOP_JUMP:
|
|
|
|
case LOP_JUMPIF:
|
|
|
|
case LOP_JUMPIFNOT:
|
|
|
|
case LOP_JUMPIFEQ:
|
|
|
|
case LOP_JUMPIFLE:
|
|
|
|
case LOP_JUMPIFLT:
|
|
|
|
case LOP_JUMPIFNOTEQ:
|
|
|
|
case LOP_JUMPIFNOTLE:
|
|
|
|
case LOP_JUMPIFNOTLT:
|
|
|
|
case LOP_FORNPREP:
|
|
|
|
case LOP_FORNLOOP:
|
|
|
|
case LOP_FORGPREP:
|
|
|
|
case LOP_FORGLOOP:
|
|
|
|
case LOP_FORGPREP_INEXT:
|
|
|
|
case LOP_FORGPREP_NEXT:
|
|
|
|
case LOP_JUMPBACK:
|
|
|
|
case LOP_JUMPXEQKNIL:
|
|
|
|
case LOP_JUMPXEQKB:
|
|
|
|
case LOP_JUMPXEQKN:
|
|
|
|
case LOP_JUMPXEQKS:
|
|
|
|
return true;
|
|
|
|
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool isSkipC(LuauOpcode op)
|
|
|
|
{
|
|
|
|
switch (op)
|
|
|
|
{
|
|
|
|
case LOP_LOADB:
|
|
|
|
return true;
|
|
|
|
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool isFastCall(LuauOpcode op)
|
|
|
|
{
|
|
|
|
switch (op)
|
|
|
|
{
|
|
|
|
case LOP_FASTCALL:
|
|
|
|
case LOP_FASTCALL1:
|
|
|
|
case LOP_FASTCALL2:
|
|
|
|
case LOP_FASTCALL2K:
|
|
|
|
return true;
|
|
|
|
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
inline int getJumpTarget(uint32_t insn, uint32_t pc)
|
|
|
|
{
|
|
|
|
LuauOpcode op = LuauOpcode(LUAU_INSN_OP(insn));
|
|
|
|
|
|
|
|
if (isJumpD(op))
|
|
|
|
return int(pc + LUAU_INSN_D(insn) + 1);
|
|
|
|
else if (isFastCall(op))
|
|
|
|
return int(pc + LUAU_INSN_C(insn) + 2);
|
|
|
|
else if (isSkipC(op) && LUAU_INSN_C(insn))
|
|
|
|
return int(pc + LUAU_INSN_C(insn) + 1);
|
|
|
|
else if (op == LOP_JUMPX)
|
|
|
|
return int(pc + LUAU_INSN_E(insn) + 1);
|
|
|
|
else
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool isBlockTerminator(IrCmd cmd)
|
|
|
|
{
|
|
|
|
switch (cmd)
|
|
|
|
{
|
|
|
|
case IrCmd::JUMP:
|
|
|
|
case IrCmd::JUMP_IF_TRUTHY:
|
|
|
|
case IrCmd::JUMP_IF_FALSY:
|
|
|
|
case IrCmd::JUMP_EQ_TAG:
|
2023-09-08 08:13:49 +08:00
|
|
|
case IrCmd::JUMP_CMP_INT:
|
2023-01-14 06:10:01 +08:00
|
|
|
case IrCmd::JUMP_EQ_POINTER:
|
|
|
|
case IrCmd::JUMP_CMP_NUM:
|
2023-10-21 09:10:30 +08:00
|
|
|
case IrCmd::JUMP_FORN_LOOP_COND:
|
2023-03-18 03:20:37 +08:00
|
|
|
case IrCmd::JUMP_SLOT_MATCH:
|
2023-04-01 02:42:49 +08:00
|
|
|
case IrCmd::RETURN:
|
|
|
|
case IrCmd::FORGLOOP:
|
|
|
|
case IrCmd::FORGLOOP_FALLBACK:
|
|
|
|
case IrCmd::FORGPREP_XNEXT_FALLBACK:
|
2023-01-14 06:10:01 +08:00
|
|
|
case IrCmd::FALLBACK_FORGPREP:
|
|
|
|
return true;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2023-05-06 05:52:49 +08:00
|
|
|
inline bool isNonTerminatingJump(IrCmd cmd)
|
|
|
|
{
|
|
|
|
switch (cmd)
|
|
|
|
{
|
|
|
|
case IrCmd::TRY_NUM_TO_INDEX:
|
|
|
|
case IrCmd::TRY_CALL_FASTGETTM:
|
|
|
|
case IrCmd::CHECK_FASTCALL_RES:
|
|
|
|
case IrCmd::CHECK_TAG:
|
2023-08-05 03:18:54 +08:00
|
|
|
case IrCmd::CHECK_TRUTHY:
|
2023-05-06 05:52:49 +08:00
|
|
|
case IrCmd::CHECK_READONLY:
|
|
|
|
case IrCmd::CHECK_NO_METATABLE:
|
|
|
|
case IrCmd::CHECK_SAFE_ENV:
|
|
|
|
case IrCmd::CHECK_ARRAY_SIZE:
|
|
|
|
case IrCmd::CHECK_SLOT_MATCH:
|
|
|
|
case IrCmd::CHECK_NODE_NO_NEXT:
|
2023-09-02 01:58:27 +08:00
|
|
|
case IrCmd::CHECK_NODE_VALUE:
|
2023-11-11 05:10:07 +08:00
|
|
|
case IrCmd::CHECK_BUFFER_LEN:
|
2023-05-06 05:52:49 +08:00
|
|
|
return true;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2023-01-14 06:10:01 +08:00
|
|
|
inline bool hasResult(IrCmd cmd)
|
|
|
|
{
|
|
|
|
switch (cmd)
|
|
|
|
{
|
|
|
|
case IrCmd::LOAD_TAG:
|
|
|
|
case IrCmd::LOAD_POINTER:
|
|
|
|
case IrCmd::LOAD_DOUBLE:
|
|
|
|
case IrCmd::LOAD_INT:
|
2024-01-27 11:20:56 +08:00
|
|
|
case IrCmd::LOAD_FLOAT:
|
2023-01-14 06:10:01 +08:00
|
|
|
case IrCmd::LOAD_TVALUE:
|
|
|
|
case IrCmd::LOAD_ENV:
|
|
|
|
case IrCmd::GET_ARR_ADDR:
|
|
|
|
case IrCmd::GET_SLOT_NODE_ADDR:
|
2023-03-18 03:20:37 +08:00
|
|
|
case IrCmd::GET_HASH_NODE_ADDR:
|
2023-07-28 23:13:53 +08:00
|
|
|
case IrCmd::GET_CLOSURE_UPVAL_ADDR:
|
2023-01-14 06:10:01 +08:00
|
|
|
case IrCmd::ADD_INT:
|
|
|
|
case IrCmd::SUB_INT:
|
|
|
|
case IrCmd::ADD_NUM:
|
|
|
|
case IrCmd::SUB_NUM:
|
|
|
|
case IrCmd::MUL_NUM:
|
|
|
|
case IrCmd::DIV_NUM:
|
2023-09-02 01:58:27 +08:00
|
|
|
case IrCmd::IDIV_NUM:
|
2023-01-14 06:10:01 +08:00
|
|
|
case IrCmd::MOD_NUM:
|
2023-03-04 04:21:14 +08:00
|
|
|
case IrCmd::MIN_NUM:
|
|
|
|
case IrCmd::MAX_NUM:
|
2023-01-14 06:10:01 +08:00
|
|
|
case IrCmd::UNM_NUM:
|
2023-04-01 02:42:49 +08:00
|
|
|
case IrCmd::FLOOR_NUM:
|
|
|
|
case IrCmd::CEIL_NUM:
|
|
|
|
case IrCmd::ROUND_NUM:
|
|
|
|
case IrCmd::SQRT_NUM:
|
|
|
|
case IrCmd::ABS_NUM:
|
2024-01-27 11:20:56 +08:00
|
|
|
case IrCmd::ADD_VEC:
|
|
|
|
case IrCmd::SUB_VEC:
|
|
|
|
case IrCmd::MUL_VEC:
|
|
|
|
case IrCmd::DIV_VEC:
|
|
|
|
case IrCmd::UNM_VEC:
|
2023-01-14 06:10:01 +08:00
|
|
|
case IrCmd::NOT_ANY:
|
2023-08-05 03:18:54 +08:00
|
|
|
case IrCmd::CMP_ANY:
|
2023-01-14 06:10:01 +08:00
|
|
|
case IrCmd::TABLE_LEN:
|
2023-09-02 01:58:27 +08:00
|
|
|
case IrCmd::TABLE_SETNUM:
|
2023-07-08 04:10:48 +08:00
|
|
|
case IrCmd::STRING_LEN:
|
2023-01-14 06:10:01 +08:00
|
|
|
case IrCmd::NEW_TABLE:
|
|
|
|
case IrCmd::DUP_TABLE:
|
2023-03-18 03:20:37 +08:00
|
|
|
case IrCmd::TRY_NUM_TO_INDEX:
|
|
|
|
case IrCmd::TRY_CALL_FASTGETTM:
|
2023-02-11 03:40:38 +08:00
|
|
|
case IrCmd::INT_TO_NUM:
|
2023-04-22 06:14:26 +08:00
|
|
|
case IrCmd::UINT_TO_NUM:
|
|
|
|
case IrCmd::NUM_TO_INT:
|
|
|
|
case IrCmd::NUM_TO_UINT:
|
2024-02-21 23:06:11 +08:00
|
|
|
case IrCmd::NUM_TO_VEC:
|
|
|
|
case IrCmd::TAG_VECTOR:
|
2023-02-18 07:41:51 +08:00
|
|
|
case IrCmd::SUBSTITUTE:
|
2023-03-04 04:21:14 +08:00
|
|
|
case IrCmd::INVOKE_FASTCALL:
|
2023-04-22 06:14:26 +08:00
|
|
|
case IrCmd::BITAND_UINT:
|
|
|
|
case IrCmd::BITXOR_UINT:
|
|
|
|
case IrCmd::BITOR_UINT:
|
|
|
|
case IrCmd::BITNOT_UINT:
|
|
|
|
case IrCmd::BITLSHIFT_UINT:
|
|
|
|
case IrCmd::BITRSHIFT_UINT:
|
|
|
|
case IrCmd::BITARSHIFT_UINT:
|
|
|
|
case IrCmd::BITLROTATE_UINT:
|
|
|
|
case IrCmd::BITRROTATE_UINT:
|
|
|
|
case IrCmd::BITCOUNTLZ_UINT:
|
|
|
|
case IrCmd::BITCOUNTRZ_UINT:
|
|
|
|
case IrCmd::INVOKE_LIBM:
|
2023-06-24 14:19:39 +08:00
|
|
|
case IrCmd::GET_TYPE:
|
|
|
|
case IrCmd::GET_TYPEOF:
|
2023-07-28 23:13:53 +08:00
|
|
|
case IrCmd::NEWCLOSURE:
|
|
|
|
case IrCmd::FINDUPVAL:
|
2023-11-11 05:10:07 +08:00
|
|
|
case IrCmd::BUFFER_READI8:
|
|
|
|
case IrCmd::BUFFER_READU8:
|
|
|
|
case IrCmd::BUFFER_READI16:
|
|
|
|
case IrCmd::BUFFER_READU16:
|
|
|
|
case IrCmd::BUFFER_READI32:
|
|
|
|
case IrCmd::BUFFER_READF32:
|
|
|
|
case IrCmd::BUFFER_READF64:
|
2023-01-14 06:10:01 +08:00
|
|
|
return true;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
inline bool hasSideEffects(IrCmd cmd)
|
|
|
|
{
|
2023-03-04 04:21:14 +08:00
|
|
|
if (cmd == IrCmd::INVOKE_FASTCALL)
|
|
|
|
return true;
|
|
|
|
|
2023-01-14 06:10:01 +08:00
|
|
|
// Instructions that don't produce a result most likely have other side-effects to make them useful
|
|
|
|
// Right now, a full switch would mirror the 'hasResult' function, so we use this simple condition
|
|
|
|
return !hasResult(cmd);
|
|
|
|
}
|
|
|
|
|
2023-02-18 07:41:51 +08:00
|
|
|
inline bool isPseudo(IrCmd cmd)
|
|
|
|
{
|
|
|
|
// Instructions that are used for internal needs and are not a part of final lowering
|
|
|
|
return cmd == IrCmd::NOP || cmd == IrCmd::SUBSTITUTE;
|
|
|
|
}
|
|
|
|
|
2023-04-08 05:01:29 +08:00
|
|
|
IrValueKind getCmdValueKind(IrCmd cmd);
|
|
|
|
|
2023-02-25 05:49:38 +08:00
|
|
|
bool isGCO(uint8_t tag);
|
|
|
|
|
2024-06-01 03:18:18 +08:00
|
|
|
// Optional bit has to be cleared at call site, otherwise, this will return 'false' for 'userdata?'
|
|
|
|
bool isUserdataBytecodeType(uint8_t ty);
|
|
|
|
bool isCustomUserdataBytecodeType(uint8_t ty);
|
|
|
|
|
2023-03-04 04:21:14 +08:00
|
|
|
// Manually add or remove use of an operand
|
|
|
|
void addUse(IrFunction& function, IrOp op);
|
|
|
|
void removeUse(IrFunction& function, IrOp op);
|
|
|
|
|
2023-02-11 03:40:38 +08:00
|
|
|
// Remove a single instruction
|
|
|
|
void kill(IrFunction& function, IrInst& inst);
|
|
|
|
|
|
|
|
// Remove a range of instructions
|
|
|
|
void kill(IrFunction& function, uint32_t start, uint32_t end);
|
|
|
|
|
|
|
|
// Remove a block, including all instructions inside
|
|
|
|
void kill(IrFunction& function, IrBlock& block);
|
|
|
|
|
|
|
|
// Replace a single operand and update use counts (can cause chain removal of dead code)
|
|
|
|
void replace(IrFunction& function, IrOp& original, IrOp replacement);
|
|
|
|
|
|
|
|
// Replace a single instruction
|
|
|
|
// Target instruction index instead of reference is used to handle introduction of a new block terminator
|
2023-02-25 05:49:38 +08:00
|
|
|
void replace(IrFunction& function, IrBlock& block, uint32_t instIdx, IrInst replacement);
|
2023-02-11 03:40:38 +08:00
|
|
|
|
2023-02-18 07:41:51 +08:00
|
|
|
// Replace instruction with a different value (using IrCmd::SUBSTITUTE)
|
2023-04-22 06:14:26 +08:00
|
|
|
void substitute(IrFunction& function, IrInst& inst, IrOp replacement);
|
2023-02-18 07:41:51 +08:00
|
|
|
|
|
|
|
// Replace instruction arguments that point to substitutions with target values
|
|
|
|
void applySubstitutions(IrFunction& function, IrOp& op);
|
|
|
|
void applySubstitutions(IrFunction& function, IrInst& inst);
|
|
|
|
|
2023-02-25 05:49:38 +08:00
|
|
|
// Compare numbers using IR condition value
|
|
|
|
bool compare(double a, double b, IrCondition cond);
|
|
|
|
|
2023-02-18 07:41:51 +08:00
|
|
|
// Perform constant folding on instruction at index
|
|
|
|
// For most instructions, successful folding results in a IrCmd::SUBSTITUTE
|
|
|
|
// But it can also be successful on conditional control-flow, replacing it with an unconditional IrCmd::JUMP
|
2023-02-25 05:49:38 +08:00
|
|
|
void foldConstants(IrBuilder& build, IrFunction& function, IrBlock& block, uint32_t instIdx);
|
2023-02-18 07:41:51 +08:00
|
|
|
|
2023-04-29 03:55:13 +08:00
|
|
|
uint32_t getNativeContextOffset(int bfid);
|
2023-04-22 06:14:26 +08:00
|
|
|
|
2023-07-08 04:10:48 +08:00
|
|
|
// Cleans up blocks that were created with no users
|
|
|
|
void killUnusedBlocks(IrFunction& function);
|
|
|
|
|
2023-09-16 01:26:59 +08:00
|
|
|
// Get blocks in order that tries to maximize fallthrough between them during lowering
|
|
|
|
// We want to mostly preserve build order with fallbacks outlined
|
|
|
|
// But we also use hints from optimization passes that chain blocks together where there's only one out-in edge between them
|
|
|
|
std::vector<uint32_t> getSortedBlockOrder(IrFunction& function);
|
|
|
|
|
|
|
|
// Returns first non-dead block that comes after block at index 'i' in the sorted blocks array
|
|
|
|
// 'dummy' block is returned if the end of array was reached
|
2023-10-14 04:20:12 +08:00
|
|
|
IrBlock& getNextBlock(IrFunction& function, const std::vector<uint32_t>& sortedBlocks, IrBlock& dummy, size_t i);
|
2023-09-16 01:26:59 +08:00
|
|
|
|
2023-01-14 06:10:01 +08:00
|
|
|
} // namespace CodeGen
|
|
|
|
} // namespace Luau
|