Sync to upstream/release/633 (#1318)

# What's Changed?

- Mostly stability and bugfixes with the new solver.

## New Solver

- Typechecking with the new solver should respect the no-check hot
comment.
- Record type alias locations and property locations of table
assignments
- Maintain location information for exported table types
- Stability fixes for normalization
- Report internal constraint solver errors.

---
### Internal Contributors

Co-authored-by: Aaron Weiss <aaronweiss@roblox.com>
Co-authored-by: Alexander McCord <amccord@roblox.com>
Co-authored-by: Andy Friesen <afriesen@roblox.com>
Co-authored-by: Vighnesh Vijay <vvijay@roblox.com>
Co-authored-by: Vyacheslav Egorov <vegorov@roblox.com>

---------

Co-authored-by: Aaron Weiss <aaronweiss@roblox.com>
Co-authored-by: Alexander McCord <amccord@roblox.com>
Co-authored-by: Andy Friesen <afriesen@roblox.com>
Co-authored-by: Aviral Goel <agoel@roblox.com>
Co-authored-by: David Cope <dcope@roblox.com>
Co-authored-by: Lily Brown <lbrown@roblox.com>
Co-authored-by: Vyacheslav Egorov <vegorov@roblox.com>
This commit is contained in:
Vighnesh-V 2024-07-08 14:57:06 -07:00 committed by GitHub
parent 0d2688844a
commit a7be4fcc54
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 212 additions and 376 deletions

View File

@ -202,6 +202,10 @@ struct AssignPropConstraint
std::string propName;
TypeId rhsType;
/// If a new property is to be inserted into a table type, it will be
/// ascribed this location.
std::optional<Location> propLocation;
/// The canonical write type of the property. It is _solely_ used to
/// populate astTypes during constraint resolution. Nothing should ever
/// block on it.

View File

@ -1246,6 +1246,9 @@ ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatTypeAlias*
return ControlFlow::None;
}
scope->typeAliasLocations[alias->name.value] = alias->location;
scope->typeAliasNameLocations[alias->name.value] = alias->nameLocation;
ScopePtr* defnScope = astTypeAliasDefiningScopes.find(alias);
std::unordered_map<Name, TypeFun>* typeBindings;
@ -2441,7 +2444,7 @@ void ConstraintGenerator::visitLValue(const ScopePtr& scope, AstExprIndexName* e
bool incremented = recordPropertyAssignment(lhsTy);
auto apc = addConstraint(scope, expr->location, AssignPropConstraint{lhsTy, expr->index.value, rhsType, propTy, incremented});
auto apc = addConstraint(scope, expr->location, AssignPropConstraint{lhsTy, expr->index.value, rhsType, expr->indexLocation, propTy, incremented});
getMutable<BlockedType>(propTy)->setOwner(apc);
}
@ -2457,7 +2460,7 @@ void ConstraintGenerator::visitLValue(const ScopePtr& scope, AstExprIndexExpr* e
bool incremented = recordPropertyAssignment(lhsTy);
auto apc = addConstraint(scope, expr->location, AssignPropConstraint{lhsTy, std::move(propName), rhsType, propTy, incremented});
auto apc = addConstraint(scope, expr->location, AssignPropConstraint{lhsTy, std::move(propName), rhsType, expr->index->location, propTy, incremented});
getMutable<BlockedType>(propTy)->setOwner(apc);
return;
@ -2478,6 +2481,7 @@ Inference ConstraintGenerator::check(const ScopePtr& scope, AstExprTable* expr,
LUAU_ASSERT(ttv);
ttv->state = TableState::Unsealed;
ttv->definitionModuleName = module->name;
ttv->scope = scope.get();
interiorTypes.back().push_back(ty);
@ -2509,7 +2513,7 @@ Inference ConstraintGenerator::check(const ScopePtr& scope, AstExprTable* expr,
if (AstExprConstantString* key = item.key->as<AstExprConstantString>())
{
std::string propName{key->value.data, key->value.size};
ttv->props[propName] = {itemTy};
ttv->props[propName] = {itemTy, /*deprecated*/ false, {}, key->location};
}
else
{

View File

@ -486,6 +486,9 @@ void ConstraintSolver::run()
progress |= runSolverPass(true);
} while (progress);
if (!unsolvedConstraints.empty())
reportError(InternalError{"Type inference failed to complete, you may see some confusing types and type errors."}, Location{});
// After we have run all the constraints, type families should be generalized
// At this point, we can try to perform one final simplification to suss out
// whether type families are truly uninhabited or if they can reduce
@ -1729,7 +1732,10 @@ bool ConstraintSolver::tryDispatch(const AssignPropConstraint& c, NotNull<const
if (lhsTable->state == TableState::Unsealed || lhsTable->state == TableState::Free)
{
bind(constraint, c.propType, rhsType);
lhsTable->props[propName] = Property::rw(rhsType);
Property& newProp = lhsTable->props[propName];
newProp.readTy = rhsType;
newProp.writeTy = rhsType;
newProp.location = c.propLocation;
if (lhsTable->state == TableState::Unsealed && c.decrementPropCount)
{

View File

@ -41,6 +41,7 @@ LUAU_FASTFLAGVARIABLE(DebugLuauLogSolverToJsonFile, false)
LUAU_FASTFLAGVARIABLE(DebugLuauForbidInternalTypes, false)
LUAU_FASTFLAGVARIABLE(DebugLuauForceStrictMode, false)
LUAU_FASTFLAGVARIABLE(DebugLuauForceNonStrictMode, false)
LUAU_FASTFLAGVARIABLE(LuauSourceModuleUpdatedWithSelectedMode, false)
namespace Luau
{
@ -918,6 +919,9 @@ void Frontend::checkBuildQueueItem(BuildQueueItem& item)
mode = Mode::Nonstrict;
else
mode = sourceModule.mode.value_or(config.mode);
if (FFlag::LuauSourceModuleUpdatedWithSelectedMode)
item.sourceModule->mode = {mode};
ScopePtr environmentScope = item.environmentScope;
double timestamp = getTimestamp();
const std::vector<RequireCycle>& requireCycles = item.requireCycles;
@ -1240,9 +1244,7 @@ ModulePtr check(const SourceModule& sourceModule, Mode mode, const std::vector<R
ModulePtr result = std::make_shared<Module>();
result->name = sourceModule.name;
result->humanReadableName = sourceModule.humanReadableName;
result->mode = sourceModule.mode.value_or(Mode::NoCheck);
result->mode = mode;
result->internalTypes.owningModule = result.get();
result->interfaceTypes.owningModule = result.get();
@ -1323,10 +1325,19 @@ ModulePtr check(const SourceModule& sourceModule, Mode mode, const std::vector<R
}
else
{
if (mode == Mode::Nonstrict)
switch (mode)
{
case Mode::Nonstrict:
Luau::checkNonStrict(builtinTypes, iceHandler, NotNull{&unifierState}, NotNull{&dfg}, NotNull{&limits}, sourceModule, result.get());
else
break;
case Mode::Definition:
// fallthrough intentional
case Mode::Strict:
Luau::check(builtinTypes, NotNull{&unifierState}, NotNull{&limits}, logger.get(), sourceModule, result.get());
break;
case Mode::NoCheck:
break;
};
}
unfreeze(result->interfaceTypes);

View File

@ -255,8 +255,10 @@ bool isSubtype(const NormalizedStringType& subStr, const NormalizedStringType& s
void NormalizedClassType::pushPair(TypeId ty, TypeIds negations)
{
ordering.push_back(ty);
classes.insert(std::make_pair(ty, std::move(negations)));
auto result = classes.insert(std::make_pair(ty, std::move(negations)));
if (result.second)
ordering.push_back(ty);
LUAU_ASSERT(ordering.size() == classes.size());
}
void NormalizedClassType::resetToNever()

View File

@ -753,7 +753,12 @@ void Substitution::replaceChildren(TypeId ty)
for (auto& [name, prop] : ttv->props)
{
if (FFlag::DebugLuauDeferredConstraintResolution)
prop = Property::create(replace(prop.readTy), replace(prop.writeTy));
{
if (prop.readTy)
prop.readTy = replace(prop.readTy);
if (prop.writeTy)
prop.writeTy = replace(prop.writeTy);
}
else
prop.setType(replace(prop.type()));
}

View File

@ -31,10 +31,8 @@ void toString(IrToStringContext& ctx, IrOp op);
void toString(std::string& result, IrConst constant);
const char* getBytecodeTypeName_DEPRECATED(uint8_t type);
const char* getBytecodeTypeName(uint8_t type, const char* const* userdataTypes);
void toString_DEPRECATED(std::string& result, const BytecodeTypes& bcTypes);
void toString(std::string& result, const BytecodeTypes& bcTypes, const char* const* userdataTypes);
void toStringDetailed(

View File

@ -11,7 +11,6 @@
#include <algorithm>
LUAU_FASTFLAGVARIABLE(LuauCodegenUserdataOps, false)
LUAU_FASTFLAGVARIABLE(LuauCodegenFastcall3, false)
namespace Luau
@ -749,50 +748,27 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
regTags[ra] = LBC_TYPE_ANY;
if (FFlag::LuauCodegenUserdataOps)
TString* str = gco2ts(function.proto->k[kc].value.gc);
const char* field = getstr(str);
if (bcType.a == LBC_TYPE_VECTOR)
{
TString* str = gco2ts(function.proto->k[kc].value.gc);
const char* field = getstr(str);
if (bcType.a == LBC_TYPE_VECTOR)
if (str->len == 1)
{
if (str->len == 1)
{
// Same handling as LOP_GETTABLEKS block in lvmexecute.cpp - case-insensitive comparison with "X" / "Y" / "Z"
char ch = field[0] | ' ';
// Same handling as LOP_GETTABLEKS block in lvmexecute.cpp - case-insensitive comparison with "X" / "Y" / "Z"
char ch = field[0] | ' ';
if (ch == 'x' || ch == 'y' || ch == 'z')
regTags[ra] = LBC_TYPE_NUMBER;
}
if (ch == 'x' || ch == 'y' || ch == 'z')
regTags[ra] = LBC_TYPE_NUMBER;
}
if (regTags[ra] == LBC_TYPE_ANY && hostHooks.vectorAccessBytecodeType)
regTags[ra] = hostHooks.vectorAccessBytecodeType(field, str->len);
}
else if (isCustomUserdataBytecodeType(bcType.a))
{
if (regTags[ra] == LBC_TYPE_ANY && hostHooks.userdataAccessBytecodeType)
regTags[ra] = hostHooks.userdataAccessBytecodeType(bcType.a, field, str->len);
}
if (regTags[ra] == LBC_TYPE_ANY && hostHooks.vectorAccessBytecodeType)
regTags[ra] = hostHooks.vectorAccessBytecodeType(field, str->len);
}
else
else if (isCustomUserdataBytecodeType(bcType.a))
{
if (bcType.a == LBC_TYPE_VECTOR)
{
TString* str = gco2ts(function.proto->k[kc].value.gc);
const char* field = getstr(str);
if (str->len == 1)
{
// Same handling as LOP_GETTABLEKS block in lvmexecute.cpp - case-insensitive comparison with "X" / "Y" / "Z"
char ch = field[0] | ' ';
if (ch == 'x' || ch == 'y' || ch == 'z')
regTags[ra] = LBC_TYPE_NUMBER;
}
if (regTags[ra] == LBC_TYPE_ANY && hostHooks.vectorAccessBytecodeType)
regTags[ra] = hostHooks.vectorAccessBytecodeType(field, str->len);
}
if (regTags[ra] == LBC_TYPE_ANY && hostHooks.userdataAccessBytecodeType)
regTags[ra] = hostHooks.userdataAccessBytecodeType(bcType.a, field, str->len);
}
bcType.result = regTags[ra];
@ -829,7 +805,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
regTags[ra] = LBC_TYPE_NUMBER;
else if (bcType.a == LBC_TYPE_VECTOR && bcType.b == LBC_TYPE_VECTOR)
regTags[ra] = LBC_TYPE_VECTOR;
else if (FFlag::LuauCodegenUserdataOps && hostHooks.userdataMetamethodBytecodeType &&
else if (hostHooks.userdataMetamethodBytecodeType &&
(isCustomUserdataBytecodeType(bcType.a) || isCustomUserdataBytecodeType(bcType.b)))
regTags[ra] = hostHooks.userdataMetamethodBytecodeType(bcType.a, bcType.b, opcodeToHostMetamethod(op));
@ -861,7 +837,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
if (bcType.b == LBC_TYPE_NUMBER || bcType.b == LBC_TYPE_VECTOR)
regTags[ra] = LBC_TYPE_VECTOR;
}
else if (FFlag::LuauCodegenUserdataOps && hostHooks.userdataMetamethodBytecodeType &&
else if (hostHooks.userdataMetamethodBytecodeType &&
(isCustomUserdataBytecodeType(bcType.a) || isCustomUserdataBytecodeType(bcType.b)))
{
regTags[ra] = hostHooks.userdataMetamethodBytecodeType(bcType.a, bcType.b, opcodeToHostMetamethod(op));
@ -884,7 +860,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
if (bcType.a == LBC_TYPE_NUMBER && bcType.b == LBC_TYPE_NUMBER)
regTags[ra] = LBC_TYPE_NUMBER;
else if (FFlag::LuauCodegenUserdataOps && hostHooks.userdataMetamethodBytecodeType &&
else if (hostHooks.userdataMetamethodBytecodeType &&
(isCustomUserdataBytecodeType(bcType.a) || isCustomUserdataBytecodeType(bcType.b)))
regTags[ra] = hostHooks.userdataMetamethodBytecodeType(bcType.a, bcType.b, opcodeToHostMetamethod(op));
@ -907,7 +883,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
regTags[ra] = LBC_TYPE_NUMBER;
else if (bcType.a == LBC_TYPE_VECTOR && bcType.b == LBC_TYPE_VECTOR)
regTags[ra] = LBC_TYPE_VECTOR;
else if (FFlag::LuauCodegenUserdataOps && hostHooks.userdataMetamethodBytecodeType &&
else if (hostHooks.userdataMetamethodBytecodeType &&
(isCustomUserdataBytecodeType(bcType.a) || isCustomUserdataBytecodeType(bcType.b)))
regTags[ra] = hostHooks.userdataMetamethodBytecodeType(bcType.a, bcType.b, opcodeToHostMetamethod(op));
@ -939,7 +915,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
if (bcType.b == LBC_TYPE_NUMBER || bcType.b == LBC_TYPE_VECTOR)
regTags[ra] = LBC_TYPE_VECTOR;
}
else if (FFlag::LuauCodegenUserdataOps && hostHooks.userdataMetamethodBytecodeType &&
else if (hostHooks.userdataMetamethodBytecodeType &&
(isCustomUserdataBytecodeType(bcType.a) || isCustomUserdataBytecodeType(bcType.b)))
{
regTags[ra] = hostHooks.userdataMetamethodBytecodeType(bcType.a, bcType.b, opcodeToHostMetamethod(op));
@ -962,7 +938,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
if (bcType.a == LBC_TYPE_NUMBER && bcType.b == LBC_TYPE_NUMBER)
regTags[ra] = LBC_TYPE_NUMBER;
else if (FFlag::LuauCodegenUserdataOps && hostHooks.userdataMetamethodBytecodeType &&
else if (hostHooks.userdataMetamethodBytecodeType &&
(isCustomUserdataBytecodeType(bcType.a) || isCustomUserdataBytecodeType(bcType.b)))
regTags[ra] = hostHooks.userdataMetamethodBytecodeType(bcType.a, bcType.b, opcodeToHostMetamethod(op));
@ -984,7 +960,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
regTags[ra] = LBC_TYPE_NUMBER;
else if (bcType.a == LBC_TYPE_VECTOR && bcType.b == LBC_TYPE_VECTOR)
regTags[ra] = LBC_TYPE_VECTOR;
else if (FFlag::LuauCodegenUserdataOps && hostHooks.userdataMetamethodBytecodeType &&
else if (hostHooks.userdataMetamethodBytecodeType &&
(isCustomUserdataBytecodeType(bcType.a) || isCustomUserdataBytecodeType(bcType.b)))
regTags[ra] = hostHooks.userdataMetamethodBytecodeType(bcType.a, bcType.b, opcodeToHostMetamethod(op));
@ -1014,7 +990,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
if (bcType.b == LBC_TYPE_NUMBER || bcType.b == LBC_TYPE_VECTOR)
regTags[ra] = LBC_TYPE_VECTOR;
}
else if (FFlag::LuauCodegenUserdataOps && hostHooks.userdataMetamethodBytecodeType &&
else if (hostHooks.userdataMetamethodBytecodeType &&
(isCustomUserdataBytecodeType(bcType.a) || isCustomUserdataBytecodeType(bcType.b)))
{
regTags[ra] = hostHooks.userdataMetamethodBytecodeType(bcType.a, bcType.b, opcodeToHostMetamethod(op));
@ -1047,7 +1023,7 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
regTags[ra] = LBC_TYPE_NUMBER;
else if (bcType.a == LBC_TYPE_VECTOR)
regTags[ra] = LBC_TYPE_VECTOR;
else if (FFlag::LuauCodegenUserdataOps && hostHooks.userdataMetamethodBytecodeType && isCustomUserdataBytecodeType(bcType.a))
else if (hostHooks.userdataMetamethodBytecodeType && isCustomUserdataBytecodeType(bcType.a))
regTags[ra] = hostHooks.userdataMetamethodBytecodeType(bcType.a, LBC_TYPE_ANY, HostMetamethod::Minus);
bcType.result = regTags[ra];
@ -1203,26 +1179,13 @@ void analyzeBytecodeTypes(IrFunction& function, const HostIrHooks& hostHooks)
bcType.result = LBC_TYPE_FUNCTION;
if (FFlag::LuauCodegenUserdataOps)
{
TString* str = gco2ts(function.proto->k[kc].value.gc);
const char* field = getstr(str);
TString* str = gco2ts(function.proto->k[kc].value.gc);
const char* field = getstr(str);
if (bcType.a == LBC_TYPE_VECTOR && hostHooks.vectorNamecallBytecodeType)
knownNextCallResult = LuauBytecodeType(hostHooks.vectorNamecallBytecodeType(field, str->len));
else if (isCustomUserdataBytecodeType(bcType.a) && hostHooks.userdataNamecallBytecodeType)
knownNextCallResult = LuauBytecodeType(hostHooks.userdataNamecallBytecodeType(bcType.a, field, str->len));
}
else
{
if (bcType.a == LBC_TYPE_VECTOR && hostHooks.vectorNamecallBytecodeType)
{
TString* str = gco2ts(function.proto->k[kc].value.gc);
const char* field = getstr(str);
knownNextCallResult = LuauBytecodeType(hostHooks.vectorNamecallBytecodeType(field, str->len));
}
}
if (bcType.a == LBC_TYPE_VECTOR && hostHooks.vectorNamecallBytecodeType)
knownNextCallResult = LuauBytecodeType(hostHooks.vectorNamecallBytecodeType(field, str->len));
else if (isCustomUserdataBytecodeType(bcType.a) && hostHooks.userdataNamecallBytecodeType)
knownNextCallResult = LuauBytecodeType(hostHooks.userdataNamecallBytecodeType(bcType.a, field, str->len));
break;
}
case LOP_CALL:

View File

@ -12,7 +12,6 @@
#include "lapi.h"
LUAU_FASTFLAG(LuauLoadUserdataInfo)
LUAU_FASTFLAG(LuauNativeAttribute)
namespace Luau
@ -83,54 +82,9 @@ static void logFunctionHeader(AssemblyBuilder& build, Proto* proto)
build.logAppend("\n");
}
template<typename AssemblyBuilder>
static void logFunctionTypes_DEPRECATED(AssemblyBuilder& build, const IrFunction& function)
{
CODEGEN_ASSERT(!FFlag::LuauLoadUserdataInfo);
const BytecodeTypeInfo& typeInfo = function.bcTypeInfo;
for (size_t i = 0; i < typeInfo.argumentTypes.size(); i++)
{
uint8_t ty = typeInfo.argumentTypes[i];
if (ty != LBC_TYPE_ANY)
{
if (const char* name = tryFindLocalName(function.proto, int(i), 0))
build.logAppend("; R%d: %s [argument '%s']\n", int(i), getBytecodeTypeName_DEPRECATED(ty), name);
else
build.logAppend("; R%d: %s [argument]\n", int(i), getBytecodeTypeName_DEPRECATED(ty));
}
}
for (size_t i = 0; i < typeInfo.upvalueTypes.size(); i++)
{
uint8_t ty = typeInfo.upvalueTypes[i];
if (ty != LBC_TYPE_ANY)
{
if (const char* name = tryFindUpvalueName(function.proto, int(i)))
build.logAppend("; U%d: %s ['%s']\n", int(i), getBytecodeTypeName_DEPRECATED(ty), name);
else
build.logAppend("; U%d: %s\n", int(i), getBytecodeTypeName_DEPRECATED(ty));
}
}
for (const BytecodeRegTypeInfo& el : typeInfo.regTypes)
{
// Using last active position as the PC because 'startpc' for type info is before local is initialized
if (const char* name = tryFindLocalName(function.proto, el.reg, el.endpc - 1))
build.logAppend("; R%d: %s from %d to %d [local '%s']\n", el.reg, getBytecodeTypeName_DEPRECATED(el.type), el.startpc, el.endpc, name);
else
build.logAppend("; R%d: %s from %d to %d\n", el.reg, getBytecodeTypeName_DEPRECATED(el.type), el.startpc, el.endpc);
}
}
template<typename AssemblyBuilder>
static void logFunctionTypes(AssemblyBuilder& build, const IrFunction& function, const char* const* userdataTypes)
{
CODEGEN_ASSERT(FFlag::LuauLoadUserdataInfo);
const BytecodeTypeInfo& typeInfo = function.bcTypeInfo;
for (size_t i = 0; i < typeInfo.argumentTypes.size(); i++)
@ -238,12 +192,7 @@ static std::string getAssemblyImpl(AssemblyBuilder& build, const TValue* func, A
logFunctionHeader(build, p);
if (options.includeIrTypes)
{
if (FFlag::LuauLoadUserdataInfo)
logFunctionTypes(build, ir.function, options.compilationOptions.userdataTypes);
else
logFunctionTypes_DEPRECATED(build, ir.function);
}
logFunctionTypes(build, ir.function, options.compilationOptions.userdataTypes);
CodeGenCompilationResult result = CodeGenCompilationResult::Success;

View File

@ -27,7 +27,6 @@ LUAU_FASTFLAG(DebugCodegenSkipNumbering)
LUAU_FASTINT(CodegenHeuristicsInstructionLimit)
LUAU_FASTINT(CodegenHeuristicsBlockLimit)
LUAU_FASTINT(CodegenHeuristicsBlockInstructionLimit)
LUAU_FASTFLAG(LuauLoadUserdataInfo)
LUAU_FASTFLAG(LuauNativeAttribute)
namespace Luau
@ -179,10 +178,7 @@ inline bool lowerImpl(AssemblyBuilder& build, IrLowering& lowering, IrFunction&
if (bcTypes.result != LBC_TYPE_ANY || bcTypes.a != LBC_TYPE_ANY || bcTypes.b != LBC_TYPE_ANY || bcTypes.c != LBC_TYPE_ANY)
{
if (FFlag::LuauLoadUserdataInfo)
toString(ctx.result, bcTypes, options.compilationOptions.userdataTypes);
else
toString_DEPRECATED(ctx.result, bcTypes);
toString(ctx.result, bcTypes, options.compilationOptions.userdataTypes);
build.logAppend("\n");
}

View File

@ -13,7 +13,6 @@
#include <string.h>
LUAU_FASTFLAG(LuauLoadUserdataInfo)
LUAU_FASTFLAG(LuauCodegenInstG)
LUAU_FASTFLAG(LuauCodegenFastcall3)
@ -102,16 +101,13 @@ static void buildArgumentTypeChecks(IrBuilder& build)
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TBUFFER), build.vmExit(kVmExitEntryGuardPc));
break;
default:
if (FFlag::LuauLoadUserdataInfo)
if (tag >= LBC_TYPE_TAGGED_USERDATA_BASE && tag < LBC_TYPE_TAGGED_USERDATA_END)
{
if (tag >= LBC_TYPE_TAGGED_USERDATA_BASE && tag < LBC_TYPE_TAGGED_USERDATA_END)
{
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TUSERDATA), build.vmExit(kVmExitEntryGuardPc));
}
else
{
CODEGEN_ASSERT(!"unknown argument type tag");
}
build.inst(IrCmd::CHECK_TAG, load, build.constTag(LUA_TUSERDATA), build.vmExit(kVmExitEntryGuardPc));
}
else
{
CODEGEN_ASSERT(!"unknown argument type tag");
}
break;
}

View File

@ -7,7 +7,6 @@
#include <stdarg.h>
LUAU_FASTFLAG(LuauLoadUserdataInfo)
LUAU_FASTFLAG(LuauCodegenInstG)
namespace Luau
@ -492,44 +491,8 @@ void toString(std::string& result, IrConst constant)
}
}
const char* getBytecodeTypeName_DEPRECATED(uint8_t type)
{
CODEGEN_ASSERT(!FFlag::LuauLoadUserdataInfo);
switch (type & ~LBC_TYPE_OPTIONAL_BIT)
{
case LBC_TYPE_NIL:
return (type & LBC_TYPE_OPTIONAL_BIT) != 0 ? "nil?" : "nil";
case LBC_TYPE_BOOLEAN:
return (type & LBC_TYPE_OPTIONAL_BIT) != 0 ? "boolean?" : "boolean";
case LBC_TYPE_NUMBER:
return (type & LBC_TYPE_OPTIONAL_BIT) != 0 ? "number?" : "number";
case LBC_TYPE_STRING:
return (type & LBC_TYPE_OPTIONAL_BIT) != 0 ? "string?" : "string";
case LBC_TYPE_TABLE:
return (type & LBC_TYPE_OPTIONAL_BIT) != 0 ? "table?" : "table";
case LBC_TYPE_FUNCTION:
return (type & LBC_TYPE_OPTIONAL_BIT) != 0 ? "function?" : "function";
case LBC_TYPE_THREAD:
return (type & LBC_TYPE_OPTIONAL_BIT) != 0 ? "thread?" : "thread";
case LBC_TYPE_USERDATA:
return (type & LBC_TYPE_OPTIONAL_BIT) != 0 ? "userdata?" : "userdata";
case LBC_TYPE_VECTOR:
return (type & LBC_TYPE_OPTIONAL_BIT) != 0 ? "vector?" : "vector";
case LBC_TYPE_BUFFER:
return (type & LBC_TYPE_OPTIONAL_BIT) != 0 ? "buffer?" : "buffer";
case LBC_TYPE_ANY:
return (type & LBC_TYPE_OPTIONAL_BIT) != 0 ? "any?" : "any";
}
CODEGEN_ASSERT(!"Unhandled type in getBytecodeTypeName");
return nullptr;
}
const char* getBytecodeTypeName(uint8_t type, const char* const* userdataTypes)
{
CODEGEN_ASSERT(FFlag::LuauLoadUserdataInfo);
// Optional bit should be handled externally
type = type & ~LBC_TYPE_OPTIONAL_BIT;
@ -571,22 +534,8 @@ const char* getBytecodeTypeName(uint8_t type, const char* const* userdataTypes)
return nullptr;
}
void toString_DEPRECATED(std::string& result, const BytecodeTypes& bcTypes)
{
CODEGEN_ASSERT(!FFlag::LuauLoadUserdataInfo);
if (bcTypes.c != LBC_TYPE_ANY)
append(result, "%s <- %s, %s, %s", getBytecodeTypeName_DEPRECATED(bcTypes.result), getBytecodeTypeName_DEPRECATED(bcTypes.a),
getBytecodeTypeName_DEPRECATED(bcTypes.b), getBytecodeTypeName_DEPRECATED(bcTypes.c));
else
append(result, "%s <- %s, %s", getBytecodeTypeName_DEPRECATED(bcTypes.result), getBytecodeTypeName_DEPRECATED(bcTypes.a),
getBytecodeTypeName_DEPRECATED(bcTypes.b));
}
void toString(std::string& result, const BytecodeTypes& bcTypes, const char* const* userdataTypes)
{
CODEGEN_ASSERT(FFlag::LuauLoadUserdataInfo);
append(result, "%s%s", getBytecodeTypeName(bcTypes.result, userdataTypes), (bcTypes.result & LBC_TYPE_OPTIONAL_BIT) != 0 ? "?" : "");
append(result, " <- ");
append(result, "%s%s", getBytecodeTypeName(bcTypes.a, userdataTypes), (bcTypes.a & LBC_TYPE_OPTIONAL_BIT) != 0 ? "?" : "");

View File

@ -11,9 +11,6 @@
#include "lstate.h"
#include "lgc.h"
LUAU_FASTFLAG(LuauCodegenUserdataOps)
LUAU_FASTFLAGVARIABLE(LuauCodegenUserdataAlloc, false)
LUAU_FASTFLAGVARIABLE(LuauCodegenUserdataOpsFixA64, false)
LUAU_FASTFLAG(LuauCodegenFastcall3)
LUAU_FASTFLAG(LuauCodegenMathSign)
@ -1077,8 +1074,6 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
}
case IrCmd::NEW_USERDATA:
{
CODEGEN_ASSERT(FFlag::LuauCodegenUserdataAlloc);
regs.spill(build, index);
build.mov(x0, rState);
build.mov(x1, intOp(inst.a));
@ -1708,18 +1703,11 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
}
case IrCmd::CHECK_USERDATA_TAG:
{
CODEGEN_ASSERT(FFlag::LuauCodegenUserdataOps);
Label fresh; // used when guard aborts execution or jumps to a VM exit
Label& fail = getTargetLabel(inst.c, fresh);
RegisterA64 temp = regs.allocTemp(KindA64::w);
build.ldrb(temp, mem(regOp(inst.a), offsetof(Udata, tag)));
if (FFlag::LuauCodegenUserdataOpsFixA64)
build.cmp(temp, intOp(inst.b));
else
build.cmp(temp, tagOp(inst.b));
build.cmp(temp, intOp(inst.b));
build.b(ConditionA64::NotEqual, fail);
finalizeTargetLabel(inst.c, fresh);
break;
@ -2688,66 +2676,34 @@ AddressA64 IrLoweringA64::tempAddr(IrOp op, int offset)
AddressA64 IrLoweringA64::tempAddrBuffer(IrOp bufferOp, IrOp indexOp, uint8_t tag)
{
if (FFlag::LuauCodegenUserdataOps)
CODEGEN_ASSERT(tag == LUA_TUSERDATA || tag == LUA_TBUFFER);
int dataOffset = tag == LUA_TBUFFER ? offsetof(Buffer, data) : offsetof(Udata, data);
if (indexOp.kind == IrOpKind::Inst)
{
CODEGEN_ASSERT(tag == LUA_TUSERDATA || tag == LUA_TBUFFER);
int dataOffset = tag == LUA_TBUFFER ? offsetof(Buffer, data) : offsetof(Udata, data);
RegisterA64 temp = regs.allocTemp(KindA64::x);
build.add(temp, regOp(bufferOp), regOp(indexOp)); // implicit uxtw
return mem(temp, dataOffset);
}
else if (indexOp.kind == IrOpKind::Constant)
{
// Since the resulting address may be used to load any size, including 1 byte, from an unaligned offset, we are limited by unscaled
// encoding
if (unsigned(intOp(indexOp)) + dataOffset <= 255)
return mem(regOp(bufferOp), int(intOp(indexOp) + dataOffset));
if (indexOp.kind == IrOpKind::Inst)
{
RegisterA64 temp = regs.allocTemp(KindA64::x);
build.add(temp, regOp(bufferOp), regOp(indexOp)); // implicit uxtw
return mem(temp, dataOffset);
}
else if (indexOp.kind == IrOpKind::Constant)
{
// Since the resulting address may be used to load any size, including 1 byte, from an unaligned offset, we are limited by unscaled
// encoding
if (unsigned(intOp(indexOp)) + dataOffset <= 255)
return mem(regOp(bufferOp), int(intOp(indexOp) + dataOffset));
// indexOp can only be negative in dead code (since offsets are checked); this avoids assertion in emitAddOffset
if (intOp(indexOp) < 0)
return mem(regOp(bufferOp), dataOffset);
// indexOp can only be negative in dead code (since offsets are checked); this avoids assertion in emitAddOffset
if (intOp(indexOp) < 0)
return mem(regOp(bufferOp), dataOffset);
RegisterA64 temp = regs.allocTemp(KindA64::x);
emitAddOffset(build, temp, regOp(bufferOp), size_t(intOp(indexOp)));
return mem(temp, dataOffset);
}
else
{
CODEGEN_ASSERT(!"Unsupported instruction form");
return noreg;
}
RegisterA64 temp = regs.allocTemp(KindA64::x);
emitAddOffset(build, temp, regOp(bufferOp), size_t(intOp(indexOp)));
return mem(temp, dataOffset);
}
else
{
if (indexOp.kind == IrOpKind::Inst)
{
RegisterA64 temp = regs.allocTemp(KindA64::x);
build.add(temp, regOp(bufferOp), regOp(indexOp)); // implicit uxtw
return mem(temp, offsetof(Buffer, data));
}
else if (indexOp.kind == IrOpKind::Constant)
{
// Since the resulting address may be used to load any size, including 1 byte, from an unaligned offset, we are limited by unscaled
// encoding
if (unsigned(intOp(indexOp)) + offsetof(Buffer, data) <= 255)
return mem(regOp(bufferOp), int(intOp(indexOp) + offsetof(Buffer, data)));
// indexOp can only be negative in dead code (since offsets are checked); this avoids assertion in emitAddOffset
if (intOp(indexOp) < 0)
return mem(regOp(bufferOp), offsetof(Buffer, data));
RegisterA64 temp = regs.allocTemp(KindA64::x);
emitAddOffset(build, temp, regOp(bufferOp), size_t(intOp(indexOp)));
return mem(temp, offsetof(Buffer, data));
}
else
{
CODEGEN_ASSERT(!"Unsupported instruction form");
return noreg;
}
CODEGEN_ASSERT(!"Unsupported instruction form");
return noreg;
}
}

View File

@ -15,8 +15,6 @@
#include "lstate.h"
#include "lgc.h"
LUAU_FASTFLAG(LuauCodegenUserdataOps)
LUAU_FASTFLAG(LuauCodegenUserdataAlloc)
LUAU_FASTFLAG(LuauCodegenFastcall3)
LUAU_FASTFLAG(LuauCodegenMathSign)
@ -939,8 +937,6 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
}
case IrCmd::NEW_USERDATA:
{
CODEGEN_ASSERT(FFlag::LuauCodegenUserdataAlloc);
IrCallWrapperX64 callWrap(regs, build, index);
callWrap.addArgument(SizeX64::qword, rState);
callWrap.addArgument(SizeX64::qword, intOp(inst.a));
@ -1421,8 +1417,6 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
}
case IrCmd::CHECK_USERDATA_TAG:
{
CODEGEN_ASSERT(FFlag::LuauCodegenUserdataOps);
build.cmp(byte[regOp(inst.a) + offsetof(Udata, tag)], intOp(inst.b));
jumpOrAbortOnUndef(ConditionX64::NotEqual, inst.c, next);
break;
@ -2269,23 +2263,13 @@ RegisterX64 IrLoweringX64::regOp(IrOp op)
OperandX64 IrLoweringX64::bufferAddrOp(IrOp bufferOp, IrOp indexOp, uint8_t tag)
{
if (FFlag::LuauCodegenUserdataOps)
{
CODEGEN_ASSERT(tag == LUA_TUSERDATA || tag == LUA_TBUFFER);
int dataOffset = tag == LUA_TBUFFER ? offsetof(Buffer, data) : offsetof(Udata, data);
CODEGEN_ASSERT(tag == LUA_TUSERDATA || tag == LUA_TBUFFER);
int dataOffset = tag == LUA_TBUFFER ? offsetof(Buffer, data) : offsetof(Udata, data);
if (indexOp.kind == IrOpKind::Inst)
return regOp(bufferOp) + qwordReg(regOp(indexOp)) + dataOffset;
else if (indexOp.kind == IrOpKind::Constant)
return regOp(bufferOp) + intOp(indexOp) + dataOffset;
}
else
{
if (indexOp.kind == IrOpKind::Inst)
return regOp(bufferOp) + qwordReg(regOp(indexOp)) + offsetof(Buffer, data);
else if (indexOp.kind == IrOpKind::Constant)
return regOp(bufferOp) + intOp(indexOp) + offsetof(Buffer, data);
}
if (indexOp.kind == IrOpKind::Inst)
return regOp(bufferOp) + qwordReg(regOp(indexOp)) + dataOffset;
else if (indexOp.kind == IrOpKind::Constant)
return regOp(bufferOp) + intOp(indexOp) + dataOffset;
CODEGEN_ASSERT(!"Unsupported instruction form");
return noreg;

View File

@ -13,7 +13,6 @@
#include "lstate.h"
#include "ltm.h"
LUAU_FASTFLAG(LuauCodegenUserdataOps)
LUAU_FASTFLAG(LuauCodegenFastcall3)
namespace Luau
@ -444,7 +443,7 @@ static void translateInstBinaryNumeric(IrBuilder& build, int ra, int rb, int rc,
return;
}
if (FFlag::LuauCodegenUserdataOps && (isUserdataBytecodeType(bcTypes.a) || isUserdataBytecodeType(bcTypes.b)))
if (isUserdataBytecodeType(bcTypes.a) || isUserdataBytecodeType(bcTypes.b))
{
if (build.hostHooks.userdataMetamethod &&
build.hostHooks.userdataMetamethod(build, bcTypes.a, bcTypes.b, ra, opb, opc, tmToHostMetamethod(tm), pcpos))
@ -596,7 +595,7 @@ void translateInstMinus(IrBuilder& build, const Instruction* pc, int pcpos)
return;
}
if (FFlag::LuauCodegenUserdataOps && isUserdataBytecodeType(bcTypes.a))
if (isUserdataBytecodeType(bcTypes.a))
{
if (build.hostHooks.userdataMetamethod &&
build.hostHooks.userdataMetamethod(build, bcTypes.a, bcTypes.b, ra, build.vmReg(rb), {}, tmToHostMetamethod(TM_UNM), pcpos))
@ -628,17 +627,7 @@ void translateInstMinus(IrBuilder& build, const Instruction* pc, int pcpos)
FallbackStreamScope scope(build, fallback, next);
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
if (FFlag::LuauCodegenUserdataOps)
{
build.inst(IrCmd::DO_ARITH, build.vmReg(ra), build.vmReg(rb), build.vmReg(rb), build.constInt(TM_UNM));
}
else
{
build.inst(
IrCmd::DO_ARITH, build.vmReg(LUAU_INSN_A(*pc)), build.vmReg(LUAU_INSN_B(*pc)), build.vmReg(LUAU_INSN_B(*pc)), build.constInt(TM_UNM));
}
build.inst(IrCmd::DO_ARITH, build.vmReg(ra), build.vmReg(rb), build.vmReg(rb), build.constInt(TM_UNM));
build.inst(IrCmd::JUMP, next);
}
}
@ -650,7 +639,7 @@ void translateInstLength(IrBuilder& build, const Instruction* pc, int pcpos)
int ra = LUAU_INSN_A(*pc);
int rb = LUAU_INSN_B(*pc);
if (FFlag::LuauCodegenUserdataOps && isUserdataBytecodeType(bcTypes.a))
if (isUserdataBytecodeType(bcTypes.a))
{
if (build.hostHooks.userdataMetamethod &&
build.hostHooks.userdataMetamethod(build, bcTypes.a, bcTypes.b, ra, build.vmReg(rb), {}, tmToHostMetamethod(TM_LEN), pcpos))
@ -680,12 +669,7 @@ void translateInstLength(IrBuilder& build, const Instruction* pc, int pcpos)
FallbackStreamScope scope(build, fallback, next);
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
if (FFlag::LuauCodegenUserdataOps)
build.inst(IrCmd::DO_LEN, build.vmReg(ra), build.vmReg(rb));
else
build.inst(IrCmd::DO_LEN, build.vmReg(LUAU_INSN_A(*pc)), build.vmReg(LUAU_INSN_B(*pc)));
build.inst(IrCmd::DO_LEN, build.vmReg(ra), build.vmReg(rb));
build.inst(IrCmd::JUMP, next);
}
@ -1293,11 +1277,11 @@ void translateInstGetTableKS(IrBuilder& build, const Instruction* pc, int pcpos)
return;
}
if (FFlag::LuauCodegenUserdataOps ? isUserdataBytecodeType(bcTypes.a) : bcTypes.a == LBC_TYPE_USERDATA)
if (isUserdataBytecodeType(bcTypes.a))
{
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TUSERDATA), build.vmExit(pcpos));
if (FFlag::LuauCodegenUserdataOps && build.hostHooks.userdataAccess)
if (build.hostHooks.userdataAccess)
{
TString* str = gco2ts(build.function.proto->k[aux].value.gc);
const char* field = getstr(str);
@ -1340,7 +1324,7 @@ void translateInstSetTableKS(IrBuilder& build, const Instruction* pc, int pcpos)
IrOp tb = build.inst(IrCmd::LOAD_TAG, build.vmReg(rb));
if (FFlag::LuauCodegenUserdataOps ? isUserdataBytecodeType(bcTypes.a) : bcTypes.a == LBC_TYPE_USERDATA)
if (isUserdataBytecodeType(bcTypes.a))
{
build.inst(IrCmd::CHECK_TAG, tb, build.constTag(LUA_TUSERDATA), build.vmExit(pcpos));
@ -1486,11 +1470,11 @@ bool translateInstNamecall(IrBuilder& build, const Instruction* pc, int pcpos)
return false;
}
if (FFlag::LuauCodegenUserdataOps ? isUserdataBytecodeType(bcTypes.a) : bcTypes.a == LBC_TYPE_USERDATA)
if (isUserdataBytecodeType(bcTypes.a))
{
build.loadAndCheckTag(build.vmReg(rb), LUA_TUSERDATA, build.vmExit(pcpos));
if (FFlag::LuauCodegenUserdataOps && build.hostHooks.userdataNamecall)
if (build.hostHooks.userdataNamecall)
{
Instruction call = pc[2];
CODEGEN_ASSERT(LUAU_INSN_OP(call) == LOP_CALL);

View File

@ -14,8 +14,6 @@
#include <math.h>
#include <string.h>
LUAU_FASTFLAG(LuauCodegenUserdataAlloc)
namespace Luau
{
namespace CodeGen
@ -91,9 +89,7 @@ void initFunctions(NativeContext& context)
context.forgPrepXnextFallback = forgPrepXnextFallback;
context.callProlog = callProlog;
context.callEpilogC = callEpilogC;
if (FFlag::LuauCodegenUserdataAlloc)
context.newUserdata = newUserdata;
context.newUserdata = newUserdata;
context.callFallback = callFallback;

View File

@ -18,8 +18,6 @@ LUAU_FASTINTVARIABLE(LuauCodeGenMinLinearBlockPath, 3)
LUAU_FASTINTVARIABLE(LuauCodeGenReuseSlotLimit, 64)
LUAU_FASTINTVARIABLE(LuauCodeGenReuseUdataTagLimit, 64)
LUAU_FASTFLAGVARIABLE(DebugLuauAbortingChecks, false)
LUAU_FASTFLAG(LuauCodegenUserdataOps)
LUAU_FASTFLAG(LuauCodegenUserdataAlloc)
LUAU_FASTFLAG(LuauCodegenFastcall3)
LUAU_FASTFLAG(LuauCodegenMathSign)
@ -425,9 +423,7 @@ struct ConstPropState
invalidateValuePropagation();
invalidateHeapTableData();
invalidateHeapBufferData();
if (FFlag::LuauCodegenUserdataOps)
invalidateUserdataData();
invalidateUserdataData();
}
IrFunction& function;
@ -1051,8 +1047,6 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
}
case IrCmd::CHECK_USERDATA_TAG:
{
CODEGEN_ASSERT(FFlag::LuauCodegenUserdataOps);
for (uint32_t prevIdx : state.useradataTagCache)
{
IrInst& prev = function.instructions[prevIdx];
@ -1062,7 +1056,7 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
if (prev.a != inst.a || prev.b != inst.b)
continue;
}
else if (FFlag::LuauCodegenUserdataAlloc && prev.cmd == IrCmd::NEW_USERDATA)
else if (prev.cmd == IrCmd::NEW_USERDATA)
{
if (inst.a.kind != IrOpKind::Inst || prevIdx != inst.a.index || prev.b != inst.b)
continue;
@ -1244,8 +1238,6 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
case IrCmd::TRY_CALL_FASTGETTM:
break;
case IrCmd::NEW_USERDATA:
CODEGEN_ASSERT(FFlag::LuauCodegenUserdataAlloc);
if (int(state.useradataTagCache.size()) < FInt::LuauCodeGenReuseUdataTagLimit)
state.useradataTagCache.push_back(index);
break;
@ -1532,9 +1524,7 @@ static void constPropInBlockChain(IrBuilder& build, std::vector<uint8_t>& visite
// Same for table and buffer data propagation
state.invalidateHeapTableData();
state.invalidateHeapBufferData();
if (FFlag::LuauCodegenUserdataOps)
state.invalidateUserdataData();
state.invalidateUserdataData();
// Blocks in a chain are guaranteed to follow each other
// We force that by giving all blocks the same sorting key, but consecutive chain keys

View File

@ -9,8 +9,6 @@
#include "lobject.h"
LUAU_FASTFLAG(LuauCodegenUserdataOps)
// TODO: optimization can be improved by knowing which registers are live in at each VM exit
namespace Luau
@ -596,8 +594,6 @@ static void markDeadStoresInInst(RemoveDeadStoreState& state, IrBuilder& build,
state.checkLiveIns(inst.d);
break;
case IrCmd::CHECK_USERDATA_TAG:
CODEGEN_ASSERT(FFlag::LuauCodegenUserdataOps);
state.checkLiveIns(inst.c);
break;

View File

@ -13,8 +13,6 @@
#include <string.h>
LUAU_FASTFLAGVARIABLE(LuauLoadUserdataInfo, false)
// TODO: RAII deallocation doesn't work for longjmp builds if a memory error happens
template<typename T>
struct TempBuffer
@ -189,8 +187,6 @@ static void resolveImportSafe(lua_State* L, Table* env, TValue* k, uint32_t id)
static void remapUserdataTypes(char* data, size_t size, uint8_t* userdataRemapping, uint32_t count)
{
LUAU_ASSERT(FFlag::LuauLoadUserdataInfo);
size_t offset = 0;
uint32_t typeSize = readVarInt(data, size, offset);
@ -287,16 +283,13 @@ int luau_load(lua_State* L, const char* chunkname, const char* data, size_t size
{
typesversion = read<uint8_t>(data, size, offset);
if (FFlag::LuauLoadUserdataInfo)
if (typesversion < LBC_TYPE_VERSION_MIN || typesversion > LBC_TYPE_VERSION_MAX)
{
if (typesversion < LBC_TYPE_VERSION_MIN || typesversion > LBC_TYPE_VERSION_MAX)
{
char chunkbuf[LUA_IDSIZE];
const char* chunkid = luaO_chunkid(chunkbuf, sizeof(chunkbuf), chunkname, strlen(chunkname));
lua_pushfstring(L, "%s: bytecode type version mismatch (expected [%d..%d], got %d)", chunkid, LBC_TYPE_VERSION_MIN,
LBC_TYPE_VERSION_MAX, typesversion);
return 1;
}
char chunkbuf[LUA_IDSIZE];
const char* chunkid = luaO_chunkid(chunkbuf, sizeof(chunkbuf), chunkname, strlen(chunkname));
lua_pushfstring(L, "%s: bytecode type version mismatch (expected [%d..%d], got %d)", chunkid, LBC_TYPE_VERSION_MIN, LBC_TYPE_VERSION_MAX,
typesversion);
return 1;
}
}
@ -317,7 +310,7 @@ int luau_load(lua_State* L, const char* chunkname, const char* data, size_t size
const uint32_t userdataTypeLimit = LBC_TYPE_TAGGED_USERDATA_END - LBC_TYPE_TAGGED_USERDATA_BASE;
uint8_t userdataRemapping[userdataTypeLimit];
if (FFlag::LuauLoadUserdataInfo && typesversion == 3)
if (typesversion == 3)
{
memset(userdataRemapping, LBC_TYPE_USERDATA, userdataTypeLimit);
@ -393,7 +386,7 @@ int luau_load(lua_State* L, const char* chunkname, const char* data, size_t size
offset += typesize;
}
else if (typesversion == 2 || (FFlag::LuauLoadUserdataInfo && typesversion == 3))
else if (typesversion == 2 || typesversion == 3)
{
uint32_t typesize = readVarInt(data, size, offset);
@ -406,7 +399,7 @@ int luau_load(lua_State* L, const char* chunkname, const char* data, size_t size
memcpy(p->typeinfo, types, typesize);
offset += typesize;
if (FFlag::LuauLoadUserdataInfo && typesversion == 3)
if (typesversion == 3)
{
remapUserdataTypes((char*)(uint8_t*)p->typeinfo, p->sizetypeinfo, userdataRemapping, userdataTypeLimit);
}

View File

@ -1387,4 +1387,31 @@ TEST_CASE_FIXTURE(FrontendFixture, "no_separate_caches_with_the_new_solver")
CHECK(module->mode == Mode::Nonstrict);
}
TEST_CASE_FIXTURE(Fixture, "exported_tables_have_position_metadata")
{
CheckResult result = check(R"(
return { abc = 22 }
)");
LUAU_REQUIRE_NO_ERRORS(result);
ModulePtr mm = getMainModule();
TypePackId retTp = mm->getModuleScope()->returnType;
auto retHead = flatten(retTp).first;
REQUIRE(1 == retHead.size());
const TableType* tt = get<TableType>(retHead[0]);
REQUIRE(tt);
CHECK("MainModule" == tt->definitionModuleName);
CHECK(1 == tt->props.size());
CHECK(tt->props.count("abc"));
const Property& prop = tt->props.find("abc")->second;
CHECK(Location{Position{1, 17}, Position{1, 20}} == prop.location);
}
TEST_SUITE_END();

View File

@ -16,9 +16,6 @@
#include <string_view>
LUAU_FASTFLAG(LuauCompileUserdataInfo)
LUAU_FASTFLAG(LuauLoadUserdataInfo)
LUAU_FASTFLAG(LuauCodegenUserdataOps)
LUAU_FASTFLAG(LuauCodegenUserdataAlloc)
LUAU_FASTFLAG(LuauCompileFastcall3)
LUAU_FASTFLAG(LuauCodegenFastcall3)
@ -1548,7 +1545,7 @@ TEST_CASE("CustomUserdataTypesTemp")
if (!Luau::CodeGen::isSupported())
return;
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, false}, {FFlag::LuauLoadUserdataInfo, true}};
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, false}};
CHECK_EQ("\n" + getCodegenHeader(R"(
local function foo(v: vec2, x: mat3)
@ -1568,7 +1565,7 @@ TEST_CASE("CustomUserdataTypes")
if (!Luau::CodeGen::isSupported())
return;
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}, {FFlag::LuauLoadUserdataInfo, true}};
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}};
CHECK_EQ("\n" + getCodegenHeader(R"(
local function foo(v: vec2, x: mat3)
@ -1588,7 +1585,7 @@ TEST_CASE("CustomUserdataPropertyAccess")
if (!Luau::CodeGen::isSupported())
return;
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}, {FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenUserdataOps, true}};
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}};
CHECK_EQ("\n" + getCodegenAssembly(R"(
local function foo(v: vec2)
@ -1623,7 +1620,7 @@ TEST_CASE("CustomUserdataPropertyAccess2")
if (!Luau::CodeGen::isSupported())
return;
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}, {FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenUserdataOps, true}};
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}};
CHECK_EQ("\n" + getCodegenAssembly(R"(
local function foo(a: mat3)
@ -1660,7 +1657,7 @@ TEST_CASE("CustomUserdataNamecall1")
if (!Luau::CodeGen::isSupported())
return;
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}, {FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenUserdataOps, true}};
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}};
CHECK_EQ("\n" + getCodegenAssembly(R"(
local function foo(a: vec2, b: vec2)
@ -1706,8 +1703,7 @@ TEST_CASE("CustomUserdataNamecall2")
if (!Luau::CodeGen::isSupported())
return;
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}, {FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenUserdataOps, true},
{FFlag::LuauCodegenUserdataAlloc, true}};
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}};
CHECK_EQ("\n" + getCodegenAssembly(R"(
local function foo(a: vec2, b: vec2)
@ -1756,7 +1752,7 @@ TEST_CASE("CustomUserdataMetamethodDirectFlow")
if (!Luau::CodeGen::isSupported())
return;
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}, {FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenUserdataOps, true}};
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}};
CHECK_EQ("\n" + getCodegenAssembly(R"(
local function foo(a: mat3, b: mat3)
@ -1788,7 +1784,7 @@ TEST_CASE("CustomUserdataMetamethodDirectFlow2")
if (!Luau::CodeGen::isSupported())
return;
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}, {FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenUserdataOps, true}};
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}};
CHECK_EQ("\n" + getCodegenAssembly(R"(
local function foo(a: mat3)
@ -1818,7 +1814,7 @@ TEST_CASE("CustomUserdataMetamethodDirectFlow3")
if (!Luau::CodeGen::isSupported())
return;
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}, {FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenUserdataOps, true}};
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}};
CHECK_EQ("\n" + getCodegenAssembly(R"(
local function foo(a: sequence)
@ -1848,8 +1844,7 @@ TEST_CASE("CustomUserdataMetamethod")
if (!Luau::CodeGen::isSupported())
return;
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}, {FFlag::LuauLoadUserdataInfo, true}, {FFlag::LuauCodegenUserdataOps, true},
{FFlag::LuauCodegenUserdataAlloc, true}};
ScopedFastFlag sffs[]{{FFlag::LuauCompileUserdataInfo, true}};
CHECK_EQ("\n" + getCodegenAssembly(R"(
local function foo(a: vec2, b: vec2, c: vec2)

View File

@ -4,6 +4,7 @@
#include "doctest.h"
#include "Luau/BuiltinDefinitions.h"
#include "Luau/AstQuery.h"
using namespace Luau;
@ -929,17 +930,19 @@ TEST_CASE_FIXTURE(Fixture, "type_alias_locations")
)");
ModulePtr mod = getMainModule();
REQUIRE(mod);
REQUIRE(mod->scopes.size() == 8);
REQUIRE(!mod->scopes.empty());
REQUIRE(mod->scopes[0].second->typeAliasNameLocations.count("T") > 0);
CHECK(mod->scopes[0].second->typeAliasNameLocations["T"] == Location(Position(1, 13), 1));
REQUIRE(mod->scopes[3].second->typeAliasNameLocations.count("T") > 0);
CHECK(mod->scopes[3].second->typeAliasNameLocations["T"] == Location(Position(4, 17), 1));
ScopePtr doScope = findScopeAtPosition(*mod, Position{4, 0});
REQUIRE(doScope);
REQUIRE(mod->scopes[3].second->typeAliasNameLocations.count("X") > 0);
CHECK(mod->scopes[3].second->typeAliasNameLocations["X"] == Location(Position(5, 17), 1));
REQUIRE(doScope->typeAliasNameLocations.count("T") > 0);
CHECK(doScope->typeAliasNameLocations["T"] == Location(Position(4, 17), 1));
REQUIRE(doScope->typeAliasNameLocations.count("X") > 0);
CHECK(doScope->typeAliasNameLocations["X"] == Location(Position(5, 17), 1));
}
/*

View File

@ -202,7 +202,7 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "for_in_loop_with_zero_iterators_dcr")
for key in no_iter() do end
)");
LUAU_REQUIRE_ERROR_COUNT(1, result);
LUAU_REQUIRE_ERROR_COUNT(2, result);
}
TEST_CASE_FIXTURE(BuiltinsFixture, "for_in_with_a_custom_iterator_should_type_check")

View File

@ -2990,6 +2990,24 @@ c = b
CHECK(0 == ttv->instantiatedTypeParams.size());
}
TEST_CASE_FIXTURE(Fixture, "record_location_of_inserted_table_properties")
{
CheckResult result = check(R"(
local a = {}
a.foo = 1234
)");
LUAU_REQUIRE_NO_ERRORS(result);
const TableType* tt = get<TableType>(requireType("a"));
REQUIRE(tt);
REQUIRE(tt->props.count("foo"));
const Property& prop = tt->props.find("foo")->second;
CHECK(Location{{2, 10}, {2, 13}} == prop.location);
}
TEST_CASE_FIXTURE(Fixture, "table_indexing_error_location")
{
CheckResult result = check(R"(

View File

@ -245,11 +245,18 @@ TypeAliases.recursive_types_restriction_not_ok
TypeAliases.report_shadowed_aliases
TypeAliases.type_alias_local_mutation
TypeAliases.type_alias_local_rename
TypeAliases.type_alias_locations
TypeAliases.type_alias_of_an_imported_recursive_generic_type
TypeFamilyTests.add_family_at_work
TypeFamilyTests.cyclic_add_family_at_work
TypeFamilyTests.cyclic_concat_family_at_work
TypeFamilyTests.didnt_quite_exceed_distributivity_limits
TypeFamilyTests.ensure_equivalence_with_distributivity
TypeFamilyTests.family_as_fn_arg
TypeFamilyTests.index_type_family_works_w_generic_types
TypeFamilyTests.internal_families_raise_errors
TypeFamilyTests.keyof_oss_crash_gh1161
TypeFamilyTests.mul_family_with_union_of_multiplicatives
TypeFamilyTests.mul_family_with_union_of_multiplicatives_2
TypeFamilyTests.unsolvable_family
TypeInfer.be_sure_to_use_active_txnlog_when_evaluating_a_variadic_overload
TypeInfer.check_type_infer_recursion_count
@ -266,6 +273,7 @@ TypeInfer.no_stack_overflow_from_isoptional
TypeInfer.recursive_function_that_invokes_itself_with_a_refinement_of_its_parameter
TypeInfer.recursive_function_that_invokes_itself_with_a_refinement_of_its_parameter_2
TypeInfer.tc_after_error_recovery_no_replacement_name_in_error
TypeInfer.tc_if_else_expressions_expected_type_3
TypeInfer.type_infer_recursion_limit_no_ice
TypeInfer.type_infer_recursion_limit_normalizer
TypeInfer.unify_nearly_identical_recursive_types
@ -335,6 +343,7 @@ TypeInferFunctions.too_many_return_values_in_parentheses
TypeInferFunctions.too_many_return_values_no_function
TypeInferFunctions.unifier_should_not_bind_free_types
TypeInferLoops.cli_68448_iterators_need_not_accept_nil
TypeInferLoops.dcr_iteration_fragmented_keys
TypeInferLoops.dcr_iteration_on_never_gives_never
TypeInferLoops.dcr_xpath_candidates
TypeInferLoops.for_in_loop
@ -343,11 +352,13 @@ TypeInferLoops.for_in_loop_error_on_iterator_requiring_args_but_none_given
TypeInferLoops.for_in_loop_on_error
TypeInferLoops.for_in_loop_on_non_function
TypeInferLoops.for_in_loop_with_next
TypeInferLoops.for_in_with_just_one_iterator_is_ok
TypeInferLoops.for_loop
TypeInferLoops.ipairs_produces_integral_indices
TypeInferLoops.iterate_over_properties
TypeInferLoops.iteration_regression_issue_69967_alt
TypeInferLoops.loop_iter_metamethod_nil
TypeInferLoops.loop_iter_metamethod_not_enough_returns
TypeInferLoops.loop_iter_metamethod_ok
TypeInferLoops.loop_iter_metamethod_ok_with_inference
TypeInferLoops.loop_iter_no_indexer_strict