Sync to upstream/release/598 (#1063)

* Include `windows.h` rather than `Windows.h` to make things compile on
MinGW.
* Custom implementation of timegm/os.time for all platforms
* Disable builtin constant folding when getfenv/setfenv are used
* Fixes https://github.com/Roblox/luau/issues/1042
* Fixes https://github.com/Roblox/luau/issues/1043

New Type Checker

* Initial work toward type states.
* Rework most overloadable operators to use type families.
* Initial work toward our new nonstrict mode.


Native Codegen

* Fix native code generation for dead loops
* Annotate top-level functions as cold
* Slightly smaller/faster x64 Luau calls
* emitInstCall used to not set savedpc itself, but now it does for
consistency with all other implementations
* Implement cmov support for X64
* Fix assertion in luau-compile when module is empty
* Optimize A64 calls at some code size cost
* Inline constant array index offset into the load/store instruction
* Increase x64 spill slots from 5 to 13

---------

Co-authored-by: Arseny Kapoulkine <arseny.kapoulkine@gmail.com>
Co-authored-by: Vyacheslav Egorov <vegorov@roblox.com>
Co-authored-by: Lily Brown <lbrown@roblox.com>
Co-authored-by: Aaron Weiss <aaronweiss@roblox.com>
Co-authored-by: Alexander McCord <amccord@roblox.com>
This commit is contained in:
Andy Friesen 2023-10-06 12:02:32 -07:00 committed by GitHub
parent 225a4a0870
commit 36e0e64715
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
67 changed files with 1940 additions and 726 deletions

View File

@ -187,12 +187,12 @@ private:
* @param generalize If true, generalize any lambdas that are encountered.
* @return the type of the expression.
*/
Inference check(const ScopePtr& scope, AstExpr* expr, ValueContext context = ValueContext::RValue, std::optional<TypeId> expectedType = {},
bool forceSingleton = false, bool generalize = true);
Inference check(
const ScopePtr& scope, AstExpr* expr, std::optional<TypeId> expectedType = {}, bool forceSingleton = false, bool generalize = true);
Inference check(const ScopePtr& scope, AstExprConstantString* string, std::optional<TypeId> expectedType, bool forceSingleton);
Inference check(const ScopePtr& scope, AstExprConstantBool* bool_, std::optional<TypeId> expectedType, bool forceSingleton);
Inference check(const ScopePtr& scope, AstExprLocal* local, ValueContext context);
Inference check(const ScopePtr& scope, AstExprLocal* local);
Inference check(const ScopePtr& scope, AstExprGlobal* global);
Inference check(const ScopePtr& scope, AstExprIndexName* indexName);
Inference check(const ScopePtr& scope, AstExprIndexExpr* indexExpr);
@ -208,6 +208,11 @@ private:
std::vector<TypeId> checkLValues(const ScopePtr& scope, AstArray<AstExpr*> exprs);
TypeId checkLValue(const ScopePtr& scope, AstExpr* expr);
TypeId checkLValue(const ScopePtr& scope, AstExprLocal* local);
TypeId checkLValue(const ScopePtr& scope, AstExprGlobal* global);
TypeId checkLValue(const ScopePtr& scope, AstExprIndexName* indexName);
TypeId checkLValue(const ScopePtr& scope, AstExprIndexExpr* indexExpr);
TypeId updateProperty(const ScopePtr& scope, AstExpr* expr);
struct FunctionSignature
{

View File

@ -52,6 +52,7 @@ struct Scope
void addBuiltinTypeBinding(const Name& name, const TypeFun& tyFun);
std::optional<TypeId> lookup(Symbol sym) const;
std::optional<TypeId> lookupLValue(DefId def) const;
std::optional<TypeId> lookup(DefId def) const;
std::optional<std::pair<Binding*, Scope*>> lookupEx(Symbol sym);
@ -65,7 +66,15 @@ struct Scope
std::optional<Binding> linearSearchForBinding(const std::string& name, bool traverseScopeChain = true) const;
RefinementMap refinements;
DenseHashMap<const Def*, TypeId> dcrRefinements{nullptr};
// This can be viewed as the "unrefined" type of each binding.
DenseHashMap<const Def*, TypeId> lvalueTypes{nullptr};
// Luau values are routinely refined more narrowly than their actual
// inferred type through control flow statements. We retain those refined
// types here.
DenseHashMap<const Def*, TypeId> rvalueRefinements{nullptr};
void inheritRefinements(const ScopePtr& childScope);
// For mutually recursive type aliases, it's important that

View File

@ -355,6 +355,7 @@ struct FunctionType
// `hasNoFreeOrGenericTypes` should be true if and only if the type does not have any free or generic types present inside it.
// this flag is used as an optimization to exit early from procedures that manipulate free or generic types.
bool hasNoFreeOrGenericTypes = false;
bool isCheckedFunction = false;
};
enum class TableState

View File

@ -1,9 +1,13 @@
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#pragma once
#include "ConstraintSolver.h"
#include "Error.h"
#include "Luau/Error.h"
#include "Luau/NotNull.h"
#include "Luau/Variant.h"
#include "NotNull.h"
#include "TypeCheckLimits.h"
#include <functional>
#include <string>
@ -23,6 +27,42 @@ struct BuiltinTypes;
struct TxnLog;
class Normalizer;
struct TypeFamilyContext
{
NotNull<TypeArena> arena;
NotNull<BuiltinTypes> builtins;
NotNull<Scope> scope;
NotNull<Normalizer> normalizer;
NotNull<InternalErrorReporter> ice;
NotNull<TypeCheckLimits> limits;
// nullptr if the type family is being reduced outside of the constraint solver.
ConstraintSolver* solver;
TypeFamilyContext(NotNull<ConstraintSolver> cs, NotNull<Scope> scope)
: arena(cs->arena)
, builtins(cs->builtinTypes)
, scope(scope)
, normalizer(cs->normalizer)
, ice(NotNull{&cs->iceReporter})
, limits(NotNull{&cs->limits})
, solver(cs.get())
{
}
TypeFamilyContext(NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins, NotNull<Scope> scope, NotNull<Normalizer> normalizer,
NotNull<InternalErrorReporter> ice, NotNull<TypeCheckLimits> limits)
: arena(arena)
, builtins(builtins)
, scope(scope)
, normalizer(normalizer)
, ice(ice)
, limits(limits)
, solver(nullptr)
{
}
};
/// Represents a reduction result, which may have successfully reduced the type,
/// may have concretely failed to reduce the type, or may simply be stuck
/// without more information.
@ -53,9 +93,7 @@ struct TypeFamily
std::string name;
/// The reducer function for the type family.
std::function<TypeFamilyReductionResult<TypeId>(std::vector<TypeId>, std::vector<TypePackId>, NotNull<TypeArena>, NotNull<BuiltinTypes>,
NotNull<TxnLog>, NotNull<Scope>, NotNull<Normalizer>, ConstraintSolver*)>
reducer;
std::function<TypeFamilyReductionResult<TypeId>(std::vector<TypeId>, std::vector<TypePackId>, NotNull<TypeFamilyContext>)> reducer;
};
/// Represents a type function that may be applied to map a series of types and
@ -67,9 +105,7 @@ struct TypePackFamily
std::string name;
/// The reducer function for the type pack family.
std::function<TypeFamilyReductionResult<TypePackId>(std::vector<TypeId>, std::vector<TypePackId>, NotNull<TypeArena>, NotNull<BuiltinTypes>,
NotNull<TxnLog>, NotNull<Scope>, NotNull<Normalizer>, ConstraintSolver*)>
reducer;
std::function<TypeFamilyReductionResult<TypePackId>(std::vector<TypeId>, std::vector<TypePackId>, NotNull<TypeFamilyContext>)> reducer;
};
struct FamilyGraphReductionResult
@ -82,76 +118,53 @@ struct FamilyGraphReductionResult
};
/**
* Attempt to reduce all instances of any type or type pack family in the type
* graph provided.
*
* @param entrypoint the entry point to the type graph.
* @param location the location the reduction is occurring at; used to populate
* type errors.
* @param arena an arena to allocate types into.
* @param builtins the built-in types.
* @param normalizer the normalizer to use when normalizing types
* @param log a TxnLog to use. If one is provided, substitution will take place
* against the TxnLog, otherwise substitutions will directly mutate the type
* graph. Do not provide the empty TxnLog, as a result.
*/
FamilyGraphReductionResult reduceFamilies(TypeId entrypoint, Location location, NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins,
NotNull<Scope> scope, NotNull<Normalizer> normalizer, TxnLog* log = nullptr, bool force = false);
/**
* Attempt to reduce all instances of any type or type pack family in the type
* graph provided.
*
* @param entrypoint the entry point to the type graph.
* @param location the location the reduction is occurring at; used to populate
* type errors.
* @param arena an arena to allocate types into.
* @param builtins the built-in types.
* @param normalizer the normalizer to use when normalizing types
* @param log a TxnLog to use. If one is provided, substitution will take place
* against the TxnLog, otherwise substitutions will directly mutate the type
* graph. Do not provide the empty TxnLog, as a result.
*/
FamilyGraphReductionResult reduceFamilies(TypePackId entrypoint, Location location, NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins,
NotNull<Scope> scope, NotNull<Normalizer> normalizer, TxnLog* log = nullptr, bool force = false);
* Attempt to reduce all instances of any type or type pack family in the type
* graph provided.
*
* @param entrypoint the entry point to the type graph.
* @param location the location the reduction is occurring at; used to populate
* type errors.
* @param arena an arena to allocate types into.
* @param builtins the built-in types.
* @param normalizer the normalizer to use when normalizing types
* @param ice the internal error reporter to use for ICEs
*/
FamilyGraphReductionResult reduceFamilies(TypeId entrypoint, Location location, TypeFamilyContext, bool force = false);
/**
* Attempt to reduce all instances of any type or type pack family in the type
* graph provided.
*
* @param solver the constraint solver this reduction is being performed in.
* @param entrypoint the entry point to the type graph.
* @param location the location the reduction is occurring at; used to populate
* type errors.
* @param log a TxnLog to use. If one is provided, substitution will take place
* against the TxnLog, otherwise substitutions will directly mutate the type
* graph. Do not provide the empty TxnLog, as a result.
* @param arena an arena to allocate types into.
* @param builtins the built-in types.
* @param normalizer the normalizer to use when normalizing types
* @param ice the internal error reporter to use for ICEs
*/
FamilyGraphReductionResult reduceFamilies(
NotNull<ConstraintSolver> solver, TypeId entrypoint, Location location, NotNull<Scope> scope, TxnLog* log = nullptr, bool force = false);
/**
* Attempt to reduce all instances of any type or type pack family in the type
* graph provided.
*
* @param solver the constraint solver this reduction is being performed in.
* @param entrypoint the entry point to the type graph.
* @param location the location the reduction is occurring at; used to populate
* type errors.
* @param log a TxnLog to use. If one is provided, substitution will take place
* against the TxnLog, otherwise substitutions will directly mutate the type
* graph. Do not provide the empty TxnLog, as a result.
*/
FamilyGraphReductionResult reduceFamilies(
NotNull<ConstraintSolver> solver, TypePackId entrypoint, Location location, NotNull<Scope> scope, TxnLog* log = nullptr, bool force = false);
FamilyGraphReductionResult reduceFamilies(TypePackId entrypoint, Location location, TypeFamilyContext, bool force = false);
struct BuiltinTypeFamilies
{
BuiltinTypeFamilies();
TypeFamily addFamily;
TypeFamily subFamily;
TypeFamily mulFamily;
TypeFamily divFamily;
TypeFamily idivFamily;
TypeFamily powFamily;
TypeFamily modFamily;
TypeFamily andFamily;
TypeFamily orFamily;
void addToScope(NotNull<TypeArena> arena, NotNull<Scope> scope) const;
};
const BuiltinTypeFamilies kBuiltinTypeFamilies{};
} // namespace Luau

View File

@ -8,7 +8,9 @@
#include "Luau/ToString.h"
#include "Luau/ConstraintSolver.h"
#include "Luau/ConstraintGraphBuilder.h"
#include "Luau/NotNull.h"
#include "Luau/TypeInfer.h"
#include "Luau/TypeFamily.h"
#include "Luau/TypePack.h"
#include "Luau/Type.h"
#include "Luau/TypeUtils.h"
@ -21,6 +23,8 @@
* about a function that takes any number of values, but where each value must have some specific type.
*/
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
namespace Luau
{
@ -209,6 +213,9 @@ void registerBuiltinGlobals(Frontend& frontend, GlobalTypes& globals, bool typeC
TypeArena& arena = globals.globalTypes;
NotNull<BuiltinTypes> builtinTypes = globals.builtinTypes;
if (FFlag::DebugLuauDeferredConstraintResolution)
kBuiltinTypeFamilies.addToScope(NotNull{&arena}, NotNull{globals.globalScope.get()});
LoadDefinitionFileResult loadResult = frontend.loadDefinitionFile(
globals, globals.globalScope, getBuiltinDefinitionSource(), "@luau", /* captureComments */ false, typeCheckForAutocomplete);
LUAU_ASSERT(loadResult.success);

View File

@ -455,7 +455,7 @@ void ConstraintGraphBuilder::applyRefinements(const ScopePtr& scope, Location lo
}
}
scope->dcrRefinements[def] = ty;
scope->rvalueRefinements[def] = ty;
}
}
@ -627,7 +627,7 @@ ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocal* l
if (hasAnnotation)
expectedType = varTypes.at(i);
TypeId exprType = check(scope, value, ValueContext::RValue, expectedType, /*forceSingleton*/ false, /*generalize*/ true).ty;
TypeId exprType = check(scope, value, expectedType, /*forceSingleton*/ false, /*generalize*/ true).ty;
if (i < varTypes.size())
{
if (varTypes[i])
@ -699,7 +699,8 @@ ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocal* l
// HACK: In the greedy solver, we say the type state of a variable is the type annotation itself, but
// the actual type state is the corresponding initializer expression (if it exists) or nil otherwise.
BreadcrumbId bc = dfg->getBreadcrumb(l);
scope->dcrRefinements[bc->def] = varTypes[i];
scope->lvalueTypes[bc->def] = varTypes[i];
scope->rvalueRefinements[bc->def] = varTypes[i];
}
if (local->values.size > 0)
@ -764,7 +765,8 @@ ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatFor* for
forScope->bindings[for_->var] = Binding{annotationTy, for_->var->location};
BreadcrumbId bc = dfg->getBreadcrumb(for_->var);
forScope->dcrRefinements[bc->def] = annotationTy;
forScope->lvalueTypes[bc->def] = annotationTy;
forScope->rvalueRefinements[bc->def] = annotationTy;
visit(forScope, for_->body);
@ -791,7 +793,8 @@ ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatForIn* f
variableTypes.push_back(ty);
BreadcrumbId bc = dfg->getBreadcrumb(var);
loopScope->dcrRefinements[bc->def] = ty;
loopScope->lvalueTypes[bc->def] = ty;
loopScope->rvalueRefinements[bc->def] = ty;
}
// It is always ok to provide too few variables, so we give this pack a free tail.
@ -845,8 +848,10 @@ ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocalFun
sig.bodyScope->bindings[function->name] = Binding{sig.signature, function->func->location};
BreadcrumbId bc = dfg->getBreadcrumb(function->name);
scope->dcrRefinements[bc->def] = functionType;
sig.bodyScope->dcrRefinements[bc->def] = sig.signature;
scope->lvalueTypes[bc->def] = functionType;
scope->rvalueRefinements[bc->def] = functionType;
sig.bodyScope->lvalueTypes[bc->def] = sig.signature;
sig.bodyScope->rvalueRefinements[bc->def] = sig.signature;
Checkpoint start = checkpoint(this);
checkFunctionBody(sig.bodyScope, function->func);
@ -889,9 +894,12 @@ ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatFunction
const NullableBreadcrumbId functionBreadcrumb = dfg->getBreadcrumb(function->name);
std::optional<TypeId> existingFunctionTy;
if (functionBreadcrumb)
existingFunctionTy = scope->lookupLValue(functionBreadcrumb->def);
if (AstExprLocal* localName = function->name->as<AstExprLocal>())
{
std::optional<TypeId> existingFunctionTy = scope->lookup(localName->local);
if (existingFunctionTy)
{
addConstraint(scope, function->name->location, SubtypeConstraint{generalizedType, *existingFunctionTy});
@ -905,11 +913,13 @@ ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatFunction
sig.bodyScope->bindings[localName->local] = Binding{sig.signature, localName->location};
if (functionBreadcrumb)
sig.bodyScope->dcrRefinements[functionBreadcrumb->def] = sig.signature;
{
sig.bodyScope->lvalueTypes[functionBreadcrumb->def] = sig.signature;
sig.bodyScope->rvalueRefinements[functionBreadcrumb->def] = sig.signature;
}
}
else if (AstExprGlobal* globalName = function->name->as<AstExprGlobal>())
{
std::optional<TypeId> existingFunctionTy = scope->lookup(globalName->name);
if (!existingFunctionTy)
ice->ice("prepopulateGlobalScope did not populate a global name", globalName->location);
@ -918,7 +928,10 @@ ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatFunction
sig.bodyScope->bindings[globalName->name] = Binding{sig.signature, globalName->location};
if (functionBreadcrumb)
sig.bodyScope->dcrRefinements[functionBreadcrumb->def] = sig.signature;
{
sig.bodyScope->lvalueTypes[functionBreadcrumb->def] = sig.signature;
sig.bodyScope->rvalueRefinements[functionBreadcrumb->def] = sig.signature;
}
}
else if (AstExprIndexName* indexName = function->name->as<AstExprIndexName>())
{
@ -946,7 +959,7 @@ ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatFunction
ice->ice("generalizedType == nullptr", function->location);
if (functionBreadcrumb)
scope->dcrRefinements[functionBreadcrumb->def] = generalizedType;
scope->rvalueRefinements[functionBreadcrumb->def] = generalizedType;
checkFunctionBody(sig.bodyScope, function->func);
Checkpoint end = checkpoint(this);
@ -1059,7 +1072,7 @@ ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatCompound
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatIf* ifStatement)
{
RefinementId refinement = check(scope, ifStatement->condition, ValueContext::RValue, std::nullopt).refinement;
RefinementId refinement = check(scope, ifStatement->condition, std::nullopt).refinement;
ScopePtr thenScope = childScope(ifStatement->thenbody, scope);
applyRefinements(thenScope, ifStatement->condition->location, refinement);
@ -1164,7 +1177,8 @@ ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareG
rootScope->bindings[global->name] = Binding{globalTy, global->location};
BreadcrumbId bc = dfg->getBreadcrumb(global);
rootScope->dcrRefinements[bc->def] = globalTy;
rootScope->lvalueTypes[bc->def] = globalTy;
rootScope->rvalueRefinements[bc->def] = globalTy;
return ControlFlow::None;
}
@ -1323,6 +1337,7 @@ ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareF
TypePackId retPack = resolveTypePack(funScope, global->retTypes, /* inTypeArguments */ false);
TypeId fnType = arena->addType(FunctionType{TypeLevel{}, funScope.get(), std::move(genericTys), std::move(genericTps), paramPack, retPack});
FunctionType* ftv = getMutable<FunctionType>(fnType);
ftv->isCheckedFunction = global->checkedFunction;
ftv->argNames.reserve(global->paramNames.size);
for (const auto& el : global->paramNames)
@ -1334,7 +1349,8 @@ ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareF
scope->bindings[global->name] = Binding{fnType, global->location};
BreadcrumbId bc = dfg->getBreadcrumb(global);
rootScope->dcrRefinements[bc->def] = fnType;
rootScope->lvalueTypes[bc->def] = fnType;
rootScope->rvalueRefinements[bc->def] = fnType;
return ControlFlow::None;
}
@ -1363,7 +1379,7 @@ InferencePack ConstraintGraphBuilder::checkPack(
std::optional<TypeId> expectedType;
if (i < expectedTypes.size())
expectedType = expectedTypes[i];
head.push_back(check(scope, expr, ValueContext::RValue, expectedType).ty);
head.push_back(check(scope, expr, expectedType).ty);
}
else
{
@ -1407,7 +1423,7 @@ InferencePack ConstraintGraphBuilder::checkPack(
std::optional<TypeId> expectedType;
if (!expectedTypes.empty())
expectedType = expectedTypes[0];
TypeId t = check(scope, expr, ValueContext::RValue, expectedType, /*forceSingletons*/ false, generalize).ty;
TypeId t = check(scope, expr, expectedType, /*forceSingletons*/ false, generalize).ty;
result = InferencePack{arena->addTypePack({t})};
}
@ -1489,8 +1505,7 @@ InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstExprCa
}
else if (i < exprArgs.size() - 1 || !(arg->is<AstExprCall>() || arg->is<AstExprVarargs>()))
{
auto [ty, refinement] =
check(scope, arg, ValueContext::RValue, /*expectedType*/ std::nullopt, /*forceSingleton*/ false, /*generalize*/ false);
auto [ty, refinement] = check(scope, arg, /*expectedType*/ std::nullopt, /*forceSingleton*/ false, /*generalize*/ false);
args.push_back(ty);
argumentRefinements.push_back(refinement);
}
@ -1549,7 +1564,8 @@ InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstExprCa
scope->bindings[targetLocal->local].typeId = resultTy;
BreadcrumbId bc = dfg->getBreadcrumb(targetLocal);
scope->dcrRefinements[bc->def] = resultTy; // TODO: typestates: track this as an assignment
scope->lvalueTypes[bc->def] = resultTy; // TODO: typestates: track this as an assignment
scope->rvalueRefinements[bc->def] = resultTy; // TODO: typestates: track this as an assignment
}
return InferencePack{arena->addTypePack({resultTy}), {refinementArena.variadic(returnRefinements)}};
@ -1588,7 +1604,7 @@ InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstExprCa
}
Inference ConstraintGraphBuilder::check(
const ScopePtr& scope, AstExpr* expr, ValueContext context, std::optional<TypeId> expectedType, bool forceSingleton, bool generalize)
const ScopePtr& scope, AstExpr* expr, std::optional<TypeId> expectedType, bool forceSingleton, bool generalize)
{
RecursionCounter counter{&recursionCount};
@ -1601,7 +1617,7 @@ Inference ConstraintGraphBuilder::check(
Inference result;
if (auto group = expr->as<AstExprGroup>())
result = check(scope, group->expr, ValueContext::RValue, expectedType, forceSingleton);
result = check(scope, group->expr, expectedType, forceSingleton);
else if (auto stringExpr = expr->as<AstExprConstantString>())
result = check(scope, stringExpr, expectedType, forceSingleton);
else if (expr->is<AstExprConstantNumber>())
@ -1611,7 +1627,7 @@ Inference ConstraintGraphBuilder::check(
else if (expr->is<AstExprConstantNil>())
result = Inference{builtinTypes->nilType};
else if (auto local = expr->as<AstExprLocal>())
result = check(scope, local, context);
result = check(scope, local);
else if (auto global = expr->as<AstExprGlobal>())
result = check(scope, global);
else if (expr->is<AstExprVarargs>())
@ -1706,13 +1722,11 @@ Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprConstantBo
return Inference{builtinTypes->booleanType};
}
Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprLocal* local, ValueContext context)
Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprLocal* local)
{
BreadcrumbId bc = dfg->getBreadcrumb(local);
if (auto ty = scope->lookup(bc->def); ty && context == ValueContext::RValue)
return Inference{*ty, refinementArena.proposition(bc, builtinTypes->truthyType)};
else if (auto ty = scope->lookup(local->local))
if (auto ty = scope->lookup(bc->def))
return Inference{*ty, refinementArena.proposition(bc, builtinTypes->truthyType)};
else
ice->ice("AstExprLocal came before its declaration?");
@ -1729,7 +1743,7 @@ Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprGlobal* gl
return Inference{*ty, refinementArena.proposition(bc, builtinTypes->truthyType)};
else if (auto ty = scope->lookup(global->name))
{
rootScope->dcrRefinements[bc->def] = *ty;
rootScope->rvalueRefinements[bc->def] = *ty;
return Inference{*ty, refinementArena.proposition(bc, builtinTypes->truthyType)};
}
else
@ -1750,7 +1764,7 @@ Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprIndexName*
if (auto ty = scope->lookup(bc->def))
return Inference{*ty, refinementArena.proposition(NotNull{bc}, builtinTypes->truthyType)};
scope->dcrRefinements[bc->def] = result;
scope->rvalueRefinements[bc->def] = result;
}
addConstraint(scope, indexName->expr->location, HasPropConstraint{result, obj, indexName->index.value});
@ -1773,7 +1787,7 @@ Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprIndexExpr*
if (auto ty = scope->lookup(bc->def))
return Inference{*ty, refinementArena.proposition(NotNull{bc}, builtinTypes->truthyType)};
scope->dcrRefinements[bc->def] = result;
scope->rvalueRefinements[bc->def] = result;
}
TableIndexer indexer{indexType, result};
@ -1836,7 +1850,9 @@ Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprBinary* bi
{
auto [leftType, rightType, refinement] = checkBinary(scope, binary, expectedType);
if (binary->op == AstExprBinary::Op::Add)
switch (binary->op)
{
case AstExprBinary::Op::Add:
{
TypeId resultType = arena->addType(TypeFamilyInstanceType{
NotNull{&kBuiltinTypeFamilies.addFamily},
@ -1846,11 +1862,94 @@ Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprBinary* bi
addConstraint(scope, binary->location, ReduceConstraint{resultType});
return Inference{resultType, std::move(refinement)};
}
TypeId resultType = arena->addType(BlockedType{});
addConstraint(scope, binary->location,
BinaryConstraint{binary->op, leftType, rightType, resultType, binary, &module->astOriginalCallTypes, &module->astOverloadResolvedTypes});
return Inference{resultType, std::move(refinement)};
case AstExprBinary::Op::Sub:
{
TypeId resultType = arena->addType(TypeFamilyInstanceType{
NotNull{&kBuiltinTypeFamilies.subFamily},
{leftType, rightType},
{},
});
addConstraint(scope, binary->location, ReduceConstraint{resultType});
return Inference{resultType, std::move(refinement)};
}
case AstExprBinary::Op::Mul:
{
TypeId resultType = arena->addType(TypeFamilyInstanceType{
NotNull{&kBuiltinTypeFamilies.mulFamily},
{leftType, rightType},
{},
});
addConstraint(scope, binary->location, ReduceConstraint{resultType});
return Inference{resultType, std::move(refinement)};
}
case AstExprBinary::Op::Div:
{
TypeId resultType = arena->addType(TypeFamilyInstanceType{
NotNull{&kBuiltinTypeFamilies.divFamily},
{leftType, rightType},
{},
});
addConstraint(scope, binary->location, ReduceConstraint{resultType});
return Inference{resultType, std::move(refinement)};
}
case AstExprBinary::Op::FloorDiv:
{
TypeId resultType = arena->addType(TypeFamilyInstanceType{
NotNull{&kBuiltinTypeFamilies.idivFamily},
{leftType, rightType},
{},
});
addConstraint(scope, binary->location, ReduceConstraint{resultType});
return Inference{resultType, std::move(refinement)};
}
case AstExprBinary::Op::Pow:
{
TypeId resultType = arena->addType(TypeFamilyInstanceType{
NotNull{&kBuiltinTypeFamilies.powFamily},
{leftType, rightType},
{},
});
addConstraint(scope, binary->location, ReduceConstraint{resultType});
return Inference{resultType, std::move(refinement)};
}
case AstExprBinary::Op::Mod:
{
TypeId resultType = arena->addType(TypeFamilyInstanceType{
NotNull{&kBuiltinTypeFamilies.modFamily},
{leftType, rightType},
{},
});
addConstraint(scope, binary->location, ReduceConstraint{resultType});
return Inference{resultType, std::move(refinement)};
}
case AstExprBinary::Op::And:
{
TypeId resultType = arena->addType(TypeFamilyInstanceType{
NotNull{&kBuiltinTypeFamilies.andFamily},
{leftType, rightType},
{},
});
addConstraint(scope, binary->location, ReduceConstraint{resultType});
return Inference{resultType, std::move(refinement)};
}
case AstExprBinary::Op::Or:
{
TypeId resultType = arena->addType(TypeFamilyInstanceType{
NotNull{&kBuiltinTypeFamilies.orFamily},
{leftType, rightType},
{},
});
addConstraint(scope, binary->location, ReduceConstraint{resultType});
return Inference{resultType, std::move(refinement)};
}
default:
{
TypeId resultType = arena->addType(BlockedType{});
addConstraint(scope, binary->location,
BinaryConstraint{binary->op, leftType, rightType, resultType, binary, &module->astOriginalCallTypes, &module->astOverloadResolvedTypes});
return Inference{resultType, std::move(refinement)};
}
}
}
Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprIfElse* ifElse, std::optional<TypeId> expectedType)
@ -1860,18 +1959,18 @@ Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprIfElse* if
ScopePtr thenScope = childScope(ifElse->trueExpr, scope);
applyRefinements(thenScope, ifElse->trueExpr->location, refinement);
TypeId thenType = check(thenScope, ifElse->trueExpr, ValueContext::RValue, expectedType).ty;
TypeId thenType = check(thenScope, ifElse->trueExpr, expectedType).ty;
ScopePtr elseScope = childScope(ifElse->falseExpr, scope);
applyRefinements(elseScope, ifElse->falseExpr->location, refinementArena.negation(refinement));
TypeId elseType = check(elseScope, ifElse->falseExpr, ValueContext::RValue, expectedType).ty;
TypeId elseType = check(elseScope, ifElse->falseExpr, expectedType).ty;
return Inference{expectedType ? *expectedType : simplifyUnion(builtinTypes, arena, thenType, elseType).result};
}
Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprTypeAssertion* typeAssert)
{
check(scope, typeAssert->expr, ValueContext::RValue, std::nullopt);
check(scope, typeAssert->expr, std::nullopt);
return Inference{resolveType(scope, typeAssert->annotation, /* inTypeArguments */ false)};
}
@ -1893,11 +1992,11 @@ std::tuple<TypeId, TypeId, RefinementId> ConstraintGraphBuilder::checkBinary(
if (expectedType)
relaxedExpectedLhs = arena->addType(UnionType{{builtinTypes->falsyType, *expectedType}});
auto [leftType, leftRefinement] = check(scope, binary->left, ValueContext::RValue, relaxedExpectedLhs);
auto [leftType, leftRefinement] = check(scope, binary->left, relaxedExpectedLhs);
ScopePtr rightScope = childScope(binary->right, scope);
applyRefinements(rightScope, binary->right->location, leftRefinement);
auto [rightType, rightRefinement] = check(rightScope, binary->right, ValueContext::RValue, expectedType);
auto [rightType, rightRefinement] = check(rightScope, binary->right, expectedType);
return {leftType, rightType, refinementArena.conjunction(leftRefinement, rightRefinement)};
}
@ -1908,11 +2007,11 @@ std::tuple<TypeId, TypeId, RefinementId> ConstraintGraphBuilder::checkBinary(
if (expectedType)
relaxedExpectedLhs = arena->addType(UnionType{{builtinTypes->falsyType, *expectedType}});
auto [leftType, leftRefinement] = check(scope, binary->left, ValueContext::RValue, relaxedExpectedLhs);
auto [leftType, leftRefinement] = check(scope, binary->left, relaxedExpectedLhs);
ScopePtr rightScope = childScope(binary->right, scope);
applyRefinements(rightScope, binary->right->location, refinementArena.negation(leftRefinement));
auto [rightType, rightRefinement] = check(rightScope, binary->right, ValueContext::RValue, expectedType);
auto [rightType, rightRefinement] = check(rightScope, binary->right, expectedType);
return {leftType, rightType, refinementArena.disjunction(leftRefinement, rightRefinement)};
}
@ -1973,8 +2072,8 @@ std::tuple<TypeId, TypeId, RefinementId> ConstraintGraphBuilder::checkBinary(
{
// We are checking a binary expression of the form a op b
// Just because a op b is epxected to return a bool, doesn't mean a, b are expected to be bools too
TypeId leftType = check(scope, binary->left, ValueContext::RValue, {}, true).ty;
TypeId rightType = check(scope, binary->right, ValueContext::RValue, {}, true).ty;
TypeId leftType = check(scope, binary->left, {}, true).ty;
TypeId rightType = check(scope, binary->right, {}, true).ty;
RefinementId leftRefinement = nullptr;
if (auto bc = dfg->getBreadcrumb(binary->left))
@ -1994,8 +2093,8 @@ std::tuple<TypeId, TypeId, RefinementId> ConstraintGraphBuilder::checkBinary(
}
else
{
TypeId leftType = check(scope, binary->left, ValueContext::RValue).ty;
TypeId rightType = check(scope, binary->right, ValueContext::RValue).ty;
TypeId leftType = check(scope, binary->left).ty;
TypeId rightType = check(scope, binary->right).ty;
return {leftType, rightType, nullptr};
}
}
@ -2011,6 +2110,47 @@ std::vector<TypeId> ConstraintGraphBuilder::checkLValues(const ScopePtr& scope,
return types;
}
TypeId ConstraintGraphBuilder::checkLValue(const ScopePtr& scope, AstExpr* expr)
{
if (auto local = expr->as<AstExprLocal>())
return checkLValue(scope, local);
else if (auto global = expr->as<AstExprGlobal>())
return checkLValue(scope, global);
else if (auto indexName = expr->as<AstExprIndexName>())
return checkLValue(scope, indexName);
else if (auto indexExpr = expr->as<AstExprIndexExpr>())
return checkLValue(scope, indexExpr);
else if (auto error = expr->as<AstExprError>())
{
check(scope, error);
return builtinTypes->errorRecoveryType();
}
else
ice->ice("checkLValue is inexhaustive");
}
TypeId ConstraintGraphBuilder::checkLValue(const ScopePtr& scope, AstExprLocal* local)
{
std::optional<TypeId> upperBound = scope->lookup(Symbol{local->local});
LUAU_ASSERT(upperBound);
return *upperBound;
}
TypeId ConstraintGraphBuilder::checkLValue(const ScopePtr& scope, AstExprGlobal* global)
{
return scope->lookup(Symbol{global->name}).value_or(builtinTypes->errorRecoveryType());
}
TypeId ConstraintGraphBuilder::checkLValue(const ScopePtr& scope, AstExprIndexName* indexName)
{
return updateProperty(scope, indexName);
}
TypeId ConstraintGraphBuilder::checkLValue(const ScopePtr& scope, AstExprIndexExpr* indexExpr)
{
return updateProperty(scope, indexExpr);
}
static bool isIndexNameEquivalent(AstExpr* expr)
{
if (expr->is<AstExprIndexName>())
@ -2031,7 +2171,7 @@ static bool isIndexNameEquivalent(AstExpr* expr)
*
* If expr has the form name.a.b.c
*/
TypeId ConstraintGraphBuilder::checkLValue(const ScopePtr& scope, AstExpr* expr)
TypeId ConstraintGraphBuilder::updateProperty(const ScopePtr& scope, AstExpr* expr)
{
if (auto indexExpr = expr->as<AstExprIndexExpr>(); indexExpr && !indexExpr->index->is<AstExprConstantString>())
{
@ -2058,7 +2198,7 @@ TypeId ConstraintGraphBuilder::checkLValue(const ScopePtr& scope, AstExpr* expr)
return propType;
}
else if (!isIndexNameEquivalent(expr))
return check(scope, expr, ValueContext::LValue).ty;
return check(scope, expr).ty;
Symbol sym;
std::vector<std::string> segments;
@ -2086,7 +2226,7 @@ TypeId ConstraintGraphBuilder::checkLValue(const ScopePtr& scope, AstExpr* expr)
else if (auto indexExpr = e->as<AstExprIndexExpr>())
{
// We need to populate the type for the index value
check(scope, indexExpr->index, ValueContext::RValue);
check(scope, indexExpr->index);
if (auto strIndex = indexExpr->index->as<AstExprConstantString>())
{
segments.push_back(std::string(strIndex->value.data, strIndex->value.size));
@ -2095,11 +2235,11 @@ TypeId ConstraintGraphBuilder::checkLValue(const ScopePtr& scope, AstExpr* expr)
}
else
{
return check(scope, expr, ValueContext::LValue).ty;
return check(scope, expr).ty;
}
}
else
return check(scope, expr, ValueContext::LValue).ty;
return check(scope, expr).ty;
}
LUAU_ASSERT(!segments.empty());
@ -2109,7 +2249,7 @@ TypeId ConstraintGraphBuilder::checkLValue(const ScopePtr& scope, AstExpr* expr)
auto lookupResult = scope->lookupEx(sym);
if (!lookupResult)
return check(scope, expr, ValueContext::LValue).ty;
return check(scope, expr).ty;
const auto [subjectBinding, symbolScope] = std::move(*lookupResult);
TypeId subjectType = subjectBinding->typeId;
@ -2139,7 +2279,10 @@ TypeId ConstraintGraphBuilder::checkLValue(const ScopePtr& scope, AstExpr* expr)
// This can fail if the user is erroneously trying to augment a builtin
// table like os or string.
if (auto bc = dfg->getBreadcrumb(e))
symbolScope->dcrRefinements[bc->def] = updatedType;
{
symbolScope->lvalueTypes[bc->def] = updatedType;
symbolScope->rvalueRefinements[bc->def] = updatedType;
}
}
return propTy;
@ -2232,7 +2375,7 @@ Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprTable* exp
checkExpectedIndexResultType = pinnedIndexResultType;
}
TypeId itemTy = check(scope, item.value, ValueContext::RValue, checkExpectedIndexResultType).ty;
TypeId itemTy = check(scope, item.value, checkExpectedIndexResultType).ty;
if (isIndexedResultType && !pinnedIndexResultType)
pinnedIndexResultType = itemTy;
@ -2242,7 +2385,7 @@ Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExprTable* exp
// Even though we don't need to use the type of the item's key if
// it's a string constant, we still want to check it to populate
// astTypes.
TypeId keyTy = check(scope, item.key, ValueContext::RValue, annotatedKeyType).ty;
TypeId keyTy = check(scope, item.key, annotatedKeyType).ty;
if (AstExprConstantString* key = item.key->as<AstExprConstantString>())
{
@ -2346,7 +2489,8 @@ ConstraintGraphBuilder::FunctionSignature ConstraintGraphBuilder::checkFunctionS
signatureScope->bindings[fn->self] = Binding{selfType, fn->self->location};
BreadcrumbId bc = dfg->getBreadcrumb(fn->self);
signatureScope->dcrRefinements[bc->def] = selfType;
signatureScope->lvalueTypes[bc->def] = selfType;
signatureScope->rvalueRefinements[bc->def] = selfType;
}
for (size_t i = 0; i < fn->args.size; ++i)
@ -2370,7 +2514,8 @@ ConstraintGraphBuilder::FunctionSignature ConstraintGraphBuilder::checkFunctionS
signatureScope->bindings[local] = Binding{argTy, local->location};
BreadcrumbId bc = dfg->getBreadcrumb(local);
signatureScope->dcrRefinements[bc->def] = argTy;
signatureScope->lvalueTypes[bc->def] = argTy;
signatureScope->rvalueRefinements[bc->def] = argTy;
}
TypePackId varargPack = nullptr;
@ -2612,6 +2757,7 @@ TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, b
// TODO: FunctionType needs a pointer to the scope so that we know
// how to quantify/instantiate it.
FunctionType ftv{TypeLevel{}, scope.get(), {}, {}, argTypes, returnTypes};
ftv.isCheckedFunction = fn->checkedFunction;
// This replicates the behavior of the appropriate FunctionType
// constructors.
@ -2836,17 +2982,27 @@ struct GlobalPrepopulator : AstVisitor
{
const NotNull<Scope> globalScope;
const NotNull<TypeArena> arena;
const NotNull<const DataFlowGraph> dfg;
GlobalPrepopulator(NotNull<Scope> globalScope, NotNull<TypeArena> arena)
GlobalPrepopulator(NotNull<Scope> globalScope, NotNull<TypeArena> arena, NotNull<const DataFlowGraph> dfg)
: globalScope(globalScope)
, arena(arena)
, dfg(dfg)
{
}
bool visit(AstStatFunction* function) override
{
if (AstExprGlobal* g = function->name->as<AstExprGlobal>())
globalScope->bindings[g->name] = Binding{arena->addType(BlockedType{})};
{
TypeId bt = arena->addType(BlockedType{});
globalScope->bindings[g->name] = Binding{bt};
NullableBreadcrumbId bc = dfg->getBreadcrumb(function->name);
LUAU_ASSERT(bc);
globalScope->lvalueTypes[bc->def] = bt;
}
return true;
}
@ -2854,7 +3010,7 @@ struct GlobalPrepopulator : AstVisitor
void ConstraintGraphBuilder::prepopulateGlobalScope(const ScopePtr& globalScope, AstStatBlock* program)
{
GlobalPrepopulator gp{NotNull{globalScope.get()}, arena};
GlobalPrepopulator gp{NotNull{globalScope.get()}, arena, dfg};
if (prepareModuleScope)
prepareModuleScope(module->name, globalScope);

View File

@ -1344,7 +1344,7 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<cons
head.insert(head.begin(), fn);
argsPack = arena->addTypePack(TypePack{std::move(head), tail});
fn = *callMm;
fn = follow(*callMm);
asMutable(c.result)->ty.emplace<FreeTypePack>(constraint->scope);
}
else
@ -1887,6 +1887,9 @@ bool ConstraintSolver::tryDispatch(const RefineConstraint& c, NotNull<const Cons
const TypeId type = follow(c.type);
if (hasUnresolvedConstraints(type))
return block(type, constraint);
LUAU_ASSERT(get<BlockedType>(c.resultType));
if (type == c.resultType)
@ -1946,8 +1949,7 @@ bool ConstraintSolver::tryDispatch(const RefineConstraint& c, NotNull<const Cons
bool ConstraintSolver::tryDispatch(const ReduceConstraint& c, NotNull<const Constraint> constraint, bool force)
{
TypeId ty = follow(c.ty);
FamilyGraphReductionResult result =
reduceFamilies(NotNull{this}, ty, constraint->location, constraint->scope, nullptr, force);
FamilyGraphReductionResult result = reduceFamilies(ty, constraint->location, TypeFamilyContext{NotNull{this}, constraint->scope}, force);
for (TypeId r : result.reducedTypes)
unblock(r, constraint->location);
@ -1970,8 +1972,7 @@ bool ConstraintSolver::tryDispatch(const ReduceConstraint& c, NotNull<const Cons
bool ConstraintSolver::tryDispatch(const ReducePackConstraint& c, NotNull<const Constraint> constraint, bool force)
{
TypePackId tp = follow(c.tp);
FamilyGraphReductionResult result =
reduceFamilies(NotNull{this}, tp, constraint->location, constraint->scope, nullptr, force);
FamilyGraphReductionResult result = reduceFamilies(tp, constraint->location, TypeFamilyContext{NotNull{this}, constraint->scope}, force);
for (TypeId r : result.reducedTypes)
unblock(r, constraint->location);

View File

@ -55,12 +55,25 @@ std::optional<std::pair<Binding*, Scope*>> Scope::lookupEx(Symbol sym)
}
}
std::optional<TypeId> Scope::lookupLValue(DefId def) const
{
for (const Scope* current = this; current; current = current->parent.get())
{
if (auto ty = current->lvalueTypes.find(def))
return *ty;
}
return std::nullopt;
}
// TODO: We might kill Scope::lookup(Symbol) once data flow is fully fleshed out with type states and control flow analysis.
std::optional<TypeId> Scope::lookup(DefId def) const
{
for (const Scope* current = this; current; current = current->parent.get())
{
if (auto ty = current->dcrRefinements.find(def))
if (auto ty = current->rvalueRefinements.find(def))
return *ty;
if (auto ty = current->lvalueTypes.find(def))
return *ty;
}
@ -156,10 +169,10 @@ void Scope::inheritRefinements(const ScopePtr& childScope)
{
if (FFlag::DebugLuauDeferredConstraintResolution)
{
for (const auto& [k, a] : childScope->dcrRefinements)
for (const auto& [k, a] : childScope->rvalueRefinements)
{
if (lookup(NotNull{k}))
dcrRefinements[k] = a;
rvalueRefinements[k] = a;
}
}

View File

@ -101,6 +101,27 @@ SubtypingResult Subtyping::isSubtype(TypeId subTy, TypeId superTy)
TypeId lowerBound = makeAggregateType<UnionType>(lb, builtinTypes->neverType);
TypeId upperBound = makeAggregateType<IntersectionType>(ub, builtinTypes->unknownType);
const NormalizedType* nt = normalizer->normalize(upperBound);
if (!nt)
result.normalizationTooComplex = true;
else if (!normalizer->isInhabited(nt))
{
/* If the normalized upper bound we're mapping to a generic is
* uninhabited, then we must consider the subtyping relation not to
* hold.
*
* This happens eg in <T>() -> (T, T) <: () -> (string, number)
*
* T appears in covariant position and would have to be both string
* and number at once.
*
* No actual value is both a string and a number, so the test fails.
*
* TODO: We'll need to add explanitory context here.
*/
result.isSubtype = false;
}
result.andAlso(isCovariantWith(lowerBound, upperBound));
}

View File

@ -1044,6 +1044,14 @@ void persist(TypeId ty)
else if (get<GenericType>(t) || get<AnyType>(t) || get<FreeType>(t) || get<SingletonType>(t) || get<PrimitiveType>(t) || get<NegationType>(t))
{
}
else if (auto tfit = get<TypeFamilyInstanceType>(t))
{
for (auto ty : tfit->typeArguments)
queue.push_back(ty);
for (auto tp : tfit->packArguments)
persist(tp);
}
else
{
LUAU_ASSERT(!"TypeId is not supported in a persist call");
@ -1072,6 +1080,14 @@ void persist(TypePackId tp)
else if (get<GenericTypePack>(tp))
{
}
else if (auto tfitp = get<TypeFamilyInstanceTypePack>(tp))
{
for (auto ty : tfitp->typeArguments)
persist(ty);
for (auto tp : tfitp->packArguments)
persist(tp);
}
else
{
LUAU_ASSERT(!"TypePackId is not supported in a persist call");

View File

@ -17,7 +17,6 @@
#include "Luau/Type.h"
#include "Luau/TypePack.h"
#include "Luau/TypeUtils.h"
#include "Luau/Unifier.h"
#include "Luau/TypeFamily.h"
#include "Luau/VisitType.h"
@ -242,7 +241,8 @@ struct TypeChecker2
DenseHashSet<TypeId> noTypeFamilyErrors{nullptr};
Normalizer normalizer;
Subtyping subtyping;
Subtyping _subtyping;
NotNull<Subtyping> subtyping;
TypeChecker2(NotNull<BuiltinTypes> builtinTypes, NotNull<UnifierSharedState> unifierState, NotNull<TypeCheckLimits> limits, DcrLogger* logger,
const SourceModule* sourceModule, Module* module)
@ -253,7 +253,9 @@ struct TypeChecker2
, sourceModule(sourceModule)
, module(module)
, normalizer{&testArena, builtinTypes, unifierState, /* cacheInhabitance */ true}
, subtyping{builtinTypes, NotNull{&testArena}, NotNull{&normalizer}, NotNull{unifierState->iceHandler}, NotNull{module->getModuleScope().get()}}
, _subtyping{builtinTypes, NotNull{&testArena}, NotNull{&normalizer}, NotNull{unifierState->iceHandler},
NotNull{module->getModuleScope().get()}}
, subtyping(&_subtyping)
{
}
@ -282,9 +284,9 @@ struct TypeChecker2
if (noTypeFamilyErrors.find(instance))
return instance;
TxnLog fake{};
ErrorVec errors =
reduceFamilies(instance, location, NotNull{&testArena}, builtinTypes, stack.back(), NotNull{&normalizer}, &fake, true).errors;
ErrorVec errors = reduceFamilies(
instance, location, TypeFamilyContext{NotNull{&testArena}, builtinTypes, stack.back(), NotNull{&normalizer}, ice, limits}, true)
.errors;
if (errors.empty())
noTypeFamilyErrors.insert(instance);
@ -491,17 +493,7 @@ struct TypeChecker2
TypeArena* arena = &testArena;
TypePackId actualRetType = reconstructPack(ret->list, *arena);
Unifier u{NotNull{&normalizer}, stack.back(), ret->location, Covariant};
u.hideousFixMeGenericsAreActuallyFree = true;
u.tryUnify(actualRetType, expectedRetType);
const bool ok = (u.errors.empty() && u.log.empty()) || isErrorSuppressing(ret->location, actualRetType, ret->location, expectedRetType);
if (!ok)
{
for (const TypeError& e : u.errors)
reportError(e);
}
testIsSubtype(actualRetType, expectedRetType, ret->location);
for (AstExpr* expr : ret->list)
visit(expr, ValueContext::RValue);
@ -532,9 +524,7 @@ struct TypeChecker2
TypeId annotationType = lookupAnnotation(var->annotation);
TypeId valueType = value ? lookupType(value) : nullptr;
if (valueType)
{
reportErrors(tryUnify(stack.back(), value->location, valueType, annotationType));
}
testIsSubtype(valueType, annotationType, value->location);
visit(var->annotation);
}
@ -559,7 +549,7 @@ struct TypeChecker2
if (var->annotation)
{
TypeId varType = lookupAnnotation(var->annotation);
reportErrors(tryUnify(stack.back(), value->location, valueTypes.head[j - i], varType));
testIsSubtype(valueTypes.head[j - i], varType, value->location);
visit(var->annotation);
}
@ -586,20 +576,20 @@ struct TypeChecker2
void visit(AstStatFor* forStatement)
{
NotNull<Scope> scope = stack.back();
if (forStatement->var->annotation)
{
visit(forStatement->var->annotation);
reportErrors(tryUnify(scope, forStatement->var->location, builtinTypes->numberType, lookupAnnotation(forStatement->var->annotation)));
TypeId annotatedType = lookupAnnotation(forStatement->var->annotation);
testIsSubtype(builtinTypes->numberType, annotatedType, forStatement->var->location);
}
auto checkNumber = [this, scope](AstExpr* expr) {
auto checkNumber = [this](AstExpr* expr) {
if (!expr)
return;
visit(expr, ValueContext::RValue);
reportErrors(tryUnify(scope, expr->location, lookupType(expr), builtinTypes->numberType));
testIsSubtype(lookupType(expr), builtinTypes->numberType, expr->location);
};
checkNumber(forStatement->from);
@ -689,8 +679,7 @@ struct TypeChecker2
}
TypeId iteratorTy = follow(iteratorTypes.head[0]);
auto checkFunction = [this, &arena, &scope, &forInStatement, &variableTypes](
const FunctionType* iterFtv, std::vector<TypeId> iterTys, bool isMm) {
auto checkFunction = [this, &arena, &forInStatement, &variableTypes](const FunctionType* iterFtv, std::vector<TypeId> iterTys, bool isMm) {
if (iterTys.size() < 1 || iterTys.size() > 3)
{
if (isMm)
@ -713,7 +702,7 @@ struct TypeChecker2
}
for (size_t i = 0; i < std::min(expectedVariableTypes.head.size(), variableTypes.size()); ++i)
reportErrors(tryUnify(scope, forInStatement->vars.data[i]->location, variableTypes[i], expectedVariableTypes.head[i]));
testIsSubtype(variableTypes[i], expectedVariableTypes.head[i], forInStatement->vars.data[i]->location);
// nextFn is going to be invoked with (arrayTy, startIndexTy)
@ -757,13 +746,13 @@ struct TypeChecker2
if (iterTys.size() >= 2 && flattenedArgTypes.head.size() > 0)
{
size_t valueIndex = forInStatement->values.size > 1 ? 1 : 0;
reportErrors(tryUnify(scope, forInStatement->values.data[valueIndex]->location, iterTys[1], flattenedArgTypes.head[0]));
testIsSubtype(iterTys[1], flattenedArgTypes.head[0], forInStatement->values.data[valueIndex]->location);
}
if (iterTys.size() == 3 && flattenedArgTypes.head.size() > 1)
{
size_t valueIndex = forInStatement->values.size > 2 ? 2 : 0;
reportErrors(tryUnify(scope, forInStatement->values.data[valueIndex]->location, iterTys[2], flattenedArgTypes.head[1]));
testIsSubtype(iterTys[2], flattenedArgTypes.head[1], forInStatement->values.data[valueIndex]->location);
}
};
@ -795,9 +784,9 @@ struct TypeChecker2
{
if ((forInStatement->vars.size == 1 || forInStatement->vars.size == 2) && ttv->indexer)
{
reportErrors(tryUnify(scope, forInStatement->vars.data[0]->location, variableTypes[0], ttv->indexer->indexType));
testIsSubtype(variableTypes[0], ttv->indexer->indexType, forInStatement->vars.data[0]->location);
if (variableTypes.size() == 2)
reportErrors(tryUnify(scope, forInStatement->vars.data[1]->location, variableTypes[1], ttv->indexer->indexResultType));
testIsSubtype(variableTypes[1], ttv->indexer->indexResultType, forInStatement->vars.data[1]->location);
}
else
reportError(GenericError{"Cannot iterate over a table without indexer"}, forInStatement->values.data[0]->location);
@ -820,7 +809,7 @@ struct TypeChecker2
if (const FunctionType* iterMmFtv = get<FunctionType>(*instantiatedIterMmTy))
{
TypePackId argPack = arena.addTypePack({iteratorTy});
reportErrors(tryUnify(scope, forInStatement->values.data[0]->location, argPack, iterMmFtv->argTypes));
testIsSubtype(argPack, iterMmFtv->argTypes, forInStatement->values.data[0]->location);
TypePack mmIteratorTypes = extendTypePack(arena, builtinTypes, iterMmFtv->retTypes, 3);
@ -894,12 +883,7 @@ struct TypeChecker2
if (get<NeverType>(lhsType))
continue;
if (!isSubtype(rhsType, lhsType, stack.back()) &&
!isErrorSuppressing(assign->vars.data[i]->location, lhsType, assign->values.data[i]->location, rhsType))
{
reportError(TypeMismatch{lhsType, rhsType}, rhs->location);
}
testIsSubtype(rhsType, lhsType, rhs->location);
}
}
@ -909,7 +893,7 @@ struct TypeChecker2
TypeId resultTy = visit(&fake, stat);
TypeId varTy = lookupType(stat->var);
reportErrors(tryUnify(stack.back(), stat->location, resultTy, varTy));
testIsSubtype(resultTy, varTy, stat->location);
}
void visit(AstStatFunction* stat)
@ -1029,34 +1013,38 @@ struct TypeChecker2
void visit(AstExprConstantNil* expr)
{
NotNull<Scope> scope = stack.back();
TypeId actualType = lookupType(expr);
TypeId expectedType = builtinTypes->nilType;
LUAU_ASSERT(isSubtype(actualType, expectedType, scope));
SubtypingResult r = subtyping->isSubtype(actualType, expectedType);
LUAU_ASSERT(r.isSubtype || r.isErrorSuppressing);
}
void visit(AstExprConstantBool* expr)
{
NotNull<Scope> scope = stack.back();
TypeId actualType = lookupType(expr);
TypeId expectedType = builtinTypes->booleanType;
LUAU_ASSERT(isSubtype(actualType, expectedType, scope));
SubtypingResult r = subtyping->isSubtype(actualType, expectedType);
LUAU_ASSERT(r.isSubtype || r.isErrorSuppressing);
}
void visit(AstExprConstantNumber* expr)
{
NotNull<Scope> scope = stack.back();
TypeId actualType = lookupType(expr);
TypeId expectedType = builtinTypes->numberType;
LUAU_ASSERT(isSubtype(actualType, expectedType, scope));
SubtypingResult r = subtyping->isSubtype(actualType, expectedType);
LUAU_ASSERT(r.isSubtype || r.isErrorSuppressing);
}
void visit(AstExprConstantString* expr)
{
NotNull<Scope> scope = stack.back();
TypeId actualType = lookupType(expr);
TypeId expectedType = builtinTypes->stringType;
LUAU_ASSERT(isSubtype(actualType, expectedType, scope));
SubtypingResult r = subtyping->isSubtype(actualType, expectedType);
LUAU_ASSERT(r.isSubtype || r.isErrorSuppressing);
}
void visit(AstExprLocal* expr)
@ -1089,7 +1077,7 @@ struct TypeChecker2
TypeId fnTy = *originalCallTy;
if (selectedOverloadTy)
{
SubtypingResult result = subtyping.isSubtype(*originalCallTy, *selectedOverloadTy);
SubtypingResult result = subtyping->isSubtype(*originalCallTy, *selectedOverloadTy);
if (result.isSubtype)
fnTy = *selectedOverloadTy;
@ -1152,6 +1140,8 @@ struct TypeChecker2
NotNull{&normalizer},
NotNull{stack.back()},
ice,
limits,
subtyping,
call->location,
};
@ -1243,6 +1233,8 @@ struct TypeChecker2
NotNull<Normalizer> normalizer;
NotNull<Scope> scope;
NotNull<InternalErrorReporter> ice;
NotNull<TypeCheckLimits> limits;
NotNull<Subtyping> subtyping;
Location callLoc;
std::vector<TypeId> ok;
@ -1252,32 +1244,38 @@ struct TypeChecker2
InsertionOrderedMap<TypeId, std::pair<Analysis, size_t>> resolution;
private:
std::optional<ErrorVec> tryUnify(const Location& location, TypeId subTy, TypeId superTy, const LiteralProperties* literalProperties = nullptr)
std::optional<ErrorVec> testIsSubtype(const Location& location, TypeId subTy, TypeId superTy)
{
Unifier u{normalizer, scope, location, Covariant};
u.ctx = CountMismatch::Arg;
u.hideousFixMeGenericsAreActuallyFree = true;
u.enableNewSolver();
u.tryUnify(subTy, superTy, /*isFunctionCall*/ false, /*isIntersection*/ false, literalProperties);
auto r = subtyping->isSubtype(subTy, superTy);
ErrorVec errors;
if (u.errors.empty())
if (r.normalizationTooComplex)
errors.push_back(TypeError{location, NormalizationTooComplex{}});
if (!r.isSubtype && !r.isErrorSuppressing)
errors.push_back(TypeError{location, TypeMismatch{superTy, subTy}});
if (errors.empty())
return std::nullopt;
return std::move(u.errors);
return errors;
}
std::optional<ErrorVec> tryUnify(const Location& location, TypePackId subTy, TypePackId superTy)
std::optional<ErrorVec> testIsSubtype(const Location& location, TypePackId subTy, TypePackId superTy)
{
Unifier u{normalizer, scope, location, Covariant};
u.ctx = CountMismatch::Arg;
u.hideousFixMeGenericsAreActuallyFree = true;
u.enableNewSolver();
u.tryUnify(subTy, superTy);
auto r = subtyping->isSubtype(subTy, superTy);
ErrorVec errors;
if (u.errors.empty())
if (r.normalizationTooComplex)
errors.push_back(TypeError{location, NormalizationTooComplex{}});
if (!r.isSubtype && !r.isErrorSuppressing)
errors.push_back(TypeError{location, TypePackMismatch{superTy, subTy}});
if (errors.empty())
return std::nullopt;
return std::move(u.errors);
return errors;
}
std::pair<Analysis, ErrorVec> checkOverload(
@ -1316,35 +1314,12 @@ struct TypeChecker2
expr->is<AstExprConstantString>() || expr->is<AstExprFunction>() || expr->is<AstExprTable>();
}
static std::unique_ptr<LiteralProperties> buildLiteralPropertiesSet(AstExpr* expr)
{
const AstExprTable* table = expr->as<AstExprTable>();
if (!table)
return nullptr;
std::unique_ptr<LiteralProperties> result = std::make_unique<LiteralProperties>(Name{});
for (const AstExprTable::Item& item : table->items)
{
if (item.kind != AstExprTable::Item::Record)
continue;
AstExprConstantString* keyExpr = item.key->as<AstExprConstantString>();
LUAU_ASSERT(keyExpr);
if (isLiteral(item.value))
result->insert(Name{keyExpr->value.begin(), keyExpr->value.end()});
}
return result;
}
LUAU_NOINLINE
std::pair<Analysis, ErrorVec> checkOverload_(
TypeId fnTy, const FunctionType* fn, const TypePack* args, AstExpr* fnExpr, const std::vector<AstExpr*>* argExprs)
{
TxnLog fake;
FamilyGraphReductionResult result = reduceFamilies(fnTy, callLoc, arena, builtinTypes, scope, normalizer, &fake, /*force=*/true);
FamilyGraphReductionResult result =
reduceFamilies(fnTy, callLoc, TypeFamilyContext{arena, builtinTypes, scope, normalizer, ice, limits}, /*force=*/true);
if (!result.errors.empty())
return {OverloadIsNonviable, result.errors};
@ -1363,9 +1338,7 @@ struct TypeChecker2
TypeId argTy = args->head[argOffset];
AstExpr* argLoc = argExprs->at(argOffset >= argExprs->size() ? argExprs->size() - 1 : argOffset);
std::unique_ptr<LiteralProperties> literalProperties{buildLiteralPropertiesSet(argLoc)};
if (auto errors = tryUnify(argLoc->location, argTy, paramTy, literalProperties.get()))
if (auto errors = testIsSubtype(argLoc->location, argTy, paramTy))
{
// Since we're stopping right here, we need to decide if this is a nonviable overload or if there is an arity mismatch.
// If it's a nonviable overload, then we need to keep going to get all type errors.
@ -1395,7 +1368,7 @@ struct TypeChecker2
}
else if (auto vtp = get<VariadicTypePack>(follow(paramIter.tail())))
{
if (auto errors = tryUnify(argExpr->location, args->head[argOffset], vtp->ty))
if (auto errors = testIsSubtype(argExpr->location, args->head[argOffset], vtp->ty))
argumentErrors.insert(argumentErrors.end(), errors->begin(), errors->end());
}
else if (get<GenericTypePack>(follow(paramIter.tail())))
@ -1414,7 +1387,7 @@ struct TypeChecker2
{
AstExpr* argExpr = argExprs->at(argExprs->size() - 1);
if (auto errors = tryUnify(argExpr->location, vtp->ty, *paramIter))
if (auto errors = testIsSubtype(argExpr->location, vtp->ty, *paramIter))
argumentErrors.insert(argumentErrors.end(), errors->begin(), errors->end());
}
else if (!isOptional(*paramIter))
@ -1439,7 +1412,7 @@ struct TypeChecker2
if (paramIter.tail() && args->tail)
{
if (auto errors = tryUnify(argLoc, *args->tail, *paramIter.tail()))
if (auto errors = testIsSubtype(argLoc, *args->tail, *paramIter.tail()))
argumentErrors.insert(argumentErrors.end(), errors->begin(), errors->end());
}
else if (paramIter.tail())
@ -1604,20 +1577,18 @@ struct TypeChecker2
visit(indexExpr->expr, ValueContext::RValue);
visit(indexExpr->index, ValueContext::RValue);
NotNull<Scope> scope = stack.back();
TypeId exprType = lookupType(indexExpr->expr);
TypeId indexType = lookupType(indexExpr->index);
if (auto tt = get<TableType>(exprType))
{
if (tt->indexer)
reportErrors(tryUnify(scope, indexExpr->index->location, indexType, tt->indexer->indexType));
testIsSubtype(indexType, tt->indexer->indexType, indexExpr->index->location);
else
reportError(CannotExtendTable{exprType, CannotExtendTable::Indexer, "indexer??"}, indexExpr->location);
}
else if (auto cls = get<ClassType>(exprType); cls && cls->indexer)
reportErrors(tryUnify(scope, indexExpr->index->location, indexType, cls->indexer->indexType));
testIsSubtype(indexType, cls->indexer->indexType, indexExpr->index->location);
else if (get<UnionType>(exprType) && isOptional(exprType))
reportError(OptionalValueAccess{exprType}, indexExpr->location);
}
@ -1668,11 +1639,7 @@ struct TypeChecker2
TypeId inferredArgTy = *argIt;
TypeId annotatedArgTy = lookupAnnotation(arg->annotation);
if (!isSubtype(inferredArgTy, annotatedArgTy, stack.back()) &&
!isErrorSuppressing(arg->location, inferredArgTy, arg->annotation->location, annotatedArgTy))
{
reportError(TypeMismatch{inferredArgTy, annotatedArgTy}, arg->location);
}
testIsSubtype(inferredArgTy, annotatedArgTy, arg->location);
}
++argIt;
@ -1699,7 +1666,6 @@ struct TypeChecker2
{
visit(expr->expr, ValueContext::RValue);
NotNull<Scope> scope = stack.back();
TypeId operandType = lookupType(expr->expr);
TypeId resultType = lookupType(expr);
@ -1717,7 +1683,7 @@ struct TypeChecker2
{
if (expr->op == AstExprUnary::Op::Len)
{
reportErrors(tryUnify(scope, expr->location, follow(*ret), builtinTypes->numberType));
testIsSubtype(follow(*ret), builtinTypes->numberType, expr->location);
}
}
else
@ -1737,12 +1703,9 @@ struct TypeChecker2
TypeId expectedFunction = testArena.addType(FunctionType{expectedArgs, expectedRet});
ErrorVec errors = tryUnify(scope, expr->location, *mm, expectedFunction);
if (!errors.empty() && !isErrorSuppressing(expr->expr->location, *firstArg, expr->expr->location, operandType))
{
reportError(TypeMismatch{*firstArg, operandType}, expr->location);
bool success = testIsSubtype(*mm, expectedFunction, expr->location);
if (!success)
return;
}
}
return;
@ -1768,7 +1731,7 @@ struct TypeChecker2
}
else if (expr->op == AstExprUnary::Op::Minus)
{
reportErrors(tryUnify(scope, expr->location, operandType, builtinTypes->numberType));
testIsSubtype(operandType, builtinTypes->numberType, expr->location);
}
else if (expr->op == AstExprUnary::Op::Not)
{
@ -1792,7 +1755,7 @@ struct TypeChecker2
TypeId leftType = lookupType(expr->left);
TypeId rightType = lookupType(expr->right);
TypeId expectedResult = lookupType(expr);
TypeId expectedResult = follow(lookupType(expr));
if (get<TypeFamilyInstanceType>(expectedResult))
{
@ -1928,7 +1891,7 @@ struct TypeChecker2
TypeId expectedTy = testArena.addType(FunctionType(expectedArgs, expectedRets));
reportErrors(tryUnify(scope, expr->location, follow(*mm), expectedTy));
testIsSubtype(follow(*mm), expectedTy, expr->location);
std::optional<TypeId> ret = first(ftv->retTypes);
if (ret)
@ -2022,13 +1985,13 @@ struct TypeChecker2
case AstExprBinary::Op::Mod:
LUAU_ASSERT(FFlag::LuauFloorDivision || expr->op != AstExprBinary::Op::FloorDiv);
reportErrors(tryUnify(scope, expr->left->location, leftType, builtinTypes->numberType));
reportErrors(tryUnify(scope, expr->right->location, rightType, builtinTypes->numberType));
testIsSubtype(leftType, builtinTypes->numberType, expr->left->location);
testIsSubtype(rightType, builtinTypes->numberType, expr->right->location);
return builtinTypes->numberType;
case AstExprBinary::Op::Concat:
reportErrors(tryUnify(scope, expr->left->location, leftType, builtinTypes->stringType));
reportErrors(tryUnify(scope, expr->right->location, rightType, builtinTypes->stringType));
testIsSubtype(leftType, builtinTypes->stringType, expr->left->location);
testIsSubtype(rightType, builtinTypes->stringType, expr->right->location);
return builtinTypes->stringType;
case AstExprBinary::Op::CompareGe:
@ -2041,12 +2004,12 @@ struct TypeChecker2
if (normLeft && normLeft->isExactlyNumber())
{
reportErrors(tryUnify(scope, expr->right->location, rightType, builtinTypes->numberType));
testIsSubtype(rightType, builtinTypes->numberType, expr->right->location);
return builtinTypes->numberType;
}
else if (normLeft && normLeft->isSubtypeOfString())
{
reportErrors(tryUnify(scope, expr->right->location, rightType, builtinTypes->stringType));
testIsSubtype(rightType, builtinTypes->stringType, expr->right->location);
return builtinTypes->stringType;
}
else
@ -2081,10 +2044,10 @@ struct TypeChecker2
TypeId computedType = lookupType(expr->expr);
// Note: As an optimization, we try 'number <: number | string' first, as that is the more likely case.
if (isSubtype(annotationType, computedType, stack.back(), true))
if (auto r = subtyping->isSubtype(annotationType, computedType); r.isSubtype || r.isErrorSuppressing)
return;
if (isSubtype(computedType, annotationType, stack.back(), true))
if (auto r = subtyping->isSubtype(computedType, annotationType); r.isSubtype || r.isErrorSuppressing)
return;
reportError(TypesAreUnrelated{computedType, annotationType}, expr->location);
@ -2424,33 +2387,30 @@ struct TypeChecker2
}
}
template<typename TID>
bool isSubtype(TID subTy, TID superTy, NotNull<Scope> scope, bool genericsOkay = false)
bool testIsSubtype(TypeId subTy, TypeId superTy, Location location)
{
TypeArena arena;
Unifier u{NotNull{&normalizer}, scope, Location{}, Covariant};
u.hideousFixMeGenericsAreActuallyFree = genericsOkay;
u.enableNewSolver();
SubtypingResult r = subtyping->isSubtype(subTy, superTy);
u.tryUnify(subTy, superTy);
const bool ok = u.errors.empty() && u.log.empty();
return ok;
if (r.normalizationTooComplex)
reportError(NormalizationTooComplex{}, location);
if (!r.isSubtype && !r.isErrorSuppressing)
reportError(TypeMismatch{superTy, subTy}, location);
return r.isSubtype;
}
template<typename TID>
ErrorVec tryUnify(NotNull<Scope> scope, const Location& location, TID subTy, TID superTy, CountMismatch::Context context = CountMismatch::Arg,
bool genericsOkay = false)
bool testIsSubtype(TypePackId subTy, TypePackId superTy, Location location)
{
Unifier u{NotNull{&normalizer}, scope, location, Covariant};
u.ctx = context;
u.hideousFixMeGenericsAreActuallyFree = genericsOkay;
u.enableNewSolver();
u.tryUnify(subTy, superTy);
SubtypingResult r = subtyping->isSubtype(subTy, superTy);
if (isErrorSuppressing(location, subTy, location, superTy))
return {};
if (r.normalizationTooComplex)
reportError(NormalizationTooComplex{}, location);
return std::move(u.errors);
if (!r.isSubtype && !r.isErrorSuppressing)
reportError(TypePackMismatch{superTy, subTy}, location);
return r.isSubtype;
}
void reportError(TypeErrorData data, const Location& location)

View File

@ -6,12 +6,13 @@
#include "Luau/DenseHash.h"
#include "Luau/Instantiation.h"
#include "Luau/Normalize.h"
#include "Luau/Simplify.h"
#include "Luau/Substitution.h"
#include "Luau/ToString.h"
#include "Luau/TxnLog.h"
#include "Luau/TypeCheckLimits.h"
#include "Luau/TypeUtils.h"
#include "Luau/Unifier.h"
#include "Luau/Unifier2.h"
#include "Luau/VisitType.h"
LUAU_DYNAMIC_FASTINTVARIABLE(LuauTypeFamilyGraphReductionMaximumSteps, 1'000'000);
@ -56,38 +57,23 @@ struct InstanceCollector : TypeOnceVisitor
struct FamilyReducer
{
ConstraintSolver* solver = nullptr;
// Conditionally from the solver if one is provided.
NotNull<TypeArena> arena;
NotNull<BuiltinTypes> builtins;
NotNull<Normalizer> normalizer;
TypeFamilyContext ctx;
std::deque<TypeId> queuedTys;
std::deque<TypePackId> queuedTps;
DenseHashSet<const void*> irreducible{nullptr};
FamilyGraphReductionResult result;
TxnLog* parentLog = nullptr;
TxnLog log;
bool force = false;
// Local to the constraint being reduced.
Location location;
NotNull<Scope> scope;
FamilyReducer(std::deque<TypeId> queuedTys, std::deque<TypePackId> queuedTps, Location location, NotNull<TypeArena> arena,
NotNull<BuiltinTypes> builtins, NotNull<Scope> scope, NotNull<Normalizer> normalizer, ConstraintSolver* solver, TxnLog* parentLog = nullptr, bool force = false)
: solver(solver)
, arena(arena)
, builtins(builtins)
, normalizer(normalizer)
FamilyReducer(std::deque<TypeId> queuedTys, std::deque<TypePackId> queuedTps, Location location, TypeFamilyContext ctx, bool force = false)
: ctx(ctx)
, queuedTys(std::move(queuedTys))
, queuedTps(std::move(queuedTps))
, parentLog(parentLog)
, log(parentLog)
, force(force)
, location(location)
, scope(scope)
{
}
@ -100,16 +86,16 @@ struct FamilyReducer
SkipTestResult testForSkippability(TypeId ty)
{
ty = log.follow(ty);
ty = follow(ty);
if (log.is<TypeFamilyInstanceType>(ty))
if (is<TypeFamilyInstanceType>(ty))
{
if (!irreducible.contains(ty))
return SkipTestResult::Defer;
else
return SkipTestResult::Irreducible;
}
else if (log.is<GenericType>(ty))
else if (is<GenericType>(ty))
{
return SkipTestResult::Irreducible;
}
@ -119,16 +105,16 @@ struct FamilyReducer
SkipTestResult testForSkippability(TypePackId ty)
{
ty = log.follow(ty);
ty = follow(ty);
if (log.is<TypeFamilyInstanceTypePack>(ty))
if (is<TypeFamilyInstanceTypePack>(ty))
{
if (!irreducible.contains(ty))
return SkipTestResult::Defer;
else
return SkipTestResult::Irreducible;
}
else if (log.is<GenericTypePack>(ty))
else if (is<GenericTypePack>(ty))
{
return SkipTestResult::Irreducible;
}
@ -139,10 +125,7 @@ struct FamilyReducer
template<typename T>
void replace(T subject, T replacement)
{
if (parentLog)
parentLog->replace(subject, Unifiable::Bound{replacement});
else
asMutable(subject)->ty.template emplace<Unifiable::Bound<T>>(replacement);
asMutable(subject)->ty.template emplace<Unifiable::Bound<T>>(replacement);
if constexpr (std::is_same_v<T, TypeId>)
result.reducedTypes.insert(subject);
@ -230,38 +213,36 @@ struct FamilyReducer
void stepType()
{
TypeId subject = log.follow(queuedTys.front());
TypeId subject = follow(queuedTys.front());
queuedTys.pop_front();
if (irreducible.contains(subject))
return;
if (const TypeFamilyInstanceType* tfit = log.get<TypeFamilyInstanceType>(subject))
if (const TypeFamilyInstanceType* tfit = get<TypeFamilyInstanceType>(subject))
{
if (!testParameters(subject, tfit))
return;
TypeFamilyReductionResult<TypeId> result =
tfit->family->reducer(tfit->typeArguments, tfit->packArguments, arena, builtins, NotNull{&log}, scope, normalizer, solver);
TypeFamilyReductionResult<TypeId> result = tfit->family->reducer(tfit->typeArguments, tfit->packArguments, NotNull{&ctx});
handleFamilyReduction(subject, result);
}
}
void stepPack()
{
TypePackId subject = log.follow(queuedTps.front());
TypePackId subject = follow(queuedTps.front());
queuedTps.pop_front();
if (irreducible.contains(subject))
return;
if (const TypeFamilyInstanceTypePack* tfit = log.get<TypeFamilyInstanceTypePack>(subject))
if (const TypeFamilyInstanceTypePack* tfit = get<TypeFamilyInstanceTypePack>(subject))
{
if (!testParameters(subject, tfit))
return;
TypeFamilyReductionResult<TypePackId> result =
tfit->family->reducer(tfit->typeArguments, tfit->packArguments, arena, builtins, NotNull{&log}, scope, normalizer, solver);
TypeFamilyReductionResult<TypePackId> result = tfit->family->reducer(tfit->typeArguments, tfit->packArguments, NotNull{&ctx});
handleFamilyReduction(subject, result);
}
}
@ -275,11 +256,10 @@ struct FamilyReducer
}
};
static FamilyGraphReductionResult reduceFamiliesInternal(std::deque<TypeId> queuedTys, std::deque<TypePackId> queuedTps, Location location,
NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins, NotNull<Scope> scope, NotNull<Normalizer> normalizer, ConstraintSolver* solver,
TxnLog* log, bool force)
static FamilyGraphReductionResult reduceFamiliesInternal(
std::deque<TypeId> queuedTys, std::deque<TypePackId> queuedTps, Location location, TypeFamilyContext ctx, bool force)
{
FamilyReducer reducer{std::move(queuedTys), std::move(queuedTps), location, arena, builtins, scope, normalizer, solver, log, force};
FamilyReducer reducer{std::move(queuedTys), std::move(queuedTps), location, ctx, force};
int iterationCount = 0;
while (!reducer.done())
@ -297,8 +277,7 @@ static FamilyGraphReductionResult reduceFamiliesInternal(std::deque<TypeId> queu
return std::move(reducer.result);
}
FamilyGraphReductionResult reduceFamilies(TypeId entrypoint, Location location, NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins,
NotNull<Scope> scope, NotNull<Normalizer> normalizer, TxnLog* log, bool force)
FamilyGraphReductionResult reduceFamilies(TypeId entrypoint, Location location, TypeFamilyContext ctx, bool force)
{
InstanceCollector collector;
@ -314,11 +293,10 @@ FamilyGraphReductionResult reduceFamilies(TypeId entrypoint, Location location,
if (collector.tys.empty() && collector.tps.empty())
return {};
return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), location, arena, builtins, scope, normalizer, nullptr, log, force);
return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), location, ctx, force);
}
FamilyGraphReductionResult reduceFamilies(TypePackId entrypoint, Location location, NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins,
NotNull<Scope> scope, NotNull<Normalizer> normalizer, TxnLog* log, bool force)
FamilyGraphReductionResult reduceFamilies(TypePackId entrypoint, Location location, TypeFamilyContext ctx, bool force)
{
InstanceCollector collector;
@ -334,94 +312,52 @@ FamilyGraphReductionResult reduceFamilies(TypePackId entrypoint, Location locati
if (collector.tys.empty() && collector.tps.empty())
return {};
return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), location, arena, builtins, scope, normalizer, nullptr, log, force);
return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), location, ctx, force);
}
FamilyGraphReductionResult reduceFamilies(
NotNull<ConstraintSolver> solver, TypeId entrypoint, Location location, NotNull<Scope> scope, TxnLog* log, bool force)
bool isPending(TypeId ty, ConstraintSolver* solver)
{
InstanceCollector collector;
try
{
collector.traverse(entrypoint);
}
catch (RecursionLimitException&)
{
return FamilyGraphReductionResult{};
}
if (collector.tys.empty() && collector.tps.empty())
return {};
return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), location, solver->arena, solver->builtinTypes, scope, solver->normalizer, solver.get(), log, force);
return is<BlockedType>(ty) || is<PendingExpansionType>(ty) || is<TypeFamilyInstanceType>(ty) || (solver && solver->hasUnresolvedConstraints(ty));
}
FamilyGraphReductionResult reduceFamilies(
NotNull<ConstraintSolver> solver, TypePackId entrypoint, Location location, NotNull<Scope> scope, TxnLog* log, bool force)
{
InstanceCollector collector;
try
{
collector.traverse(entrypoint);
}
catch (RecursionLimitException&)
{
return FamilyGraphReductionResult{};
}
if (collector.tys.empty() && collector.tps.empty())
return {};
return reduceFamiliesInternal(std::move(collector.tys), std::move(collector.tps), location, solver->arena, solver->builtinTypes, scope, solver->normalizer, solver.get(), log, force);
}
bool isPending(TypeId ty, NotNull<TxnLog> log, ConstraintSolver* solver)
{
return log->is<BlockedType>(ty) || log->is<PendingExpansionType>(ty) || log->is<TypeFamilyInstanceType>(ty)
|| (solver && solver->hasUnresolvedConstraints(ty));
}
TypeFamilyReductionResult<TypeId> addFamilyFn(std::vector<TypeId> typeParams, std::vector<TypePackId> packParams, NotNull<TypeArena> arena,
NotNull<BuiltinTypes> builtins, NotNull<TxnLog> log, NotNull<Scope> scope, NotNull<Normalizer> normalizer, ConstraintSolver* solver)
TypeFamilyReductionResult<TypeId> numericBinopFamilyFn(
std::vector<TypeId> typeParams, std::vector<TypePackId> packParams, NotNull<TypeFamilyContext> ctx, const std::string metamethod)
{
if (typeParams.size() != 2 || !packParams.empty())
{
// TODO: ICE?
ctx->ice->ice("encountered a type family instance without the required argument structure");
LUAU_ASSERT(false);
return {std::nullopt, true, {}, {}};
}
TypeId lhsTy = log->follow(typeParams.at(0));
TypeId rhsTy = log->follow(typeParams.at(1));
const NormalizedType* normLhsTy = normalizer->normalize(lhsTy);
const NormalizedType* normRhsTy = normalizer->normalize(rhsTy);
TypeId lhsTy = follow(typeParams.at(0));
TypeId rhsTy = follow(typeParams.at(1));
const NormalizedType* normLhsTy = ctx->normalizer->normalize(lhsTy);
const NormalizedType* normRhsTy = ctx->normalizer->normalize(rhsTy);
if (!normLhsTy || !normRhsTy)
{
return {std::nullopt, false, {}, {}};
}
else if (log->is<AnyType>(normLhsTy->tops) || log->is<AnyType>(normRhsTy->tops))
else if (is<AnyType>(normLhsTy->tops) || is<AnyType>(normRhsTy->tops))
{
return {builtins->anyType, false, {}, {}};
return {ctx->builtins->anyType, false, {}, {}};
}
else if ((normLhsTy->hasNumbers() || normLhsTy->hasTops()) && (normRhsTy->hasNumbers() || normRhsTy->hasTops()))
{
return {builtins->numberType, false, {}, {}};
return {ctx->builtins->numberType, false, {}, {}};
}
else if (log->is<ErrorType>(lhsTy) || log->is<ErrorType>(rhsTy))
else if (is<ErrorType>(lhsTy) || is<ErrorType>(rhsTy))
{
return {builtins->errorRecoveryType(), false, {}, {}};
return {ctx->builtins->errorRecoveryType(), false, {}, {}};
}
else if (log->is<NeverType>(lhsTy) || log->is<NeverType>(rhsTy))
else if (is<NeverType>(lhsTy) || is<NeverType>(rhsTy))
{
return {builtins->neverType, false, {}, {}};
return {ctx->builtins->neverType, false, {}, {}};
}
else if (isPending(lhsTy, log, solver))
else if (isPending(lhsTy, ctx->solver))
{
return {std::nullopt, false, {lhsTy}, {}};
}
else if (isPending(rhsTy, log, solver))
else if (isPending(rhsTy, ctx->solver))
{
return {std::nullopt, false, {rhsTy}, {}};
}
@ -430,28 +366,28 @@ TypeFamilyReductionResult<TypeId> addFamilyFn(std::vector<TypeId> typeParams, st
// the necessary state to do that, even if we intend to just eat the errors.
ErrorVec dummy;
std::optional<TypeId> addMm = findMetatableEntry(builtins, dummy, lhsTy, "__add", Location{});
std::optional<TypeId> mmType = findMetatableEntry(ctx->builtins, dummy, lhsTy, metamethod, Location{});
bool reversed = false;
if (!addMm)
if (!mmType)
{
addMm = findMetatableEntry(builtins, dummy, rhsTy, "__add", Location{});
mmType = findMetatableEntry(ctx->builtins, dummy, rhsTy, metamethod, Location{});
reversed = true;
}
if (!addMm)
if (!mmType)
return {std::nullopt, true, {}, {}};
if (isPending(log->follow(*addMm), log, solver))
return {std::nullopt, false, {log->follow(*addMm)}, {}};
mmType = follow(*mmType);
if (isPending(*mmType, ctx->solver))
return {std::nullopt, false, {*mmType}, {}};
const FunctionType* mmFtv = log->get<FunctionType>(log->follow(*addMm));
const FunctionType* mmFtv = get<FunctionType>(*mmType);
if (!mmFtv)
return {std::nullopt, true, {}, {}};
TypeCheckLimits limits; // TODO: We need to thread TypeCheckLimits in from Frontend to here.
if (std::optional<TypeId> instantiatedAddMm = instantiate(builtins, arena, NotNull{&limits}, scope, log->follow(*addMm)))
if (std::optional<TypeId> instantiatedMmType = instantiate(ctx->builtins, ctx->arena, ctx->limits, ctx->scope, *mmType))
{
if (const FunctionType* instantiatedMmFtv = get<FunctionType>(*instantiatedAddMm))
if (const FunctionType* instantiatedMmFtv = get<FunctionType>(*instantiatedMmType))
{
std::vector<TypeId> inferredArgs;
if (!reversed)
@ -459,22 +395,19 @@ TypeFamilyReductionResult<TypeId> addFamilyFn(std::vector<TypeId> typeParams, st
else
inferredArgs = {rhsTy, lhsTy};
TypePackId inferredArgPack = arena->addTypePack(std::move(inferredArgs));
Unifier u{normalizer, scope, Location{}, Variance::Covariant, log.get()};
u.tryUnify(inferredArgPack, instantiatedMmFtv->argTypes);
TypePackId inferredArgPack = ctx->arena->addTypePack(std::move(inferredArgs));
Unifier2 u2{ctx->arena, ctx->builtins, ctx->scope, ctx->ice};
if (!u2.unify(inferredArgPack, instantiatedMmFtv->argTypes))
return {std::nullopt, true, {}, {}}; // occurs check failed
if (std::optional<TypeId> ret = first(instantiatedMmFtv->retTypes); ret && u.errors.empty())
{
return {u.log.follow(*ret), false, {}, {}};
}
if (std::optional<TypeId> ret = first(instantiatedMmFtv->retTypes))
return {*ret, false, {}, {}};
else
{
return {std::nullopt, true, {}, {}};
}
}
else
{
return {builtins->errorRecoveryType(), false, {}, {}};
return {ctx->builtins->errorRecoveryType(), false, {}, {}};
}
}
else
@ -484,9 +417,171 @@ TypeFamilyReductionResult<TypeId> addFamilyFn(std::vector<TypeId> typeParams, st
}
}
TypeFamilyReductionResult<TypeId> addFamilyFn(std::vector<TypeId> typeParams, std::vector<TypePackId> packParams, NotNull<TypeFamilyContext> ctx)
{
if (typeParams.size() != 2 || !packParams.empty())
{
ctx->ice->ice("add type family: encountered a type family instance without the required argument structure");
LUAU_ASSERT(false);
}
return numericBinopFamilyFn(typeParams, packParams, ctx, "__add");
}
TypeFamilyReductionResult<TypeId> subFamilyFn(std::vector<TypeId> typeParams, std::vector<TypePackId> packParams, NotNull<TypeFamilyContext> ctx)
{
if (typeParams.size() != 2 || !packParams.empty())
{
ctx->ice->ice("sub type family: encountered a type family instance without the required argument structure");
LUAU_ASSERT(false);
}
return numericBinopFamilyFn(typeParams, packParams, ctx, "__sub");
}
TypeFamilyReductionResult<TypeId> mulFamilyFn(std::vector<TypeId> typeParams, std::vector<TypePackId> packParams, NotNull<TypeFamilyContext> ctx)
{
if (typeParams.size() != 2 || !packParams.empty())
{
ctx->ice->ice("mul type family: encountered a type family instance without the required argument structure");
LUAU_ASSERT(false);
}
return numericBinopFamilyFn(typeParams, packParams, ctx, "__mul");
}
TypeFamilyReductionResult<TypeId> divFamilyFn(std::vector<TypeId> typeParams, std::vector<TypePackId> packParams, NotNull<TypeFamilyContext> ctx)
{
if (typeParams.size() != 2 || !packParams.empty())
{
ctx->ice->ice("div type family: encountered a type family instance without the required argument structure");
LUAU_ASSERT(false);
}
return numericBinopFamilyFn(typeParams, packParams, ctx, "__div");
}
TypeFamilyReductionResult<TypeId> idivFamilyFn(std::vector<TypeId> typeParams, std::vector<TypePackId> packParams, NotNull<TypeFamilyContext> ctx)
{
if (typeParams.size() != 2 || !packParams.empty())
{
ctx->ice->ice("integer div type family: encountered a type family instance without the required argument structure");
LUAU_ASSERT(false);
}
return numericBinopFamilyFn(typeParams, packParams, ctx, "__idiv");
}
TypeFamilyReductionResult<TypeId> powFamilyFn(std::vector<TypeId> typeParams, std::vector<TypePackId> packParams, NotNull<TypeFamilyContext> ctx)
{
if (typeParams.size() != 2 || !packParams.empty())
{
ctx->ice->ice("pow type family: encountered a type family instance without the required argument structure");
LUAU_ASSERT(false);
}
return numericBinopFamilyFn(typeParams, packParams, ctx, "__pow");
}
TypeFamilyReductionResult<TypeId> modFamilyFn(std::vector<TypeId> typeParams, std::vector<TypePackId> packParams, NotNull<TypeFamilyContext> ctx)
{
if (typeParams.size() != 2 || !packParams.empty())
{
ctx->ice->ice("modulo type family: encountered a type family instance without the required argument structure");
LUAU_ASSERT(false);
}
return numericBinopFamilyFn(typeParams, packParams, ctx, "__mod");
}
TypeFamilyReductionResult<TypeId> andFamilyFn(std::vector<TypeId> typeParams, std::vector<TypePackId> packParams, NotNull<TypeFamilyContext> ctx)
{
if (typeParams.size() != 2 || !packParams.empty())
{
ctx->ice->ice("and type family: encountered a type family instance without the required argument structure");
LUAU_ASSERT(false);
}
TypeId lhsTy = follow(typeParams.at(0));
TypeId rhsTy = follow(typeParams.at(1));
if (isPending(lhsTy, ctx->solver))
{
return {std::nullopt, false, {lhsTy}, {}};
}
else if (isPending(rhsTy, ctx->solver))
{
return {std::nullopt, false, {rhsTy}, {}};
}
// And evalutes to a boolean if the LHS is falsey, and the RHS type if LHS is truthy.
SimplifyResult filteredLhs = simplifyIntersection(ctx->builtins, ctx->arena, lhsTy, ctx->builtins->falsyType);
SimplifyResult overallResult = simplifyUnion(ctx->builtins, ctx->arena, rhsTy, filteredLhs.result);
std::vector<TypeId> blockedTypes(filteredLhs.blockedTypes.begin(), filteredLhs.blockedTypes.end());
blockedTypes.insert(blockedTypes.end(), overallResult.blockedTypes.begin(), overallResult.blockedTypes.end());
return {overallResult.result, false, std::move(blockedTypes), {}};
}
TypeFamilyReductionResult<TypeId> orFamilyFn(std::vector<TypeId> typeParams, std::vector<TypePackId> packParams, NotNull<TypeFamilyContext> ctx)
{
if (typeParams.size() != 2 || !packParams.empty())
{
ctx->ice->ice("or type family: encountered a type family instance without the required argument structure");
LUAU_ASSERT(false);
}
TypeId lhsTy = follow(typeParams.at(0));
TypeId rhsTy = follow(typeParams.at(1));
if (isPending(lhsTy, ctx->solver))
{
return {std::nullopt, false, {lhsTy}, {}};
}
else if (isPending(rhsTy, ctx->solver))
{
return {std::nullopt, false, {rhsTy}, {}};
}
// Or evalutes to the LHS type if the LHS is truthy, and the RHS type if LHS is falsy.
SimplifyResult filteredLhs = simplifyIntersection(ctx->builtins, ctx->arena, lhsTy, ctx->builtins->truthyType);
SimplifyResult overallResult = simplifyUnion(ctx->builtins, ctx->arena, rhsTy, filteredLhs.result);
std::vector<TypeId> blockedTypes(filteredLhs.blockedTypes.begin(), filteredLhs.blockedTypes.end());
blockedTypes.insert(blockedTypes.end(), overallResult.blockedTypes.begin(), overallResult.blockedTypes.end());
return {overallResult.result, false, std::move(blockedTypes), {}};
}
BuiltinTypeFamilies::BuiltinTypeFamilies()
: addFamily{"Add", addFamilyFn}
, subFamily{"Sub", subFamilyFn}
, mulFamily{"Mul", mulFamilyFn}
, divFamily{"Div", divFamilyFn}
, idivFamily{"FloorDiv", idivFamilyFn}
, powFamily{"Exp", powFamilyFn}
, modFamily{"Mod", modFamilyFn}
, andFamily{"And", andFamilyFn}
, orFamily{"Or", orFamilyFn}
{
}
void BuiltinTypeFamilies::addToScope(NotNull<TypeArena> arena, NotNull<Scope> scope) const
{
// make a type function for a two-argument type family
auto mkBinaryTypeFamily = [&](const TypeFamily* family) {
TypeId t = arena->addType(GenericType{"T"});
TypeId u = arena->addType(GenericType{"U"});
GenericTypeDefinition genericT{t};
GenericTypeDefinition genericU{u};
return TypeFun{{genericT, genericU}, arena->addType(TypeFamilyInstanceType{NotNull{family}, {t, u}, {}})};
};
scope->exportedTypeBindings[addFamily.name] = mkBinaryTypeFamily(&addFamily);
scope->exportedTypeBindings[subFamily.name] = mkBinaryTypeFamily(&subFamily);
scope->exportedTypeBindings[mulFamily.name] = mkBinaryTypeFamily(&mulFamily);
scope->exportedTypeBindings[divFamily.name] = mkBinaryTypeFamily(&divFamily);
scope->exportedTypeBindings[idivFamily.name] = mkBinaryTypeFamily(&idivFamily);
scope->exportedTypeBindings[powFamily.name] = mkBinaryTypeFamily(&powFamily);
scope->exportedTypeBindings[modFamily.name] = mkBinaryTypeFamily(&modFamily);
}
} // namespace Luau

View File

@ -454,17 +454,21 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool
if (log.get<TypeFamilyInstanceType>(superTy))
{
// We do not report errors from reducing here. This is because we will
// "double-report" errors in some cases, like when trying to unify
// identical type family instantiations like Add<false, false> with
// Add<false, false>.
reduceFamilies(superTy, location, NotNull(types), builtinTypes, scope, normalizer, &log);
// FIXME: we should be be ICEing here because the old unifier is legacy and should not interact with type families at all.
// Unfortunately, there are, at the time of writing, still uses of the old unifier under local type inference.
TypeCheckLimits limits;
reduceFamilies(
superTy, location, TypeFamilyContext{NotNull(types), builtinTypes, scope, normalizer, NotNull{sharedState.iceHandler}, NotNull{&limits}});
superTy = log.follow(superTy);
}
if (log.get<TypeFamilyInstanceType>(subTy))
{
reduceFamilies(subTy, location, NotNull(types), builtinTypes, scope, normalizer, &log);
// FIXME: we should be be ICEing here because the old unifier is legacy and should not interact with type families at all.
// Unfortunately, there are, at the time of writing, still uses of the old unifier under local type inference.
TypeCheckLimits limits;
reduceFamilies(
subTy, location, TypeFamilyContext{NotNull(types), builtinTypes, scope, normalizer, NotNull{sharedState.iceHandler}, NotNull{&limits}});
subTy = log.follow(subTy);
}

View File

@ -154,6 +154,9 @@ bool Unifier2::unify(TypeId subTy, const FunctionType* superFn) {
if (shouldInstantiate)
{
std::optional<TypeId> instantiated = instantiate(builtinTypes, arena, NotNull{&limits}, scope, subTy);
if (!instantiated)
return false;
subFn = get<FunctionType>(*instantiated);
LUAU_ASSERT(subFn); // instantiation should not make a function type _not_ a function type.

View File

@ -795,6 +795,11 @@ public:
const AstArray<AstGenericTypePack>& genericPacks, const AstTypeList& params, const AstArray<AstArgumentName>& paramNames,
const AstTypeList& retTypes);
AstStatDeclareFunction(const Location& location, const AstName& name, const AstArray<AstGenericType>& generics,
const AstArray<AstGenericTypePack>& genericPacks, const AstTypeList& params, const AstArray<AstArgumentName>& paramNames,
const AstTypeList& retTypes, bool checkedFunction);
void visit(AstVisitor* visitor) override;
AstName name;
@ -803,6 +808,7 @@ public:
AstTypeList params;
AstArray<AstArgumentName> paramNames;
AstTypeList retTypes;
bool checkedFunction;
};
struct AstDeclaredClassProp
@ -903,6 +909,9 @@ public:
AstTypeFunction(const Location& location, const AstArray<AstGenericType>& generics, const AstArray<AstGenericTypePack>& genericPacks,
const AstTypeList& argTypes, const AstArray<std::optional<AstArgumentName>>& argNames, const AstTypeList& returnTypes);
AstTypeFunction(const Location& location, const AstArray<AstGenericType>& generics, const AstArray<AstGenericTypePack>& genericPacks,
const AstTypeList& argTypes, const AstArray<std::optional<AstArgumentName>>& argNames, const AstTypeList& returnTypes, bool checkedFunction);
void visit(AstVisitor* visitor) override;
AstArray<AstGenericType> generics;
@ -910,6 +919,7 @@ public:
AstTypeList argTypes;
AstArray<std::optional<AstArgumentName>> argNames;
AstTypeList returnTypes;
bool checkedFunction;
};
class AstTypeTypeof : public AstType

View File

@ -91,7 +91,6 @@ struct Lexeme
BrokenComment,
BrokenUnicode,
BrokenInterpDoubleBrace,
Error,
Reserved_BEGIN,
@ -116,6 +115,7 @@ struct Lexeme
ReservedTrue,
ReservedUntil,
ReservedWhile,
ReservedChecked,
Reserved_END
};

View File

@ -176,15 +176,16 @@ private:
AstTableIndexer* parseTableIndexer();
AstTypeOrPack parseFunctionType(bool allowPack);
AstTypeOrPack parseFunctionType(bool allowPack, bool isCheckedFunction = false);
AstType* parseFunctionTypeTail(const Lexeme& begin, AstArray<AstGenericType> generics, AstArray<AstGenericTypePack> genericPacks,
AstArray<AstType*> params, AstArray<std::optional<AstArgumentName>> paramNames, AstTypePack* varargAnnotation);
AstArray<AstType*> params, AstArray<std::optional<AstArgumentName>> paramNames, AstTypePack* varargAnnotation,
bool isCheckedFunction = false);
AstType* parseTableType();
AstTypeOrPack parseSimpleType(bool allowPack);
AstType* parseTableType(bool inDeclarationContext = false);
AstTypeOrPack parseSimpleType(bool allowPack, bool inDeclarationContext = false);
AstTypeOrPack parseTypeOrPack();
AstType* parseType();
AstType* parseType(bool inDeclarationContext = false);
AstTypePack* parseTypePack();
AstTypePack* parseVariadicArgumentTypePack();

View File

@ -709,6 +709,21 @@ AstStatDeclareFunction::AstStatDeclareFunction(const Location& location, const A
, params(params)
, paramNames(paramNames)
, retTypes(retTypes)
, checkedFunction(false)
{
}
AstStatDeclareFunction::AstStatDeclareFunction(const Location& location, const AstName& name, const AstArray<AstGenericType>& generics,
const AstArray<AstGenericTypePack>& genericPacks, const AstTypeList& params, const AstArray<AstArgumentName>& paramNames,
const AstTypeList& retTypes, bool checkedFunction)
: AstStat(ClassIndex(), location)
, name(name)
, generics(generics)
, genericPacks(genericPacks)
, params(params)
, paramNames(paramNames)
, retTypes(retTypes)
, checkedFunction(checkedFunction)
{
}
@ -817,6 +832,20 @@ AstTypeFunction::AstTypeFunction(const Location& location, const AstArray<AstGen
, argTypes(argTypes)
, argNames(argNames)
, returnTypes(returnTypes)
, checkedFunction(false)
{
LUAU_ASSERT(argNames.size == 0 || argNames.size == argTypes.types.size);
}
AstTypeFunction::AstTypeFunction(const Location& location, const AstArray<AstGenericType>& generics, const AstArray<AstGenericTypePack>& genericPacks,
const AstTypeList& argTypes, const AstArray<std::optional<AstArgumentName>>& argNames, const AstTypeList& returnTypes, bool checkedFunction)
: AstType(ClassIndex(), location)
, generics(generics)
, genericPacks(genericPacks)
, argTypes(argTypes)
, argNames(argNames)
, returnTypes(returnTypes)
, checkedFunction(checkedFunction)
{
LUAU_ASSERT(argNames.size == 0 || argNames.size == argTypes.types.size);
}

View File

@ -1,6 +1,7 @@
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#include "Luau/Lexer.h"
#include "Luau/Common.h"
#include "Luau/Confusables.h"
#include "Luau/StringUtils.h"
@ -8,6 +9,7 @@
LUAU_FASTFLAGVARIABLE(LuauFloorDivision, false)
LUAU_FASTFLAGVARIABLE(LuauLexerLookaheadRemembersBraceType, false)
LUAU_FASTFLAGVARIABLE(LuauCheckedFunctionSyntax, false)
namespace Luau
{
@ -106,7 +108,7 @@ Lexeme::Lexeme(const Location& location, Type type, const char* name)
}
static const char* kReserved[] = {"and", "break", "do", "else", "elseif", "end", "false", "for", "function", "if", "in", "local", "nil", "not", "or",
"repeat", "return", "then", "true", "until", "while"};
"repeat", "return", "then", "true", "until", "while", "@checked"};
std::string Lexeme::toString() const
{
@ -709,7 +711,7 @@ Lexeme Lexer::readNumber(const Position& start, unsigned int startOffset)
std::pair<AstName, Lexeme::Type> Lexer::readName()
{
LUAU_ASSERT(isAlpha(peekch()) || peekch() == '_');
LUAU_ASSERT(isAlpha(peekch()) || peekch() == '_' || peekch() == '@');
unsigned int startOffset = offset;
@ -1007,7 +1009,20 @@ Lexeme Lexer::readNext()
return Lexeme(Location(start, 1), ch);
}
case '@':
{
if (FFlag::LuauCheckedFunctionSyntax)
{
// We're trying to lex the token @checked
LUAU_ASSERT(peekch() == '@');
std::pair<AstName, Lexeme::Type> maybeChecked = readName();
if (maybeChecked.second != Lexeme::ReservedChecked)
return Lexeme(Location(start, position()), Lexeme::Error);
return Lexeme(Location(start, position()), maybeChecked.second, maybeChecked.first.value);
}
}
default:
if (isDigit(peekch()))
{

View File

@ -1,6 +1,7 @@
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#include "Luau/Parser.h"
#include "Luau/Common.h"
#include "Luau/TimeTrace.h"
#include <algorithm>
@ -15,6 +16,7 @@ LUAU_FASTINTVARIABLE(LuauRecursionLimit, 1000)
LUAU_FASTINTVARIABLE(LuauParseErrorLimit, 100)
LUAU_FASTFLAGVARIABLE(LuauParseDeclareClassIndexer, false)
LUAU_FASTFLAG(LuauFloorDivision)
LUAU_FASTFLAG(LuauCheckedFunctionSyntax)
namespace Luau
{
@ -823,8 +825,14 @@ AstStat* Parser::parseDeclaration(const Location& start)
if (lexer.current().type == Lexeme::ReservedFunction)
{
nextLexeme();
Name globalName = parseName("global function name");
bool checkedFunction = false;
if (FFlag::LuauCheckedFunctionSyntax && lexer.current().type == Lexeme::ReservedChecked)
{
checkedFunction = true;
nextLexeme();
}
Name globalName = parseName("global function name");
auto [generics, genericPacks] = parseGenericTypeList(/* withDefaultValues= */ false);
MatchLexeme matchParen = lexer.current();
@ -860,8 +868,8 @@ AstStat* Parser::parseDeclaration(const Location& start)
if (vararg && !varargAnnotation)
return reportStatError(Location(start, end), {}, {}, "All declaration parameters must be annotated");
return allocator.alloc<AstStatDeclareFunction>(
Location(start, end), globalName.name, generics, genericPacks, AstTypeList{copy(vars), varargAnnotation}, copy(varNames), retTypes);
return allocator.alloc<AstStatDeclareFunction>(Location(start, end), globalName.name, generics, genericPacks,
AstTypeList{copy(vars), varargAnnotation}, copy(varNames), retTypes, checkedFunction);
}
else if (AstName(lexer.current().name) == "class")
{
@ -940,7 +948,7 @@ AstStat* Parser::parseDeclaration(const Location& start)
{
expectAndConsume(':', "global variable declaration");
AstType* type = parseType();
AstType* type = parseType(/* in declaration context */ true);
return allocator.alloc<AstStatDeclareGlobal>(Location(start, type->location), globalName->name, type);
}
else
@ -1302,7 +1310,7 @@ AstTableIndexer* Parser::parseTableIndexer()
// TablePropOrIndexer ::= TableProp | TableIndexer
// PropList ::= TablePropOrIndexer {fieldsep TablePropOrIndexer} [fieldsep]
// TableType ::= `{' PropList `}'
AstType* Parser::parseTableType()
AstType* Parser::parseTableType(bool inDeclarationContext)
{
incrementRecursionCounter("type annotation");
@ -1370,7 +1378,7 @@ AstType* Parser::parseTableType()
expectAndConsume(':', "table field");
AstType* type = parseType();
AstType* type = parseType(inDeclarationContext);
props.push_back({name->name, name->location, type});
}
@ -1396,7 +1404,7 @@ AstType* Parser::parseTableType()
// ReturnType ::= Type | `(' TypeList `)'
// FunctionType ::= [`<' varlist `>'] `(' [TypeList] `)' `->` ReturnType
AstTypeOrPack Parser::parseFunctionType(bool allowPack)
AstTypeOrPack Parser::parseFunctionType(bool allowPack, bool isCheckedFunction)
{
incrementRecursionCounter("type annotation");
@ -1444,11 +1452,11 @@ AstTypeOrPack Parser::parseFunctionType(bool allowPack)
AstArray<std::optional<AstArgumentName>> paramNames = copy(names);
return {parseFunctionTypeTail(begin, generics, genericPacks, paramTypes, paramNames, varargAnnotation), {}};
return {parseFunctionTypeTail(begin, generics, genericPacks, paramTypes, paramNames, varargAnnotation, isCheckedFunction), {}};
}
AstType* Parser::parseFunctionTypeTail(const Lexeme& begin, AstArray<AstGenericType> generics, AstArray<AstGenericTypePack> genericPacks,
AstArray<AstType*> params, AstArray<std::optional<AstArgumentName>> paramNames, AstTypePack* varargAnnotation)
AstArray<AstType*> params, AstArray<std::optional<AstArgumentName>> paramNames, AstTypePack* varargAnnotation, bool isCheckedFunction)
{
incrementRecursionCounter("type annotation");
@ -1472,7 +1480,8 @@ AstType* Parser::parseFunctionTypeTail(const Lexeme& begin, AstArray<AstGenericT
auto [endLocation, returnTypeList] = parseReturnType();
AstTypeList paramTypes = AstTypeList{params, varargAnnotation};
return allocator.alloc<AstTypeFunction>(Location(begin.location, endLocation), generics, genericPacks, paramTypes, paramNames, returnTypeList);
return allocator.alloc<AstTypeFunction>(
Location(begin.location, endLocation), generics, genericPacks, paramTypes, paramNames, returnTypeList, isCheckedFunction);
}
// Type ::=
@ -1565,14 +1574,14 @@ AstTypeOrPack Parser::parseTypeOrPack()
return {parseTypeSuffix(type, begin), {}};
}
AstType* Parser::parseType()
AstType* Parser::parseType(bool inDeclarationContext)
{
unsigned int oldRecursionCount = recursionCounter;
incrementRecursionCounter("type annotation");
Location begin = lexer.current().location;
AstType* type = parseSimpleType(/* allowPack= */ false).type;
AstType* type = parseSimpleType(/* allowPack= */ false, /* in declaration context */ inDeclarationContext).type;
recursionCounter = oldRecursionCount;
@ -1581,7 +1590,7 @@ AstType* Parser::parseType()
// Type ::= nil | Name[`.' Name] [ `<' Type [`,' ...] `>' ] | `typeof' `(' expr `)' | `{' [PropList] `}'
// | [`<' varlist `>'] `(' [TypeList] `)' `->` ReturnType
AstTypeOrPack Parser::parseSimpleType(bool allowPack)
AstTypeOrPack Parser::parseSimpleType(bool allowPack, bool inDeclarationContext)
{
incrementRecursionCounter("type annotation");
@ -1673,7 +1682,13 @@ AstTypeOrPack Parser::parseSimpleType(bool allowPack)
}
else if (lexer.current().type == '{')
{
return {parseTableType(), {}};
return {parseTableType(/* inDeclarationContext */ inDeclarationContext), {}};
}
else if (FFlag::LuauCheckedFunctionSyntax && inDeclarationContext && lexer.current().type == Lexeme::ReservedChecked)
{
LUAU_ASSERT(FFlag::LuauCheckedFunctionSyntax);
nextLexeme();
return parseFunctionType(allowPack, /* isCheckedFunction */ true);
}
else if (lexer.current().type == '(' || lexer.current().type == '<')
{

View File

@ -369,10 +369,9 @@ int main(int argc, char** argv)
stats.bytecode == 0 ? 0.0 : double(stats.codegen) / double(stats.bytecode), stats.readTime, stats.parseTime, stats.compileTime,
stats.codegenTime);
printf("Lowering stats:\n");
printf("- spills to stack: %d, spills to restore: %d, max spill slot %u\n", stats.lowerStats.spillsToSlot, stats.lowerStats.spillsToRestore,
printf("Lowering: regalloc failed: %d, lowering failed %d; spills to stack: %d, spills to restore: %d, max spill slot %u\n",
stats.lowerStats.regAllocErrors, stats.lowerStats.loweringErrors, stats.lowerStats.spillsToSlot, stats.lowerStats.spillsToRestore,
stats.lowerStats.maxSpillSlotsUsed);
printf("- regalloc failed: %d, lowering failed %d\n", stats.lowerStats.regAllocErrors, stats.lowerStats.loweringErrors);
}
return failed ? 1 : 0;

View File

@ -85,6 +85,7 @@ public:
void test(OperandX64 lhs, OperandX64 rhs);
void lea(OperandX64 lhs, OperandX64 rhs);
void setcc(ConditionX64 cond, OperandX64 op);
void cmov(ConditionX64 cond, RegisterX64 lhs, OperandX64 rhs);
void push(OperandX64 op);
void pop(OperandX64 op);

View File

@ -17,6 +17,8 @@ enum CodeGenFlags
{
// Only run native codegen for modules that have been marked with --!native
CodeGen_OnlyNativeModules = 1 << 0,
// Run native codegen for functions that the compiler considers not profitable
CodeGen_ColdFunctions = 1 << 1,
};
enum class CodeGenCompilationResult

View File

@ -27,6 +27,11 @@ static const char* setccTextForCondition[] = {"seto", "setno", "setc", "setnc",
"setge", "setnb", "setnbe", "setna", "setnae", "setne", "setnl", "setnle", "setng", "setnge", "setz", "setnz", "setp", "setnp"};
static_assert(sizeof(setccTextForCondition) / sizeof(setccTextForCondition[0]) == size_t(ConditionX64::Count), "all conditions have to be covered");
static const char* cmovTextForCondition[] = {"cmovo", "cmovno", "cmovc", "cmovnc", "cmovb", "cmovbe", "cmova", "cmovae", "cmove", "cmovl", "cmovle",
"cmovg", "cmovge", "cmovnb", "cmovnbe", "cmovna", "cmovnae", "cmovne", "cmovnl", "cmovnle", "cmovng", "cmovnge", "cmovz", "cmovnz", "cmovp",
"cmovnp"};
static_assert(sizeof(cmovTextForCondition) / sizeof(cmovTextForCondition[0]) == size_t(ConditionX64::Count), "all conditions have to be covered");
#define OP_PLUS_REG(op, reg) ((op) + (reg & 0x7))
#define OP_PLUS_CC(op, cc) ((op) + uint8_t(cc))
@ -404,6 +409,20 @@ void AssemblyBuilderX64::setcc(ConditionX64 cond, OperandX64 op)
commit();
}
void AssemblyBuilderX64::cmov(ConditionX64 cond, RegisterX64 lhs, OperandX64 rhs)
{
SizeX64 size = rhs.cat == CategoryX64::reg ? rhs.base.size : rhs.memSize;
LUAU_ASSERT(size != SizeX64::byte && size == lhs.size);
if (logText)
log(cmovTextForCondition[size_t(cond)], lhs, rhs);
placeRex(lhs, rhs);
place(0x0f);
place(0x40 | codeForCondition[size_t(cond)]);
placeRegAndModRegMem(lhs, rhs);
commit();
}
void AssemblyBuilderX64::jcc(ConditionX64 cond, Label& label)
{
placeJcc(jccTextForCondition[size_t(cond)], label, codeForCondition[size_t(cond)]);

View File

@ -259,7 +259,7 @@ CodeGenCompilationResult compile(lua_State* L, int idx, unsigned int flags, Comp
return CodeGenCompilationResult::CodeGenNotInitialized;
std::vector<Proto*> protos;
gatherFunctions(protos, root);
gatherFunctions(protos, root, flags);
// Skip protos that have been compiled during previous invocations of CodeGen::compile
protos.erase(std::remove_if(protos.begin(), protos.end(),

View File

@ -5,6 +5,7 @@
#include "Luau/UnwindBuilder.h"
#include "BitUtils.h"
#include "CodeGenUtils.h"
#include "NativeState.h"
#include "EmitCommonA64.h"
@ -96,42 +97,27 @@ static void emitInterrupt(AssemblyBuilderA64& build)
build.br(x0);
}
static void emitReentry(AssemblyBuilderA64& build, ModuleHelpers& helpers)
static void emitContinueCall(AssemblyBuilderA64& build, ModuleHelpers& helpers)
{
// x0 = closure object to reentry (equal to clvalue(L->ci->func))
// If the fallback requested an exit, we need to do this right away
build.cbz(x0, helpers.exitNoContinueVm);
emitUpdateBase(build);
// If the fallback yielded, we need to do this right away
// note: it's slightly cheaper to check x0 LSB; a valid Closure pointer must be aligned to 8 bytes
LUAU_ASSERT(CALL_FALLBACK_YIELD == 1);
build.tbnz(x0, 0, helpers.exitNoContinueVm);
// Need to update state of the current function before we jump away
build.ldr(x1, mem(x0, offsetof(Closure, l.p))); // cl->l.p aka proto
build.ldr(x2, mem(rState, offsetof(lua_State, ci))); // L->ci
// We need to check if the new frame can be executed natively
// TODO: .flags and .savedpc load below can be fused with ldp
build.ldr(w3, mem(x2, offsetof(CallInfo, flags)));
build.tbz(x3, countrz(LUA_CALLINFO_NATIVE), helpers.exitContinueVm);
build.ldr(x2, mem(x1, offsetof(Proto, exectarget)));
build.cbz(x2, helpers.exitContinueVm);
build.mov(rClosure, x0);
LUAU_ASSERT(offsetof(Proto, code) == offsetof(Proto, k) + 8);
build.ldp(rConstants, rCode, mem(x1, offsetof(Proto, k))); // proto->k, proto->code
// Get instruction index from instruction pointer
// To get instruction index from instruction pointer, we need to divide byte offset by 4
// But we will actually need to scale instruction index by 4 back to byte offset later so it cancels out
build.ldr(x2, mem(x2, offsetof(CallInfo, savedpc))); // L->ci->savedpc
build.sub(x2, x2, rCode);
// Get new instruction location and jump to it
LUAU_ASSERT(offsetof(Proto, exectarget) == offsetof(Proto, execdata) + 8);
build.ldp(x3, x4, mem(x1, offsetof(Proto, execdata)));
build.ldr(w2, mem(x3, x2));
build.add(x4, x4, x2);
build.br(x4);
build.br(x2);
}
void emitReturn(AssemblyBuilderA64& build, ModuleHelpers& helpers)
@ -326,11 +312,6 @@ void assembleHelpers(AssemblyBuilderA64& build, ModuleHelpers& helpers)
build.setLabel(helpers.exitNoContinueVm);
emitExit(build, /* continueInVm */ false);
if (build.logText)
build.logAppend("; reentry\n");
build.setLabel(helpers.reentry);
emitReentry(build, helpers);
if (build.logText)
build.logAppend("; interrupt\n");
build.setLabel(helpers.interrupt);
@ -340,6 +321,11 @@ void assembleHelpers(AssemblyBuilderA64& build, ModuleHelpers& helpers)
build.logAppend("; return\n");
build.setLabel(helpers.return_);
emitReturn(build, helpers);
if (build.logText)
build.logAppend("; continueCall\n");
build.setLabel(helpers.continueCall);
emitContinueCall(build, helpers);
}
} // namespace A64

View File

@ -46,7 +46,15 @@ template<typename AssemblyBuilder>
static std::string getAssemblyImpl(AssemblyBuilder& build, const TValue* func, AssemblyOptions options, LoweringStats* stats)
{
std::vector<Proto*> protos;
gatherFunctions(protos, clvalue(func)->l.p);
gatherFunctions(protos, clvalue(func)->l.p, /* flags= */ 0);
protos.erase(std::remove_if(protos.begin(), protos.end(), [](Proto* p) { return p == nullptr; }), protos.end());
if (protos.empty())
{
build.finalize(); // to avoid assertion in AssemblyBuilder dtor
return std::string();
}
ModuleHelpers helpers;
assembleHelpers(build, helpers);
@ -58,24 +66,23 @@ static std::string getAssemblyImpl(AssemblyBuilder& build, const TValue* func, A
}
for (Proto* p : protos)
if (p)
{
IrBuilder ir;
ir.buildFunctionIr(p);
if (options.includeAssembly || options.includeIr)
logFunctionHeader(build, p);
if (!lowerFunction(ir, build, helpers, p, options, stats))
{
IrBuilder ir;
ir.buildFunctionIr(p);
if (options.includeAssembly || options.includeIr)
logFunctionHeader(build, p);
if (!lowerFunction(ir, build, helpers, p, options, stats))
{
if (build.logText)
build.logAppend("; skipping (can't lower)\n");
}
if (build.logText)
build.logAppend("\n");
build.logAppend("; skipping (can't lower)\n");
}
if (build.logText)
build.logAppend("\n");
}
if (!build.finalize())
return std::string();

View File

@ -29,7 +29,7 @@ namespace Luau
namespace CodeGen
{
inline void gatherFunctions(std::vector<Proto*>& results, Proto* proto)
inline void gatherFunctions(std::vector<Proto*>& results, Proto* proto, unsigned int flags)
{
if (results.size() <= size_t(proto->bytecodeid))
results.resize(proto->bytecodeid + 1);
@ -38,10 +38,13 @@ inline void gatherFunctions(std::vector<Proto*>& results, Proto* proto)
if (results[proto->bytecodeid])
return;
results[proto->bytecodeid] = proto;
// Only compile cold functions if requested
if ((proto->flags & LPF_NATIVE_COLD) == 0 || (flags & CodeGen_ColdFunctions) != 0)
results[proto->bytecodeid] = proto;
// Recursively traverse child protos even if we aren't compiling this one
for (int i = 0; i < proto->sizep; i++)
gatherFunctions(results, proto->p[i]);
gatherFunctions(results, proto->p[i], flags);
}
template<typename AssemblyBuilder, typename IrLowering>

View File

@ -275,7 +275,7 @@ Closure* callFallback(lua_State* L, StkId ra, StkId argtop, int nresults)
// yield
if (n < 0)
return NULL;
return (Closure*)CALL_FALLBACK_YIELD;
// ci is our callinfo, cip is our parent
CallInfo* ci = L->ci;
@ -299,8 +299,7 @@ Closure* callFallback(lua_State* L, StkId ra, StkId argtop, int nresults)
L->top = (nresults == LUA_MULTRET) ? res : cip->top;
// keep executing current function
LUAU_ASSERT(isLua(cip));
return clvalue(cip->func);
return NULL;
}
}

View File

@ -17,6 +17,8 @@ void forgPrepXnextFallback(lua_State* L, TValue* ra, int pc);
Closure* callProlog(lua_State* L, TValue* ra, StkId argtop, int nresults);
void callEpilogC(lua_State* L, int nresults, int n);
#define CALL_FALLBACK_YIELD 1
Closure* callFallback(lua_State* L, StkId ra, StkId argtop, int nresults);
const Instruction* executeGETGLOBAL(lua_State* L, const Instruction* pc, StkId base, TValue* k);

View File

@ -240,11 +240,6 @@ void assembleHelpers(X64::AssemblyBuilderX64& build, ModuleHelpers& helpers)
build.setLabel(helpers.exitNoContinueVm);
emitExit(build, /* continueInVm */ false);
if (build.logText)
build.logAppend("; continueCallInVm\n");
build.setLabel(helpers.continueCallInVm);
emitContinueCallInVm(build);
if (build.logText)
build.logAppend("; interrupt\n");
build.setLabel(helpers.interrupt);

View File

@ -29,11 +29,8 @@ struct ModuleHelpers
Label return_;
Label interrupt;
// X64
Label continueCallInVm;
// A64
Label reentry; // x0: closure
Label continueCall; // x0: closure
};
} // namespace CodeGen

View File

@ -378,17 +378,6 @@ void emitUpdatePcForExit(AssemblyBuilderX64& build)
build.mov(qword[rax + offsetof(CallInfo, savedpc)], rdx);
}
void emitContinueCallInVm(AssemblyBuilderX64& build)
{
RegisterX64 proto = rcx; // Sync with emitInstCall
build.mov(rdx, qword[proto + offsetof(Proto, code)]);
build.mov(rax, qword[rState + offsetof(lua_State, ci)]);
build.mov(qword[rax + offsetof(CallInfo, savedpc)], rdx);
emitExit(build, /* continueInVm */ true);
}
void emitReturn(AssemblyBuilderX64& build, ModuleHelpers& helpers)
{
// input: res in rdi, number of written values in ecx

View File

@ -43,7 +43,7 @@ constexpr RegisterX64 rNativeContext = r13; // NativeContext* context
constexpr RegisterX64 rConstants = r12; // TValue* k
constexpr unsigned kExtraLocals = 3; // Number of 8 byte slots available for specialized local variables specified below
constexpr unsigned kSpillSlots = 5; // Number of 8 byte slots available for register allocator to spill data into
constexpr unsigned kSpillSlots = 13; // Number of 8 byte slots available for register allocator to spill data into
static_assert((kExtraLocals + kSpillSlots) * 8 % 16 == 0, "locals have to preserve 16 byte alignment");
constexpr uint8_t kWindowsFirstNonVolXmmReg = 6;
@ -216,7 +216,6 @@ void emitInterrupt(AssemblyBuilderX64& build);
void emitFallback(IrRegAllocX64& regs, AssemblyBuilderX64& build, int offset, int pcpos);
void emitUpdatePcForExit(AssemblyBuilderX64& build);
void emitContinueCallInVm(AssemblyBuilderX64& build);
void emitReturn(AssemblyBuilderX64& build, ModuleHelpers& helpers);

View File

@ -79,36 +79,34 @@ void emitInstCall(AssemblyBuilderX64& build, ModuleHelpers& helpers, int ra, int
// Set L->top to ci->top as most function expect (no vararg)
build.mov(rax, qword[ci + offsetof(CallInfo, top)]);
build.mov(qword[rState + offsetof(lua_State, top)], rax);
// But if it is vararg, update it to 'argi'
Label skipVararg;
build.test(byte[proto + offsetof(Proto, is_vararg)], 1);
build.jcc(ConditionX64::Zero, skipVararg);
build.mov(rax, argi);
build.mov(qword[rState + offsetof(lua_State, top)], argi);
build.setLabel(skipVararg);
// Keep executing new function
build.mov(qword[rState + offsetof(lua_State, top)], rax);
// Switch current code
// ci->savedpc = p->code;
build.mov(rax, qword[proto + offsetof(Proto, code)]);
build.mov(sCode, rax); // note: this needs to be before the next store for optimal performance
build.mov(qword[ci + offsetof(CallInfo, savedpc)], rax);
// Get native function entry
build.mov(rax, qword[proto + offsetof(Proto, exectarget)]);
build.test(rax, rax);
build.jcc(ConditionX64::Zero, helpers.continueCallInVm);
// Mark call frame as native
build.mov(dword[ci + offsetof(CallInfo, flags)], LUA_CALLINFO_NATIVE);
// Switch current constants
build.mov(rConstants, qword[proto + offsetof(Proto, k)]);
// Switch current code
build.mov(rdx, qword[proto + offsetof(Proto, code)]);
build.mov(sCode, rdx);
// Get native function entry
build.mov(rax, qword[proto + offsetof(Proto, exectarget)]);
build.test(rax, rax);
build.jcc(ConditionX64::Zero, helpers.exitContinueVm);
// Mark call frame as native
build.mov(dword[ci + offsetof(CallInfo, flags)], LUA_CALLINFO_NATIVE);
build.jmp(rax);
}

View File

@ -146,6 +146,11 @@ void IrBuilder::buildFunctionIr(Proto* proto)
if (instIndexToBlock[i] != kNoAssociatedBlockIndex)
beginBlock(blockAtInst(i));
// Numeric for loops require additional processing to maintain loop stack
// Notably, this must be performed even when the block is dead so that we maintain the pairing FORNPREP-FORNLOOP
if (op == LOP_FORNPREP)
beforeInstForNPrep(*this, pc);
// We skip dead bytecode instructions when they appear after block was already terminated
if (!inTerminatedBlock)
{
@ -164,6 +169,10 @@ void IrBuilder::buildFunctionIr(Proto* proto)
}
}
// See above for FORNPREP..FORNLOOP processing
if (op == LOP_FORNLOOP)
afterInstForNLoop(*this, pc);
i = nexti;
LUAU_ASSERT(i <= proto->sizecode);

View File

@ -1515,8 +1515,10 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
build.ldr(x4, mem(rNativeContext, offsetof(NativeContext, callFallback)));
build.blr(x4);
// reentry with x0=closure (NULL will trigger exit)
build.b(helpers.reentry);
emitUpdateBase(build);
// reentry with x0=closure (NULL implies C function; CALL_FALLBACK_YIELD will trigger exit)
build.cbnz(x0, helpers.continueCall);
break;
case IrCmd::RETURN:
regs.spill(build, index);

View File

@ -10,7 +10,7 @@
#include <string.h>
LUAU_FASTFLAGVARIABLE(DebugLuauCodegenChaosA64, false)
LUAU_FASTFLAGVARIABLE(DebugCodegenChaosA64, false)
namespace Luau
{
@ -146,7 +146,7 @@ RegisterA64 IrRegAllocA64::allocReg(KindA64 kind, uint32_t index)
int reg = 31 - countlz(set.free);
if (FFlag::DebugLuauCodegenChaosA64)
if (FFlag::DebugCodegenChaosA64)
reg = countrz(set.free); // allocate from low end; this causes extra conflicts for calls
set.free &= ~(1u << reg);
@ -167,7 +167,7 @@ RegisterA64 IrRegAllocA64::allocTemp(KindA64 kind)
int reg = 31 - countlz(set.free);
if (FFlag::DebugLuauCodegenChaosA64)
if (FFlag::DebugCodegenChaosA64)
reg = countrz(set.free); // allocate from low end; this causes extra conflicts for calls
set.free &= ~(1u << reg);
@ -278,7 +278,7 @@ size_t IrRegAllocA64::spill(AssemblyBuilderA64& build, uint32_t index, std::init
uint32_t poisongpr = 0;
uint32_t poisonsimd = 0;
if (FFlag::DebugLuauCodegenChaosA64)
if (FFlag::DebugCodegenChaosA64)
{
poisongpr = gpr.base & ~gpr.free;
poisonsimd = simd.base & ~simd.free;
@ -370,7 +370,7 @@ size_t IrRegAllocA64::spill(AssemblyBuilderA64& build, uint32_t index, std::init
LUAU_ASSERT(set.free == set.base);
}
if (FFlag::DebugLuauCodegenChaosA64)
if (FFlag::DebugCodegenChaosA64)
{
for (int reg = 0; reg < 32; ++reg)
{

View File

@ -83,6 +83,9 @@ static BuiltinImplResult translateBuiltin2NumberToNumberLibm(
IrOp va = builtinLoadDouble(build, build.vmReg(arg));
IrOp vb = builtinLoadDouble(build, args);
if (bfid == LBF_MATH_LDEXP)
vb = build.inst(IrCmd::NUM_TO_INT, vb);
IrOp res = build.inst(IrCmd::INVOKE_LIBM, build.constUint(bfid), va, vb);
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), res);
@ -93,30 +96,6 @@ static BuiltinImplResult translateBuiltin2NumberToNumberLibm(
return {BuiltinImplType::Full, 1};
}
static BuiltinImplResult translateBuiltinMathLdexp(
IrBuilder& build, LuauBuiltinFunction bfid, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
{
if (nparams < 2 || nresults > 1)
return {BuiltinImplType::None, -1};
builtinCheckDouble(build, build.vmReg(arg), pcpos);
builtinCheckDouble(build, args, pcpos);
IrOp va = builtinLoadDouble(build, build.vmReg(arg));
IrOp vb = builtinLoadDouble(build, args);
IrOp vbi = build.inst(IrCmd::NUM_TO_INT, vb);
IrOp res = build.inst(IrCmd::INVOKE_LIBM, build.constUint(bfid), va, vbi);
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), res);
if (ra != arg)
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNUMBER));
return {BuiltinImplType::Full, 1};
}
// (number, ...) -> (number, number)
static BuiltinImplResult translateBuiltinNumberTo2Number(
IrBuilder& build, LuauBuiltinFunction bfid, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
@ -152,7 +131,7 @@ static BuiltinImplResult translateBuiltinAssert(IrBuilder& build, int nparams, i
return {BuiltinImplType::UsesFallback, 0};
}
static BuiltinImplResult translateBuiltinMathDeg(IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
static BuiltinImplResult translateBuiltinMathDegRad(IrBuilder& build, IrCmd cmd, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
{
if (nparams < 1 || nresults > 1)
return {BuiltinImplType::None, -1};
@ -162,26 +141,7 @@ static BuiltinImplResult translateBuiltinMathDeg(IrBuilder& build, int nparams,
const double rpd = (3.14159265358979323846 / 180.0);
IrOp varg = builtinLoadDouble(build, build.vmReg(arg));
IrOp value = build.inst(IrCmd::DIV_NUM, varg, build.constDouble(rpd));
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), value);
if (ra != arg)
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNUMBER));
return {BuiltinImplType::Full, 1};
}
static BuiltinImplResult translateBuiltinMathRad(IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
{
if (nparams < 1 || nresults > 1)
return {BuiltinImplType::None, -1};
builtinCheckDouble(build, build.vmReg(arg), pcpos);
const double rpd = (3.14159265358979323846 / 180.0);
IrOp varg = builtinLoadDouble(build, build.vmReg(arg));
IrOp value = build.inst(IrCmd::MUL_NUM, varg, build.constDouble(rpd));
IrOp value = build.inst(cmd, varg, build.constDouble(rpd));
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), value);
if (ra != arg)
@ -231,7 +191,7 @@ static BuiltinImplResult translateBuiltinMathLog(
return {BuiltinImplType::Full, 1};
}
static BuiltinImplResult translateBuiltinMathMin(IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
static BuiltinImplResult translateBuiltinMathMinMax(IrBuilder& build, IrCmd cmd, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
{
if (nparams < 2 || nparams > kMinMaxUnrolledParams || nresults > 1)
return {BuiltinImplType::None, -1};
@ -245,42 +205,12 @@ static BuiltinImplResult translateBuiltinMathMin(IrBuilder& build, int nparams,
IrOp varg1 = builtinLoadDouble(build, build.vmReg(arg));
IrOp varg2 = builtinLoadDouble(build, args);
IrOp res = build.inst(IrCmd::MIN_NUM, varg2, varg1); // Swapped arguments are required for consistency with VM builtins
IrOp res = build.inst(cmd, varg2, varg1); // Swapped arguments are required for consistency with VM builtins
for (int i = 3; i <= nparams; ++i)
{
IrOp arg = builtinLoadDouble(build, build.vmReg(vmRegOp(args) + (i - 2)));
res = build.inst(IrCmd::MIN_NUM, arg, res);
}
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), res);
if (ra != arg)
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNUMBER));
return {BuiltinImplType::Full, 1};
}
static BuiltinImplResult translateBuiltinMathMax(IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
{
if (nparams < 2 || nparams > kMinMaxUnrolledParams || nresults > 1)
return {BuiltinImplType::None, -1};
builtinCheckDouble(build, build.vmReg(arg), pcpos);
builtinCheckDouble(build, args, pcpos);
for (int i = 3; i <= nparams; ++i)
builtinCheckDouble(build, build.vmReg(vmRegOp(args) + (i - 2)), pcpos);
IrOp varg1 = builtinLoadDouble(build, build.vmReg(arg));
IrOp varg2 = builtinLoadDouble(build, args);
IrOp res = build.inst(IrCmd::MAX_NUM, varg2, varg1); // Swapped arguments are required for consistency with VM builtins
for (int i = 3; i <= nparams; ++i)
{
IrOp arg = builtinLoadDouble(build, build.vmReg(vmRegOp(args) + (i - 2)));
res = build.inst(IrCmd::MAX_NUM, arg, res);
res = build.inst(cmd, arg, res);
}
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), res);
@ -823,15 +753,15 @@ BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg,
case LBF_ASSERT:
return translateBuiltinAssert(build, nparams, ra, arg, args, nresults, pcpos);
case LBF_MATH_DEG:
return translateBuiltinMathDeg(build, nparams, ra, arg, args, nresults, pcpos);
return translateBuiltinMathDegRad(build, IrCmd::DIV_NUM, nparams, ra, arg, args, nresults, pcpos);
case LBF_MATH_RAD:
return translateBuiltinMathRad(build, nparams, ra, arg, args, nresults, pcpos);
return translateBuiltinMathDegRad(build, IrCmd::MUL_NUM, nparams, ra, arg, args, nresults, pcpos);
case LBF_MATH_LOG:
return translateBuiltinMathLog(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
case LBF_MATH_MIN:
return translateBuiltinMathMin(build, nparams, ra, arg, args, nresults, pcpos);
return translateBuiltinMathMinMax(build, IrCmd::MIN_NUM, nparams, ra, arg, args, nresults, pcpos);
case LBF_MATH_MAX:
return translateBuiltinMathMax(build, nparams, ra, arg, args, nresults, pcpos);
return translateBuiltinMathMinMax(build, IrCmd::MAX_NUM, nparams, ra, arg, args, nresults, pcpos);
case LBF_MATH_CLAMP:
return translateBuiltinMathClamp(build, nparams, ra, arg, args, nresults, fallback, pcpos);
case LBF_MATH_FLOOR:
@ -861,9 +791,8 @@ BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg,
case LBF_MATH_POW:
case LBF_MATH_FMOD:
case LBF_MATH_ATAN2:
return translateBuiltin2NumberToNumberLibm(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
case LBF_MATH_LDEXP:
return translateBuiltinMathLdexp(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
return translateBuiltin2NumberToNumberLibm(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
case LBF_MATH_FREXP:
case LBF_MATH_MODF:
return translateBuiltinNumberTo2Number(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);

View File

@ -12,8 +12,9 @@
#include "lstate.h"
#include "ltm.h"
LUAU_FASTFLAGVARIABLE(LuauImproveForN, false)
LUAU_FASTFLAGVARIABLE(LuauImproveForN2, false)
LUAU_FASTFLAG(LuauReduceStackSpills)
LUAU_FASTFLAGVARIABLE(LuauInlineArrConstOffset, false)
namespace Luau
{
@ -631,6 +632,26 @@ static IrOp getLoopStepK(IrBuilder& build, int ra)
return build.undef();
}
void beforeInstForNPrep(IrBuilder& build, const Instruction* pc)
{
if (FFlag::LuauImproveForN2)
{
int ra = LUAU_INSN_A(*pc);
IrOp stepK = getLoopStepK(build, ra);
build.loopStepStack.push_back(stepK);
}
}
void afterInstForNLoop(IrBuilder& build, const Instruction* pc)
{
if (FFlag::LuauImproveForN2)
{
LUAU_ASSERT(!build.loopStepStack.empty());
build.loopStepStack.pop_back();
}
}
void translateInstForNPrep(IrBuilder& build, const Instruction* pc, int pcpos)
{
int ra = LUAU_INSN_A(*pc);
@ -638,10 +659,10 @@ void translateInstForNPrep(IrBuilder& build, const Instruction* pc, int pcpos)
IrOp loopStart = build.blockAtInst(pcpos + getOpLength(LuauOpcode(LUAU_INSN_OP(*pc))));
IrOp loopExit = build.blockAtInst(getJumpTarget(*pc, pcpos));
if (FFlag::LuauImproveForN)
if (FFlag::LuauImproveForN2)
{
IrOp stepK = getLoopStepK(build, ra);
build.loopStepStack.push_back(stepK);
LUAU_ASSERT(!build.loopStepStack.empty());
IrOp stepK = build.loopStepStack.back();
// When loop parameters are not numbers, VM tries to perform type coercion from string and raises an exception if that fails
// Performing that fallback in native code increases code size and complicates CFG, obscuring the values when they are constant
@ -733,7 +754,7 @@ void translateInstForNPrep(IrBuilder& build, const Instruction* pc, int pcpos)
// VM places interrupt in FORNLOOP, but that creates a likely spill point for short loops that use loop index as INTERRUPT always spills
// We place the interrupt at the beginning of the loop body instead; VM uses FORNLOOP because it doesn't want to waste an extra instruction.
// Because loop block may not have been started yet (as it's started when lowering the first instruction!), we need to defer INTERRUPT placement.
if (FFlag::LuauImproveForN)
if (FFlag::LuauImproveForN2)
build.interruptRequested = true;
}
@ -744,11 +765,10 @@ void translateInstForNLoop(IrBuilder& build, const Instruction* pc, int pcpos)
IrOp loopRepeat = build.blockAtInst(getJumpTarget(*pc, pcpos));
IrOp loopExit = build.blockAtInst(pcpos + getOpLength(LuauOpcode(LUAU_INSN_OP(*pc))));
if (FFlag::LuauImproveForN)
if (FFlag::LuauImproveForN2)
{
LUAU_ASSERT(!build.loopStepStack.empty());
IrOp stepK = build.loopStepStack.back();
build.loopStepStack.pop_back();
IrOp zero = build.constDouble(0.0);
IrOp limit = build.inst(IrCmd::LOAD_DOUBLE, build.vmReg(ra + 0));
@ -950,10 +970,20 @@ void translateInstGetTableN(IrBuilder& build, const Instruction* pc, int pcpos)
build.inst(IrCmd::CHECK_ARRAY_SIZE, vb, build.constInt(c), fallback);
build.inst(IrCmd::CHECK_NO_METATABLE, vb, fallback);
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constInt(c));
if (FFlag::LuauInlineArrConstOffset)
{
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constInt(0));
IrOp arrElTval = build.inst(IrCmd::LOAD_TVALUE, arrEl);
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), arrElTval);
IrOp arrElTval = build.inst(IrCmd::LOAD_TVALUE, arrEl, build.constInt(c * sizeof(TValue)));
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), arrElTval);
}
else
{
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constInt(c));
IrOp arrElTval = build.inst(IrCmd::LOAD_TVALUE, arrEl);
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), arrElTval);
}
IrOp next = build.blockAtInst(pcpos + 1);
FallbackStreamScope scope(build, fallback, next);
@ -980,10 +1010,20 @@ void translateInstSetTableN(IrBuilder& build, const Instruction* pc, int pcpos)
build.inst(IrCmd::CHECK_NO_METATABLE, vb, fallback);
build.inst(IrCmd::CHECK_READONLY, vb, fallback);
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constInt(c));
if (FFlag::LuauInlineArrConstOffset)
{
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constInt(0));
IrOp tva = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(ra));
build.inst(IrCmd::STORE_TVALUE, arrEl, tva);
IrOp tva = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(ra));
build.inst(IrCmd::STORE_TVALUE, arrEl, tva, build.constInt(c * sizeof(TValue)));
}
else
{
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constInt(c));
IrOp tva = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(ra));
build.inst(IrCmd::STORE_TVALUE, arrEl, tva);
}
build.inst(IrCmd::BARRIER_TABLE_FORWARD, vb, build.vmReg(ra), build.undef());

View File

@ -65,5 +65,8 @@ void translateInstAndX(IrBuilder& build, const Instruction* pc, int pcpos, IrOp
void translateInstOrX(IrBuilder& build, const Instruction* pc, int pcpos, IrOp c);
void translateInstNewClosure(IrBuilder& build, const Instruction* pc, int pcpos);
void beforeInstForNPrep(IrBuilder& build, const Instruction* pc);
void afterInstForNLoop(IrBuilder& build, const Instruction* pc);
} // namespace CodeGen
} // namespace Luau

View File

@ -427,7 +427,7 @@ enum LuauBytecodeTag
// Bytecode version; runtime supports [MIN, MAX], compiler emits TARGET by default but may emit a higher version when flags are enabled
LBC_VERSION_MIN = 3,
LBC_VERSION_MAX = 4,
LBC_VERSION_TARGET = 3,
LBC_VERSION_TARGET = 4,
// Type encoding version
LBC_TYPE_VERSION = 1,
// Types of constant table entries
@ -575,4 +575,6 @@ enum LuauProtoFlag
{
// used to tag main proto for modules with --!native
LPF_NATIVE_MODULE = 1 << 0,
// used to tag individual protos as not profitable to compile natively
LPF_NATIVE_COLD = 1 << 1,
};

View File

@ -7,8 +7,6 @@
#include <algorithm>
#include <string.h>
LUAU_FASTFLAGVARIABLE(BytecodeVersion4, false)
LUAU_FASTFLAG(LuauFloorDivision)
namespace Luau
@ -586,12 +584,9 @@ void BytecodeBuilder::finalize()
bytecode = char(version);
if (FFlag::BytecodeVersion4)
{
uint8_t typesversion = getTypeEncodingVersion();
LUAU_ASSERT(typesversion == 1);
writeByte(bytecode, typesversion);
}
uint8_t typesversion = getTypeEncodingVersion();
LUAU_ASSERT(typesversion == 1);
writeByte(bytecode, typesversion);
writeStringTable(bytecode);
@ -615,13 +610,10 @@ void BytecodeBuilder::writeFunction(std::string& ss, uint32_t id, uint8_t flags)
writeByte(ss, func.numupvalues);
writeByte(ss, func.isvararg);
if (FFlag::BytecodeVersion4)
{
writeByte(ss, flags);
writeByte(ss, flags);
writeVarInt(ss, uint32_t(func.typeinfo.size()));
ss.append(func.typeinfo);
}
writeVarInt(ss, uint32_t(func.typeinfo.size()));
ss.append(func.typeinfo);
// instructions
writeVarInt(ss, uint32_t(insns.size()));
@ -1074,10 +1066,6 @@ std::string BytecodeBuilder::getError(const std::string& message)
uint8_t BytecodeBuilder::getVersion()
{
// This function usually returns LBC_VERSION_TARGET but may sometimes return a higher number (within LBC_VERSION_MIN/MAX) under fast flags
if (FFlag::BytecodeVersion4)
return 4;
return LBC_VERSION_TARGET;
}

View File

@ -26,7 +26,13 @@ LUAU_FASTINTVARIABLE(LuauCompileInlineThreshold, 25)
LUAU_FASTINTVARIABLE(LuauCompileInlineThresholdMaxBoost, 300)
LUAU_FASTINTVARIABLE(LuauCompileInlineDepth, 5)
LUAU_FASTFLAGVARIABLE(LuauCompileFenvNoBuiltinFold, false)
LUAU_FASTFLAGVARIABLE(LuauCompileTopCold, false)
LUAU_FASTFLAG(LuauFloorDivision)
LUAU_FASTFLAGVARIABLE(LuauCompileFixContinueValidation, false)
LUAU_FASTFLAGVARIABLE(LuauCompileContinueCloseUpvals, false)
namespace Luau
{
@ -259,6 +265,10 @@ struct Compiler
if (bytecode.getInstructionCount() > kMaxInstructionCount)
CompileError::raise(func->location, "Exceeded function instruction limit; split the function into parts to compile");
// since top-level code only executes once, it can be marked as cold if it has no loops (top-level code with loops might be profitable to compile natively)
if (FFlag::LuauCompileTopCold && func->functionDepth == 0 && !hasLoops)
protoflags |= LPF_NATIVE_COLD;
bytecode.endFunction(uint8_t(stackSize), uint8_t(upvals.size()), protoflags);
Function& f = functions[func];
@ -283,6 +293,7 @@ struct Compiler
upvals.clear(); // note: instead of std::move above, we copy & clear to preserve capacity for future pushes
stackSize = 0;
hasLoops = false;
return fid;
}
@ -646,11 +657,25 @@ struct Compiler
// apply all evaluated arguments to the compiler state
// note: locals use current startpc for debug info, although some of them have been computed earlier; this is similar to compileStatLocal
for (InlineArg& arg : args)
{
if (arg.value.type == Constant::Type_Unknown)
{
pushLocal(arg.local, arg.reg);
}
else
{
locstants[arg.local] = arg.value;
if (FFlag::LuauCompileFixContinueValidation)
{
// Mark that optimization skipped allocation of this local
Local& l = locals[arg.local];
LUAU_ASSERT(!l.skipped);
l.skipped = true;
}
}
}
// the inline frame will be used to compile return statements as well as to reject recursive inlining attempts
inlineFrames.push_back({func, oldLocals, target, targetCount});
@ -693,8 +718,27 @@ struct Compiler
// clean up constant state for future inlining attempts
for (size_t i = 0; i < func->args.size; ++i)
if (Constant* var = locstants.find(func->args.data[i]))
var->type = Constant::Type_Unknown;
{
AstLocal* local = func->args.data[i];
if (FFlag::LuauCompileFixContinueValidation)
{
if (Constant* var = locstants.find(local); var && var->type != Constant::Type_Unknown)
{
var->type = Constant::Type_Unknown;
// Restore local allocation skip flag as well
Local& l = locals[local];
LUAU_ASSERT(l.skipped);
l.skipped = false;
}
}
else
{
if (Constant* var = locstants.find(local))
var->type = Constant::Type_Unknown;
}
}
foldConstants(constants, variables, locstants, builtinsFold, builtinsFoldMathK, func->body);
}
@ -2469,7 +2513,7 @@ struct Compiler
AstStat* continueStatement = extractStatContinue(stat->thenbody);
// Optimization: body is a "continue" statement with no "else" => we can directly continue in "then" case
if (!stat->elsebody && continueStatement != nullptr && !areLocalsCaptured(loops.back().localOffset))
if (!stat->elsebody && continueStatement != nullptr && !areLocalsCaptured(loops.back().localOffsetContinue))
{
if (loops.back().untilCondition)
validateContinueUntil(continueStatement, loops.back().untilCondition);
@ -2533,7 +2577,8 @@ struct Compiler
size_t oldJumps = loopJumps.size();
size_t oldLocals = localStack.size();
loops.push_back({oldLocals, nullptr});
loops.push_back({oldLocals, oldLocals, nullptr});
hasLoops = true;
size_t loopLabel = bytecode.emitLabel();
@ -2568,7 +2613,8 @@ struct Compiler
size_t oldJumps = loopJumps.size();
size_t oldLocals = localStack.size();
loops.push_back({oldLocals, stat->condition});
loops.push_back({oldLocals, oldLocals, stat->condition});
hasLoops = true;
size_t loopLabel = bytecode.emitLabel();
@ -2579,8 +2625,17 @@ struct Compiler
RegScope rs(this);
for (size_t i = 0; i < body->body.size; ++i)
{
compileStat(body->body.data[i]);
// continue statement inside the repeat..until loop should not close upvalues defined directly in the loop body
// (but it must still close upvalues defined in more nested blocks)
// this is because the upvalues defined inside the loop body may be captured by a closure defined in the until
// expression that continue will jump to.
if (FFlag::LuauCompileContinueCloseUpvals)
loops.back().localOffsetContinue = localStack.size();
}
size_t contLabel = bytecode.emitLabel();
size_t endLabel;
@ -2707,7 +2762,19 @@ struct Compiler
{
// Optimization: we don't need to allocate and assign const locals, since their uses will be constant-folded
if (options.optimizationLevel >= 1 && options.debugLevel <= 1 && areLocalsRedundant(stat))
{
if (FFlag::LuauCompileFixContinueValidation)
{
// Mark that optimization skipped allocation of this local
for (AstLocal* local : stat->vars)
{
Local& l = locals[local];
l.skipped = true;
}
}
return;
}
// Optimization: for 1-1 local assignments, we can reuse the register *if* neither local is mutated
if (options.optimizationLevel >= 1 && stat->vars.size == 1 && stat->values.size == 1)
@ -2796,7 +2863,7 @@ struct Compiler
size_t oldLocals = localStack.size();
size_t oldJumps = loopJumps.size();
loops.push_back({oldLocals, nullptr});
loops.push_back({oldLocals, oldLocals, nullptr});
for (int iv = 0; iv < tripCount; ++iv)
{
@ -2847,7 +2914,8 @@ struct Compiler
size_t oldLocals = localStack.size();
size_t oldJumps = loopJumps.size();
loops.push_back({oldLocals, nullptr});
loops.push_back({oldLocals, oldLocals, nullptr});
hasLoops = true;
// register layout: limit, step, index
uint8_t regs = allocReg(stat, 3);
@ -2911,7 +2979,8 @@ struct Compiler
size_t oldLocals = localStack.size();
size_t oldJumps = loopJumps.size();
loops.push_back({oldLocals, nullptr});
loops.push_back({oldLocals, oldLocals, nullptr});
hasLoops = true;
// register layout: generator, state, index, variables...
uint8_t regs = allocReg(stat, 3);
@ -3327,7 +3396,7 @@ struct Compiler
// before continuing, we need to close all local variables that were captured in closures since loop start
// normally they are closed by the enclosing blocks, including the loop block, but we're skipping that here
closeLocals(loops.back().localOffset);
closeLocals(loops.back().localOffsetContinue);
size_t label = bytecode.emitLabel();
@ -3640,6 +3709,9 @@ struct Compiler
{
Local& l = self->locals[local];
if (FFlag::LuauCompileFixContinueValidation && l.skipped)
return;
if (!l.allocated && !undef)
undef = local;
}
@ -3765,6 +3837,7 @@ struct Compiler
uint8_t reg = 0;
bool allocated = false;
bool captured = false;
bool skipped = false;
uint32_t debugpc = 0;
};
@ -3783,6 +3856,7 @@ struct Compiler
struct Loop
{
size_t localOffset;
size_t localOffsetContinue;
AstExpr* untilCondition;
};
@ -3830,8 +3904,10 @@ struct Compiler
const DenseHashMap<AstExprCall*, int>* builtinsFold = nullptr;
bool builtinsFoldMathK = false;
// compileFunction state, gets reset for every function
unsigned int regTop = 0;
unsigned int stackSize = 0;
bool hasLoops = false;
bool getfenvUsed = false;
bool setfenvUsed = false;
@ -3877,8 +3953,15 @@ void compileOrThrow(BytecodeBuilder& bytecode, const ParseResult& parseResult, c
// this pass analyzes mutability of locals/globals and associates locals with their initial values
trackValues(compiler.globals, compiler.variables, root);
// this visitor tracks calls to getfenv/setfenv and disables some optimizations when they are found
if (options.optimizationLevel >= 1 && (names.get("getfenv").value || names.get("setfenv").value))
{
Compiler::FenvVisitor fenvVisitor(compiler.getfenvUsed, compiler.setfenvUsed);
root->visit(&fenvVisitor);
}
// builtin folding is enabled on optimization level 2 since we can't deoptimize folding at runtime
if (options.optimizationLevel >= 2)
if (options.optimizationLevel >= 2 && (!FFlag::LuauCompileFenvNoBuiltinFold || (!compiler.getfenvUsed && !compiler.setfenvUsed)))
{
compiler.builtinsFold = &compiler.builtins;
@ -3898,13 +3981,6 @@ void compileOrThrow(BytecodeBuilder& bytecode, const ParseResult& parseResult, c
predictTableShapes(compiler.tableShapes, root);
}
// this visitor tracks calls to getfenv/setfenv and disables some optimizations when they are found
if (options.optimizationLevel >= 1 && (names.get("getfenv").value || names.get("setfenv").value))
{
Compiler::FenvVisitor fenvVisitor(compiler.getfenvUsed, compiler.setfenvUsed);
root->visit(&fenvVisitor);
}
// gathers all functions with the invariant that all function references are to functions earlier in the list
// for example, function foo() return function() end end will result in two vector entries, [0] = anonymous and [1] = foo
std::vector<AstExprFunction*> functions;

View File

@ -236,7 +236,7 @@ target_sources(Luau.Analysis PRIVATE
Analysis/src/Linter.cpp
Analysis/src/LValue.cpp
Analysis/src/Module.cpp
Analysis/src/NonStrictTypeChecker.cpp
Analysis/src/NonStrictTypeChecker.cpp
Analysis/src/Normalize.cpp
Analysis/src/Quantify.cpp
Analysis/src/Refinement.cpp
@ -400,7 +400,7 @@ if(TARGET Luau.UnitTest)
tests/LValue.test.cpp
tests/Module.test.cpp
tests/NonstrictMode.test.cpp
tests/NonStrictTypeChecker.test.cpp
tests/NonStrictTypeChecker.test.cpp
tests/Normalize.test.cpp
tests/NotNull.test.cpp
tests/Parser.test.cpp

View File

@ -2,11 +2,15 @@
// This code is based on Lua 5.x implementation licensed under MIT License; see lua_LICENSE.txt for details
#include "lualib.h"
#include "lcommon.h"
#include <string.h>
#include <time.h>
#define LUA_STRFTIMEOPTIONS "aAbBcdHIjmMpSUwWxXyYzZ%"
LUAU_FASTFLAGVARIABLE(LuauOsTimegm, false)
#if defined(_WIN32)
static tm* gmtime_r(const time_t* timep, tm* result)
{
@ -20,10 +24,49 @@ static tm* localtime_r(const time_t* timep, tm* result)
static time_t timegm(struct tm* timep)
{
LUAU_ASSERT(!FFlag::LuauOsTimegm);
return _mkgmtime(timep);
}
#endif
static time_t os_timegm(struct tm* timep)
{
LUAU_ASSERT(FFlag::LuauOsTimegm);
// Julian day number calculation
int day = timep->tm_mday;
int month = timep->tm_mon + 1;
int year = timep->tm_year + 1900;
// year adjustment, pretend that it starts in March
int a = timep->tm_mon % 12 < 2 ? 1 : 0;
// also adjust for out-of-range month numbers in input
a -= timep->tm_mon / 12;
int y = year + 4800 - a;
int m = month + (12 * a) - 3;
int julianday = day + ((153 * m + 2) / 5) + (365 * y) + (y / 4) - (y / 100) + (y / 400) - 32045;
const int utcstartasjulianday = 2440588; // Jan 1st 1970 offset in Julian calendar
const int64_t utcstartasjuliansecond = utcstartasjulianday * 86400ll; // same in seconds
// fail the dates before UTC start
if (julianday < utcstartasjulianday)
return time_t(-1);
int64_t daysecond = timep->tm_hour * 3600ll + timep->tm_min * 60ll + timep->tm_sec;
int64_t julianseconds = int64_t(julianday) * 86400ull + daysecond;
if (julianseconds < utcstartasjuliansecond)
return time_t(-1);
int64_t utc = julianseconds - utcstartasjuliansecond;
return time_t(utc);
}
static int os_clock(lua_State* L)
{
lua_pushnumber(L, lua_clock());
@ -163,7 +206,10 @@ static int os_time(lua_State* L)
ts.tm_isdst = getboolfield(L, "isdst");
// Note: upstream Lua uses mktime() here which assumes input is local time, but we prefer UTC for consistency
t = timegm(&ts);
if (FFlag::LuauOsTimegm)
t = os_timegm(&ts);
else
t = timegm(&ts);
}
if (t == (time_t)(-1))
lua_pushnil(L);

View File

@ -2061,9 +2061,8 @@ function re.type(...)
return proxy[...] and proxy[...].name;
end;
for k, f in pairs(re_m) do
re[k] = f;
end;
-- TODO: table.foreach is currently used as top-level loops needlessly increase native code size for this module
table.foreach(re_m, function(k, f) re[k] = f end)
re_m = { __index = re_m };

View File

@ -264,6 +264,14 @@ TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "FormsOfSetcc")
SINGLE_COMPARE(setcc(ConditionX64::BelowEqual, byte[rcx]), 0x0f, 0x96, 0x01);
}
TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "FormsOfCmov")
{
SINGLE_COMPARE(cmov(ConditionX64::LessEqual, ebx, eax), 0x0f, 0x4e, 0xd8);
SINGLE_COMPARE(cmov(ConditionX64::NotZero, rbx, qword[rax]), 0x48, 0x0f, 0x45, 0x18);
SINGLE_COMPARE(cmov(ConditionX64::Zero, rbx, qword[rax + rcx]), 0x48, 0x0f, 0x44, 0x1c, 0x08);
SINGLE_COMPARE(cmov(ConditionX64::BelowEqual, r14d, r15d), 0x45, 0x0f, 0x46, 0xf7);
}
TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "FormsOfAbsoluteJumps")
{
SINGLE_COMPARE(jmp(rax), 0xff, 0xe0);
@ -590,6 +598,7 @@ TEST_CASE("LogTest")
build.vroundsd(xmm1, xmm2, xmm3, RoundingModeX64::RoundToNearestEven);
build.add(rdx, qword[rcx - 12]);
build.pop(r12);
build.cmov(ConditionX64::AboveEqual, rax, rbx);
build.ret();
build.int3();
@ -634,6 +643,7 @@ TEST_CASE("LogTest")
vroundsd xmm1,xmm2,xmm3,8
add rdx,qword ptr [rcx-0Ch]
pop r12
cmovae rax,rbx
ret
int3
nop

View File

@ -1772,6 +1772,8 @@ RETURN R0 0
TEST_CASE("LoopContinueUntil")
{
ScopedFastFlag sff("LuauCompileContinueCloseUpvals", true);
// it's valid to use locals defined inside the loop in until expression if they're defined before continue
CHECK_EQ("\n" + compileFunction0("repeat local r = math.random() if r > 0.5 then continue end r = r + 0.3 until r < 0.5"), R"(
L0: GETIMPORT R0 2 [math.random]
@ -1833,17 +1835,15 @@ L2: RETURN R0 0
L0: GETIMPORT R0 2 [math.random]
CALL R0 0 1
LOADK R1 K3 [0.5]
JUMPIFNOTLT R1 R0 L1
CLOSEUPVALS R0
JUMP L2
L1: ADDK R0 R0 K4 [0.29999999999999999]
L2: NEWCLOSURE R1 P0
JUMPIFLT R1 R0 L1
ADDK R0 R0 K4 [0.29999999999999999]
L1: NEWCLOSURE R1 P0
CAPTURE REF R0
CALL R1 0 1
JUMPIF R1 L3
JUMPIF R1 L2
CLOSEUPVALS R0
JUMPBACK L0
L3: CLOSEUPVALS R0
L2: CLOSEUPVALS R0
RETURN R0 0
)");
@ -1895,42 +1895,188 @@ L2: RETURN R0 0
L0: GETIMPORT R0 2 [math.random]
CALL R0 0 1
LOADK R1 K3 [0.5]
JUMPIFNOTLT R1 R0 L1
CLOSEUPVALS R0
JUMP L2
L1: ADDK R0 R0 K4 [0.29999999999999999]
L2: NEWCLOSURE R1 P0
JUMPIFLT R1 R0 L1
ADDK R0 R0 K4 [0.29999999999999999]
L1: NEWCLOSURE R1 P0
CAPTURE UPVAL U0
CAPTURE REF R0
CALL R1 0 1
JUMPIF R1 L3
JUMPIF R1 L2
CLOSEUPVALS R0
JUMPBACK L0
L3: CLOSEUPVALS R0
L2: CLOSEUPVALS R0
RETURN R0 0
)");
}
TEST_CASE("LoopContinueUntilOops")
TEST_CASE("LoopContinueIgnoresImplicitConstant")
{
ScopedFastFlag luauCompileFixContinueValidation{"LuauCompileFixContinueValidation", true};
// this used to crash the compiler :(
try
{
Luau::BytecodeBuilder bcb;
Luau::compileOrThrow(bcb, R"(
CHECK_EQ("\n" + compileFunction0(R"(
local _
repeat
continue
until not _
)"),
R"(
RETURN R0 0
RETURN R0 0
)");
}
TEST_CASE("LoopContinueIgnoresExplicitConstant")
{
ScopedFastFlag luauCompileFixContinueValidation{"LuauCompileFixContinueValidation", true};
// Constants do not allocate locals and 'continue' validation should skip them if their lifetime already started
CHECK_EQ("\n" + compileFunction0(R"(
local c = true
repeat
continue
until c
)"),
R"(
RETURN R0 0
RETURN R0 0
)");
}
TEST_CASE("LoopContinueRespectsExplicitConstant")
{
ScopedFastFlag luauCompileFixContinueValidation{"LuauCompileFixContinueValidation", true};
// If local lifetime hasn't started, even if it's a constant that will not receive an allocation, it cannot be jumped over
try
{
Luau::BytecodeBuilder bcb;
Luau::compileOrThrow(bcb, R"(
repeat
do continue end
local c = true
until c
)");
CHECK(!"Expected CompileError");
}
catch (Luau::CompileError& e)
{
CHECK_EQ(e.getLocation().begin.line + 1, 6);
CHECK_EQ(
std::string(e.what()), "Local _ used in the repeat..until condition is undefined because continue statement on line 4 jumps over it");
std::string(e.what()), "Local c used in the repeat..until condition is undefined because continue statement on line 3 jumps over it");
}
}
TEST_CASE("LoopContinueIgnoresImplicitConstantAfterInline")
{
ScopedFastFlag luauCompileFixContinueValidation{"LuauCompileFixContinueValidation", true};
// Inlining might also replace some locals with constants instead of allocating them
CHECK_EQ("\n" + compileFunction(R"(
local function inline(f)
repeat
continue
until f
end
local function test(...)
inline(true)
end
test()
)",
1, 2),
R"(
RETURN R0 0
RETURN R0 0
)");
}
TEST_CASE("LoopContinueUntilCapture")
{
ScopedFastFlag sff("LuauCompileContinueCloseUpvals", true);
// validate continue upvalue closing behavior: continue must close locals defined in the nested scopes
// but can't close locals defined in the loop scope - these are visible to the condition and will be closed
// when evaluating the condition instead.
CHECK_EQ("\n" + compileFunction(R"(
local a a = 0
repeat
local b b = 0
if a then
local c
print(function() c = 0 end)
if a then
continue -- must close c but not a/b
end
-- must close c
end
-- must close b but not a
until function() a = 0 b = 0 end
-- must close b on loop exit
-- must close a
)",
2),
R"(
LOADNIL R0
LOADN R0 0
L0: LOADNIL R1
LOADN R1 0
JUMPIFNOT R0 L2
LOADNIL R2
GETIMPORT R3 1 [print]
NEWCLOSURE R4 P0
CAPTURE REF R2
CALL R3 1 0
JUMPIFNOT R0 L1
CLOSEUPVALS R2
JUMP L2
L1: CLOSEUPVALS R2
L2: NEWCLOSURE R2 P1
CAPTURE REF R0
CAPTURE REF R1
JUMPIF R2 L3
CLOSEUPVALS R1
JUMPBACK L0
L3: CLOSEUPVALS R1
CLOSEUPVALS R0
RETURN R0 0
)");
// a simpler version of the above test doesn't need to close anything when evaluating continue
CHECK_EQ("\n" + compileFunction(R"(
local a a = 0
repeat
local b b = 0
if a then
continue -- must not close a/b
end
-- must close b but not a
until function() a = 0 b = 0 end
-- must close b on loop exit
-- must close a
)",
1),
R"(
LOADNIL R0
LOADN R0 0
L0: LOADNIL R1
LOADN R1 0
JUMPIF R0 L1
L1: NEWCLOSURE R2 P0
CAPTURE REF R0
CAPTURE REF R1
JUMPIF R2 L2
CLOSEUPVALS R1
JUMPBACK L0
L2: CLOSEUPVALS R1
CLOSEUPVALS R0
RETURN R0 0
)");
}
TEST_CASE("AndOrOptimizations")
{
// the OR/ORK optimization triggers for cutoff since lhs is simple
@ -6368,7 +6514,7 @@ return
math.log10(100),
math.log(1),
math.log(4, 2),
math.log(27, 3),
math.log(64, 4),
math.max(1, 2, 3),
math.min(1, 2, 3),
math.pow(3, 3),
@ -7408,4 +7554,27 @@ RETURN R0 1
)");
}
TEST_CASE("NoBuiltinFoldFenv")
{
ScopedFastFlag sff("LuauCompileFenvNoBuiltinFold", true);
// builtin folding is disabled when getfenv/setfenv is used in the module
CHECK_EQ("\n" + compileFunction(R"(
getfenv()
function test()
return math.pi, math.sin(0)
end
)",
0, 2),
R"(
GETIMPORT R0 2 [math.pi]
LOADN R2 0
FASTCALL1 24 R2 L0
GETIMPORT R1 4 [math.sin]
CALL R1 1 1
L0: RETURN R0 2
)");
}
TEST_SUITE_END();

View File

@ -10,6 +10,7 @@
#include "Luau/TypeInfer.h"
#include "Luau/BytecodeBuilder.h"
#include "Luau/Frontend.h"
#include "Luau/CodeGen.h"
#include "doctest.h"
#include "ScopedFlags.h"
@ -224,7 +225,7 @@ static StateRef runConformance(const char* name, void (*setup)(lua_State* L) = n
free(bytecode);
if (result == 0 && codegen && !skipCodegen && luau_codegen_supported())
luau_codegen_compile(L, -1);
Luau::CodeGen::compile(L, -1, Luau::CodeGen::CodeGen_ColdFunctions);
int status = (result == 0) ? lua_resume(L, nullptr, 0) : LUA_ERRSYNTAX;
@ -288,7 +289,7 @@ TEST_CASE("Assert")
TEST_CASE("Basic")
{
ScopedFastFlag sffs{"LuauFloorDivision", true};
ScopedFastFlag sfff{"LuauImproveForN", true};
ScopedFastFlag sfff{"LuauImproveForN2", true};
runConformance("basic.lua");
}
@ -379,6 +380,8 @@ TEST_CASE("Events")
TEST_CASE("Constructs")
{
ScopedFastFlag sff("LuauCompileContinueCloseUpvals", true);
runConformance("constructs.lua");
}
@ -1809,8 +1812,6 @@ TEST_CASE("Native")
TEST_CASE("NativeTypeAnnotations")
{
ScopedFastFlag bytecodeVersion4("BytecodeVersion4", true);
// This tests requires code to run natively, otherwise all 'is_native' checks will fail
if (!codegen || !luau_codegen_supported())
return;
@ -1891,7 +1892,7 @@ TEST_CASE("HugeFunction")
REQUIRE(result == 0);
if (codegen && luau_codegen_supported())
luau_codegen_compile(L, -1);
Luau::CodeGen::compile(L, -1, Luau::CodeGen::CodeGen_ColdFunctions);
int status = lua_resume(L, nullptr, 0);
REQUIRE(status == 0);

View File

@ -3,10 +3,28 @@
#include "Fixture.h"
#include "Luau/Common.h"
#include "Luau/Ast.h"
#include "Luau/ModuleResolver.h"
#include "ScopedFlags.h"
#include "doctest.h"
#include <iostream>
using namespace Luau;
struct NonStrictTypeCheckerFixture : Fixture
{
ParseResult parse(std::string source)
{
ParseOptions opts;
opts.allowDeclarationSyntax = true;
ScopedFastFlag sff{"LuauCheckedFunctionSyntax", true};
return tryParse(source, opts);
}
};
TEST_SUITE_BEGIN("NonStrictTypeCheckerTest");
TEST_CASE_FIXTURE(Fixture, "basic")
@ -14,4 +32,85 @@ TEST_CASE_FIXTURE(Fixture, "basic")
Luau::checkNonStrict(builtinTypes, nullptr);
}
TEST_CASE_FIXTURE(NonStrictTypeCheckerFixture, "parse_top_level_checked_fn")
{
std::string src = R"BUILTIN_SRC(
declare function @checked abs(n: number): number
)BUILTIN_SRC";
ParseResult pr = parse(src);
LUAU_ASSERT(pr.errors.size() == 0);
LUAU_ASSERT(pr.root->body.size == 1);
AstStat* root = *(pr.root->body.data);
auto func = root->as<AstStatDeclareFunction>();
LUAU_ASSERT(func);
LUAU_ASSERT(func->checkedFunction);
}
TEST_CASE_FIXTURE(NonStrictTypeCheckerFixture, "parse_declared_table_checked_member")
{
std::string src = R"BUILTIN_SRC(
declare math : {
abs : @checked (number) -> number
}
)BUILTIN_SRC";
ParseResult pr = parse(src);
LUAU_ASSERT(pr.errors.size() == 0);
LUAU_ASSERT(pr.root->body.size == 1);
AstStat* root = *(pr.root->body.data);
auto glob = root->as<AstStatDeclareGlobal>();
LUAU_ASSERT(glob);
auto tbl = glob->type->as<AstTypeTable>();
LUAU_ASSERT(tbl);
LUAU_ASSERT(tbl->props.size == 1);
auto prop = *tbl->props.data;
auto func = prop.type->as<AstTypeFunction>();
LUAU_ASSERT(func);
LUAU_ASSERT(func->checkedFunction);
}
TEST_CASE_FIXTURE(NonStrictTypeCheckerFixture, "parse_checked_outside_decl_fails")
{
auto src = R"(
local @checked = 3
)";
ParseResult pr = parse(src);
LUAU_ASSERT(pr.errors.size() > 0);
auto ts = pr.errors[1].getMessage();
}
TEST_CASE_FIXTURE(NonStrictTypeCheckerFixture, "parse_checked_in_and_out_of_decl_fails")
{
auto src = R"(
local @checked = 3
declare function @checked abs(n: number): number
)";
auto pr = parse(src);
LUAU_ASSERT(pr.errors.size() == 2);
LUAU_ASSERT(pr.errors[0].getLocation().begin.line == 1);
LUAU_ASSERT(pr.errors[1].getLocation().begin.line == 1);
}
TEST_CASE_FIXTURE(NonStrictTypeCheckerFixture, "parse_checked_as_function_name_fails")
{
auto pr = parse(R"(
function @checked(x: number) : number
end
)");
LUAU_ASSERT(pr.errors.size() > 0);
}
TEST_CASE_FIXTURE(NonStrictTypeCheckerFixture, "cannot_use_@_as_variable_name")
{
auto pr = parse(R"(
local @blah = 3
)");
LUAU_ASSERT(pr.errors.size() > 0);
}
TEST_SUITE_END();

View File

@ -18,7 +18,7 @@ using namespace Luau;
struct LimitFixture : BuiltinsFixture
{
#if defined(_NOOPT) || defined(_DEBUG)
ScopedFastInt LuauTypeInferRecursionLimit{"LuauTypeInferRecursionLimit", 100};
ScopedFastInt LuauTypeInferRecursionLimit{"LuauTypeInferRecursionLimit", 90};
#endif
};

View File

@ -598,6 +598,20 @@ TEST_CASE_FIXTURE(SubtypeFixture, "(number) -> () <!: <T>(T) -> ()")
CHECK_IS_NOT_SUBTYPE(numberToNothingType, genericTToNothingType);
}
TEST_CASE_FIXTURE(SubtypeFixture, "<T>() -> (T, T) <!: () -> (string, number)")
{
TypeId nothingToTwoTs = arena.addType(FunctionType{
{genericT},
{},
builtinTypes->emptyTypePack,
arena.addTypePack({genericT, genericT})
});
TypeId nothingToStringAndNumber = fn({}, {builtinTypes->stringType, builtinTypes->numberType});
CHECK_IS_NOT_SUBTYPE(nothingToTwoTs, nothingToStringAndNumber);
}
TEST_CASE_FIXTURE(SubtypeFixture, "<A...>(A...) -> A... <: (number) -> number")
{
CHECK_IS_SUBTYPE(genericAsToAsType, numberToNumberType);

View File

@ -2,6 +2,7 @@
#include "Luau/TypeFamily.h"
#include "Luau/ConstraintSolver.h"
#include "Luau/NotNull.h"
#include "Luau/TxnLog.h"
#include "Luau/Type.h"
@ -22,21 +23,20 @@ struct FamilyFixture : Fixture
{
swapFamily = TypeFamily{/* name */ "Swap",
/* reducer */
[](std::vector<TypeId> tys, std::vector<TypePackId> tps, NotNull<TypeArena> arena, NotNull<BuiltinTypes> builtins,
NotNull<const TxnLog> log, NotNull<Scope> scope, NotNull<Normalizer> normalizer, ConstraintSolver* solver) -> TypeFamilyReductionResult<TypeId> {
[](std::vector<TypeId> tys, std::vector<TypePackId> tps, NotNull<TypeFamilyContext> ctx) -> TypeFamilyReductionResult<TypeId> {
LUAU_ASSERT(tys.size() == 1);
TypeId param = log->follow(tys.at(0));
TypeId param = follow(tys.at(0));
if (isString(param))
{
return TypeFamilyReductionResult<TypeId>{builtins->numberType, false, {}, {}};
return TypeFamilyReductionResult<TypeId>{ctx->builtins->numberType, false, {}, {}};
}
else if (isNumber(param))
{
return TypeFamilyReductionResult<TypeId>{builtins->stringType, false, {}, {}};
return TypeFamilyReductionResult<TypeId>{ctx->builtins->stringType, false, {}, {}};
}
else if (log->get<BlockedType>(param) || log->get<PendingExpansionType>(param) ||
log->get<TypeFamilyInstanceType>(param) || (solver && solver->hasUnresolvedConstraints(param)))
else if (is<BlockedType>(param) || is<PendingExpansionType>(param) || is<TypeFamilyInstanceType>(param) ||
(ctx->solver && ctx->solver->hasUnresolvedConstraints(param)))
{
return TypeFamilyReductionResult<TypeId>{std::nullopt, false, {param}, {}};
}

View File

@ -198,10 +198,7 @@ TEST_CASE_FIXTURE(Fixture, "generic_aliases")
)");
LUAU_REQUIRE_ERROR_COUNT(1, result);
const std::string expected = R"(Type 'bad' could not be converted into 'T<number>'
caused by:
Property 'v' is not compatible.
Type 'string' could not be converted into 'number' in an invariant context)";
const std::string expected = "Type 'bad' could not be converted into 'T<number>'";
CHECK(result.errors[0].location == Location{{4, 31}, {4, 44}});
CHECK_EQ(expected, toString(result.errors[0]));
}
@ -220,13 +217,7 @@ TEST_CASE_FIXTURE(Fixture, "dependent_generic_aliases")
)");
LUAU_REQUIRE_ERROR_COUNT(1, result);
const std::string expected = R"(Type 'bad' could not be converted into 'U<number>'
caused by:
Property 't' is not compatible.
Type '{| v: string |}' could not be converted into 'T<number>'
caused by:
Property 'v' is not compatible.
Type 'string' could not be converted into 'number' in an invariant context)";
const std::string expected = "Type 'bad' could not be converted into 'U<number>'";
CHECK(result.errors[0].location == Location{{4, 31}, {4, 52}});
CHECK_EQ(expected, toString(result.errors[0]));

View File

@ -997,7 +997,7 @@ TEST_CASE_FIXTURE(Fixture, "cli_80596_simplify_degenerate_intersections")
local x: number = obj.x or 3
)");
LUAU_REQUIRE_NO_ERRORS(result);
LUAU_REQUIRE_ERRORS(result);
}
TEST_CASE_FIXTURE(Fixture, "cli_80596_simplify_more_realistic_intersections")
@ -1023,7 +1023,7 @@ TEST_CASE_FIXTURE(Fixture, "cli_80596_simplify_more_realistic_intersections")
local x: number = obj.x or 3
)");
LUAU_REQUIRE_NO_ERRORS(result);
LUAU_REQUIRE_ERRORS(result);
}
TEST_SUITE_END();

View File

@ -936,6 +936,128 @@ TEST_CASE_FIXTURE(Fixture, "infer_any_in_all_modes_when_lhs_is_unknown")
// the case right now, though.
}
TEST_CASE_FIXTURE(Fixture, "infer_type_for_generic_subtraction")
{
CheckResult result = check(Mode::Strict, R"(
local function f(x, y)
return x - y
end
)");
if (FFlag::DebugLuauDeferredConstraintResolution)
{
LUAU_REQUIRE_NO_ERRORS(result);
CHECK(toString(requireType("f")) == "<a, b>(a, b) -> Sub<a, b>");
}
else
{
LUAU_REQUIRE_ERROR_COUNT(1, result);
CHECK_EQ(toString(result.errors[0]), "Unknown type used in - operation; consider adding a type annotation to 'x'");
}
}
TEST_CASE_FIXTURE(Fixture, "infer_type_for_generic_multiplication")
{
CheckResult result = check(Mode::Strict, R"(
local function f(x, y)
return x * y
end
)");
if (FFlag::DebugLuauDeferredConstraintResolution)
{
LUAU_REQUIRE_NO_ERRORS(result);
CHECK(toString(requireType("f")) == "<a, b>(a, b) -> Mul<a, b>");
}
else
{
LUAU_REQUIRE_ERROR_COUNT(1, result);
CHECK_EQ(toString(result.errors[0]), "Unknown type used in * operation; consider adding a type annotation to 'x'");
}
}
TEST_CASE_FIXTURE(Fixture, "infer_type_for_generic_division")
{
CheckResult result = check(Mode::Strict, R"(
local function f(x, y)
return x / y
end
)");
if (FFlag::DebugLuauDeferredConstraintResolution)
{
LUAU_REQUIRE_NO_ERRORS(result);
CHECK(toString(requireType("f")) == "<a, b>(a, b) -> Div<a, b>");
}
else
{
LUAU_REQUIRE_ERROR_COUNT(1, result);
CHECK_EQ(toString(result.errors[0]), "Unknown type used in / operation; consider adding a type annotation to 'x'");
}
}
TEST_CASE_FIXTURE(Fixture, "infer_type_for_generic_floor_division")
{
ScopedFastFlag floorDiv{"LuauFloorDivision", true};
CheckResult result = check(Mode::Strict, R"(
local function f(x, y)
return x // y
end
)");
if (FFlag::DebugLuauDeferredConstraintResolution)
{
LUAU_REQUIRE_NO_ERRORS(result);
CHECK(toString(requireType("f")) == "<a, b>(a, b) -> FloorDiv<a, b>");
}
else
{
LUAU_REQUIRE_ERROR_COUNT(1, result);
CHECK_EQ(toString(result.errors[0]), "Unknown type used in // operation; consider adding a type annotation to 'x'");
}
}
TEST_CASE_FIXTURE(Fixture, "infer_type_for_generic_exponentiation")
{
CheckResult result = check(Mode::Strict, R"(
local function f(x, y)
return x ^ y
end
)");
if (FFlag::DebugLuauDeferredConstraintResolution)
{
LUAU_REQUIRE_NO_ERRORS(result);
CHECK(toString(requireType("f")) == "<a, b>(a, b) -> Exp<a, b>");
}
else
{
LUAU_REQUIRE_ERROR_COUNT(1, result);
CHECK_EQ(toString(result.errors[0]), "Unknown type used in ^ operation; consider adding a type annotation to 'x'");
}
}
TEST_CASE_FIXTURE(Fixture, "infer_type_for_generic_modulo")
{
CheckResult result = check(Mode::Strict, R"(
local function f(x, y)
return x % y
end
)");
if (FFlag::DebugLuauDeferredConstraintResolution)
{
LUAU_REQUIRE_NO_ERRORS(result);
CHECK(toString(requireType("f")) == "<a, b>(a, b) -> Mod<a, b>");
}
else
{
LUAU_REQUIRE_ERROR_COUNT(1, result);
CHECK_EQ(toString(result.errors[0]), "Unknown type used in % operation; consider adding a type annotation to 'x'");
}
}
TEST_CASE_FIXTURE(BuiltinsFixture, "equality_operations_succeed_if_any_union_branch_succeeds")
{
CheckResult result = check(R"(

View File

@ -1942,4 +1942,54 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "conditional_refinement_should_stay_error_sup
LUAU_REQUIRE_NO_ERRORS(result);
}
TEST_CASE_FIXTURE(BuiltinsFixture, "globals_can_be_narrowed_too")
{
CheckResult result = check(R"(
if typeof(string) == 'string' then
local foo = string
end
)");
CHECK("never" == toString(requireTypeAtPosition(Position{2, 24})));
}
TEST_CASE_FIXTURE(BuiltinsFixture, "luau_polyfill_isindexkey_refine_conjunction")
{
CheckResult result = check(R"(
local function isIndexKey(k, contiguousLength)
return type(k) == "number"
and k <= contiguousLength -- nothing out of bounds
and 1 <= k -- nothing illegal for array indices
and math.floor(k) == k -- no float keys
end
)");
LUAU_REQUIRE_NO_ERRORS(result);
}
TEST_CASE_FIXTURE(BuiltinsFixture, "luau_polyfill_isindexkey_refine_conjunction_variant")
{
CheckResult result = check(R"(
local function isIndexKey(k, contiguousLength: number)
return type(k) == "number"
and k <= contiguousLength -- nothing out of bounds
and 1 <= k -- nothing illegal for array indices
and math.floor(k) == k -- no float keys
end
)");
LUAU_REQUIRE_NO_ERRORS(result);
}
TEST_CASE_FIXTURE(BuiltinsFixture, "globals_can_be_narrowed_too")
{
CheckResult result = check(R"(
if typeof(string) == 'string' then
local foo = string
end
)");
CHECK("never" == toString(requireTypeAtPosition(Position{2, 24})));
}
TEST_SUITE_END();

View File

@ -366,10 +366,7 @@ TEST_CASE_FIXTURE(Fixture, "parametric_tagged_union_alias")
LUAU_REQUIRE_ERROR_COUNT(1, result);
const std::string expectedError = R"(Type 'a' could not be converted into 'Err<number> | Ok<string>'
caused by:
None of the union options are compatible. For example:
Table type 'a' not compatible with type 'Err<number>' because the former is missing field 'error')";
const std::string expectedError = "Type 'a' could not be converted into 'Err<number> | Ok<string>'";
CHECK(toString(result.errors[0]) == expectedError);
}

View File

@ -2672,7 +2672,7 @@ TEST_CASE_FIXTURE(Fixture, "generalize_table_argument")
std::optional<TypeId> fooArg1 = first(fooType->argTypes);
REQUIRE(fooArg1);
const TableType* fooArg1Table = get<TableType>(*fooArg1);
const TableType* fooArg1Table = get<TableType>(follow(*fooArg1));
REQUIRE(fooArg1Table);
CHECK_EQ(fooArg1Table->state, TableState::Generic);

View File

@ -237,4 +237,19 @@ repeat
i = i+1
until i==c
-- validate continue upvalue close behavior
local function check_connected(writer, reader)
writer(1)
assert(reader() == 1)
return true
end
repeat
local value = nil
local function write(n)
value = n
end
continue
until check_connected(write, function() return value end)
return 'OK'

View File

@ -23,6 +23,29 @@ assert(os.time({ year = 1970, month = 1, day = 1, hour = 0, min = 0, sec = 0}) =
assert(os.time({ year = 3000, month = 12, day = 31, hour = 23, min = 59, sec = 59}) == 32535215999) -- just before Windows max range
assert(os.time({ year = 1970, month = 1, day = 1, hour = 0, min = 0, sec = -1}) == nil) -- going before using time fields
assert(os.time({ year = 2000, month = 6, day = 10, hour = 0, min = 0, sec = 0}) == 960595200)
assert(os.time({ year = 2000, month = 6, day = 10, hour = 0, min = 0, sec = -86400}) == 960508800)
assert(os.time({ year = 2000, month = 6, day = 10, hour = 0, min = 0, sec = 86400}) == 960681600)
assert(os.time({ year = 2000, month = 6, day = 10, hour = 0, min = -600, sec = 0}) == 960559200)
assert(os.time({ year = 2000, month = 6, day = 10, hour = 0, min = 600, sec = 0}) == 960631200)
assert(os.time({ year = 2000, month = 6, day = 10, hour = -600, min = 0, sec = 0}) == 958435200)
assert(os.time({ year = 2000, month = 6, day = 10, hour = 600, min = 0, sec = 0}) == 962755200)
assert(os.time({ year = 2000, month = 6, day = -100, hour = 0, min = 0, sec = 0}) == 951091200)
assert(os.time({ year = 2000, month = 6, day = 1000, hour = 0, min = 0, sec = 0}) == 1046131200)
assert(os.time({ year = 2000, month = -60, day = 10, hour = 0, min = 0, sec = 0}) == 787017600)
assert(os.time({ year = 2000, month = 60, day = 10, hour = 0, min = 0, sec = 0}) == 1102636800)
assert(os.time({ year = 2000, month = 6, day = 10, hour = 0, min = 0, sec = -86400000}) == 874195200)
assert(os.time({ year = 2000, month = 6, day = 10, hour = 0, min = 0, sec = 86400000}) == 1046995200)
assert(os.time({ year = 2000, month = 6, day = 10, hour = 0, min = -600000, sec = 0}) == 924595200)
assert(os.time({ year = 2000, month = 6, day = 10, hour = 0, min = 600000, sec = 0}) == 996595200)
assert(os.time({ year = 2100, month = 6, day = 10, hour = -600000, min = 0, sec = 0}) == 1956268800)
assert(os.time({ year = 2100, month = 6, day = 10, hour = 600000, min = 0, sec = 0}) == 6276268800)
assert(os.time({ year = 2100, month = 6, day = -10000, hour = 0, min = 0, sec = 0}) == 3251404800)
assert(os.time({ year = 2100, month = 6, day = 100000, hour = 0, min = 0, sec = 0}) == 12755404800)
assert(os.time({ year = 2100, month = -600, day = 10, hour = 0, min = 0, sec = 0}) == 2522707200)
assert(os.time({ year = 2100, month = 600, day = 10, hour = 0, min = 0, sec = 0}) == 5678380800)
local function checkDateTable (t)
local D = os.date("!*t", t)
assert(os.time(D) == t)

View File

@ -346,7 +346,7 @@ assert(math.log10("10") == 1)
assert(math.log("0") == -inf)
assert(math.log("8", 2) == 3)
assert(math.log("10", 10) == 1)
assert(math.log("9", 3) == 2)
assert(math.log("16", 4) == 2)
assert(math.max("1", 2) == 2)
assert(math.max(2, "1") == 2)
assert(math.max(1, 2, "3") == 3)

View File

@ -203,6 +203,33 @@ local function arraySizeOpt2(a, i)
return a[i] + a[5]
end
assert(arraySizeOpt1({1}, 1) == 71)
assert(arraySizeOpt2({1}, 1) == 71)
function deadLoopBody(n)
local r = 0
if n and false then
for i = 1, n do
r += 1
end
end
return r
end
assert(deadLoopBody(5) == 0)
function arrayIndexingSpecialNumbers1(a, b, c)
local arr = table.create(100000)
arr[a] = 9
arr[b-1] = 80
arr[b] = 700
arr[b+1] = 6000
arr[c-1] = 50000
arr[c] = 400000
arr[c+1] = 3000000
return arr[1] + arr[255] + arr[256] + arr[257] + arr[65535] + arr[65536] + arr[65537]
end
assert(arrayIndexingSpecialNumbers1(1, 256, 65536) == 3456789)
return('OK')

View File

@ -24,6 +24,8 @@ assert(tostring(-0.17) == "-0.17")
assert(tostring(math.pi) == "3.141592653589793")
-- fuzzing corpus
-- Note: If the assert below fires it may indicate floating point denormalized values
-- are not handled as expected.
assert(tostring(5.4536123983019448e-311) == "5.453612398302e-311")
assert(tostring(5.4834368411298348e-311) == "5.48343684113e-311")
assert(tostring(4.4154895841930002e-305) == "4.415489584193e-305")

View File

@ -18,6 +18,10 @@
#include <windows.h> // IsDebuggerPresent
#endif
#if defined(__x86_64__) || defined(_M_X64)
#include <immintrin.h>
#endif
#ifdef __APPLE__
#include <sys/types.h>
#include <sys/sysctl.h>
@ -253,8 +257,21 @@ static void setFastFlags(const std::vector<doctest::String>& flags)
}
}
// This function performs system/architecture specific initialization prior to running tests.
static void initSystem()
{
#if defined(__x86_64__) || defined(_M_X64)
// Some unit tests make use of denormalized numbers. So flags to flush to zero or treat denormals as zero
// must be disabled for expected behavior.
_MM_SET_FLUSH_ZERO_MODE(_MM_FLUSH_ZERO_OFF);
_MM_SET_DENORMALS_ZERO_MODE(_MM_DENORMALS_ZERO_OFF);
#endif
}
int main(int argc, char** argv)
{
initSystem();
Luau::assertHandler() = testAssertionHandler;

View File

@ -1,4 +1,6 @@
AnnotationTests.infer_type_of_value_a_via_typeof_with_assignment
AnnotationTests.two_type_params
AnnotationTests.use_generic_type_alias
AstQuery.last_argument_function_call_type
AstQuery::getDocumentationSymbolAtPosition.table_overloaded_function_prop
AutocompleteTest.anonymous_autofilled_generic_on_argument_type_pack_vararg
@ -24,6 +26,7 @@ AutocompleteTest.type_correct_suggestion_in_argument
AutocompleteTest.unsealed_table_2
BuiltinTests.aliased_string_format
BuiltinTests.assert_removes_falsy_types
BuiltinTests.assert_removes_falsy_types2
BuiltinTests.assert_removes_falsy_types_even_from_type_pack_tail_but_only_for_the_first_type
BuiltinTests.assert_returns_false_and_string_iff_it_knows_the_first_argument_cannot_be_truthy
BuiltinTests.bad_select_should_not_crash
@ -39,6 +42,7 @@ BuiltinTests.gmatch_capture_types_set_containing_lbracket
BuiltinTests.gmatch_definition
BuiltinTests.ipairs_iterator_should_infer_types_and_type_check
BuiltinTests.next_iterator_should_infer_types_and_type_check
BuiltinTests.os_time_takes_optional_date_table
BuiltinTests.pairs_iterator_should_infer_types_and_type_check
BuiltinTests.see_thru_select
BuiltinTests.see_thru_select_count
@ -47,6 +51,7 @@ BuiltinTests.select_way_out_of_range
BuiltinTests.select_with_decimal_argument_is_rounded_down
BuiltinTests.set_metatable_needs_arguments
BuiltinTests.setmetatable_should_not_mutate_persisted_types
BuiltinTests.sort
BuiltinTests.sort_with_bad_predicate
BuiltinTests.sort_with_predicate
BuiltinTests.string_format_arg_count_mismatch
@ -54,6 +59,7 @@ BuiltinTests.string_format_as_method
BuiltinTests.string_format_correctly_ordered_types
BuiltinTests.string_format_report_all_type_errors_at_correct_positions
BuiltinTests.string_format_use_correct_argument2
BuiltinTests.table_concat_returns_string
BuiltinTests.table_dot_remove_optionally_returns_generic
BuiltinTests.table_freeze_is_generic
BuiltinTests.table_insert_correctly_infers_type_of_array_2_args_overload
@ -90,92 +96,181 @@ ControlFlowAnalysis.type_alias_does_not_leak_out_continuing
DefinitionTests.class_definition_indexer
DefinitionTests.class_definition_overload_metamethods
DefinitionTests.class_definition_string_props
DefinitionTests.declaring_generic_functions
DefinitionTests.definition_file_classes
Differ.equal_generictp_cyclic
Differ.equal_table_A_B_C
Differ.equal_table_cyclic_diamonds_unraveled
Differ.equal_table_kind_A
Differ.equal_table_kind_B
Differ.equal_table_kind_C
Differ.equal_table_kind_D
Differ.equal_table_measuring_tapes
Differ.equal_table_two_cyclic_tables_are_not_different
Differ.equal_table_two_shifted_circles_are_not_different
Differ.generictp_normal
Differ.generictp_normal_2
Differ.left_cyclic_table_right_table_missing_property
Differ.left_cyclic_table_right_table_property_wrong
Differ.negation
Differ.right_cyclic_table_left_table_missing_property
Differ.right_cyclic_table_left_table_property_wrong
Differ.table_left_circle_right_measuring_tape
FrontendTest.accumulate_cached_errors_in_consistent_order
FrontendTest.it_should_be_safe_to_stringify_errors_when_full_type_graph_is_discarded
GenericsTests.apply_type_function_nested_generics1
GenericsTests.apply_type_function_nested_generics3
GenericsTests.better_mismatch_error_messages
GenericsTests.bound_tables_do_not_clone_original_fields
GenericsTests.check_generic_function
GenericsTests.check_generic_local_function
GenericsTests.check_generic_typepack_function
GenericsTests.check_mutual_generic_functions
GenericsTests.check_nested_generic_function
GenericsTests.check_recursive_generic_function
GenericsTests.correctly_instantiate_polymorphic_member_functions
GenericsTests.do_not_always_instantiate_generic_intersection_types
GenericsTests.do_not_infer_generic_functions
GenericsTests.dont_substitute_bound_types
GenericsTests.error_detailed_function_mismatch_generic_pack
GenericsTests.error_detailed_function_mismatch_generic_types
GenericsTests.factories_of_generics
GenericsTests.function_arguments_can_be_polytypes
GenericsTests.generic_argument_count_too_few
GenericsTests.generic_argument_count_too_many
GenericsTests.generic_factories
GenericsTests.generic_functions_dont_cache_type_parameters
GenericsTests.generic_functions_in_types
GenericsTests.generic_functions_should_be_memory_safe
GenericsTests.generic_type_pack_parentheses
GenericsTests.generic_type_pack_unification1
GenericsTests.generic_type_pack_unification2
GenericsTests.generic_type_pack_unification3
GenericsTests.higher_rank_polymorphism_should_not_accept_instantiated_arguments
GenericsTests.hof_subtype_instantiation_regression
GenericsTests.infer_generic_function
GenericsTests.infer_generic_function_function_argument
GenericsTests.infer_generic_function_function_argument_2
GenericsTests.infer_generic_function_function_argument_3
GenericsTests.infer_generic_function_function_argument_overloaded
GenericsTests.infer_generic_lib_function_function_argument
GenericsTests.infer_generic_local_function
GenericsTests.infer_generic_property
GenericsTests.infer_nested_generic_function
GenericsTests.inferred_local_vars_can_be_polytypes
GenericsTests.instantiated_function_argument_names
GenericsTests.mutable_state_polymorphism
GenericsTests.instantiation_sharing_types
GenericsTests.local_vars_can_be_polytypes
GenericsTests.no_stack_overflow_from_quantifying
GenericsTests.properties_can_be_instantiated_polytypes
GenericsTests.properties_can_be_polytypes
GenericsTests.quantify_functions_even_if_they_have_an_explicit_generic
GenericsTests.rank_N_types_via_typeof
GenericsTests.self_recursive_instantiated_param
GenericsTests.type_parameters_can_be_polytypes
GenericsTests.typefuns_sharing_types
IntersectionTypes.error_detailed_intersection_all
IntersectionTypes.error_detailed_intersection_part
IntersectionTypes.intersect_bool_and_false
IntersectionTypes.intersect_false_and_bool_and_false
IntersectionTypes.intersect_metatables
IntersectionTypes.intersect_saturate_overloaded_functions
IntersectionTypes.intersection_of_tables
IntersectionTypes.intersection_of_tables_with_never_properties
IntersectionTypes.intersection_of_tables_with_top_properties
IntersectionTypes.less_greedy_unification_with_intersection_types
IntersectionTypes.overloaded_functions_mentioning_generic
IntersectionTypes.overloaded_functions_mentioning_generic_packs
IntersectionTypes.overloaded_functions_mentioning_generics
IntersectionTypes.overloaded_functions_returning_intersections
IntersectionTypes.overloadeded_functions_with_never_arguments
IntersectionTypes.overloadeded_functions_with_never_result
IntersectionTypes.overloadeded_functions_with_overlapping_results_and_variadics
IntersectionTypes.overloadeded_functions_with_unknown_arguments
IntersectionTypes.overloadeded_functions_with_unknown_result
IntersectionTypes.overloadeded_functions_with_weird_typepacks_1
IntersectionTypes.overloadeded_functions_with_weird_typepacks_2
IntersectionTypes.overloadeded_functions_with_weird_typepacks_3
IntersectionTypes.overloadeded_functions_with_weird_typepacks_4
IntersectionTypes.select_correct_union_fn
IntersectionTypes.should_still_pick_an_overload_whose_arguments_are_unions
IntersectionTypes.table_intersection_write_sealed_indirect
IntersectionTypes.table_write_sealed_indirect
IntersectionTypes.union_saturate_overloaded_functions
Negations.cofinite_strings_can_be_compared_for_equality
Normalize.higher_order_function_with_annotation
Normalize.negations_of_tables
Normalize.specific_functions_cannot_be_negated
ProvisionalTests.assign_table_with_refined_property_with_a_similar_type_is_illegal
ProvisionalTests.bail_early_if_unification_is_too_complicated
ProvisionalTests.choose_the_right_overload_for_pcall
ProvisionalTests.do_not_ice_when_trying_to_pick_first_of_generic_type_pack
ProvisionalTests.error_on_eq_metamethod_returning_a_type_other_than_boolean
ProvisionalTests.expected_type_should_be_a_helpful_deduction_guide_for_function_calls
ProvisionalTests.floating_generics_should_not_be_allowed
ProvisionalTests.free_is_not_bound_to_any
ProvisionalTests.free_options_can_be_unified_together
ProvisionalTests.free_options_cannot_be_unified_together
ProvisionalTests.function_returns_many_things_but_first_of_it_is_forgotten
ProvisionalTests.generic_type_leak_to_module_interface
ProvisionalTests.greedy_inference_with_shared_self_triggers_function_with_no_returns
ProvisionalTests.it_should_be_agnostic_of_actual_size
ProvisionalTests.luau-polyfill.Array.filter
ProvisionalTests.luau-polyfill.Map.entries
ProvisionalTests.optional_class_instances_are_invariant
ProvisionalTests.pcall_returns_at_least_two_value_but_function_returns_nothing
ProvisionalTests.setmetatable_constrains_free_type_into_free_table
ProvisionalTests.specialization_binds_with_prototypes_too_early
ProvisionalTests.table_insert_with_a_singleton_argument
ProvisionalTests.table_unification_infinite_recursion
ProvisionalTests.typeguard_inference_incomplete
ProvisionalTests.while_body_are_also_refined
ProvisionalTests.xpcall_returns_what_f_returns
RefinementTest.call_an_incompatible_function_after_using_typeguard
RefinementTest.apply_refinements_on_astexprindexexpr_whose_subscript_expr_is_constant_string
RefinementTest.correctly_lookup_property_whose_base_was_previously_refined
RefinementTest.dataflow_analysis_can_tell_refinements_when_its_appropriate_to_refine_into_nil_or_never
RefinementTest.discriminate_from_truthiness_of_x
RefinementTest.fail_to_refine_a_property_of_subscript_expression
RefinementTest.falsiness_of_TruthyPredicate_narrows_into_nil
RefinementTest.impossible_type_narrow_is_not_an_error
RefinementTest.index_on_a_refined_property
RefinementTest.isa_type_refinement_must_be_known_ahead_of_time
RefinementTest.luau_polyfill_isindexkey_refine_conjunction
RefinementTest.luau_polyfill_isindexkey_refine_conjunction_variant
RefinementTest.narrow_property_of_a_bounded_variable
RefinementTest.nonoptional_type_can_narrow_to_nil_if_sense_is_true
RefinementTest.not_t_or_some_prop_of_t
RefinementTest.refine_a_property_of_some_global
RefinementTest.refine_unknown_to_table_then_clone_it
RefinementTest.string_not_equal_to_string_or_nil
RefinementTest.truthy_constraint_on_properties
RefinementTest.type_annotations_arent_relevant_when_doing_dataflow_analysis
RefinementTest.type_narrow_to_vector
RefinementTest.typeguard_cast_free_table_to_vector
RefinementTest.typeguard_in_assert_position
RefinementTest.x_as_any_if_x_is_instance_elseif_x_is_table
TableTests.a_free_shape_can_turn_into_a_scalar_directly
TableTests.a_free_shape_can_turn_into_a_scalar_if_it_is_compatible
TableTests.a_free_shape_cannot_turn_into_a_scalar_if_it_is_not_compatible
TableTests.accidentally_checked_prop_in_opposite_branch
TableTests.array_factory_function
TableTests.call_method
TableTests.cannot_change_type_of_unsealed_table_prop
TableTests.casting_tables_with_props_into_table_with_indexer2
TableTests.casting_tables_with_props_into_table_with_indexer3
TableTests.casting_tables_with_props_into_table_with_indexer4
TableTests.casting_unsealed_tables_with_props_into_table_with_indexer
TableTests.certain_properties_of_table_literal_arguments_can_be_covariant
TableTests.checked_prop_too_early
TableTests.cli_84607_missing_prop_in_array_or_dict
TableTests.common_table_element_general
TableTests.common_table_element_inner_index
TableTests.common_table_element_inner_prop
TableTests.common_table_element_list
TableTests.common_table_element_union_assignment
TableTests.common_table_element_union_in_call
TableTests.common_table_element_union_in_call_tail
TableTests.common_table_element_union_in_prop
TableTests.confusing_indexing
TableTests.cyclic_shifted_tables
TableTests.disallow_indexing_into_an_unsealed_table_with_no_indexer_in_strict_mode
TableTests.dont_crash_when_setmetatable_does_not_produce_a_metatabletypevar
@ -183,29 +278,54 @@ TableTests.dont_extend_unsealed_tables_in_rvalue_position
TableTests.dont_hang_when_trying_to_look_up_in_cyclic_metatable_index
TableTests.dont_leak_free_table_props
TableTests.dont_quantify_table_that_belongs_to_outer_scope
TableTests.dont_seal_an_unsealed_table_by_passing_it_to_a_function_that_takes_a_sealed_table
TableTests.dont_suggest_exact_match_keys
TableTests.error_detailed_indexer_key
TableTests.error_detailed_indexer_value
TableTests.error_detailed_metatable_prop
TableTests.error_detailed_prop
TableTests.error_detailed_prop_nested
TableTests.expected_indexer_from_table_union
TableTests.expected_indexer_value_type_extra
TableTests.expected_indexer_value_type_extra_2
TableTests.explicitly_typed_table
TableTests.explicitly_typed_table_error
TableTests.explicitly_typed_table_with_indexer
TableTests.extend_unsealed_table_with_metatable
TableTests.generalize_table_argument
TableTests.generic_table_instantiation_potential_regression
TableTests.give_up_after_one_metatable_index_look_up
TableTests.indexer_mismatch
TableTests.indexer_on_sealed_table_must_unify_with_free_table
TableTests.indexers_get_quantified_too
TableTests.inequality_operators_imply_exactly_matching_types
TableTests.infer_array_2
TableTests.infer_indexer_for_left_unsealed_table_from_right_hand_table_with_indexer
TableTests.infer_indexer_from_its_function_return_type
TableTests.infer_indexer_from_value_property_in_literal
TableTests.inferred_return_type_of_free_table
TableTests.instantiate_table_cloning_3
TableTests.instantiate_tables_at_scope_level
TableTests.invariant_table_properties_means_instantiating_tables_in_assignment_is_unsound
TableTests.invariant_table_properties_means_instantiating_tables_in_call_is_unsound
TableTests.leaking_bad_metatable_errors
TableTests.less_exponential_blowup_please
TableTests.metatable_mismatch_should_fail
TableTests.missing_metatable_for_sealed_tables_do_not_get_inferred
TableTests.mixed_tables_with_implicit_numbered_keys
TableTests.ok_to_provide_a_subtype_during_construction
TableTests.ok_to_set_nil_even_on_non_lvalue_base_expr
TableTests.okay_to_add_property_to_unsealed_tables_by_assignment
TableTests.okay_to_add_property_to_unsealed_tables_by_function_call
TableTests.only_ascribe_synthetic_names_at_module_scope
TableTests.oop_indexer_works
TableTests.oop_polymorphic
TableTests.open_table_unification_2
TableTests.pass_a_union_of_tables_to_a_function_that_requires_a_table
TableTests.pass_a_union_of_tables_to_a_function_that_requires_a_table_2
TableTests.passing_compatible_unions_to_a_generic_table_without_crashing
TableTests.prop_access_on_key_whose_types_mismatches
TableTests.prop_access_on_unions_of_indexers_where_key_whose_types_mismatches
TableTests.quantify_even_that_table_was_never_exported_at_all
TableTests.quantify_metatables_of_metatables_of_table
TableTests.reasonable_error_when_adding_a_nonexistent_property_to_an_array_like_table
@ -214,18 +334,25 @@ TableTests.right_table_missing_key2
TableTests.scalar_is_a_subtype_of_a_compatible_polymorphic_shape_type
TableTests.scalar_is_not_a_subtype_of_a_compatible_polymorphic_shape_type
TableTests.sealed_table_indexers_must_unify
TableTests.sealed_table_value_can_infer_an_indexer
TableTests.shared_selfs
TableTests.shared_selfs_from_free_param
TableTests.shared_selfs_through_metatables
TableTests.subproperties_can_also_be_covariantly_tested
TableTests.table_call_metamethod_basic
TableTests.table_call_metamethod_generic
TableTests.table_function_check_use_after_free
TableTests.table_insert_should_cope_with_optional_properties_in_strict
TableTests.table_param_width_subtyping_1
TableTests.table_param_width_subtyping_2
TableTests.table_param_width_subtyping_3
TableTests.table_simple_call
TableTests.table_subtyping_with_extra_props_dont_report_multiple_errors
TableTests.table_subtyping_with_missing_props_dont_report_multiple_errors
TableTests.table_subtyping_with_missing_props_dont_report_multiple_errors2
TableTests.table_unification_4
TableTests.table_unifies_into_map
TableTests.top_table_type
TableTests.type_mismatch_on_massive_table_is_cut_short
TableTests.unifying_tables_shouldnt_uaf1
TableTests.used_colon_instead_of_dot
@ -236,31 +363,41 @@ ToString.exhaustive_toString_of_cyclic_table
ToString.free_types
ToString.named_metatable_toStringNamedFunction
ToString.pick_distinct_names_for_mixed_explicit_and_implicit_generics
ToString.tostring_error_mismatch
ToString.toStringDetailed2
ToString.toStringErrorPack
ToString.toStringNamedFunction_generic_pack
TryUnifyTests.members_of_failed_typepack_unification_are_unified_with_errorType
TryUnifyTests.result_of_failed_typepack_unification_is_constrained
TryUnifyTests.typepack_unification_should_trim_free_tails
TryUnifyTests.uninhabited_table_sub_anything
TryUnifyTests.uninhabited_table_sub_never
TryUnifyTests.variadics_should_use_reversed_properly
TypeAliases.alias_expands_to_bare_reference_to_imported_type
TypeAliases.dont_lose_track_of_PendingExpansionTypes_after_substitution
TypeAliases.free_variables_from_typeof_in_aliases
TypeAliases.generic_param_remap
TypeAliases.mismatched_generic_type_param
TypeAliases.mutually_recursive_aliases
TypeAliases.mutually_recursive_generic_aliases
TypeAliases.mutually_recursive_types_restriction_not_ok_1
TypeAliases.mutually_recursive_types_restriction_not_ok_2
TypeAliases.mutually_recursive_types_swapsies_not_ok
TypeAliases.recursive_types_restriction_not_ok
TypeAliases.report_shadowed_aliases
TypeAliases.stringify_optional_parameterized_alias
TypeAliases.type_alias_local_mutation
TypeAliases.type_alias_local_rename
TypeAliases.type_alias_locations
TypeAliases.type_alias_of_an_imported_recursive_generic_type
TypeAliases.use_table_name_and_generic_params_in_errors
TypeFamilyTests.add_family_at_work
TypeFamilyTests.family_as_fn_arg
TypeFamilyTests.family_as_fn_ret
TypeFamilyTests.function_internal_families
TypeFamilyTests.internal_families_raise_errors
TypeFamilyTests.table_internal_families
TypeFamilyTests.type_families_inhabited_with_normalization
TypeFamilyTests.unsolvable_family
TypeInfer.bidirectional_checking_of_callback_property
TypeInfer.check_expr_recursion_limit
@ -269,13 +406,22 @@ TypeInfer.cli_39932_use_unifier_in_ensure_methods
TypeInfer.cli_50041_committing_txnlog_in_apollo_client_error
TypeInfer.dont_report_type_errors_within_an_AstExprError
TypeInfer.dont_report_type_errors_within_an_AstStatError
TypeInfer.follow_on_new_types_in_substitution
TypeInfer.fuzz_free_table_type_change_during_index_check
TypeInfer.infer_assignment_value_types_mutable_lval
TypeInfer.globals_are_banned_in_strict_mode
TypeInfer.if_statement
TypeInfer.infer_locals_via_assignment_from_its_call_site
TypeInfer.infer_locals_with_nil_value
TypeInfer.infer_through_group_expr
TypeInfer.infer_type_assertion_value_type
TypeInfer.interesting_local_type_inference_case
TypeInfer.no_stack_overflow_from_isoptional
TypeInfer.promote_tail_type_packs
TypeInfer.recursive_function_that_invokes_itself_with_a_refinement_of_its_parameter
TypeInfer.recursive_function_that_invokes_itself_with_a_refinement_of_its_parameter_2
TypeInfer.tc_after_error_recovery_no_replacement_name_in_error
TypeInfer.tc_error
TypeInfer.tc_error_2
TypeInfer.tc_if_else_expressions_expected_type_3
TypeInfer.type_infer_recursion_limit_no_ice
TypeInfer.type_infer_recursion_limit_normalizer
@ -288,22 +434,36 @@ TypeInferAnyError.for_in_loop_iterator_returns_any
TypeInferAnyError.for_in_loop_iterator_returns_any2
TypeInferAnyError.intersection_of_any_can_have_props
TypeInferAnyError.replace_every_free_type_when_unifying_a_complex_function_with_any
TypeInferAnyError.union_of_types_regression_test
TypeInferAnyError.type_error_addition
TypeInferClasses.callable_classes
TypeInferClasses.can_read_prop_of_base_class_using_string
TypeInferClasses.cannot_unify_class_instance_with_primitive
TypeInferClasses.class_type_mismatch_with_name_conflict
TypeInferClasses.detailed_class_unification_error
TypeInferClasses.index_instance_property
TypeInferClasses.indexable_classes
TypeInferClasses.table_class_unification_reports_sane_errors_for_missing_properties
TypeInferClasses.table_indexers_are_invariant
TypeInferClasses.type_mismatch_invariance_required_for_error
TypeInferClasses.we_can_report_when_someone_is_trying_to_use_a_table_rather_than_a_class
TypeInferFunctions.apply_of_lambda_with_inferred_and_explicit_types
TypeInferFunctions.cannot_hoist_interior_defns_into_signature
TypeInferFunctions.check_function_bodies
TypeInferFunctions.concrete_functions_are_not_supertypes_of_function
TypeInferFunctions.dont_assert_when_the_tarjan_limit_is_exceeded_during_generalization
TypeInferFunctions.dont_give_other_overloads_message_if_only_one_argument_matching_overload_exists
TypeInferFunctions.dont_infer_parameter_types_for_functions_from_their_call_site
TypeInferFunctions.error_detailed_function_mismatch_arg
TypeInferFunctions.error_detailed_function_mismatch_arg_count
TypeInferFunctions.error_detailed_function_mismatch_ret
TypeInferFunctions.error_detailed_function_mismatch_ret_count
TypeInferFunctions.error_detailed_function_mismatch_ret_mult
TypeInferFunctions.function_cast_error_uses_correct_language
TypeInferFunctions.function_decl_non_self_sealed_overwrite_2
TypeInferFunctions.function_decl_non_self_unsealed_overwrite
TypeInferFunctions.function_does_not_return_enough_values
TypeInferFunctions.function_exprs_are_generalized_at_signature_scope_not_enclosing
TypeInferFunctions.function_is_supertype_of_concrete_functions
TypeInferFunctions.function_statement_sealed_table_assignment_through_indexer
TypeInferFunctions.generic_packs_are_not_variadic
TypeInferFunctions.higher_order_function_2
@ -311,14 +471,22 @@ TypeInferFunctions.higher_order_function_3
TypeInferFunctions.higher_order_function_4
TypeInferFunctions.improved_function_arg_mismatch_errors
TypeInferFunctions.infer_anonymous_function_arguments
TypeInferFunctions.infer_anonymous_function_arguments_outside_call
TypeInferFunctions.infer_generic_function_function_argument
TypeInferFunctions.infer_generic_function_function_argument_overloaded
TypeInferFunctions.infer_generic_lib_function_function_argument
TypeInferFunctions.infer_return_type_from_selected_overload
TypeInferFunctions.infer_return_value_type
TypeInferFunctions.infer_that_function_does_not_return_a_table
TypeInferFunctions.inferred_higher_order_functions_are_quantified_at_the_right_time3
TypeInferFunctions.instantiated_type_packs_must_have_a_non_null_scope
TypeInferFunctions.it_is_ok_to_oversaturate_a_higher_order_function_argument
TypeInferFunctions.luau_subtyping_is_np_hard
TypeInferFunctions.no_lossy_function_type
TypeInferFunctions.num_is_solved_after_num_or_str
TypeInferFunctions.num_is_solved_before_num_or_str
TypeInferFunctions.occurs_check_failure_in_function_return_type
TypeInferFunctions.other_things_are_not_related_to_function
TypeInferFunctions.param_1_and_2_both_takes_the_same_generic_but_their_arguments_are_incompatible
TypeInferFunctions.param_1_and_2_both_takes_the_same_generic_but_their_arguments_are_incompatible_2
TypeInferFunctions.record_matching_overload
@ -330,7 +498,9 @@ TypeInferFunctions.too_few_arguments_variadic_generic2
TypeInferFunctions.too_many_arguments_error_location
TypeInferFunctions.too_many_return_values_in_parentheses
TypeInferFunctions.too_many_return_values_no_function
TypeInferFunctions.vararg_function_is_quantified
TypeInferLoops.cli_68448_iterators_need_not_accept_nil
TypeInferLoops.dcr_iteration_explore_raycast_minimization
TypeInferLoops.dcr_iteration_on_never_gives_never
TypeInferLoops.dcr_xpath_candidates
TypeInferLoops.for_in_loop
@ -341,8 +511,12 @@ TypeInferLoops.for_in_loop_on_non_function
TypeInferLoops.for_in_loop_with_custom_iterator
TypeInferLoops.for_in_loop_with_incompatible_args_to_iterator
TypeInferLoops.for_in_loop_with_next
TypeInferLoops.for_in_with_generic_next
TypeInferLoops.for_in_with_just_one_iterator_is_ok
TypeInferLoops.for_loop
TypeInferLoops.ipairs_produces_integral_indices
TypeInferLoops.iteration_no_table_passed
TypeInferLoops.iteration_regression_issue_69967
TypeInferLoops.iteration_regression_issue_69967_alt
TypeInferLoops.loop_iter_basic
TypeInferLoops.loop_iter_metamethod_nil
@ -351,11 +525,14 @@ TypeInferLoops.loop_iter_metamethod_ok_with_inference
TypeInferLoops.loop_iter_trailing_nil
TypeInferLoops.loop_typecheck_crash_on_empty_optional
TypeInferLoops.properly_infer_iteratee_is_a_free_table
TypeInferLoops.repeat_loop
TypeInferLoops.unreachable_code_after_infinite_loop
TypeInferLoops.varlist_declared_by_for_in_loop_should_be_free
TypeInferLoops.while_loop
TypeInferModules.bound_free_table_export_is_ok
TypeInferModules.do_not_modify_imported_types_4
TypeInferModules.do_not_modify_imported_types_5
TypeInferModules.general_require_call_expression
TypeInferModules.module_type_conflict
TypeInferModules.module_type_conflict_instantiated
TypeInferModules.require_failed_module
@ -371,13 +548,20 @@ TypeInferOOP.table_oop
TypeInferOperators.add_type_family_works
TypeInferOperators.and_binexps_dont_unify
TypeInferOperators.cli_38355_recursive_union
TypeInferOperators.compound_assign_mismatch_metatable
TypeInferOperators.compound_assign_mismatch_op
TypeInferOperators.compound_assign_mismatch_result
TypeInferOperators.compound_assign_result_must_be_compatible_with_var
TypeInferOperators.concat_op_on_free_lhs_and_string_rhs
TypeInferOperators.concat_op_on_string_lhs_and_free_rhs
TypeInferOperators.disallow_string_and_types_without_metatables_from_arithmetic_binary_ops
TypeInferOperators.luau-polyfill.Array.startswith
TypeInferOperators.luau_polyfill_is_array
TypeInferOperators.normalize_strings_comparison
TypeInferOperators.operator_eq_completely_incompatible
TypeInferOperators.operator_eq_verifies_types_do_intersect
TypeInferOperators.reducing_and
TypeInferOperators.refine_and_or
TypeInferOperators.strict_binary_op_where_lhs_unknown
TypeInferOperators.typecheck_overloaded_multiply_that_is_an_intersection
TypeInferOperators.typecheck_overloaded_multiply_that_is_an_intersection_on_rhs
@ -388,22 +572,51 @@ TypeInferOperators.unrelated_classes_cannot_be_compared
TypeInferOperators.unrelated_primitives_cannot_be_compared
TypeInferPrimitives.CheckMethodsOfNumber
TypeInferPrimitives.string_index
TypeInferUnknownNever.array_like_table_of_never_is_inhabitable
TypeInferUnknownNever.index_on_union_of_tables_for_properties_that_is_never
TypeInferUnknownNever.index_on_union_of_tables_for_properties_that_is_sorta_never
TypeInferUnknownNever.length_of_never
TypeInferUnknownNever.math_operators_and_never
TypePackTests.fuzz_typepack_iter_follow_2
TypePackTests.pack_tail_unification_check
TypePackTests.parenthesized_varargs_returns_any
TypePackTests.type_alias_backwards_compatible
TypePackTests.type_alias_default_type_errors
TypePackTests.type_packs_with_tails_in_vararg_adjustment
TypePackTests.unify_variadic_tails_in_arguments
TypePackTests.unify_variadic_tails_in_arguments_free
TypePackTests.variadic_argument_tail
TypeSingletons.enums_using_singletons_mismatch
TypeSingletons.error_detailed_tagged_union_mismatch_bool
TypeSingletons.error_detailed_tagged_union_mismatch_string
TypeSingletons.function_args_infer_singletons
TypeSingletons.function_call_with_singletons
TypeSingletons.function_call_with_singletons_mismatch
TypeSingletons.overloaded_function_call_with_singletons
TypeSingletons.return_type_of_f_is_not_widened
TypeSingletons.table_properties_singleton_strings
TypeSingletons.table_properties_type_error_escapes
TypeSingletons.widen_the_supertype_if_it_is_free_and_subtype_has_singleton
TypeSingletons.widening_happens_almost_everywhere
UnionTypes.disallow_less_specific_assign
UnionTypes.disallow_less_specific_assign2
UnionTypes.error_detailed_optional
UnionTypes.error_detailed_union_all
UnionTypes.error_detailed_union_part
UnionTypes.generic_function_with_optional_arg
UnionTypes.index_on_a_union_type_with_missing_property
UnionTypes.less_greedy_unification_with_union_types
UnionTypes.optional_arguments_table2
UnionTypes.optional_index_error
UnionTypes.optional_length_error
UnionTypes.table_union_write_indirect
UnionTypes.unify_unsealed_table_union_check
UnionTypes.unify_sealed_table_union_check
UnionTypes.union_of_functions
UnionTypes.union_of_functions_mentioning_generic_typepacks
UnionTypes.union_of_functions_mentioning_generics
UnionTypes.union_of_functions_with_mismatching_arg_arities
UnionTypes.union_of_functions_with_mismatching_arg_variadics
UnionTypes.union_of_functions_with_mismatching_result_arities
UnionTypes.union_of_functions_with_mismatching_result_variadics
UnionTypes.union_of_functions_with_variadics
UnionTypes.union_true_and_false