Sync to upstream/release/606 (#1127)

New Solver
* Improvements to data flow analysis

Native Code Generation
* Block limit is now per-function instead of per-module

Co-authored-by: Alexander McCord <amccord@roblox.com>
Co-authored-by: Andy Friesen <afriesen@roblox.com>
Co-authored-by: Aviral Goel <agoel@roblox.com>
Co-authored-by: Vyacheslav Egorov <vegorov@roblox.com>
This commit is contained in:
vegorov-rbx 2023-12-08 23:50:16 +02:00 committed by GitHub
parent 2ea7193777
commit c26d820902
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 546 additions and 214 deletions

View File

@ -150,7 +150,7 @@ private:
*/
ScopePtr childScope(AstNode* node, const ScopePtr& parent);
std::optional<TypeId> lookup(Scope* scope, DefId def);
std::optional<TypeId> lookup(Scope* scope, DefId def, bool prototype = true);
/**
* Adds a new constraint with no dependencies to a given scope.

View File

@ -74,8 +74,15 @@ private:
struct DfgScope
{
enum ScopeType
{
Linear,
Loop,
Function,
};
DfgScope* parent;
bool isLoopScope;
ScopeType scopeType;
using Bindings = DenseHashMap<Symbol, const Def*>;
using Props = DenseHashMap<const Def*, std::unordered_map<std::string, const Def*>>;
@ -117,7 +124,17 @@ private:
std::vector<std::unique_ptr<DfgScope>> scopes;
DfgScope* childScope(DfgScope* scope, bool isLoopScope = false);
struct FunctionCapture
{
std::vector<DefId> captureDefs;
std::vector<DefId> allVersions;
size_t versionOffset = 0;
};
DenseHashMap<Symbol, FunctionCapture> captures{Symbol{}};
void resolveCaptures();
DfgScope* childScope(DfgScope* scope, DfgScope::ScopeType scopeType = DfgScope::Linear);
void join(DfgScope* p, DfgScope* a, DfgScope* b);
void joinBindings(DfgScope::Bindings& p, const DfgScope::Bindings& a, const DfgScope::Bindings& b);
@ -167,11 +184,11 @@ private:
DataFlowResult visitExpr(DfgScope* scope, AstExprError* error);
void visitLValue(DfgScope* scope, AstExpr* e, DefId incomingDef, bool isCompoundAssignment = false);
void visitLValue(DfgScope* scope, AstExprLocal* l, DefId incomingDef, bool isCompoundAssignment);
void visitLValue(DfgScope* scope, AstExprGlobal* g, DefId incomingDef, bool isCompoundAssignment);
void visitLValue(DfgScope* scope, AstExprIndexName* i, DefId incomingDef);
void visitLValue(DfgScope* scope, AstExprIndexExpr* i, DefId incomingDef);
void visitLValue(DfgScope* scope, AstExprError* e, DefId incomingDef);
DefId visitLValue(DfgScope* scope, AstExprLocal* l, DefId incomingDef, bool isCompoundAssignment);
DefId visitLValue(DfgScope* scope, AstExprGlobal* g, DefId incomingDef, bool isCompoundAssignment);
DefId visitLValue(DfgScope* scope, AstExprIndexName* i, DefId incomingDef);
DefId visitLValue(DfgScope* scope, AstExprIndexExpr* i, DefId incomingDef);
DefId visitLValue(DfgScope* scope, AstExprError* e, DefId incomingDef);
void visitType(DfgScope* scope, AstType* t);
void visitType(DfgScope* scope, AstTypeReference* r);

View File

@ -73,6 +73,7 @@ const T* get(DefId def)
}
bool containsSubscriptedDefinition(DefId def);
void collectOperands(DefId def, std::vector<DefId>* operands);
struct DefArena
{

View File

@ -205,7 +205,7 @@ ScopePtr ConstraintGenerator::childScope(AstNode* node, const ScopePtr& parent)
return scope;
}
std::optional<TypeId> ConstraintGenerator::lookup(Scope* scope, DefId def)
std::optional<TypeId> ConstraintGenerator::lookup(Scope* scope, DefId def, bool prototype)
{
if (get<Cell>(def))
return scope->lookup(def);
@ -213,22 +213,24 @@ std::optional<TypeId> ConstraintGenerator::lookup(Scope* scope, DefId def)
{
if (auto found = scope->lookup(def))
return *found;
else if (!prototype)
return std::nullopt;
TypeId res = builtinTypes->neverType;
for (DefId operand : phi->operands)
{
// `scope->lookup(operand)` may return nothing because it could be a phi node of globals, but one of
// the operand of that global has never been assigned a type, and so it should be an error.
// e.g.
// ```
// if foo() then
// g = 5
// end
// -- `g` here is a phi node of the assignment to `g`, or the original revision of `g` before the branch.
// ```
TypeId ty = scope->lookup(operand).value_or(builtinTypes->errorRecoveryType());
res = simplifyUnion(builtinTypes, arena, res, ty).result;
// `scope->lookup(operand)` may return nothing because we only bind a type to that operand
// once we've seen that particular `DefId`. In this case, we need to prototype those types
// and use those at a later time.
std::optional<TypeId> ty = lookup(scope, operand, /*prototype*/false);
if (!ty)
{
ty = arena->addType(BlockedType{});
rootScope->lvalueTypes[operand] = *ty;
}
res = simplifyUnion(builtinTypes, arena, res, *ty).result;
}
scope->lvalueTypes[def] = res;
@ -861,7 +863,7 @@ ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatFunction* f
DenseHashSet<Constraint*> excludeList{nullptr};
DefId def = dfg->getDef(function->name);
std::optional<TypeId> existingFunctionTy = scope->lookup(def);
std::optional<TypeId> existingFunctionTy = lookup(scope.get(), def);
if (AstExprLocal* localName = function->name->as<AstExprLocal>())
{
@ -1724,16 +1726,14 @@ Inference ConstraintGenerator::check(const ScopePtr& scope, AstExprGlobal* globa
/* prepopulateGlobalScope() has already added all global functions to the environment by this point, so any
* global that is not already in-scope is definitely an unknown symbol.
*/
if (auto ty = lookup(scope.get(), def))
return Inference{*ty, refinementArena.proposition(key, builtinTypes->truthyType)};
else if (auto ty = scope->lookup(global->name))
if (auto ty = lookup(scope.get(), def, /*prototype=*/false))
{
rootScope->lvalueTypes[def] = *ty;
return Inference{*ty, refinementArena.proposition(key, builtinTypes->truthyType)};
}
else
{
reportError(global->location, UnknownSymbol{global->name.value});
reportError(global->location, UnknownSymbol{global->name.value, UnknownSymbol::Binding});
return Inference{builtinTypes->errorRecoveryType()};
}
}
@ -3110,6 +3110,16 @@ struct GlobalPrepopulator : AstVisitor
return true;
}
bool visit(AstType*) override
{
return true;
}
bool visit(class AstTypePack* node) override
{
return true;
}
};
void ConstraintGenerator::prepopulateGlobalScope(const ScopePtr& globalScope, AstStatBlock* program)

View File

@ -116,7 +116,7 @@ bool DfgScope::canUpdateDefinition(Symbol symbol) const
{
if (current->bindings.find(symbol))
return true;
else if (current->isLoopScope)
else if (current->scopeType == DfgScope::Loop)
return false;
}
@ -129,7 +129,7 @@ bool DfgScope::canUpdateDefinition(DefId def, const std::string& key) const
{
if (auto props = current->props.find(def))
return true;
else if (current->isLoopScope)
else if (current->scopeType == DfgScope::Loop)
return false;
}
@ -144,6 +144,7 @@ DataFlowGraph DataFlowGraphBuilder::build(AstStatBlock* block, NotNull<InternalE
builder.handle = handle;
builder.moduleScope = builder.childScope(nullptr); // nullptr is the root DFG scope.
builder.visitBlockWithoutChildScope(builder.moduleScope, block);
builder.resolveCaptures();
if (FFlag::DebugLuauFreezeArena)
{
@ -154,9 +155,27 @@ DataFlowGraph DataFlowGraphBuilder::build(AstStatBlock* block, NotNull<InternalE
return std::move(builder.graph);
}
DfgScope* DataFlowGraphBuilder::childScope(DfgScope* scope, bool isLoopScope)
void DataFlowGraphBuilder::resolveCaptures()
{
return scopes.emplace_back(new DfgScope{scope, isLoopScope}).get();
for (const auto& [_, capture] : captures)
{
std::vector<DefId> operands;
for (size_t i = capture.versionOffset; i < capture.allVersions.size(); ++i)
collectOperands(capture.allVersions[i], &operands);
for (DefId captureDef : capture.captureDefs)
{
Phi* phi = const_cast<Phi*>(get<Phi>(captureDef));
LUAU_ASSERT(phi);
LUAU_ASSERT(phi->operands.empty());
phi->operands = operands;
}
}
}
DfgScope* DataFlowGraphBuilder::childScope(DfgScope* scope, DfgScope::ScopeType scopeType)
{
return scopes.emplace_back(new DfgScope{scope, scopeType}).get();
}
void DataFlowGraphBuilder::join(DfgScope* p, DfgScope* a, DfgScope* b)
@ -227,24 +246,44 @@ void DataFlowGraphBuilder::joinProps(DfgScope::Props& p, const DfgScope::Props&
DefId DataFlowGraphBuilder::lookup(DfgScope* scope, Symbol symbol)
{
if (auto found = scope->lookup(symbol))
return *found;
else
for (DfgScope* current = scope; current; current = current->parent)
{
DefId result = defArena->freshCell();
if (symbol.local)
scope->bindings[symbol] = result;
else
moduleScope->bindings[symbol] = result;
return result;
if (auto found = current->bindings.find(symbol))
return NotNull{*found};
else if (current->scopeType == DfgScope::Function)
{
FunctionCapture& capture = captures[symbol];
DefId captureDef = defArena->phi({});
capture.captureDefs.push_back(captureDef);
scope->bindings[symbol] = captureDef;
return NotNull{captureDef};
}
}
DefId result = defArena->freshCell();
scope->bindings[symbol] = result;
captures[symbol].allVersions.push_back(result);
return result;
}
DefId DataFlowGraphBuilder::lookup(DfgScope* scope, DefId def, const std::string& key)
{
if (auto found = scope->lookup(def, key))
return *found;
else if (auto phi = get<Phi>(def))
for (DfgScope* current = scope; current; current = current->parent)
{
if (auto props = current->props.find(def))
{
if (auto it = props->find(key); it != props->end())
return NotNull{it->second};
}
else if (auto phi = get<Phi>(def); phi && phi->operands.empty()) // Unresolved phi nodes
{
DefId result = defArena->freshCell();
scope->props[def][key] = result;
return result;
}
}
if (auto phi = get<Phi>(def))
{
std::vector<DefId> defs;
for (DefId operand : phi->operands)
@ -361,7 +400,7 @@ ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatIf* i)
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatWhile* w)
{
// TODO(controlflow): entry point has a back edge from exit point
DfgScope* whileScope = childScope(scope, /*isLoopScope=*/true);
DfgScope* whileScope = childScope(scope, DfgScope::Loop);
visitExpr(whileScope, w->condition);
visit(whileScope, w->body);
@ -373,7 +412,7 @@ ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatWhile* w)
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatRepeat* r)
{
// TODO(controlflow): entry point has a back edge from exit point
DfgScope* repeatScope = childScope(scope, /*isLoopScope=*/true);
DfgScope* repeatScope = childScope(scope, DfgScope::Loop);
visitBlockWithoutChildScope(repeatScope, r->body);
visitExpr(repeatScope, r->condition);
@ -429,6 +468,7 @@ ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatLocal* l)
DefId def = defArena->freshCell(subscripted);
graph.localDefs[local] = def;
scope->bindings[local] = def;
captures[local].allVersions.push_back(def);
}
return ControlFlow::None;
@ -436,7 +476,7 @@ ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatLocal* l)
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatFor* f)
{
DfgScope* forScope = childScope(scope, /*isLoopScope=*/true);
DfgScope* forScope = childScope(scope, DfgScope::Loop);
visitExpr(scope, f->from);
visitExpr(scope, f->to);
@ -449,6 +489,7 @@ ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatFor* f)
DefId def = defArena->freshCell();
graph.localDefs[f->var] = def;
scope->bindings[f->var] = def;
captures[f->var].allVersions.push_back(def);
// TODO(controlflow): entry point has a back edge from exit point
visit(forScope, f->body);
@ -460,7 +501,7 @@ ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatFor* f)
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatForIn* f)
{
DfgScope* forScope = childScope(scope, /*isLoopScope=*/true);
DfgScope* forScope = childScope(scope, DfgScope::Loop);
for (AstLocal* local : f->vars)
{
@ -470,6 +511,7 @@ ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatForIn* f)
DefId def = defArena->freshCell();
graph.localDefs[local] = def;
forScope->bindings[local] = def;
captures[local].allVersions.push_back(def);
}
// TODO(controlflow): entry point has a back edge from exit point
@ -527,10 +569,21 @@ ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatFunction* f)
//
// which is evidence that references to variables must be a phi node of all possible definitions,
// but for bug compatibility, we'll assume the same thing here.
DefId prototype = defArena->freshCell();
visitLValue(scope, f->name, prototype);
visitLValue(scope, f->name, defArena->freshCell());
visitExpr(scope, f->func);
if (auto local = f->name->as<AstExprLocal>())
{
// local f
// function f()
// if cond() then
// f() -- should reference only the function version and other future version, and nothing prior
// end
// end
FunctionCapture& capture = captures[local->local];
capture.versionOffset = capture.allVersions.size() - 1;
}
return ControlFlow::None;
}
@ -539,6 +592,7 @@ ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatLocalFunction* l
DefId def = defArena->freshCell();
graph.localDefs[l->name] = def;
scope->bindings[l->name] = def;
captures[l->name].allVersions.push_back(def);
visitExpr(scope, l->func);
return ControlFlow::None;
@ -559,6 +613,7 @@ ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatDeclareGlobal* d
DefId def = defArena->freshCell();
graph.declaredDefs[d] = def;
scope->bindings[d->name] = def;
captures[d->name].allVersions.push_back(def);
visitType(scope, d->type);
@ -570,6 +625,7 @@ ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatDeclareFunction*
DefId def = defArena->freshCell();
graph.declaredDefs[d] = def;
scope->bindings[d->name] = def;
captures[d->name].allVersions.push_back(def);
DfgScope* unreachable = childScope(scope);
visitGenerics(unreachable, d->generics);
@ -669,14 +725,9 @@ DataFlowResult DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprGroup* gr
DataFlowResult DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprLocal* l)
{
// DfgScope::lookup is intentional here: we want to be able to ice.
if (auto def = scope->lookup(l->local))
{
const RefinementKey* key = keyArena->leaf(*def);
return {*def, key};
}
handle->ice("DFG: AstExprLocal came before its declaration?");
DefId def = lookup(scope, l->local);
const RefinementKey* key = keyArena->leaf(def);
return {def, key};
}
DataFlowResult DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprGlobal* g)
@ -723,7 +774,7 @@ DataFlowResult DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprIndexExpr
DataFlowResult DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprFunction* f)
{
DfgScope* signatureScope = childScope(scope);
DfgScope* signatureScope = childScope(scope, DfgScope::Function);
if (AstLocal* self = f->self)
{
@ -733,6 +784,7 @@ DataFlowResult DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprFunction*
DefId def = defArena->freshCell();
graph.localDefs[self] = def;
signatureScope->bindings[self] = def;
captures[self].allVersions.push_back(def);
}
for (AstLocal* param : f->args)
@ -743,6 +795,7 @@ DataFlowResult DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprFunction*
DefId def = defArena->freshCell();
graph.localDefs[param] = def;
signatureScope->bindings[param] = def;
captures[param].allVersions.push_back(def);
}
if (f->varargAnnotation)
@ -827,41 +880,46 @@ DataFlowResult DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprError* er
void DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExpr* e, DefId incomingDef, bool isCompoundAssignment)
{
if (auto l = e->as<AstExprLocal>())
return visitLValue(scope, l, incomingDef, isCompoundAssignment);
else if (auto g = e->as<AstExprGlobal>())
return visitLValue(scope, g, incomingDef, isCompoundAssignment);
else if (auto i = e->as<AstExprIndexName>())
return visitLValue(scope, i, incomingDef);
else if (auto i = e->as<AstExprIndexExpr>())
return visitLValue(scope, i, incomingDef);
else if (auto error = e->as<AstExprError>())
return visitLValue(scope, error, incomingDef);
else
handle->ice("Unknown AstExpr in DataFlowGraphBuilder::visitLValue");
auto go = [&]() {
if (auto l = e->as<AstExprLocal>())
return visitLValue(scope, l, incomingDef, isCompoundAssignment);
else if (auto g = e->as<AstExprGlobal>())
return visitLValue(scope, g, incomingDef, isCompoundAssignment);
else if (auto i = e->as<AstExprIndexName>())
return visitLValue(scope, i, incomingDef);
else if (auto i = e->as<AstExprIndexExpr>())
return visitLValue(scope, i, incomingDef);
else if (auto error = e->as<AstExprError>())
return visitLValue(scope, error, incomingDef);
else
handle->ice("Unknown AstExpr in DataFlowGraphBuilder::visitLValue");
};
graph.astDefs[e] = go();
}
void DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprLocal* l, DefId incomingDef, bool isCompoundAssignment)
DefId DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprLocal* l, DefId incomingDef, bool isCompoundAssignment)
{
// We need to keep the previous def around for a compound assignment.
if (isCompoundAssignment)
{
if (auto def = scope->lookup(l->local))
graph.compoundAssignDefs[l] = *def;
DefId def = lookup(scope, l->local);
graph.compoundAssignDefs[l] = def;
}
// In order to avoid alias tracking, we need to clip the reference to the parent def.
if (scope->canUpdateDefinition(l->local))
{
DefId updated = defArena->freshCell(containsSubscriptedDefinition(incomingDef));
graph.astDefs[l] = updated;
scope->bindings[l->local] = updated;
captures[l->local].allVersions.push_back(updated);
return updated;
}
else
visitExpr(scope, static_cast<AstExpr*>(l));
return visitExpr(scope, static_cast<AstExpr*>(l)).def;
}
void DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprGlobal* g, DefId incomingDef, bool isCompoundAssignment)
DefId DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprGlobal* g, DefId incomingDef, bool isCompoundAssignment)
{
// We need to keep the previous def around for a compound assignment.
if (isCompoundAssignment)
@ -874,28 +932,29 @@ void DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprGlobal* g, DefId
if (scope->canUpdateDefinition(g->name))
{
DefId updated = defArena->freshCell(containsSubscriptedDefinition(incomingDef));
graph.astDefs[g] = updated;
scope->bindings[g->name] = updated;
captures[g->name].allVersions.push_back(updated);
return updated;
}
else
visitExpr(scope, static_cast<AstExpr*>(g));
return visitExpr(scope, static_cast<AstExpr*>(g)).def;
}
void DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprIndexName* i, DefId incomingDef)
DefId DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprIndexName* i, DefId incomingDef)
{
DefId parentDef = visitExpr(scope, i->expr).def;
if (scope->canUpdateDefinition(parentDef, i->index.value))
{
DefId updated = defArena->freshCell(containsSubscriptedDefinition(incomingDef));
graph.astDefs[i] = updated;
scope->props[parentDef][i->index.value] = updated;
return updated;
}
else
visitExpr(scope, static_cast<AstExpr*>(i));
return visitExpr(scope, static_cast<AstExpr*>(i)).def;
}
void DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprIndexExpr* i, DefId incomingDef)
DefId DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprIndexExpr* i, DefId incomingDef)
{
DefId parentDef = visitExpr(scope, i->expr).def;
visitExpr(scope, i->index);
@ -905,20 +964,19 @@ void DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprIndexExpr* i, Def
if (scope->canUpdateDefinition(parentDef, string->value.data))
{
DefId updated = defArena->freshCell(containsSubscriptedDefinition(incomingDef));
graph.astDefs[i] = updated;
scope->props[parentDef][string->value.data] = updated;
return updated;
}
else
visitExpr(scope, static_cast<AstExpr*>(i));
return visitExpr(scope, static_cast<AstExpr*>(i)).def;
}
graph.astDefs[i] = defArena->freshCell();
else
return defArena->freshCell(/*subscripted=*/true);
}
void DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprError* error, DefId incomingDef)
DefId DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprError* error, DefId incomingDef)
{
DefId def = visitExpr(scope, error).def;
graph.astDefs[error] = def;
return visitExpr(scope, error).def;
}
void DataFlowGraphBuilder::visitType(DfgScope* scope, AstType* t)

View File

@ -19,17 +19,13 @@ bool containsSubscriptedDefinition(DefId def)
return false;
}
DefId DefArena::freshCell(bool subscripted)
void collectOperands(DefId def, std::vector<DefId>* operands)
{
return NotNull{allocator.allocate(Def{Cell{subscripted}})};
}
static void collectOperands(DefId def, std::vector<DefId>& operands)
{
if (std::find(operands.begin(), operands.end(), def) != operands.end())
LUAU_ASSERT(operands);
if (std::find(operands->begin(), operands->end(), def) != operands->end())
return;
else if (get<Cell>(def))
operands.push_back(def);
operands->push_back(def);
else if (auto phi = get<Phi>(def))
{
for (const Def* operand : phi->operands)
@ -37,6 +33,11 @@ static void collectOperands(DefId def, std::vector<DefId>& operands)
}
}
DefId DefArena::freshCell(bool subscripted)
{
return NotNull{allocator.allocate(Def{Cell{subscripted}})};
}
DefId DefArena::phi(DefId a, DefId b)
{
return phi({a, b});
@ -46,7 +47,7 @@ DefId DefArena::phi(const std::vector<DefId>& defs)
{
std::vector<DefId> operands;
for (DefId operand : defs)
collectOperands(operand, operands);
collectOperands(operand, &operands);
// There's no need to allocate a Phi node for a singleton set.
if (operands.size() == 1)

View File

@ -32,11 +32,9 @@ LUAU_FASTINT(LuauTypeInferRecursionLimit)
LUAU_FASTINT(LuauTarjanChildLimit)
LUAU_FASTFLAG(LuauInferInNoCheckMode)
LUAU_FASTFLAGVARIABLE(LuauKnowsTheDataModel3, false)
LUAU_FASTINTVARIABLE(LuauAutocompleteCheckTimeoutMs, 100) // TODO: Remove with FFlagLuauTypecheckLimitControls
LUAU_FASTFLAGVARIABLE(DebugLuauDeferredConstraintResolution, false)
LUAU_FASTFLAGVARIABLE(DebugLuauLogSolverToJson, false)
LUAU_FASTFLAGVARIABLE(DebugLuauReadWriteProperties, false)
LUAU_FASTFLAGVARIABLE(LuauTypecheckLimitControls, false)
LUAU_FASTFLAGVARIABLE(CorrectEarlyReturnInMarkDirty, false)
LUAU_FASTFLAGVARIABLE(LuauDefinitionFileSetModuleName, false)
@ -902,82 +900,41 @@ void Frontend::checkBuildQueueItem(BuildQueueItem& item)
TypeCheckLimits typeCheckLimits;
if (FFlag::LuauTypecheckLimitControls)
if (item.options.moduleTimeLimitSec)
typeCheckLimits.finishTime = TimeTrace::getClock() + *item.options.moduleTimeLimitSec;
else
typeCheckLimits.finishTime = std::nullopt;
// TODO: This is a dirty ad hoc solution for autocomplete timeouts
// We are trying to dynamically adjust our existing limits to lower total typechecking time under the limit
// so that we'll have type information for the whole file at lower quality instead of a full abort in the middle
if (item.options.applyInternalLimitScaling)
{
if (item.options.moduleTimeLimitSec)
typeCheckLimits.finishTime = TimeTrace::getClock() + *item.options.moduleTimeLimitSec;
if (FInt::LuauTarjanChildLimit > 0)
typeCheckLimits.instantiationChildLimit = std::max(1, int(FInt::LuauTarjanChildLimit * sourceNode.autocompleteLimitsMult));
else
typeCheckLimits.finishTime = std::nullopt;
typeCheckLimits.instantiationChildLimit = std::nullopt;
// TODO: This is a dirty ad hoc solution for autocomplete timeouts
// We are trying to dynamically adjust our existing limits to lower total typechecking time under the limit
// so that we'll have type information for the whole file at lower quality instead of a full abort in the middle
if (item.options.applyInternalLimitScaling)
{
if (FInt::LuauTarjanChildLimit > 0)
typeCheckLimits.instantiationChildLimit = std::max(1, int(FInt::LuauTarjanChildLimit * sourceNode.autocompleteLimitsMult));
else
typeCheckLimits.instantiationChildLimit = std::nullopt;
if (FInt::LuauTypeInferIterationLimit > 0)
typeCheckLimits.unifierIterationLimit = std::max(1, int(FInt::LuauTypeInferIterationLimit * sourceNode.autocompleteLimitsMult));
else
typeCheckLimits.unifierIterationLimit = std::nullopt;
}
typeCheckLimits.cancellationToken = item.options.cancellationToken;
if (FInt::LuauTypeInferIterationLimit > 0)
typeCheckLimits.unifierIterationLimit = std::max(1, int(FInt::LuauTypeInferIterationLimit * sourceNode.autocompleteLimitsMult));
else
typeCheckLimits.unifierIterationLimit = std::nullopt;
}
typeCheckLimits.cancellationToken = item.options.cancellationToken;
if (item.options.forAutocomplete)
{
double autocompleteTimeLimit = FInt::LuauAutocompleteCheckTimeoutMs / 1000.0;
if (!FFlag::LuauTypecheckLimitControls)
{
// The autocomplete typecheck is always in strict mode with DM awareness
// to provide better type information for IDE features
if (autocompleteTimeLimit != 0.0)
typeCheckLimits.finishTime = TimeTrace::getClock() + autocompleteTimeLimit;
else
typeCheckLimits.finishTime = std::nullopt;
// TODO: This is a dirty ad hoc solution for autocomplete timeouts
// We are trying to dynamically adjust our existing limits to lower total typechecking time under the limit
// so that we'll have type information for the whole file at lower quality instead of a full abort in the middle
if (FInt::LuauTarjanChildLimit > 0)
typeCheckLimits.instantiationChildLimit = std::max(1, int(FInt::LuauTarjanChildLimit * sourceNode.autocompleteLimitsMult));
else
typeCheckLimits.instantiationChildLimit = std::nullopt;
if (FInt::LuauTypeInferIterationLimit > 0)
typeCheckLimits.unifierIterationLimit = std::max(1, int(FInt::LuauTypeInferIterationLimit * sourceNode.autocompleteLimitsMult));
else
typeCheckLimits.unifierIterationLimit = std::nullopt;
typeCheckLimits.cancellationToken = item.options.cancellationToken;
}
// The autocomplete typecheck is always in strict mode with DM awareness to provide better type information for IDE features
ModulePtr moduleForAutocomplete = check(sourceModule, Mode::Strict, requireCycles, environmentScope, /*forAutocomplete*/ true,
/*recordJsonLog*/ false, typeCheckLimits);
double duration = getTimestamp() - timestamp;
if (FFlag::LuauTypecheckLimitControls)
{
moduleForAutocomplete->checkDurationSec = duration;
moduleForAutocomplete->checkDurationSec = duration;
if (item.options.moduleTimeLimitSec && item.options.applyInternalLimitScaling)
applyInternalLimitScaling(sourceNode, moduleForAutocomplete, *item.options.moduleTimeLimitSec);
}
else
{
if (moduleForAutocomplete->timeout)
sourceNode.autocompleteLimitsMult = sourceNode.autocompleteLimitsMult / 2.0;
else if (duration < autocompleteTimeLimit / 2.0)
sourceNode.autocompleteLimitsMult = std::min(sourceNode.autocompleteLimitsMult * 2.0, 1.0);
}
if (item.options.moduleTimeLimitSec && item.options.applyInternalLimitScaling)
applyInternalLimitScaling(sourceNode, moduleForAutocomplete, *item.options.moduleTimeLimitSec);
item.stats.timeCheck += duration;
item.stats.filesStrict += 1;
@ -986,29 +943,16 @@ void Frontend::checkBuildQueueItem(BuildQueueItem& item)
return;
}
if (!FFlag::LuauTypecheckLimitControls)
{
typeCheckLimits.cancellationToken = item.options.cancellationToken;
}
ModulePtr module = check(sourceModule, mode, requireCycles, environmentScope, /*forAutocomplete*/ false, item.recordJsonLog, typeCheckLimits);
if (FFlag::LuauTypecheckLimitControls)
{
double duration = getTimestamp() - timestamp;
double duration = getTimestamp() - timestamp;
module->checkDurationSec = duration;
module->checkDurationSec = duration;
if (item.options.moduleTimeLimitSec && item.options.applyInternalLimitScaling)
applyInternalLimitScaling(sourceNode, module, *item.options.moduleTimeLimitSec);
item.stats.timeCheck += duration;
}
else
{
item.stats.timeCheck += getTimestamp() - timestamp;
}
if (item.options.moduleTimeLimitSec && item.options.applyInternalLimitScaling)
applyInternalLimitScaling(sourceNode, module, *item.options.moduleTimeLimitSec);
item.stats.timeCheck += duration;
item.stats.filesStrict += mode == Mode::Strict;
item.stats.filesNonstrict += mode == Mode::Nonstrict;

View File

@ -124,7 +124,7 @@ static bool analyzeFile(const char* name, const unsigned nestingLimit, std::vect
{
Luau::BytecodeBuilder bcb;
compileOrThrow(bcb, source.value(), copts());
compileOrThrow(bcb, *source, copts());
const std::string& bytecode = bcb.getBytecode();

View File

@ -42,8 +42,18 @@
LUAU_FASTFLAGVARIABLE(DebugCodegenNoOpt, false)
LUAU_FASTFLAGVARIABLE(DebugCodegenOptSize, false)
LUAU_FASTFLAGVARIABLE(DebugCodegenSkipNumbering, false)
// Per-module IR instruction count limit
LUAU_FASTINTVARIABLE(CodegenHeuristicsInstructionLimit, 1'048'576) // 1 M
LUAU_FASTINTVARIABLE(CodegenHeuristicsBlockLimit, 65'536) // 64 K
// Per-function IR block limit
// Current value is based on some member variables being limited to 16 bits
// Because block check is made before optimization passes and optimization can generate new blocks, limit is lowered 2x
// The limit will probably be adjusted in the future to avoid performance issues with analysis that's more complex than O(n)
LUAU_FASTINTVARIABLE(CodegenHeuristicsBlockLimit, 32'768) // 32 K
// Per-function IR instruction limit
// Current value is based on some member variables being limited to 16 bits
LUAU_FASTINTVARIABLE(CodegenHeuristicsBlockInstructionLimit, 65'536) // 64 K
namespace Luau
@ -104,11 +114,18 @@ static void logPerfFunction(Proto* p, uintptr_t addr, unsigned size)
}
template<typename AssemblyBuilder>
static std::optional<NativeProto> createNativeFunction(AssemblyBuilder& build, ModuleHelpers& helpers, Proto* proto)
static std::optional<NativeProto> createNativeFunction(AssemblyBuilder& build, ModuleHelpers& helpers, Proto* proto, uint32_t& totalIrInstCount)
{
IrBuilder ir;
ir.buildFunctionIr(proto);
unsigned instCount = unsigned(ir.function.instructions.size());
if (totalIrInstCount + instCount >= unsigned(FInt::CodegenHeuristicsInstructionLimit.value))
return std::nullopt;
totalIrInstCount += instCount;
if (!lowerFunction(ir, build, helpers, proto, {}, /* stats */ nullptr))
return std::nullopt;
@ -291,9 +308,13 @@ CodeGenCompilationResult compile(lua_State* L, int idx, unsigned int flags, Comp
std::vector<NativeProto> results;
results.reserve(protos.size());
uint32_t totalIrInstCount = 0;
for (Proto* p : protos)
if (std::optional<NativeProto> np = createNativeFunction(build, helpers, p))
{
if (std::optional<NativeProto> np = createNativeFunction(build, helpers, p, totalIrInstCount))
results.push_back(*np);
}
// Very large modules might result in overflowing a jump offset; in this case we currently abandon the entire module
if (!build.finalize())

View File

@ -253,11 +253,6 @@ inline bool lowerIr(A64::AssemblyBuilderA64& build, IrBuilder& ir, const std::ve
template<typename AssemblyBuilder>
inline bool lowerFunction(IrBuilder& ir, AssemblyBuilder& build, ModuleHelpers& helpers, Proto* proto, AssemblyOptions options, LoweringStats* stats)
{
helpers.bytecodeInstructionCount += unsigned(ir.function.instructions.size());
if (helpers.bytecodeInstructionCount >= unsigned(FInt::CodegenHeuristicsInstructionLimit.value))
return false;
killUnusedBlocks(ir.function);
unsigned preOptBlockCount = 0;
@ -268,9 +263,7 @@ inline bool lowerFunction(IrBuilder& ir, AssemblyBuilder& build, ModuleHelpers&
preOptBlockCount += (block.kind != IrBlockKind::Dead);
unsigned blockInstructions = block.finish - block.start;
maxBlockInstructions = std::max(maxBlockInstructions, blockInstructions);
};
helpers.preOptBlockCount += preOptBlockCount;
}
// we update stats before checking the heuristic so that even if we bail out
// our stats include information about the limit that was exceeded.
@ -280,9 +273,7 @@ inline bool lowerFunction(IrBuilder& ir, AssemblyBuilder& build, ModuleHelpers&
stats->maxBlockInstructions = maxBlockInstructions;
}
// we use helpers.blocksPreOpt instead of stats.blocksPreOpt since
// stats can be null across some code paths.
if (helpers.preOptBlockCount >= unsigned(FInt::CodegenHeuristicsBlockLimit.value))
if (preOptBlockCount >= unsigned(FInt::CodegenHeuristicsBlockLimit.value))
return false;
if (maxBlockInstructions >= unsigned(FInt::CodegenHeuristicsBlockInstructionLimit.value))

View File

@ -31,9 +31,6 @@ struct ModuleHelpers
// A64
Label continueCall; // x0: closure
unsigned bytecodeInstructionCount = 0;
unsigned preOptBlockCount = 0;
};
} // namespace CodeGen

View File

@ -14,7 +14,7 @@ inline bool isFlagExperimental(const char* flag)
"LuauInstantiateInSubtyping", // requires some fixes to lua-apps code
"LuauTinyControlFlowAnalysis", // waiting for updates to packages depended by internal builtin plugins
"LuauFixIndexerSubtypingOrdering", // requires some small fixes to lua-apps code since this fixes a false negative
"LuauUpdatedRequireByStringSemantics", // requires some small fixes to fully implement some proposed changes
"LuauUpdatedRequireByStringSemantics", // requires some small fixes to fully implement some proposed changes
// makes sure we always have at least one entry
nullptr,
};

View File

@ -33,6 +33,7 @@ LUAU_FASTFLAG(LuauCodeGenFixByteLower);
LUAU_FASTFLAG(LuauCompileBufferAnnotation);
LUAU_FASTFLAG(LuauLoopInterruptFix);
LUAU_DYNAMIC_FASTFLAG(LuauStricterUtf8);
LUAU_FASTINT(CodegenHeuristicsInstructionLimit);
static lua_CompileOptions defaultOptions()
{
@ -2020,6 +2021,64 @@ TEST_CASE("HugeFunction")
CHECK(lua_tonumber(L, -1) == 42);
}
TEST_CASE("IrInstructionLimit")
{
if (!codegen || !luau_codegen_supported())
return;
ScopedFastInt codegenHeuristicsInstructionLimit{FInt::CodegenHeuristicsInstructionLimit, 50'000};
std::string source;
// Generate a hundred fat functions
for (int fn = 0; fn < 100; fn++)
{
source += "local function fn" + std::to_string(fn) + "(...)\n";
source += "if ... then\n";
source += "local p1, p2 = ...\n";
source += "local _ = {\n";
for (int i = 0; i < 100; ++i)
{
source += "p1*0." + std::to_string(i) + ",";
source += "p2+0." + std::to_string(i) + ",";
}
source += "}\n";
source += "return _\n";
source += "end\n";
source += "end\n";
}
StateRef globalState(luaL_newstate(), lua_close);
lua_State* L = globalState.get();
luau_codegen_create(L);
luaL_openlibs(L);
luaL_sandbox(L);
luaL_sandboxthread(L);
size_t bytecodeSize = 0;
char* bytecode = luau_compile(source.data(), source.size(), nullptr, &bytecodeSize);
int result = luau_load(L, "=HugeFunction", bytecode, bytecodeSize, 0);
free(bytecode);
REQUIRE(result == 0);
Luau::CodeGen::CompilationStats nativeStats = {};
Luau::CodeGen::CodeGenCompilationResult nativeResult = Luau::CodeGen::compile(L, -1, Luau::CodeGen::CodeGen_ColdFunctions, &nativeStats);
// Limit is not hit immediately, so with some functions compiled it should be a success
CHECK(nativeResult != Luau::CodeGen::CodeGenCompilationResult::CodeGenFailed);
// We should be able to compile at least one of our functions
CHECK(nativeStats.functionsCompiled > 0);
// But because of the limit, not all of them (101 because there's an extra global function)
CHECK(nativeStats.functionsCompiled < 101);
}
TEST_CASE("BytecodeDistributionPerFunctionTest")
{
const char* source = R"(

View File

@ -341,6 +341,7 @@ TEST_CASE_FIXTURE(DataFlowGraphFixture, "property_lookup_on_a_phi_node")
const Phi* phi = get<Phi>(x3);
REQUIRE(phi);
REQUIRE(phi->operands.size() == 2);
CHECK(phi->operands.at(0) == x1);
CHECK(phi->operands.at(1) == x2);
}
@ -368,6 +369,7 @@ TEST_CASE_FIXTURE(DataFlowGraphFixture, "property_lookup_on_a_phi_node_2")
const Phi* phi = get<Phi>(x3);
REQUIRE(phi);
REQUIRE(phi->operands.size() == 2);
CHECK(phi->operands.at(0) == x2);
CHECK(phi->operands.at(1) == x1);
}
@ -408,8 +410,154 @@ TEST_CASE_FIXTURE(DataFlowGraphFixture, "property_lookup_on_a_phi_node_3")
const Phi* phi = get<Phi>(x3);
REQUIRE(phi);
REQUIRE(phi->operands.size() == 2);
CHECK(phi->operands.at(0) == x1);
CHECK(phi->operands.at(1) == x2);
}
TEST_CASE_FIXTURE(DataFlowGraphFixture, "function_captures_are_phi_nodes_of_all_versions")
{
dfg(R"(
local x = 5
function f()
print(x)
x = nil
end
f()
x = "five"
)");
DefId x1 = graph->getDef(query<AstStatLocal>(module)->vars.data[0]);
DefId x2 = getDef<AstExprLocal, 1>(); // print(x)
DefId x3 = getDef<AstExprLocal, 2>(); // x = nil
DefId x4 = getDef<AstExprLocal, 3>(); // x = "five"
CHECK(x1 != x2);
CHECK(x2 != x3);
CHECK(x3 != x4);
const Phi* phi = get<Phi>(x2);
REQUIRE(phi);
REQUIRE(phi->operands.size() == 3);
CHECK(phi->operands.at(0) == x1);
CHECK(phi->operands.at(1) == x3);
CHECK(phi->operands.at(2) == x4);
}
TEST_CASE_FIXTURE(DataFlowGraphFixture, "function_captures_are_phi_nodes_of_all_versions_properties")
{
dfg(R"(
local t = {}
t.x = 5
function f()
print(t.x)
t.x = nil
end
f()
t.x = "five"
)");
DefId x1 = getDef<AstExprIndexName, 1>(); // t.x = 5
DefId x2 = getDef<AstExprIndexName, 2>(); // print(t.x)
DefId x3 = getDef<AstExprIndexName, 3>(); // t.x = nil
DefId x4 = getDef<AstExprIndexName, 4>(); // t.x = "five"
CHECK(x1 != x2);
CHECK(x2 != x3);
CHECK(x3 != x4);
// When a local is referenced within a function, it is not pointer identical.
// Instead, it's a phi node of all possible versions, including just one version.
DefId t1 = graph->getDef(query<AstStatLocal>(module)->vars.data[0]);
DefId t2 = getDef<AstExprLocal, 2>(); // print(t.x)
const Phi* phi = get<Phi>(t2);
REQUIRE(phi);
REQUIRE(phi->operands.size() == 1);
CHECK(phi->operands.at(0) == t1);
}
TEST_CASE_FIXTURE(DataFlowGraphFixture, "local_f_which_is_prototyped_enclosed_by_function")
{
dfg(R"(
local f
function f()
if cond() then
f()
end
end
)");
DefId f1 = graph->getDef(query<AstStatLocal>(module)->vars.data[0]);
DefId f2 = getDef<AstExprLocal, 1>(); // function f()
DefId f3 = getDef<AstExprLocal, 2>(); // f()
CHECK(f1 != f2);
CHECK(f2 != f3);
const Phi* phi = get<Phi>(f3);
REQUIRE(phi);
REQUIRE(phi->operands.size() == 1);
CHECK(phi->operands.at(0) == f2);
}
TEST_CASE_FIXTURE(DataFlowGraphFixture, "local_f_which_is_prototyped_enclosed_by_function_has_some_prior_versions")
{
dfg(R"(
local f
f = 5
function f()
if cond() then
f()
end
end
)");
DefId f1 = graph->getDef(query<AstStatLocal>(module)->vars.data[0]);
DefId f2 = getDef<AstExprLocal, 1>(); // f = 5
DefId f3 = getDef<AstExprLocal, 2>(); // function f()
DefId f4 = getDef<AstExprLocal, 3>(); // f()
CHECK(f1 != f2);
CHECK(f2 != f3);
CHECK(f3 != f4);
const Phi* phi = get<Phi>(f4);
REQUIRE(phi);
REQUIRE(phi->operands.size() == 1);
CHECK(phi->operands.at(0) == f3);
}
TEST_CASE_FIXTURE(DataFlowGraphFixture, "local_f_which_is_prototyped_enclosed_by_function_has_some_future_versions")
{
dfg(R"(
local f
function f()
if cond() then
f()
end
end
f = 5
)");
DefId f1 = graph->getDef(query<AstStatLocal>(module)->vars.data[0]);
DefId f2 = getDef<AstExprLocal, 1>(); // function f()
DefId f3 = getDef<AstExprLocal, 2>(); // f()
DefId f4 = getDef<AstExprLocal, 3>(); // f = 5
CHECK(f1 != f2);
CHECK(f2 != f3);
CHECK(f3 != f4);
const Phi* phi = get<Phi>(f3);
REQUIRE(phi);
REQUIRE(phi->operands.size() == 2);
CHECK(phi->operands.at(0) == f2);
CHECK(phi->operands.at(1) == f4);
}
TEST_SUITE_END();

View File

@ -71,6 +71,7 @@ public:
luauDirAbs += "/luau";
}
if (type == PathType::Relative)
return luauDirRel;
if (type == PathType::Absolute)
@ -214,7 +215,6 @@ TEST_CASE("PathNormalization")
}
}
TEST_CASE_FIXTURE(ReplWithPathFixture, "RequireSimpleRelativePath")
{
ScopedFastFlag sff{FFlag::LuauUpdatedRequireByStringSemantics, true};

View File

@ -736,7 +736,6 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "mutual_recursion")
)");
LUAU_REQUIRE_NO_ERRORS(result);
dumpErrors(result);
}
TEST_CASE_FIXTURE(BuiltinsFixture, "toposort_doesnt_break_mutual_recursion")

View File

@ -1925,13 +1925,10 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "refine_unknown_to_table")
// this test is DCR-only as an instance of DCR fixing a bug in the old solver
CheckResult result = check(R"(
local idx, val
local function f(a: unknown)
if typeof(a) == "table" then
for i, v in a do
idx = i
val = v
return i, v
end
end
end
@ -1939,17 +1936,7 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "refine_unknown_to_table")
LUAU_REQUIRE_NO_ERRORS(result);
if (FFlag::DebugLuauDeferredConstraintResolution)
{
// Bug: We do not simplify at the right time
CHECK_EQ("unknown?", toString(requireType("idx")));
CHECK_EQ("unknown?", toString(requireType("val")));
}
else
{
CHECK_EQ("unknown", toString(requireType("idx")));
CHECK_EQ("unknown", toString(requireType("val")));
}
CHECK_EQ("(unknown) -> (unknown, unknown)", toString(requireType("f")));
}
TEST_CASE_FIXTURE(BuiltinsFixture, "conditional_refinement_should_stay_error_suppressing")

View File

@ -315,4 +315,88 @@ TEST_CASE_FIXTURE(TypeStateFixture, "local_t_is_assigned_a_fresh_table_with_x_as
CHECK("boolean | number | number | string" == toString(requireType("x")));
}
TEST_CASE_FIXTURE(TypeStateFixture, "captured_locals_are_unions_of_all_assignments")
{
CheckResult result = check(R"(
local x = nil
function f()
print(x)
x = "five"
end
x = 5
f()
)");
LUAU_REQUIRE_NO_ERRORS(result);
CHECK("(number | string)?" == toString(requireTypeAtPosition({4, 18})));
}
TEST_CASE_FIXTURE(TypeStateFixture, "captured_locals_are_unions_of_all_assignments_2")
{
CheckResult result = check(R"(
local t = {x = nil}
function f()
print(t.x)
t = {x = "five"}
end
t = {x = 5}
f()
)");
LUAU_REQUIRE_NO_ERRORS(result);
CHECK("{ x: nil } | { x: number } | { x: string }" == toString(requireTypeAtPosition({4, 18}), {true}));
CHECK("(number | string)?" == toString(requireTypeAtPosition({4, 20})));
}
TEST_CASE_FIXTURE(TypeStateFixture, "prototyped_recursive_functions")
{
CheckResult result = check(R"(
local f
function f()
if math.random() > 0.5 then
f()
end
end
)");
LUAU_REQUIRE_NO_ERRORS(result);
CHECK("(() -> ())?" == toString(requireType("f")));
}
TEST_CASE_FIXTURE(TypeStateFixture, "prototyped_recursive_functions_but_has_future_assignments")
{
CheckResult result = check(R"(
local f
function f()
if math.random() > 0.5 then
f()
end
end
f = 5
)");
LUAU_REQUIRE_ERROR_COUNT(1, result);
CHECK("((() -> ()) | number)?" == toString(requireType("f")));
}
TEST_CASE_FIXTURE(TypeStateFixture, "prototyped_recursive_functions_but_has_previous_assignments")
{
CheckResult result = check(R"(
local f
f = 5
function f()
if math.random() > 0.5 then
f()
end
end
)");
LUAU_REQUIRE_NO_ERRORS(result);
CHECK("((() -> ()) | number)?" == toString(requireType("f")));
}
TEST_SUITE_END();

View File

@ -268,9 +268,12 @@ ProvisionalTests.typeguard_inference_incomplete
ProvisionalTests.while_body_are_also_refined
ProvisionalTests.xpcall_returns_what_f_returns
RefinementTest.assert_a_to_be_truthy_then_assert_a_to_be_number
RefinementTest.assert_non_binary_expressions_actually_resolve_constraints
RefinementTest.correctly_lookup_a_shadowed_local_that_which_was_previously_refined
RefinementTest.correctly_lookup_property_whose_base_was_previously_refined
RefinementTest.dataflow_analysis_can_tell_refinements_when_its_appropriate_to_refine_into_nil_or_never
RefinementTest.discriminate_from_truthiness_of_x
RefinementTest.either_number_or_string
RefinementTest.fail_to_refine_a_property_of_subscript_expression
RefinementTest.falsiness_of_TruthyPredicate_narrows_into_nil
RefinementTest.function_call_with_colon_after_refining_not_to_be_nil
@ -288,6 +291,7 @@ RefinementTest.string_not_equal_to_string_or_nil
RefinementTest.truthy_constraint_on_properties
RefinementTest.type_annotations_arent_relevant_when_doing_dataflow_analysis
RefinementTest.type_comparison_ifelse_expression
RefinementTest.type_guard_narrowed_into_nothingness
RefinementTest.type_narrow_to_vector
RefinementTest.typeguard_cast_free_table_to_vector
RefinementTest.typeguard_in_assert_position
@ -394,6 +398,7 @@ TableTests.table_unifies_into_map
TableTests.top_table_type
TableTests.type_mismatch_on_massive_table_is_cut_short
TableTests.unification_of_unions_in_a_self_referential_type
TableTests.unifying_tables_shouldnt_uaf1
TableTests.used_colon_instead_of_dot
TableTests.used_dot_instead_of_colon
TableTests.used_dot_instead_of_colon_but_correctly
@ -403,6 +408,7 @@ ToDot.function
ToString.exhaustive_toString_of_cyclic_table
ToString.free_types
ToString.named_metatable_toStringNamedFunction
ToString.no_parentheses_around_cyclic_function_type_in_intersection
ToString.pick_distinct_names_for_mixed_explicit_and_implicit_generics
ToString.primitive
ToString.tostring_unsee_ttv_if_array
@ -466,6 +472,7 @@ TypeInfer.no_stack_overflow_from_isoptional
TypeInfer.promote_tail_type_packs
TypeInfer.recursive_function_that_invokes_itself_with_a_refinement_of_its_parameter
TypeInfer.recursive_function_that_invokes_itself_with_a_refinement_of_its_parameter_2
TypeInfer.statements_are_topologically_sorted
TypeInfer.stringify_nested_unions_with_optionals
TypeInfer.tc_after_error_recovery_no_replacement_name_in_error
TypeInfer.type_infer_recursion_limit_no_ice
@ -481,6 +488,7 @@ TypeInferAnyError.for_in_loop_iterator_is_error
TypeInferAnyError.for_in_loop_iterator_is_error2
TypeInferAnyError.for_in_loop_iterator_returns_any
TypeInferAnyError.intersection_of_any_can_have_props
TypeInferAnyError.metatable_of_any_can_be_a_table
TypeInferAnyError.quantify_any_does_not_bind_to_itself
TypeInferAnyError.replace_every_free_type_when_unifying_a_complex_function_with_any
TypeInferAnyError.type_error_addition
@ -502,6 +510,7 @@ TypeInferFunctions.apply_of_lambda_with_inferred_and_explicit_types
TypeInferFunctions.calling_function_with_anytypepack_doesnt_leak_free_types
TypeInferFunctions.cannot_hoist_interior_defns_into_signature
TypeInferFunctions.check_function_bodies
TypeInferFunctions.complicated_return_types_require_an_explicit_annotation
TypeInferFunctions.concrete_functions_are_not_supertypes_of_function
TypeInferFunctions.dont_assert_when_the_tarjan_limit_is_exceeded_during_generalization
TypeInferFunctions.dont_give_other_overloads_message_if_only_one_argument_matching_overload_exists
@ -538,7 +547,9 @@ TypeInferFunctions.it_is_ok_to_oversaturate_a_higher_order_function_argument
TypeInferFunctions.list_all_overloads_if_no_overload_takes_given_argument_count
TypeInferFunctions.list_only_alternative_overloads_that_match_argument_count
TypeInferFunctions.luau_subtyping_is_np_hard
TypeInferFunctions.mutual_recursion
TypeInferFunctions.no_lossy_function_type
TypeInferFunctions.num_is_solved_after_num_or_str
TypeInferFunctions.occurs_check_failure_in_function_return_type
TypeInferFunctions.other_things_are_not_related_to_function
TypeInferFunctions.param_1_and_2_both_takes_the_same_generic_but_their_arguments_are_incompatible
@ -554,6 +565,7 @@ TypeInferFunctions.too_many_arguments
TypeInferFunctions.too_many_arguments_error_location
TypeInferFunctions.too_many_return_values_in_parentheses
TypeInferFunctions.too_many_return_values_no_function
TypeInferFunctions.toposort_doesnt_break_mutual_recursion
TypeInferFunctions.vararg_function_is_quantified
TypeInferLoops.cli_68448_iterators_need_not_accept_nil
TypeInferLoops.dcr_iteration_explore_raycast_minimization
@ -598,6 +610,7 @@ TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_another_overload_works
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_it_wont_help_2
TypeInferOOP.dont_suggest_using_colon_rather_than_dot_if_not_defined_with_colon
TypeInferOOP.inferring_hundreds_of_self_calls_should_not_suffocate_memory
TypeInferOOP.method_depends_on_table
TypeInferOOP.methods_are_topologically_sorted
TypeInferOOP.object_constructor_can_refer_to_method_of_self
TypeInferOOP.promise_type_error_too_complex
@ -662,6 +675,7 @@ UnionTypes.disallow_less_specific_assign2
UnionTypes.error_detailed_optional
UnionTypes.error_detailed_union_all
UnionTypes.error_detailed_union_part
UnionTypes.error_takes_optional_arguments
UnionTypes.generic_function_with_optional_arg
UnionTypes.index_on_a_union_type_with_missing_property
UnionTypes.index_on_a_union_type_with_mixed_types
@ -681,6 +695,7 @@ UnionTypes.optional_union_follow
UnionTypes.optional_union_functions
UnionTypes.optional_union_members
UnionTypes.optional_union_methods
UnionTypes.return_types_can_be_disjoint
UnionTypes.table_union_write_indirect
UnionTypes.unify_unsealed_table_union_check
UnionTypes.union_of_functions