Sync to upstream/release/564

This commit is contained in:
Vyacheslav Egorov 2023-02-17 16:53:37 +02:00
parent b388e27995
commit 5c77305609
33 changed files with 1044 additions and 380 deletions

View File

@ -90,6 +90,8 @@ struct NameConstraint
TypeId namedType;
std::string name;
bool synthetic = false;
std::vector<TypeId> typeParameters;
std::vector<TypePackId> typePackParameters;
};
// target ~ inst target
@ -101,7 +103,6 @@ struct TypeAliasExpansionConstraint
struct FunctionCallConstraint
{
std::vector<NotNull<const struct Constraint>> innerConstraints;
TypeId fn;
TypePackId argsPack;
TypePackId result;

View File

@ -162,10 +162,10 @@ struct ConstraintGraphBuilder
void visit(const ScopePtr& scope, AstStatDeclareFunction* declareFunction);
void visit(const ScopePtr& scope, AstStatError* error);
InferencePack checkPack(const ScopePtr& scope, AstArray<AstExpr*> exprs, const std::vector<TypeId>& expectedTypes = {});
InferencePack checkPack(const ScopePtr& scope, AstExpr* expr, const std::vector<TypeId>& expectedTypes = {});
InferencePack checkPack(const ScopePtr& scope, AstArray<AstExpr*> exprs, const std::vector<std::optional<TypeId>>& expectedTypes = {});
InferencePack checkPack(const ScopePtr& scope, AstExpr* expr, const std::vector<std::optional<TypeId>>& expectedTypes = {});
InferencePack checkPack(const ScopePtr& scope, AstExprCall* call, const std::vector<TypeId>& expectedTypes);
InferencePack checkPack(const ScopePtr& scope, AstExprCall* call);
/**
* Checks an expression that is expected to evaluate to one type.
@ -244,8 +244,10 @@ struct ConstraintGraphBuilder
**/
TypePackId resolveTypePack(const ScopePtr& scope, const AstTypeList& list, bool inTypeArguments);
std::vector<std::pair<Name, GenericTypeDefinition>> createGenerics(const ScopePtr& scope, AstArray<AstGenericType> generics);
std::vector<std::pair<Name, GenericTypePackDefinition>> createGenericPacks(const ScopePtr& scope, AstArray<AstGenericTypePack> packs);
std::vector<std::pair<Name, GenericTypeDefinition>> createGenerics(
const ScopePtr& scope, AstArray<AstGenericType> generics, bool useCache = false);
std::vector<std::pair<Name, GenericTypePackDefinition>> createGenericPacks(
const ScopePtr& scope, AstArray<AstGenericTypePack> packs, bool useCache = false);
Inference flattenPack(const ScopePtr& scope, Location location, InferencePack pack);

View File

@ -183,7 +183,7 @@ struct ConstraintSolver
/** Pushes a new solver constraint to the solver.
* @param cv the body of the constraint.
**/
void pushConstraint(NotNull<Scope> scope, const Location& location, ConstraintV cv);
NotNull<Constraint> pushConstraint(NotNull<Scope> scope, const Location& location, ConstraintV cv);
/**
* Attempts to resolve a module from its module information. Returns the

View File

@ -162,7 +162,7 @@ struct Frontend
ScopePtr getGlobalScope();
private:
ModulePtr check(const SourceModule& sourceModule, Mode mode, const ScopePtr& environmentScope, std::vector<RequireCycle> requireCycles,
ModulePtr check(const SourceModule& sourceModule, Mode mode, std::vector<RequireCycle> requireCycles,
bool forAutocomplete = false);
std::pair<SourceNode*, SourceModule*> getSourceNode(const ModuleName& name);
@ -202,4 +202,16 @@ private:
ScopePtr globalScope;
};
ModulePtr check(
const SourceModule& sourceModule,
const std::vector<RequireCycle>& requireCycles,
NotNull<BuiltinTypes> builtinTypes,
NotNull<InternalErrorReporter> iceHandler,
NotNull<ModuleResolver> moduleResolver,
NotNull<FileResolver> fileResolver,
const ScopePtr& globalScope,
NotNull<UnifierSharedState> unifierState,
FrontendOptions options
);
} // namespace Luau

View File

@ -16,7 +16,6 @@
LUAU_FASTFLAGVARIABLE(LuauCompleteTableKeysBetter, false);
LUAU_FASTFLAGVARIABLE(LuauFixAutocompleteInWhile, false);
LUAU_FASTFLAGVARIABLE(LuauFixAutocompleteInFor, false);
LUAU_FASTFLAGVARIABLE(LuauAutocompleteStringContent, false);
static const std::unordered_set<std::string> kStatementStartingKeywords = {
"while", "if", "local", "repeat", "function", "do", "for", "return", "break", "continue", "type", "export"};
@ -1268,9 +1267,6 @@ static bool isSimpleInterpolatedString(const AstNode* node)
static std::optional<std::string> getStringContents(const AstNode* node)
{
if (!FFlag::LuauAutocompleteStringContent)
return std::nullopt;
if (const AstExprConstantString* string = node->as<AstExprConstantString>())
{
return std::string(string->value.data, string->value.size);

View File

@ -15,8 +15,6 @@
#include <algorithm>
LUAU_FASTFLAGVARIABLE(LuauBuiltInMetatableNoBadSynthetic, false)
/** FIXME: Many of these type definitions are not quite completely accurate.
*
* Some of them require richer generics than we have. For instance, we do not yet have a way to talk
@ -558,8 +556,6 @@ static std::optional<WithPredicate<TypePackId>> magicFunctionSetMetaTable(
if (tableName == metatableName)
mtv.syntheticName = tableName;
else if (!FFlag::LuauBuiltInMetatableNoBadSynthetic)
mtv.syntheticName = "{ @metatable: " + metatableName + ", " + tableName + " }";
}
TypeId mtTy = arena.addType(mtv);

View File

@ -11,6 +11,8 @@
#include "Luau/TypeUtils.h"
#include "Luau/Type.h"
#include <algorithm>
LUAU_FASTINT(LuauCheckRecursionLimit);
LUAU_FASTFLAG(DebugLuauLogSolverToJson);
LUAU_FASTFLAG(DebugLuauMagicTypes);
@ -334,13 +336,12 @@ void ConstraintGraphBuilder::visitBlockWithoutChildScope(const ScopePtr& scope,
// In order to enable mutually-recursive type aliases, we need to
// populate the type bindings before we actually check any of the
// alias statements. Since we're not ready to actually resolve
// any of the annotations, we just use a fresh type for now.
// alias statements.
for (AstStat* stat : block->body)
{
if (auto alias = stat->as<AstStatTypeAlias>())
{
if (scope->privateTypeBindings.count(alias->name.value) != 0)
if (scope->exportedTypeBindings.count(alias->name.value) || scope->privateTypeBindings.count(alias->name.value))
{
auto it = aliasDefinitionLocations.find(alias->name.value);
LUAU_ASSERT(it != aliasDefinitionLocations.end());
@ -348,30 +349,28 @@ void ConstraintGraphBuilder::visitBlockWithoutChildScope(const ScopePtr& scope,
continue;
}
bool hasGenerics = alias->generics.size > 0 || alias->genericPacks.size > 0;
ScopePtr defnScope = childScope(alias, scope);
ScopePtr defnScope = scope;
if (hasGenerics)
{
defnScope = childScope(alias, scope);
}
TypeId initialType = arena->addType(BlockedType{});
TypeFun initialFun{initialType};
TypeId initialType = freshType(scope);
TypeFun initialFun = TypeFun{initialType};
for (const auto& [name, gen] : createGenerics(defnScope, alias->generics))
for (const auto& [name, gen] : createGenerics(defnScope, alias->generics, /* useCache */ true))
{
initialFun.typeParams.push_back(gen);
defnScope->privateTypeBindings[name] = TypeFun{gen.ty};
}
for (const auto& [name, genPack] : createGenericPacks(defnScope, alias->genericPacks))
for (const auto& [name, genPack] : createGenericPacks(defnScope, alias->genericPacks, /* useCache */ true))
{
initialFun.typePackParams.push_back(genPack);
defnScope->privateTypePackBindings[name] = genPack.tp;
}
if (alias->exported)
scope->exportedTypeBindings[alias->name.value] = std::move(initialFun);
else
scope->privateTypeBindings[alias->name.value] = std::move(initialFun);
astTypeAliasDefiningScopes[alias] = defnScope;
aliasDefinitionLocations[alias->name.value] = alias->location;
}
@ -387,42 +386,46 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStat* stat)
if (auto s = stat->as<AstStatBlock>())
visit(scope, s);
else if (auto i = stat->as<AstStatIf>())
visit(scope, i);
else if (auto s = stat->as<AstStatWhile>())
visit(scope, s);
else if (auto s = stat->as<AstStatRepeat>())
visit(scope, s);
else if (stat->is<AstStatBreak>() || stat->is<AstStatContinue>())
{
// Nothing
}
else if (auto r = stat->as<AstStatReturn>())
visit(scope, r);
else if (auto e = stat->as<AstStatExpr>())
checkPack(scope, e->expr);
else if (auto s = stat->as<AstStatLocal>())
visit(scope, s);
else if (auto s = stat->as<AstStatFor>())
visit(scope, s);
else if (auto s = stat->as<AstStatForIn>())
visit(scope, s);
else if (auto s = stat->as<AstStatWhile>())
visit(scope, s);
else if (auto s = stat->as<AstStatRepeat>())
visit(scope, s);
else if (auto f = stat->as<AstStatFunction>())
visit(scope, f);
else if (auto f = stat->as<AstStatLocalFunction>())
visit(scope, f);
else if (auto r = stat->as<AstStatReturn>())
visit(scope, r);
else if (auto a = stat->as<AstStatAssign>())
visit(scope, a);
else if (auto a = stat->as<AstStatCompoundAssign>())
visit(scope, a);
else if (auto e = stat->as<AstStatExpr>())
checkPack(scope, e->expr);
else if (auto i = stat->as<AstStatIf>())
visit(scope, i);
else if (auto f = stat->as<AstStatFunction>())
visit(scope, f);
else if (auto f = stat->as<AstStatLocalFunction>())
visit(scope, f);
else if (auto a = stat->as<AstStatTypeAlias>())
visit(scope, a);
else if (auto s = stat->as<AstStatDeclareGlobal>())
visit(scope, s);
else if (auto s = stat->as<AstStatDeclareClass>())
visit(scope, s);
else if (auto s = stat->as<AstStatDeclareFunction>())
visit(scope, s);
else if (auto s = stat->as<AstStatDeclareClass>())
visit(scope, s);
else if (auto s = stat->as<AstStatError>())
visit(scope, s);
else
LUAU_ASSERT(0);
LUAU_ASSERT(0 && "Internal error: Unknown AstStat type");
}
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocal* local)
@ -482,7 +485,7 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocal* local)
}
else
{
std::vector<TypeId> expectedTypes;
std::vector<std::optional<TypeId>> expectedTypes;
if (hasAnnotation)
expectedTypes.insert(begin(expectedTypes), begin(varTypes) + i, end(varTypes));
@ -680,6 +683,7 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatFunction* funct
TypeId generalizedType = arena->addType(BlockedType{});
Checkpoint start = checkpoint(this);
FunctionSignature sig = checkFunctionSignature(scope, function->func);
if (AstExprLocal* localName = function->name->as<AstExprLocal>())
@ -724,7 +728,6 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatFunction* funct
if (generalizedType == nullptr)
ice->ice("generalizedType == nullptr", function->location);
Checkpoint start = checkpoint(this);
checkFunctionBody(sig.bodyScope, function->func);
Checkpoint end = checkpoint(this);
@ -745,7 +748,7 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatReturn* ret)
// interesting in it is if the function has an explicit return annotation.
// If this is the case, then we can expect that the return expression
// conforms to that.
std::vector<TypeId> expectedTypes;
std::vector<std::optional<TypeId>> expectedTypes;
for (TypeId ty : scope->returnType)
expectedTypes.push_back(ty);
@ -764,8 +767,21 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatAssign* assign)
{
TypePackId varPackId = checkLValues(scope, assign->vars);
TypePack expectedTypes = extendTypePack(*arena, builtinTypes, varPackId, assign->values.size);
TypePackId valuePack = checkPack(scope, assign->values, expectedTypes.head).tp;
TypePack expectedPack = extendTypePack(*arena, builtinTypes, varPackId, assign->values.size);
std::vector<std::optional<TypeId>> expectedTypes;
expectedTypes.reserve(expectedPack.head.size());
for (TypeId ty : expectedPack.head)
{
ty = follow(ty);
if (get<FreeType>(ty))
expectedTypes.push_back(std::nullopt);
else
expectedTypes.push_back(ty);
}
TypePackId valuePack = checkPack(scope, assign->values, expectedTypes).tp;
addConstraint(scope, assign->location, PackSubtypeConstraint{valuePack, varPackId});
}
@ -800,35 +816,70 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatIf* ifStatement
}
}
static bool occursCheck(TypeId needle, TypeId haystack)
{
LUAU_ASSERT(get<BlockedType>(needle));
haystack = follow(haystack);
auto checkHaystack = [needle](TypeId haystack) {
return occursCheck(needle, haystack);
};
if (needle == haystack)
return true;
else if (auto ut = get<UnionType>(haystack))
return std::any_of(begin(ut), end(ut), checkHaystack);
else if (auto it = get<IntersectionType>(haystack))
return std::any_of(begin(it), end(it), checkHaystack);
return false;
}
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatTypeAlias* alias)
{
auto bindingIt = scope->privateTypeBindings.find(alias->name.value);
ScopePtr* defnIt = astTypeAliasDefiningScopes.find(alias);
ScopePtr* defnScope = astTypeAliasDefiningScopes.find(alias);
std::unordered_map<Name, TypeFun>* typeBindings;
if (alias->exported)
typeBindings = &scope->exportedTypeBindings;
else
typeBindings = &scope->privateTypeBindings;
// These will be undefined if the alias was a duplicate definition, in which
// case we just skip over it.
if (bindingIt == scope->privateTypeBindings.end() || defnIt == nullptr)
{
auto bindingIt = typeBindings->find(alias->name.value);
if (bindingIt == typeBindings->end() || defnScope == nullptr)
return;
}
ScopePtr resolvingScope = *defnIt;
TypeId ty = resolveType(resolvingScope, alias->type, /* inTypeArguments */ false);
TypeId ty = resolveType(*defnScope, alias->type, /* inTypeArguments */ false);
if (alias->exported)
TypeId aliasTy = bindingIt->second.type;
LUAU_ASSERT(get<BlockedType>(aliasTy));
if (occursCheck(aliasTy, ty))
{
Name typeName(alias->name.value);
scope->exportedTypeBindings[typeName] = TypeFun{ty};
asMutable(aliasTy)->ty.emplace<BoundType>(builtinTypes->anyType);
reportError(alias->nameLocation, OccursCheckFailed{});
}
else
asMutable(aliasTy)->ty.emplace<BoundType>(ty);
LUAU_ASSERT(get<FreeType>(bindingIt->second.type));
std::vector<TypeId> typeParams;
for (auto tyParam : createGenerics(*defnScope, alias->generics, /* useCache */ true))
typeParams.push_back(tyParam.second.ty);
// Rather than using a subtype constraint, we instead directly bind
// the free type we generated in the first pass to the resolved type.
// This prevents a case where you could cause another constraint to
// bind the free alias type to an unrelated type, causing havoc.
asMutable(bindingIt->second.type)->ty.emplace<BoundType>(ty);
std::vector<TypePackId> typePackParams;
for (auto tpParam : createGenericPacks(*defnScope, alias->genericPacks, /* useCache */ true))
typePackParams.push_back(tpParam.second.tp);
addConstraint(scope, alias->location, NameConstraint{ty, alias->name.value});
addConstraint(scope, alias->type->location,
NameConstraint{
ty,
alias->name.value,
/*synthetic=*/false,
std::move(typeParams),
std::move(typePackParams),
});
}
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareGlobal* global)
@ -997,7 +1048,8 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatError* error)
check(scope, expr);
}
InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstArray<AstExpr*> exprs, const std::vector<TypeId>& expectedTypes)
InferencePack ConstraintGraphBuilder::checkPack(
const ScopePtr& scope, AstArray<AstExpr*> exprs, const std::vector<std::optional<TypeId>>& expectedTypes)
{
std::vector<TypeId> head;
std::optional<TypePackId> tail;
@ -1010,11 +1062,11 @@ InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstArray<
std::optional<TypeId> expectedType;
if (i < expectedTypes.size())
expectedType = expectedTypes[i];
head.push_back(check(scope, expr).ty);
head.push_back(check(scope, expr, expectedType).ty);
}
else
{
std::vector<TypeId> expectedTailTypes;
std::vector<std::optional<TypeId>> expectedTailTypes;
if (i < expectedTypes.size())
expectedTailTypes.assign(begin(expectedTypes) + i, end(expectedTypes));
tail = checkPack(scope, expr, expectedTailTypes).tp;
@ -1027,7 +1079,7 @@ InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstArray<
return InferencePack{arena->addTypePack(TypePack{std::move(head), tail})};
}
InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstExpr* expr, const std::vector<TypeId>& expectedTypes)
InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstExpr* expr, const std::vector<std::optional<TypeId>>& expectedTypes)
{
RecursionCounter counter{&recursionCount};
@ -1040,7 +1092,7 @@ InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstExpr*
InferencePack result;
if (AstExprCall* call = expr->as<AstExprCall>())
result = checkPack(scope, call, expectedTypes);
result = checkPack(scope, call);
else if (AstExprVarargs* varargs = expr->as<AstExprVarargs>())
{
if (scope->varargPack)
@ -1062,7 +1114,7 @@ InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstExpr*
return result;
}
InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstExprCall* call, const std::vector<TypeId>& expectedTypes)
InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstExprCall* call)
{
std::vector<AstExpr*> exprArgs;
@ -1164,7 +1216,7 @@ InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstExprCa
}
else
{
auto [tp, refis] = checkPack(scope, arg, {}); // FIXME? not sure about expectedTypes here
auto [tp, refis] = checkPack(scope, arg, {});
argTail = tp;
argumentRefinements.insert(argumentRefinements.end(), refis.begin(), refis.end());
}
@ -1209,24 +1261,13 @@ InferencePack ConstraintGraphBuilder::checkPack(const ScopePtr& scope, AstExprCa
if (matchAssert(*call) && !argumentRefinements.empty())
applyRefinements(scope, call->args.data[0]->location, argumentRefinements[0]);
TypeId instantiatedType = arena->addType(BlockedType{});
// TODO: How do expectedTypes play into this? Do they?
TypePackId rets = arena->addTypePack(BlockedTypePack{});
TypePackId argPack = arena->addTypePack(TypePack{args, argTail});
FunctionType ftv(TypeLevel{}, scope.get(), argPack, rets);
TypeId inferredFnType = arena->addType(ftv);
unqueuedConstraints.push_back(
std::make_unique<Constraint>(NotNull{scope.get()}, call->func->location, InstantiationConstraint{instantiatedType, fnType}));
NotNull<const Constraint> ic(unqueuedConstraints.back().get());
unqueuedConstraints.push_back(
std::make_unique<Constraint>(NotNull{scope.get()}, call->func->location, SubtypeConstraint{instantiatedType, inferredFnType}));
NotNull<Constraint> sc(unqueuedConstraints.back().get());
NotNull<Constraint> fcc = addConstraint(scope, call->func->location,
FunctionCallConstraint{
{ic, sc},
fnType,
argPack,
rets,
@ -1276,12 +1317,7 @@ Inference ConstraintGraphBuilder::check(const ScopePtr& scope, AstExpr* expr, st
else if (expr->is<AstExprVarargs>())
result = flattenPack(scope, expr->location, checkPack(scope, expr));
else if (auto call = expr->as<AstExprCall>())
{
std::vector<TypeId> expectedTypes;
if (expectedType)
expectedTypes.push_back(*expectedType);
result = flattenPack(scope, expr->location, checkPack(scope, call, expectedTypes)); // TODO: needs predicates too
}
result = flattenPack(scope, expr->location, checkPack(scope, call)); // TODO: needs predicates too
else if (auto a = expr->as<AstExprFunction>())
{
Checkpoint startCheckpoint = checkpoint(this);
@ -1883,6 +1919,7 @@ ConstraintGraphBuilder::FunctionSignature ConstraintGraphBuilder::checkFunctionS
}
std::vector<TypeId> argTypes;
std::vector<std::optional<FunctionArgument>> argNames;
TypePack expectedArgPack;
const FunctionType* expectedFunction = expectedType ? get<FunctionType>(*expectedType) : nullptr;
@ -1895,14 +1932,27 @@ ConstraintGraphBuilder::FunctionSignature ConstraintGraphBuilder::checkFunctionS
genericTypePacks = expectedFunction->genericPacks;
}
if (fn->self)
{
TypeId selfType = freshType(signatureScope);
argTypes.push_back(selfType);
argNames.emplace_back(FunctionArgument{fn->self->name.value, fn->self->location});
signatureScope->bindings[fn->self] = Binding{selfType, fn->self->location};
}
for (size_t i = 0; i < fn->args.size; ++i)
{
AstLocal* local = fn->args.data[i];
TypeId t = freshType(signatureScope);
argTypes.push_back(t);
argNames.emplace_back(FunctionArgument{local->name.value, local->location});
signatureScope->bindings[local] = Binding{t, local->location};
auto def = dfg->getDef(local);
LUAU_ASSERT(def);
signatureScope->dcrRefinements[*def] = t;
TypeId annotationTy = t;
if (local->annotation)
@ -1918,12 +1968,6 @@ ConstraintGraphBuilder::FunctionSignature ConstraintGraphBuilder::checkFunctionS
{
addConstraint(signatureScope, local->location, SubtypeConstraint{t, expectedArgPack.head[i]});
}
// HACK: This is the one case where the type of the definition will diverge from the type of the binding.
// We need to do this because there are cases where type refinements needs to have the information available
// at constraint generation time.
if (auto def = dfg->getDef(local))
signatureScope->dcrRefinements[*def] = annotationTy;
}
TypePackId varargPack = nullptr;
@ -1978,6 +2022,7 @@ ConstraintGraphBuilder::FunctionSignature ConstraintGraphBuilder::checkFunctionS
actualFunction.hasNoGenerics = !hasGenerics;
actualFunction.generics = std::move(genericTypes);
actualFunction.genericPacks = std::move(genericTypePacks);
actualFunction.argNames = std::move(argNames);
TypeId actualFunctionType = arena->addType(std::move(actualFunction));
LUAU_ASSERT(actualFunctionType);
@ -2085,11 +2130,6 @@ TypeId ConstraintGraphBuilder::resolveType(const ScopePtr& scope, AstType* ty, b
}
else
{
std::string typeName;
if (ref->prefix)
typeName = std::string(ref->prefix->value) + ".";
typeName += ref->name.value;
result = builtinTypes->errorRecoveryType();
}
}
@ -2245,6 +2285,8 @@ TypePackId ConstraintGraphBuilder::resolveTypePack(const ScopePtr& scope, AstTyp
else if (auto var = tp->as<AstTypePackVariadic>())
{
TypeId ty = resolveType(scope, var->variadicType, inTypeArgument);
if (get<ErrorType>(follow(ty)))
ty = freshType(scope);
result = arena->addTypePack(TypePackVar{VariadicTypePack{ty}});
}
else if (auto gen = tp->as<AstTypePackGeneric>())
@ -2287,12 +2329,22 @@ TypePackId ConstraintGraphBuilder::resolveTypePack(const ScopePtr& scope, const
return arena->addTypePack(TypePack{head, tail});
}
std::vector<std::pair<Name, GenericTypeDefinition>> ConstraintGraphBuilder::createGenerics(const ScopePtr& scope, AstArray<AstGenericType> generics)
std::vector<std::pair<Name, GenericTypeDefinition>> ConstraintGraphBuilder::createGenerics(
const ScopePtr& scope, AstArray<AstGenericType> generics, bool useCache)
{
std::vector<std::pair<Name, GenericTypeDefinition>> result;
for (const auto& generic : generics)
{
TypeId genericTy = arena->addType(GenericType{scope.get(), generic.name.value});
TypeId genericTy = nullptr;
if (auto it = scope->parent->typeAliasTypeParameters.find(generic.name.value); useCache && it != scope->parent->typeAliasTypeParameters.end())
genericTy = it->second;
else
{
genericTy = arena->addType(GenericType{scope.get(), generic.name.value});
scope->parent->typeAliasTypeParameters[generic.name.value] = genericTy;
}
std::optional<TypeId> defaultTy = std::nullopt;
if (generic.defaultValue)
@ -2305,12 +2357,22 @@ std::vector<std::pair<Name, GenericTypeDefinition>> ConstraintGraphBuilder::crea
}
std::vector<std::pair<Name, GenericTypePackDefinition>> ConstraintGraphBuilder::createGenericPacks(
const ScopePtr& scope, AstArray<AstGenericTypePack> generics)
const ScopePtr& scope, AstArray<AstGenericTypePack> generics, bool useCache)
{
std::vector<std::pair<Name, GenericTypePackDefinition>> result;
for (const auto& generic : generics)
{
TypePackId genericTy = arena->addTypePack(TypePackVar{GenericTypePack{scope.get(), generic.name.value}});
TypePackId genericTy;
if (auto it = scope->parent->typeAliasTypePackParameters.find(generic.name.value);
useCache && it != scope->parent->typeAliasTypePackParameters.end())
genericTy = it->second;
else
{
genericTy = arena->addTypePack(TypePackVar{GenericTypePack{scope.get(), generic.name.value}});
scope->parent->typeAliasTypePackParameters[generic.name.value] = genericTy;
}
std::optional<TypePackId> defaultTy = std::nullopt;
if (generic.defaultValue)

View File

@ -217,12 +217,6 @@ void dump(ConstraintSolver* cs, ToStringOptions& opts)
int blockCount = it == cs->blockedConstraints.end() ? 0 : int(it->second);
printf("\t%d\t\t%s\n", blockCount, toString(*dep, opts).c_str());
}
if (auto fcc = get<FunctionCallConstraint>(*c))
{
for (NotNull<const Constraint> inner : fcc->innerConstraints)
printf("\t ->\t\t%s\n", toString(*inner, opts).c_str());
}
}
}
@ -531,32 +525,19 @@ bool ConstraintSolver::tryDispatch(const UnaryConstraint& c, NotNull<const Const
}
else if (std::optional<TypeId> mm = findMetatableEntry(builtinTypes, errors, operandType, "__unm", constraint->location))
{
const FunctionType* ftv = get<FunctionType>(follow(*mm));
TypeId mmTy = follow(*mm);
if (!ftv)
{
if (std::optional<TypeId> callMm = findMetatableEntry(builtinTypes, errors, follow(*mm), "__call", constraint->location))
{
ftv = get<FunctionType>(follow(*callMm));
}
}
if (get<FreeType>(mmTy) && !force)
return block(mmTy, constraint);
if (!ftv)
{
asMutable(c.resultType)->ty.emplace<BoundType>(builtinTypes->errorRecoveryType());
return true;
}
TypePackId argPack = arena->addTypePack(TypePack{{operandType}, {}});
TypePackId retPack = arena->addTypePack(BlockedTypePack{});
TypePackId argsPack = arena->addTypePack({operandType});
unify(ftv->argTypes, argsPack, constraint->scope);
asMutable(c.resultType)->ty.emplace<FreeType>(constraint->scope);
TypeId result = builtinTypes->errorRecoveryType();
if (ftv)
{
result = first(ftv->retTypes).value_or(builtinTypes->errorRecoveryType());
}
pushConstraint(constraint->scope, constraint->location, PackSubtypeConstraint{retPack, arena->addTypePack(TypePack{{c.resultType}})});
asMutable(c.resultType)->ty.emplace<BoundType>(result);
pushConstraint(constraint->scope, constraint->location, FunctionCallConstraint{mmTy, argPack, retPack, nullptr});
}
else
{
@ -884,7 +865,11 @@ bool ConstraintSolver::tryDispatch(const NameConstraint& c, NotNull<const Constr
if (c.synthetic && !ttv->name)
ttv->syntheticName = c.name;
else
{
ttv->name = c.name;
ttv->instantiatedTypeParams = c.typeParameters;
ttv->instantiatedTypePackParams = c.typePackParameters;
}
}
else if (MetatableType* mtv = getMutable<MetatableType>(target))
mtv->syntheticName = c.name;
@ -1032,6 +1017,7 @@ bool ConstraintSolver::tryDispatch(const TypeAliasExpansionConstraint& c, NotNul
{
// TODO (CLI-56761): Report an error.
bindResult(errorRecoveryType());
reportError(GenericError{"Recursive type being used with different parameters"}, constraint->location);
return true;
}
@ -1119,9 +1105,10 @@ bool ConstraintSolver::tryDispatch(const TypeAliasExpansionConstraint& c, NotNul
bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<const Constraint> constraint)
{
TypeId fn = follow(c.fn);
TypePackId argsPack = follow(c.argsPack);
TypePackId result = follow(c.result);
if (isBlocked(c.fn))
if (isBlocked(fn))
{
return block(c.fn, constraint);
}
@ -1156,12 +1143,8 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<cons
for (TypeId arg : c.argsPack)
args.push_back(arg);
TypeId instantiatedType = arena->addType(BlockedType{});
TypeId inferredFnType = arena->addType(FunctionType(TypeLevel{}, constraint->scope.get(), arena->addTypePack(TypePack{args, {}}), c.result));
asMutable(*c.innerConstraints.at(0)).c = InstantiationConstraint{instantiatedType, *callMm};
asMutable(*c.innerConstraints.at(1)).c = SubtypeConstraint{inferredFnType, instantiatedType};
argsPack = arena->addTypePack(TypePack{args, {}});
fn = *callMm;
asMutable(c.result)->ty.emplace<FreeTypePack>(constraint->scope);
}
else
@ -1178,19 +1161,7 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<cons
ftv->dcrMagicRefinement(MagicRefinementContext{constraint->scope, c.callSite, c.discriminantTypes});
}
if (usedMagic)
{
// There are constraints that are blocked on these constraints. If we
// are never going to even examine them, then we should not block
// anything else on them.
//
// TODO CLI-58842
#if 0
for (auto& c: c.innerConstraints)
unblock(c);
#endif
}
else
if (!usedMagic)
asMutable(c.result)->ty.emplace<FreeTypePack>(constraint->scope);
}
@ -1209,22 +1180,24 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<cons
*asMutable(follow(*ty)) = BoundType{builtinTypes->anyType};
}
// Alter the inner constraints.
LUAU_ASSERT(c.innerConstraints.size() == 2);
TypeId instantiatedTy = arena->addType(BlockedType{});
TypeId inferredTy = arena->addType(FunctionType{TypeLevel{}, constraint->scope.get(), argsPack, c.result});
// Anything that is blocked on this constraint must also be blocked on our inner constraints
auto ic = pushConstraint(constraint->scope, constraint->location, InstantiationConstraint{instantiatedTy, fn});
auto sc = pushConstraint(constraint->scope, constraint->location, SubtypeConstraint{instantiatedTy, inferredTy});
// Anything that is blocked on this constraint must also be blocked on our
// synthesized constraints.
auto blockedIt = blocked.find(constraint.get());
if (blockedIt != blocked.end())
{
for (const auto& ic : c.innerConstraints)
{
for (const auto& blockedConstraint : blockedIt->second)
{
block(ic, blockedConstraint);
block(sc, blockedConstraint);
}
}
unsolvedConstraints.insert(end(unsolvedConstraints), begin(c.innerConstraints), end(c.innerConstraints));
unblock(c.result);
return true;
}
@ -1914,12 +1887,14 @@ void ConstraintSolver::unify(TypePackId subPack, TypePackId superPack, NotNull<S
unblock(changedPacks);
}
void ConstraintSolver::pushConstraint(NotNull<Scope> scope, const Location& location, ConstraintV cv)
NotNull<Constraint> ConstraintSolver::pushConstraint(NotNull<Scope> scope, const Location& location, ConstraintV cv)
{
std::unique_ptr<Constraint> c = std::make_unique<Constraint>(scope, location, std::move(cv));
NotNull<Constraint> borrow = NotNull(c.get());
solverConstraints.push_back(std::move(c));
unsolvedConstraints.push_back(borrow);
return borrow;
}
TypeId ConstraintSolver::resolveModule(const ModuleInfo& info, const Location& location)

View File

@ -372,7 +372,7 @@ ExpressionFlowGraph DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprInde
ExpressionFlowGraph DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprIndexExpr* i)
{
visitExpr(scope, i->expr);
visitExpr(scope, i->expr);
visitExpr(scope, i->index);
if (i->index->as<AstExprConstantString>())
{
@ -405,6 +405,13 @@ ExpressionFlowGraph DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprFunc
ExpressionFlowGraph DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprTable* t)
{
for (AstExprTable::Item item : t->items)
{
if (item.key)
visitExpr(scope, item.key);
visitExpr(scope, item.value);
}
return {};
}

View File

@ -104,7 +104,7 @@ LoadDefinitionFileResult Frontend::loadDefinitionFile(std::string_view source, c
module.root = parseResult.root;
module.mode = Mode::Definition;
ModulePtr checkedModule = check(module, Mode::Definition, globalScope, {});
ModulePtr checkedModule = check(module, Mode::Definition, {});
if (checkedModule->errors.size() > 0)
return LoadDefinitionFileResult{false, parseResult, checkedModule};
@ -517,7 +517,7 @@ CheckResult Frontend::check(const ModuleName& name, std::optional<FrontendOption
typeCheckerForAutocomplete.unifierIterationLimit = std::nullopt;
ModulePtr moduleForAutocomplete = FFlag::DebugLuauDeferredConstraintResolution
? check(sourceModule, mode, environmentScope, requireCycles, /*forAutocomplete*/ true)
? check(sourceModule, mode, requireCycles, /*forAutocomplete*/ true)
: typeCheckerForAutocomplete.check(sourceModule, Mode::Strict, environmentScope);
moduleResolverForAutocomplete.modules[moduleName] = moduleForAutocomplete;
@ -544,7 +544,7 @@ CheckResult Frontend::check(const ModuleName& name, std::optional<FrontendOption
typeChecker.requireCycles = requireCycles;
ModulePtr module = FFlag::DebugLuauDeferredConstraintResolution ? check(sourceModule, mode, environmentScope, requireCycles)
ModulePtr module = FFlag::DebugLuauDeferredConstraintResolution ? check(sourceModule, mode, requireCycles)
: typeChecker.check(sourceModule, mode, environmentScope);
stats.timeCheck += getTimestamp() - timestamp;
@ -855,11 +855,19 @@ ScopePtr Frontend::getGlobalScope()
return globalScope;
}
ModulePtr Frontend::check(
const SourceModule& sourceModule, Mode mode, const ScopePtr& environmentScope, std::vector<RequireCycle> requireCycles, bool forAutocomplete)
{
ModulePtr check(
const SourceModule& sourceModule,
const std::vector<RequireCycle>& requireCycles,
NotNull<BuiltinTypes> builtinTypes,
NotNull<InternalErrorReporter> iceHandler,
NotNull<ModuleResolver> moduleResolver,
NotNull<FileResolver> fileResolver,
const ScopePtr& globalScope,
NotNull<UnifierSharedState> unifierState,
FrontendOptions options
) {
ModulePtr result = std::make_shared<Module>();
result->reduction = std::make_unique<TypeReduction>(NotNull{&result->internalTypes}, builtinTypes, NotNull{&iceHandler});
result->reduction = std::make_unique<TypeReduction>(NotNull{&result->internalTypes}, builtinTypes, iceHandler);
std::unique_ptr<DcrLogger> logger;
if (FFlag::DebugLuauLogSolverToJson)
@ -872,20 +880,17 @@ ModulePtr Frontend::check(
}
}
DataFlowGraph dfg = DataFlowGraphBuilder::build(sourceModule.root, NotNull{&iceHandler});
DataFlowGraph dfg = DataFlowGraphBuilder::build(sourceModule.root, iceHandler);
const NotNull<ModuleResolver> mr{forAutocomplete ? &moduleResolverForAutocomplete : &moduleResolver};
const ScopePtr& globalScope{forAutocomplete ? typeCheckerForAutocomplete.globalScope : typeChecker.globalScope};
Normalizer normalizer{&result->internalTypes, builtinTypes, NotNull{&typeChecker.unifierState}};
Normalizer normalizer{&result->internalTypes, builtinTypes, unifierState};
ConstraintGraphBuilder cgb{
sourceModule.name,
result,
&result->internalTypes,
mr,
moduleResolver,
builtinTypes,
NotNull(&iceHandler),
iceHandler,
globalScope,
logger.get(),
NotNull{&dfg},
@ -894,7 +899,7 @@ ModulePtr Frontend::check(
cgb.visit(sourceModule.root);
result->errors = std::move(cgb.errors);
ConstraintSolver cs{NotNull{&normalizer}, NotNull(cgb.rootScope), borrowConstraints(cgb.constraints), sourceModule.name, NotNull(&moduleResolver),
ConstraintSolver cs{NotNull{&normalizer}, NotNull(cgb.rootScope), borrowConstraints(cgb.constraints), sourceModule.name, moduleResolver,
requireCycles, logger.get()};
if (options.randomizeConstraintResolutionSeed)
@ -908,7 +913,7 @@ ModulePtr Frontend::check(
result->scopes = std::move(cgb.scopes);
result->type = sourceModule.type;
result->clonePublicInterface(builtinTypes, iceHandler);
result->clonePublicInterface(builtinTypes, *iceHandler);
Luau::check(builtinTypes, logger.get(), sourceModule, result.get());
@ -929,6 +934,22 @@ ModulePtr Frontend::check(
return result;
}
ModulePtr Frontend::check(
const SourceModule& sourceModule, Mode mode, std::vector<RequireCycle> requireCycles, bool forAutocomplete)
{
return Luau::check(
sourceModule,
requireCycles,
builtinTypes,
NotNull{&iceHandler},
NotNull{forAutocomplete ? &moduleResolverForAutocomplete : &moduleResolver},
NotNull{fileResolver},
forAutocomplete ? typeCheckerForAutocomplete.globalScope : typeChecker.globalScope,
NotNull{&typeChecker.unifierState},
options
);
}
// Read AST into sourceModules if necessary. Trace require()s. Report parse errors.
std::pair<SourceNode*, SourceModule*> Frontend::getSourceNode(const ModuleName& name)
{

View File

@ -227,6 +227,9 @@ void Module::clonePublicInterface(NotNull<BuiltinTypes> builtinTypes, InternalEr
// Copy external stuff over to Module itself
this->returnType = moduleScope->returnType;
if (FFlag::DebugLuauDeferredConstraintResolution)
this->exportedTypeBindings = moduleScope->exportedTypeBindings;
else
this->exportedTypeBindings = std::move(moduleScope->exportedTypeBindings);
}

View File

@ -24,7 +24,6 @@ LUAU_FASTFLAG(DebugLuauFreezeArena)
LUAU_FASTINTVARIABLE(LuauTypeMaximumStringifierLength, 500)
LUAU_FASTINTVARIABLE(LuauTableTypeMaximumStringifierLength, 0)
LUAU_FASTINT(LuauTypeInferRecursionLimit)
LUAU_FASTFLAGVARIABLE(LuauMaybeGenericIntersectionTypes, false)
LUAU_FASTFLAG(LuauInstantiateInSubtyping)
LUAU_FASTFLAGVARIABLE(LuauMatchReturnsOptionalString, false);
@ -358,8 +357,6 @@ bool maybeGeneric(TypeId ty)
{
LUAU_ASSERT(!FFlag::LuauInstantiateInSubtyping);
if (FFlag::LuauMaybeGenericIntersectionTypes)
{
ty = follow(ty);
if (get<FreeType>(ty))
@ -380,19 +377,6 @@ bool maybeGeneric(TypeId ty)
return isGeneric(ty);
}
ty = follow(ty);
if (get<FreeType>(ty))
return true;
else if (auto ttv = get<TableType>(ty))
{
// TODO: recurse on table types CLI-39914
(void)ttv;
return true;
}
else
return isGeneric(ty);
}
bool maybeSingleton(TypeId ty)
{
ty = follow(ty);

View File

@ -204,12 +204,6 @@ struct TypeChecker2
bestLocation = scopeBounds;
}
}
else if (scopeBounds.begin > location.end)
{
// TODO: Is this sound? This relies on the fact that scopes are inserted
// into the scope list in the order that they appear in the AST.
break;
}
}
return bestScope;
@ -676,18 +670,7 @@ struct TypeChecker2
void visit(AstStatTypeAlias* stat)
{
for (const AstGenericType& el : stat->generics)
{
if (el.defaultValue)
visit(el.defaultValue);
}
for (const AstGenericTypePack& el : stat->genericPacks)
{
if (el.defaultValue)
visit(el.defaultValue);
}
visitGenerics(stat->generics, stat->genericPacks);
visit(stat->type);
}
@ -701,6 +684,7 @@ struct TypeChecker2
void visit(AstStatDeclareFunction* stat)
{
visitGenerics(stat->generics, stat->genericPacks);
visit(stat->params);
visit(stat->retTypes);
}
@ -973,8 +957,9 @@ struct TypeChecker2
void visit(AstExprIndexName* indexName, ValueContext context)
{
TypeId leftType = lookupType(indexName->expr);
visit(indexName->expr, RValue);
TypeId leftType = lookupType(indexName->expr);
const NormalizedType* norm = normalizer.normalize(leftType);
if (!norm)
reportError(NormalizationTooComplex{}, indexName->indexLocation);
@ -993,11 +978,18 @@ struct TypeChecker2
{
auto StackPusher = pushStack(fn);
visitGenerics(fn->generics, fn->genericPacks);
TypeId inferredFnTy = lookupType(fn);
const FunctionType* inferredFtv = get<FunctionType>(inferredFnTy);
LUAU_ASSERT(inferredFtv);
// There is no way to write an annotation for the self argument, so we
// cannot do anything to check it.
auto argIt = begin(inferredFtv->argTypes);
if (fn->self)
++argIt;
for (const auto& arg : fn->args)
{
if (argIt == end(inferredFtv->argTypes))
@ -1037,6 +1029,7 @@ struct TypeChecker2
NotNull<Scope> scope = stack.back();
TypeId operandType = lookupType(expr->expr);
TypeId resultType = lookupType(expr);
if (get<AnyType>(operandType) || get<ErrorType>(operandType) || get<NeverType>(operandType))
return;
@ -1048,9 +1041,6 @@ struct TypeChecker2
{
if (const FunctionType* ftv = get<FunctionType>(follow(*mm)))
{
TypePackId expectedArgs = testArena.addTypePack({operandType});
reportErrors(tryUnify(scope, expr->location, expectedArgs, ftv->argTypes));
if (std::optional<TypeId> ret = first(ftv->retTypes))
{
if (expr->op == AstExprUnary::Op::Len)
@ -1062,6 +1052,25 @@ struct TypeChecker2
{
reportError(GenericError{format("Metamethod '%s' must return a value", it->second)}, expr->location);
}
std::optional<TypeId> firstArg = first(ftv->argTypes);
if (!firstArg)
{
reportError(GenericError{"__unm metamethod must accept one argument"}, expr->location);
return;
}
TypePackId expectedArgs = testArena.addTypePack({operandType});
TypePackId expectedRet = testArena.addTypePack({resultType});
TypeId expectedFunction = testArena.addType(FunctionType{expectedArgs, expectedRet});
ErrorVec errors = tryUnify(scope, expr->location, *mm, expectedFunction);
if (!errors.empty())
{
reportError(TypeMismatch{*firstArg, operandType}, expr->location);
return;
}
}
return;
@ -1413,6 +1422,33 @@ struct TypeChecker2
ice.ice("flattenPack got a weird pack!");
}
void visitGenerics(AstArray<AstGenericType> generics, AstArray<AstGenericTypePack> genericPacks)
{
DenseHashSet<AstName> seen{AstName{}};
for (const auto& g : generics)
{
if (seen.contains(g.name))
reportError(DuplicateGenericParameter{g.name.value}, g.location);
else
seen.insert(g.name);
if (g.defaultValue)
visit(g.defaultValue);
}
for (const auto& g : genericPacks)
{
if (seen.contains(g.name))
reportError(DuplicateGenericParameter{g.name.value}, g.location);
else
seen.insert(g.name);
if (g.defaultValue)
visit(g.defaultValue);
}
}
void visit(AstType* ty)
{
if (auto t = ty->as<AstTypeReference>())
@ -1579,8 +1615,7 @@ struct TypeChecker2
void visit(AstTypeFunction* ty)
{
// TODO!
visitGenerics(ty->generics, ty->genericPacks);
visit(ty->argTypes);
visit(ty->returnTypes);
}

View File

@ -17,7 +17,6 @@
LUAU_FASTINT(LuauTypeInferTypePackLoopLimit)
LUAU_FASTFLAG(LuauErrorRecoveryType)
LUAU_FASTFLAGVARIABLE(LuauUnifyAnyTxnLog, false)
LUAU_FASTFLAGVARIABLE(LuauInstantiateInSubtyping, false)
LUAU_FASTFLAGVARIABLE(LuauScalarShapeUnifyToMtOwner2, false)
LUAU_FASTFLAGVARIABLE(LuauUninhabitedSubAnything2, false)
@ -475,8 +474,6 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool
return;
}
if (FFlag::LuauUnifyAnyTxnLog)
{
if (log.get<AnyType>(superTy))
return tryUnifyWithAny(subTy, builtinTypes->anyType);
@ -494,21 +491,6 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool
if (log.get<NeverType>(subTy))
return tryUnifyWithAny(superTy, builtinTypes->neverType);
}
else
{
if (get<ErrorType>(superTy) || get<AnyType>(superTy) || get<UnknownType>(superTy))
return tryUnifyWithAny(subTy, superTy);
if (get<AnyType>(subTy))
return tryUnifyWithAny(superTy, subTy);
if (log.get<ErrorType>(subTy))
return tryUnifyWithAny(superTy, subTy);
if (log.get<NeverType>(subTy))
return tryUnifyWithAny(superTy, subTy);
}
auto& cache = sharedState.cachedUnify;
@ -2535,18 +2517,9 @@ void Unifier::tryUnifyWithAny(TypeId subTy, TypeId anyTy)
{
LUAU_ASSERT(get<AnyType>(anyTy) || get<ErrorType>(anyTy) || get<UnknownType>(anyTy) || get<NeverType>(anyTy));
if (FFlag::LuauUnifyAnyTxnLog)
{
// These types are not visited in general loop below
if (log.get<PrimitiveType>(subTy) || log.get<AnyType>(subTy) || log.get<ClassType>(subTy))
return;
}
else
{
// These types are not visited in general loop below
if (get<PrimitiveType>(subTy) || get<AnyType>(subTy) || get<ClassType>(subTy))
return;
}
TypePackId anyTp = types->addTypePack(TypePackVar{VariadicTypePack{anyTy}});

View File

@ -319,6 +319,9 @@ std::string runCode(lua_State* L, const std::string& source)
lua_insert(T, 1);
lua_pcall(T, n, 0, 0);
}
lua_pop(L, 1);
return std::string();
}
else
{
@ -336,11 +339,9 @@ std::string runCode(lua_State* L, const std::string& source)
error += "\nstack backtrace:\n";
error += lua_debugtrace(T);
fprintf(stdout, "%s", error.c_str());
}
lua_pop(L, 1);
return std::string();
return error;
}
}
// Replaces the top of the lua stack with the metatable __index for the value

View File

@ -55,7 +55,7 @@ enum class IrCmd : uint8_t
// Get pointer (TValue) to table array at index
// A: pointer (Table)
// B: unsigned int
// B: int
GET_ARR_ADDR,
// Get pointer (LuaNode) to table node element at the active cached slot index
@ -177,7 +177,7 @@ enum class IrCmd : uint8_t
// A: pointer (Table)
DUP_TABLE,
// Try to convert a double number into a table index or jump if it's not an integer
// Try to convert a double number into a table index (int) or jump if it's not an integer
// A: double
// B: block
NUM_TO_INDEX,
@ -216,10 +216,10 @@ enum class IrCmd : uint8_t
// B: unsigned int (import path)
GET_IMPORT,
// Concatenate multiple TValues
// A: Rn (where to store the result)
// B: unsigned int (index of the first VM stack slot)
// C: unsigned int (number of stack slots to go over)
// Concatenate multiple TValues into a string
// A: Rn (value start)
// B: unsigned int (number of registers to go over)
// Note: result is stored in the register specified in 'A'
CONCAT,
// Load function upvalue into stack slot
@ -262,7 +262,8 @@ enum class IrCmd : uint8_t
// Guard against index overflowing the table array size
// A: pointer (Table)
// B: block
// B: int (index)
// C: block
CHECK_ARRAY_SIZE,
// Guard against cached table node slot not matching the actual table node slot for a key
@ -451,8 +452,12 @@ enum class IrCmd : uint8_t
// Prepare loop variables for a generic for loop, jump to the loop backedge unconditionally
// A: unsigned int (bytecode instruction index)
// B: Rn (loop state, updates Rn Rn+1 Rn+2)
// B: block
// C: block
FALLBACK_FORGPREP,
// Instruction that passes value through, it is produced by constant folding and users substitute it with the value
SUBSTITUTE,
// A: operand of any type
};
enum class IrConstKind : uint8_t
@ -659,6 +664,12 @@ struct IrFunction
LUAU_ASSERT(value.kind == IrConstKind::Double);
return value.valueDouble;
}
IrCondition conditionOp(IrOp op)
{
LUAU_ASSERT(op.kind == IrOpKind::Condition);
return IrCondition(op.index);
}
};
} // namespace CodeGen

View File

@ -10,6 +10,8 @@ namespace Luau
namespace CodeGen
{
struct IrBuilder;
inline bool isJumpD(LuauOpcode op)
{
switch (op)
@ -138,6 +140,7 @@ inline bool hasResult(IrCmd cmd)
case IrCmd::DUP_TABLE:
case IrCmd::NUM_TO_INDEX:
case IrCmd::INT_TO_NUM:
case IrCmd::SUBSTITUTE:
return true;
default:
break;
@ -153,6 +156,12 @@ inline bool hasSideEffects(IrCmd cmd)
return !hasResult(cmd);
}
inline bool isPseudo(IrCmd cmd)
{
// Instructions that are used for internal needs and are not a part of final lowering
return cmd == IrCmd::NOP || cmd == IrCmd::SUBSTITUTE;
}
// Remove a single instruction
void kill(IrFunction& function, IrInst& inst);
@ -172,5 +181,17 @@ void replace(IrFunction& function, IrOp& original, IrOp replacement);
// Target instruction index instead of reference is used to handle introduction of a new block terminator
void replace(IrFunction& function, uint32_t instIdx, IrInst replacement);
// Replace instruction with a different value (using IrCmd::SUBSTITUTE)
void substitute(IrFunction& function, IrInst& inst, IrOp replacement);
// Replace instruction arguments that point to substitutions with target values
void applySubstitutions(IrFunction& function, IrOp& op);
void applySubstitutions(IrFunction& function, IrInst& inst);
// Perform constant folding on instruction at index
// For most instructions, successful folding results in a IrCmd::SUBSTITUTE
// But it can also be successful on conditional control-flow, replacing it with an unconditional IrCmd::JUMP
void foldConstants(IrBuilder& build, IrFunction& function, uint32_t instIdx);
} // namespace CodeGen
} // namespace Luau

View File

@ -246,6 +246,8 @@ const char* getCmdName(IrCmd cmd)
return "FALLBACK_DUPCLOSURE";
case IrCmd::FALLBACK_FORGPREP:
return "FALLBACK_FORGPREP";
case IrCmd::SUBSTITUTE:
return "SUBSTITUTE";
}
LUAU_UNREACHABLE();
@ -423,8 +425,8 @@ std::string toString(IrFunction& function, bool includeDetails)
{
IrInst& inst = function.instructions[index];
// Nop is used to replace dead instructions in-place, so it's not that useful to see them
if (inst.cmd == IrCmd::NOP)
// Skip pseudo instructions unless they are still referenced
if (isPseudo(inst.cmd) && inst.useCount == 0)
continue;
append(ctx.result, " ");

View File

@ -20,7 +20,7 @@ namespace Luau
namespace CodeGen
{
static RegisterX64 gprAlocOrder[] = {rax, rdx, rcx, rbx, rsi, rdi, r8, r9, r10, r11};
static const RegisterX64 kGprAllocOrder[] = {rax, rdx, rcx, rbx, rsi, rdi, r8, r9, r10, r11};
IrLoweringX64::IrLoweringX64(AssemblyBuilderX64& build, ModuleHelpers& helpers, NativeState& data, Proto* proto, IrFunction& function)
: build(build)
@ -111,7 +111,7 @@ void IrLoweringX64::lower(AssemblyOptions options)
if (options.includeIr)
{
build.logAppend("# ");
toStringDetailed(ctx, block, uint32_t(i));
toStringDetailed(ctx, block, blockIndex);
}
build.setLabel(block.label);
@ -133,9 +133,9 @@ void IrLoweringX64::lower(AssemblyOptions options)
IrInst& inst = function.instructions[index];
// Nop is used to replace dead instructions in-place
// Because it doesn't have any effects aside from output (when enabled), we skip it completely
if (inst.cmd == IrCmd::NOP)
// Skip pseudo instructions, but make sure they are not used at this stage
// This also prevents them from getting into text output when that's enabled
if (isPseudo(inst.cmd))
{
LUAU_ASSERT(inst.useCount == 0);
continue;
@ -263,8 +263,8 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, IrBlock& next)
build.mov(inst.regX64, qword[regOp(inst.a) + offsetof(Table, array)]);
if (uintOp(inst.b) != 0)
build.lea(inst.regX64, addr[inst.regX64 + uintOp(inst.b) * sizeof(TValue)]);
if (intOp(inst.b) != 0)
build.lea(inst.regX64, addr[inst.regX64 + intOp(inst.b) * sizeof(TValue)]);
}
else
{
@ -688,9 +688,11 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, IrBlock& next)
emitInstGetImportFallback(build, inst.a.index, uintOp(inst.b));
break;
case IrCmd::CONCAT:
LUAU_ASSERT(inst.a.kind == IrOpKind::VmReg);
build.mov(rArg1, rState);
build.mov(dwordReg(rArg2), uintOp(inst.a));
build.mov(dwordReg(rArg3), uintOp(inst.b));
build.mov(dwordReg(rArg2), uintOp(inst.b));
build.mov(dwordReg(rArg3), inst.a.index + uintOp(inst.b) - 1);
build.call(qword[rNativeContext + offsetof(NativeContext, luaV_concat)]);
emitUpdateBase(build);
@ -778,7 +780,7 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, IrBlock& next)
if (inst.b.kind == IrOpKind::Inst)
build.cmp(dword[regOp(inst.a) + offsetof(Table, sizearray)], regOp(inst.b));
else if (inst.b.kind == IrOpKind::Constant)
build.cmp(dword[regOp(inst.a) + offsetof(Table, sizearray)], uintOp(inst.b));
build.cmp(dword[regOp(inst.a) + offsetof(Table, sizearray)], intOp(inst.b));
else
LUAU_ASSERT(!"Unsupported instruction form");
@ -897,6 +899,7 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, IrBlock& next)
LUAU_ASSERT(inst.c.kind == IrOpKind::VmReg);
emitInstNameCall(build, pc, uintOp(inst.a), proto->k, blockOp(inst.d).label, blockOp(inst.e).label);
jumpOrFallthrough(blockOp(inst.d), next);
break;
}
case IrCmd::LOP_CALL:
@ -1133,7 +1136,7 @@ RegisterX64 IrLoweringX64::allocGprReg(SizeX64 preferredSize)
LUAU_ASSERT(
preferredSize == SizeX64::byte || preferredSize == SizeX64::word || preferredSize == SizeX64::dword || preferredSize == SizeX64::qword);
for (RegisterX64 reg : gprAlocOrder)
for (RegisterX64 reg : kGprAllocOrder)
{
if (freeGprMap[reg.index])
{

View File

@ -695,10 +695,10 @@ void translateInstGetTableN(IrBuilder& build, const Instruction* pc, int pcpos)
IrOp vb = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
build.inst(IrCmd::CHECK_ARRAY_SIZE, vb, build.constUint(c), fallback);
build.inst(IrCmd::CHECK_ARRAY_SIZE, vb, build.constInt(c), fallback);
build.inst(IrCmd::CHECK_NO_METATABLE, vb, fallback);
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constUint(c));
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constInt(c));
// TODO: per-component loads and stores might be preferable
IrOp arrElTval = build.inst(IrCmd::LOAD_TVALUE, arrEl);
@ -725,11 +725,11 @@ void translateInstSetTableN(IrBuilder& build, const Instruction* pc, int pcpos)
IrOp vb = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
build.inst(IrCmd::CHECK_ARRAY_SIZE, vb, build.constUint(c), fallback);
build.inst(IrCmd::CHECK_ARRAY_SIZE, vb, build.constInt(c), fallback);
build.inst(IrCmd::CHECK_NO_METATABLE, vb, fallback);
build.inst(IrCmd::CHECK_READONLY, vb, fallback);
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constUint(c));
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constInt(c));
// TODO: per-component loads and stores might be preferable
IrOp tva = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(ra));
@ -969,7 +969,7 @@ void translateInstConcat(IrBuilder& build, const Instruction* pc, int pcpos)
int rc = LUAU_INSN_C(*pc);
build.inst(IrCmd::SET_SAVEDPC, build.constUint(pcpos + 1));
build.inst(IrCmd::CONCAT, build.constUint(rc - rb + 1), build.constUint(rc));
build.inst(IrCmd::CONCAT, build.vmReg(rb), build.constUint(rc - rb + 1));
// TODO: per-component loads and stores might be preferable
IrOp tvb = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(rb));

View File

@ -1,6 +1,14 @@
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#include "Luau/IrUtils.h"
#include "Luau/IrBuilder.h"
#include "lua.h"
#include "lnumutils.h"
#include <limits.h>
#include <math.h>
namespace Luau
{
namespace CodeGen
@ -8,16 +16,19 @@ namespace CodeGen
static uint32_t getBlockEnd(IrFunction& function, uint32_t start)
{
LUAU_ASSERT(start < function.instructions.size());
uint32_t end = start;
// Find previous block terminator
while (!isBlockTerminator(function.instructions[end].cmd))
end++;
LUAU_ASSERT(end < function.instructions.size());
return end;
}
static void addUse(IrFunction& function, IrOp op)
void addUse(IrFunction& function, IrOp op)
{
if (op.kind == IrOpKind::Inst)
function.instructions[op.index].useCount++;
@ -25,7 +36,7 @@ static void addUse(IrFunction& function, IrOp op)
function.blocks[op.index].useCount++;
}
static void removeUse(IrFunction& function, IrOp op)
void removeUse(IrFunction& function, IrOp op)
{
if (op.kind == IrOpKind::Inst)
removeUse(function, function.instructions[op.index]);
@ -44,6 +55,12 @@ void kill(IrFunction& function, IrInst& inst)
removeUse(function, inst.c);
removeUse(function, inst.d);
removeUse(function, inst.e);
inst.a = {};
inst.b = {};
inst.c = {};
inst.d = {};
inst.e = {};
}
void kill(IrFunction& function, uint32_t start, uint32_t end)
@ -51,6 +68,7 @@ void kill(IrFunction& function, uint32_t start, uint32_t end)
// Kill instructions in reverse order to avoid killing instructions that are still marked as used
for (int i = int(end); i >= int(start); i--)
{
LUAU_ASSERT(unsigned(i) < function.instructions.size());
IrInst& curr = function.instructions[i];
if (curr.cmd == IrCmd::NOP)
@ -102,7 +120,6 @@ void replace(IrFunction& function, IrOp& original, IrOp replacement)
void replace(IrFunction& function, uint32_t instIdx, IrInst replacement)
{
IrInst& inst = function.instructions[instIdx];
IrCmd prevCmd = inst.cmd;
// Add uses before removing new ones if those are the last ones keeping target operand alive
addUse(function, replacement.a);
@ -111,6 +128,20 @@ void replace(IrFunction& function, uint32_t instIdx, IrInst replacement)
addUse(function, replacement.d);
addUse(function, replacement.e);
// If we introduced an earlier terminating instruction, all following instructions become dead
if (!isBlockTerminator(inst.cmd) && isBlockTerminator(replacement.cmd))
{
uint32_t start = instIdx + 1;
// If we are in the process of constructing a block, replacement might happen at the last instruction
if (start < function.instructions.size())
{
uint32_t end = getBlockEnd(function, start);
kill(function, start, end);
}
}
removeUse(function, inst.a);
removeUse(function, inst.b);
removeUse(function, inst.c);
@ -118,14 +149,227 @@ void replace(IrFunction& function, uint32_t instIdx, IrInst replacement)
removeUse(function, inst.e);
inst = replacement;
}
// If we introduced an earlier terminating instruction, all following instructions become dead
if (!isBlockTerminator(prevCmd) && isBlockTerminator(inst.cmd))
void substitute(IrFunction& function, IrInst& inst, IrOp replacement)
{
uint32_t start = instIdx + 1;
uint32_t end = getBlockEnd(function, start);
LUAU_ASSERT(!isBlockTerminator(inst.cmd));
kill(function, start, end);
inst.cmd = IrCmd::SUBSTITUTE;
removeUse(function, inst.a);
removeUse(function, inst.b);
removeUse(function, inst.c);
removeUse(function, inst.d);
removeUse(function, inst.e);
inst.a = replacement;
inst.b = {};
inst.c = {};
inst.d = {};
inst.e = {};
}
void applySubstitutions(IrFunction& function, IrOp& op)
{
if (op.kind == IrOpKind::Inst)
{
IrInst& src = function.instructions[op.index];
if (src.cmd == IrCmd::SUBSTITUTE)
{
op.kind = src.a.kind;
op.index = src.a.index;
// If we substitute with the result of a different instruction, update the use count
if (op.kind == IrOpKind::Inst)
{
IrInst& dst = function.instructions[op.index];
LUAU_ASSERT(dst.cmd != IrCmd::SUBSTITUTE && "chained substitutions are not allowed");
dst.useCount++;
}
LUAU_ASSERT(src.useCount > 0);
src.useCount--;
}
}
}
void applySubstitutions(IrFunction& function, IrInst& inst)
{
applySubstitutions(function, inst.a);
applySubstitutions(function, inst.b);
applySubstitutions(function, inst.c);
applySubstitutions(function, inst.d);
applySubstitutions(function, inst.e);
}
static bool compare(double a, double b, IrCondition cond)
{
switch (cond)
{
case IrCondition::Equal:
return a == b;
case IrCondition::NotEqual:
return a != b;
case IrCondition::Less:
return a < b;
case IrCondition::NotLess:
return !(a < b);
case IrCondition::LessEqual:
return a <= b;
case IrCondition::NotLessEqual:
return !(a <= b);
case IrCondition::Greater:
return a > b;
case IrCondition::NotGreater:
return !(a > b);
case IrCondition::GreaterEqual:
return a >= b;
case IrCondition::NotGreaterEqual:
return !(a >= b);
default:
LUAU_ASSERT(!"unsupported conidtion");
}
return false;
}
void foldConstants(IrBuilder& build, IrFunction& function, uint32_t index)
{
IrInst& inst = function.instructions[index];
switch (inst.cmd)
{
case IrCmd::ADD_INT:
if (inst.a.kind == IrOpKind::Constant && inst.b.kind == IrOpKind::Constant)
{
// We need to avoid signed integer overflow, but we also have to produce a result
// So we add numbers as unsigned and use fixed-width integer types to force a two's complement evaluation
int32_t lhs = function.intOp(inst.a);
int32_t rhs = function.intOp(inst.b);
int sum = int32_t(uint32_t(lhs) + uint32_t(rhs));
substitute(function, inst, build.constInt(sum));
}
break;
case IrCmd::SUB_INT:
if (inst.a.kind == IrOpKind::Constant && inst.b.kind == IrOpKind::Constant)
{
// We need to avoid signed integer overflow, but we also have to produce a result
// So we subtract numbers as unsigned and use fixed-width integer types to force a two's complement evaluation
int32_t lhs = function.intOp(inst.a);
int32_t rhs = function.intOp(inst.b);
int sum = int32_t(uint32_t(lhs) - uint32_t(rhs));
substitute(function, inst, build.constInt(sum));
}
break;
case IrCmd::ADD_NUM:
if (inst.a.kind == IrOpKind::Constant && inst.b.kind == IrOpKind::Constant)
substitute(function, inst, build.constDouble(function.doubleOp(inst.a) + function.doubleOp(inst.b)));
break;
case IrCmd::SUB_NUM:
if (inst.a.kind == IrOpKind::Constant && inst.b.kind == IrOpKind::Constant)
substitute(function, inst, build.constDouble(function.doubleOp(inst.a) - function.doubleOp(inst.b)));
break;
case IrCmd::MUL_NUM:
if (inst.a.kind == IrOpKind::Constant && inst.b.kind == IrOpKind::Constant)
substitute(function, inst, build.constDouble(function.doubleOp(inst.a) * function.doubleOp(inst.b)));
break;
case IrCmd::DIV_NUM:
if (inst.a.kind == IrOpKind::Constant && inst.b.kind == IrOpKind::Constant)
substitute(function, inst, build.constDouble(function.doubleOp(inst.a) / function.doubleOp(inst.b)));
break;
case IrCmd::MOD_NUM:
if (inst.a.kind == IrOpKind::Constant && inst.b.kind == IrOpKind::Constant)
substitute(function, inst, build.constDouble(luai_nummod(function.doubleOp(inst.a), function.doubleOp(inst.b))));
break;
case IrCmd::POW_NUM:
if (inst.a.kind == IrOpKind::Constant && inst.b.kind == IrOpKind::Constant)
substitute(function, inst, build.constDouble(pow(function.doubleOp(inst.a), function.doubleOp(inst.b))));
break;
case IrCmd::UNM_NUM:
if (inst.a.kind == IrOpKind::Constant)
substitute(function, inst, build.constDouble(-function.doubleOp(inst.a)));
break;
case IrCmd::NOT_ANY:
if (inst.a.kind == IrOpKind::Constant)
{
uint8_t a = function.tagOp(inst.a);
if (a == LUA_TNIL)
substitute(function, inst, build.constInt(1));
else if (a != LUA_TBOOLEAN)
substitute(function, inst, build.constInt(0));
else if (inst.b.kind == IrOpKind::Constant)
substitute(function, inst, build.constInt(function.intOp(inst.b) == 1 ? 0 : 1));
}
break;
case IrCmd::JUMP_EQ_TAG:
if (inst.a.kind == IrOpKind::Constant && inst.b.kind == IrOpKind::Constant)
{
if (function.tagOp(inst.a) == function.tagOp(inst.b))
replace(function, index, {IrCmd::JUMP, inst.c});
else
replace(function, index, {IrCmd::JUMP, inst.d});
}
break;
case IrCmd::JUMP_EQ_INT:
if (inst.a.kind == IrOpKind::Constant && inst.b.kind == IrOpKind::Constant)
{
if (function.intOp(inst.a) == function.intOp(inst.b))
replace(function, index, {IrCmd::JUMP, inst.c});
else
replace(function, index, {IrCmd::JUMP, inst.d});
}
break;
case IrCmd::JUMP_CMP_NUM:
if (inst.a.kind == IrOpKind::Constant && inst.b.kind == IrOpKind::Constant)
{
if (compare(function.doubleOp(inst.a), function.doubleOp(inst.b), function.conditionOp(inst.c)))
replace(function, index, {IrCmd::JUMP, inst.d});
else
replace(function, index, {IrCmd::JUMP, inst.e});
}
break;
case IrCmd::NUM_TO_INDEX:
if (inst.a.kind == IrOpKind::Constant)
{
double value = function.doubleOp(inst.a);
// To avoid undefined behavior of casting a value not representable in the target type, we check the range
if (value >= INT_MIN && value <= INT_MAX)
{
int arrIndex = int(value);
if (double(arrIndex) == value)
substitute(function, inst, build.constInt(arrIndex));
else
replace(function, index, {IrCmd::JUMP, inst.b});
}
else
{
replace(function, index, {IrCmd::JUMP, inst.b});
}
}
break;
case IrCmd::INT_TO_NUM:
if (inst.a.kind == IrOpKind::Constant)
substitute(function, inst, build.constDouble(double(function.intOp(inst.a))));
break;
case IrCmd::CHECK_TAG:
if (inst.a.kind == IrOpKind::Constant && inst.b.kind == IrOpKind::Constant)
{
if (function.tagOp(inst.a) == function.tagOp(inst.b))
kill(function, inst);
else
replace(function, index, {IrCmd::JUMP, inst.c}); // Shows a conflict in assumptions on this path
}
break;
default:
break;
}
}

View File

@ -10,7 +10,7 @@ inline bool isFlagExperimental(const char* flag)
{
// Flags in this list are disabled by default in various command-line tools. They may have behavior that is not fully final,
// or critical bugs that are found after the code has been submitted.
static const char* kList[] = {
static const char* const kList[] = {
"LuauInstantiateInSubtyping", // requires some fixes to lua-apps code
"LuauTryhardAnd", // waiting for a fix in graphql-lua -> apollo-client-lia -> lua-apps
"LuauTypecheckTypeguards", // requires some fixes to lua-apps code (CLI-67030)

View File

@ -3442,8 +3442,6 @@ TEST_CASE_FIXTURE(ACFixture, "type_reduction_is_hooked_up_to_autocomplete")
TEST_CASE_FIXTURE(ACFixture, "string_contents_is_available_to_callback")
{
ScopedFastFlag luauAutocompleteStringContent{"LuauAutocompleteStringContent", true};
loadDefinition(R"(
declare function require(path: string): any
)");

View File

@ -1,4 +1,5 @@
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#pragma once
#include "Fixture.h"

View File

@ -176,6 +176,21 @@ AstStatBlock* Fixture::parse(const std::string& source, const ParseOptions& pars
{
frontend.lint(*sourceModule);
if (FFlag::DebugLuauDeferredConstraintResolution)
{
Luau::check(
*sourceModule,
{},
frontend.builtinTypes,
NotNull{&ice},
NotNull{&moduleResolver},
NotNull{&fileResolver},
typeChecker.globalScope,
NotNull{&typeChecker.unifierState},
frontend.options
);
}
else
typeChecker.check(*sourceModule, sourceModule->mode.value_or(Luau::Mode::Nonstrict));
}

View File

@ -2,16 +2,83 @@
#include "Luau/IrBuilder.h"
#include "Luau/IrAnalysis.h"
#include "Luau/IrDump.h"
#include "Luau/IrUtils.h"
#include "Luau/OptimizeFinalX64.h"
#include "doctest.h"
#include <limits.h>
using namespace Luau::CodeGen;
class IrBuilderFixture
{
public:
void constantFold()
{
for (size_t i = 0; i < build.function.instructions.size(); i++)
{
IrInst& inst = build.function.instructions[i];
applySubstitutions(build.function, inst);
foldConstants(build, build.function, uint32_t(i));
}
}
template<typename F>
void withOneBlock(F&& f)
{
IrOp main = build.block(IrBlockKind::Internal);
IrOp a = build.block(IrBlockKind::Internal);
build.beginBlock(main);
f(a);
build.beginBlock(a);
build.inst(IrCmd::LOP_RETURN, build.constUint(1));
};
template<typename F>
void withTwoBlocks(F&& f)
{
IrOp main = build.block(IrBlockKind::Internal);
IrOp a = build.block(IrBlockKind::Internal);
IrOp b = build.block(IrBlockKind::Internal);
build.beginBlock(main);
f(a, b);
build.beginBlock(a);
build.inst(IrCmd::LOP_RETURN, build.constUint(1));
build.beginBlock(b);
build.inst(IrCmd::LOP_RETURN, build.constUint(2));
};
void checkEq(IrOp lhs, IrOp rhs)
{
CHECK_EQ(lhs.kind, rhs.kind);
LUAU_ASSERT(lhs.kind != IrOpKind::Constant && "can't compare constants, each ref is unique");
CHECK_EQ(lhs.index, rhs.index);
}
void checkEq(IrOp instOp, const IrInst& inst)
{
const IrInst& target = build.function.instOp(instOp);
CHECK(target.cmd == inst.cmd);
checkEq(target.a, inst.a);
checkEq(target.b, inst.b);
checkEq(target.c, inst.c);
checkEq(target.d, inst.d);
checkEq(target.e, inst.e);
}
IrBuilder build;
// Luau.VM headers are not accessible
static const int tnil = 0;
static const int tboolean = 1;
static const int tnumber = 3;
};
TEST_SUITE_BEGIN("Optimization");
@ -153,7 +220,7 @@ TEST_CASE_FIXTURE(IrBuilderFixture, "FinalX64OptEqTag3")
build.beginBlock(block);
IrOp table = build.inst(IrCmd::LOAD_POINTER, build.vmReg(1));
IrOp arrElem = build.inst(IrCmd::GET_ARR_ADDR, table, build.constUint(0));
IrOp arrElem = build.inst(IrCmd::GET_ARR_ADDR, table, build.constInt(0));
IrOp opA = build.inst(IrCmd::LOAD_TAG, arrElem);
build.inst(IrCmd::JUMP_EQ_TAG, opA, build.constTag(0), trueBlock, falseBlock);
build.inst(IrCmd::LOP_RETURN, build.constUint(0));
@ -171,7 +238,7 @@ TEST_CASE_FIXTURE(IrBuilderFixture, "FinalX64OptEqTag3")
CHECK("\n" + toString(build.function, /* includeDetails */ false) == R"(
bb_0:
%0 = LOAD_POINTER R1
%1 = GET_ARR_ADDR %0, 0u
%1 = GET_ARR_ADDR %0, 0i
%2 = LOAD_TAG %1
JUMP_EQ_TAG %2, tnil, bb_1, bb_2
@ -221,3 +288,247 @@ bb_2:
}
TEST_SUITE_END();
TEST_SUITE_BEGIN("ConstantFolding");
TEST_CASE_FIXTURE(IrBuilderFixture, "Numeric")
{
IrOp block = build.block(IrBlockKind::Internal);
build.beginBlock(block);
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::ADD_INT, build.constInt(10), build.constInt(20)));
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::ADD_INT, build.constInt(INT_MAX), build.constInt(1)));
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::SUB_INT, build.constInt(10), build.constInt(20)));
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::SUB_INT, build.constInt(INT_MIN), build.constInt(1)));
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(0), build.inst(IrCmd::ADD_NUM, build.constDouble(2), build.constDouble(5)));
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(0), build.inst(IrCmd::SUB_NUM, build.constDouble(2), build.constDouble(5)));
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(0), build.inst(IrCmd::MUL_NUM, build.constDouble(2), build.constDouble(5)));
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(0), build.inst(IrCmd::DIV_NUM, build.constDouble(2), build.constDouble(5)));
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(0), build.inst(IrCmd::MOD_NUM, build.constDouble(5), build.constDouble(2)));
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(0), build.inst(IrCmd::POW_NUM, build.constDouble(5), build.constDouble(2)));
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(0), build.inst(IrCmd::UNM_NUM, build.constDouble(5)));
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::NOT_ANY, build.constTag(tnil), build.inst(IrCmd::LOAD_DOUBLE, build.vmReg(1))));
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::NOT_ANY, build.constTag(tnumber), build.inst(IrCmd::LOAD_DOUBLE, build.vmReg(1))));
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::NOT_ANY, build.constTag(tboolean), build.constInt(0)));
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::NOT_ANY, build.constTag(tboolean), build.constInt(1)));
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(0), build.inst(IrCmd::INT_TO_NUM, build.constInt(8)));
build.inst(IrCmd::LOP_RETURN, build.constUint(0));
updateUseCounts(build.function);
constantFold();
CHECK("\n" + toString(build.function, /* includeDetails */ false) == R"(
bb_0:
STORE_INT R0, 30i
STORE_INT R0, -2147483648i
STORE_INT R0, -10i
STORE_INT R0, 2147483647i
STORE_DOUBLE R0, 7
STORE_DOUBLE R0, -3
STORE_DOUBLE R0, 10
STORE_DOUBLE R0, 0.40000000000000002
STORE_DOUBLE R0, 1
STORE_DOUBLE R0, 25
STORE_DOUBLE R0, -5
STORE_INT R0, 1i
STORE_INT R0, 0i
STORE_INT R0, 1i
STORE_INT R0, 0i
STORE_DOUBLE R0, 8
LOP_RETURN 0u
)");
}
TEST_CASE_FIXTURE(IrBuilderFixture, "ControlFlowEq")
{
withTwoBlocks([this](IrOp a, IrOp b) {
build.inst(IrCmd::JUMP_EQ_TAG, build.constTag(tnil), build.constTag(tnil), a, b);
});
withTwoBlocks([this](IrOp a, IrOp b) {
build.inst(IrCmd::JUMP_EQ_TAG, build.constTag(tnil), build.constTag(tnumber), a, b);
});
withTwoBlocks([this](IrOp a, IrOp b) {
build.inst(IrCmd::JUMP_EQ_INT, build.constInt(0), build.constInt(0), a, b);
});
withTwoBlocks([this](IrOp a, IrOp b) {
build.inst(IrCmd::JUMP_EQ_INT, build.constInt(0), build.constInt(1), a, b);
});
updateUseCounts(build.function);
constantFold();
CHECK("\n" + toString(build.function, /* includeDetails */ false) == R"(
bb_0:
JUMP bb_1
bb_1:
LOP_RETURN 1u
bb_3:
JUMP bb_5
bb_5:
LOP_RETURN 2u
bb_6:
JUMP bb_7
bb_7:
LOP_RETURN 1u
bb_9:
JUMP bb_11
bb_11:
LOP_RETURN 2u
)");
}
TEST_CASE_FIXTURE(IrBuilderFixture, "NumToIndex")
{
withOneBlock([this](IrOp a) {
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::NUM_TO_INDEX, build.constDouble(4), a));
build.inst(IrCmd::LOP_RETURN, build.constUint(0));
});
withOneBlock([this](IrOp a) {
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::NUM_TO_INDEX, build.constDouble(1.2), a));
build.inst(IrCmd::LOP_RETURN, build.constUint(0));
});
withOneBlock([this](IrOp a) {
IrOp nan = build.inst(IrCmd::DIV_NUM, build.constDouble(0.0), build.constDouble(0.0));
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::NUM_TO_INDEX, nan, a));
build.inst(IrCmd::LOP_RETURN, build.constUint(0));
});
updateUseCounts(build.function);
constantFold();
CHECK("\n" + toString(build.function, /* includeDetails */ false) == R"(
bb_0:
STORE_INT R0, 4i
LOP_RETURN 0u
bb_2:
JUMP bb_3
bb_3:
LOP_RETURN 1u
bb_4:
JUMP bb_5
bb_5:
LOP_RETURN 1u
)");
}
TEST_CASE_FIXTURE(IrBuilderFixture, "Guards")
{
withOneBlock([this](IrOp a) {
build.inst(IrCmd::CHECK_TAG, build.constTag(tnumber), build.constTag(tnumber), a);
build.inst(IrCmd::LOP_RETURN, build.constUint(0));
});
withOneBlock([this](IrOp a) {
build.inst(IrCmd::CHECK_TAG, build.constTag(tnil), build.constTag(tnumber), a);
build.inst(IrCmd::LOP_RETURN, build.constUint(0));
});
updateUseCounts(build.function);
constantFold();
CHECK("\n" + toString(build.function, /* includeDetails */ false) == R"(
bb_0:
LOP_RETURN 0u
bb_2:
JUMP bb_3
bb_3:
LOP_RETURN 1u
)");
}
TEST_CASE_FIXTURE(IrBuilderFixture, "ControlFlowCmpNum")
{
IrOp nan = build.inst(IrCmd::DIV_NUM, build.constDouble(0.0), build.constDouble(0.0));
auto compareFold = [this](IrOp lhs, IrOp rhs, IrCondition cond, bool result) {
IrOp instOp;
IrInst instExpected;
withTwoBlocks([&](IrOp a, IrOp b) {
instOp = build.inst(IrCmd::JUMP_CMP_NUM, lhs, rhs, build.cond(cond), a, b);
instExpected = IrInst{IrCmd::JUMP, result ? a : b};
});
updateUseCounts(build.function);
constantFold();
checkEq(instOp, instExpected);
};
compareFold(build.constDouble(1), build.constDouble(1), IrCondition::Equal, true);
compareFold(build.constDouble(1), build.constDouble(2), IrCondition::Equal, false);
compareFold(nan, nan, IrCondition::Equal, false);
compareFold(build.constDouble(1), build.constDouble(1), IrCondition::NotEqual, false);
compareFold(build.constDouble(1), build.constDouble(2), IrCondition::NotEqual, true);
compareFold(nan, nan, IrCondition::NotEqual, true);
compareFold(build.constDouble(1), build.constDouble(1), IrCondition::Less, false);
compareFold(build.constDouble(1), build.constDouble(2), IrCondition::Less, true);
compareFold(build.constDouble(2), build.constDouble(1), IrCondition::Less, false);
compareFold(build.constDouble(1), nan, IrCondition::Less, false);
compareFold(build.constDouble(1), build.constDouble(1), IrCondition::NotLess, true);
compareFold(build.constDouble(1), build.constDouble(2), IrCondition::NotLess, false);
compareFold(build.constDouble(2), build.constDouble(1), IrCondition::NotLess, true);
compareFold(build.constDouble(1), nan, IrCondition::NotLess, true);
compareFold(build.constDouble(1), build.constDouble(1), IrCondition::LessEqual, true);
compareFold(build.constDouble(1), build.constDouble(2), IrCondition::LessEqual, true);
compareFold(build.constDouble(2), build.constDouble(1), IrCondition::LessEqual, false);
compareFold(build.constDouble(1), nan, IrCondition::LessEqual, false);
compareFold(build.constDouble(1), build.constDouble(1), IrCondition::NotLessEqual, false);
compareFold(build.constDouble(1), build.constDouble(2), IrCondition::NotLessEqual, false);
compareFold(build.constDouble(2), build.constDouble(1), IrCondition::NotLessEqual, true);
compareFold(build.constDouble(1), nan, IrCondition::NotLessEqual, true);
compareFold(build.constDouble(1), build.constDouble(1), IrCondition::Greater, false);
compareFold(build.constDouble(1), build.constDouble(2), IrCondition::Greater, false);
compareFold(build.constDouble(2), build.constDouble(1), IrCondition::Greater, true);
compareFold(build.constDouble(1), nan, IrCondition::Greater, false);
compareFold(build.constDouble(1), build.constDouble(1), IrCondition::NotGreater, true);
compareFold(build.constDouble(1), build.constDouble(2), IrCondition::NotGreater, true);
compareFold(build.constDouble(2), build.constDouble(1), IrCondition::NotGreater, false);
compareFold(build.constDouble(1), nan, IrCondition::NotGreater, true);
compareFold(build.constDouble(1), build.constDouble(1), IrCondition::GreaterEqual, true);
compareFold(build.constDouble(1), build.constDouble(2), IrCondition::GreaterEqual, false);
compareFold(build.constDouble(2), build.constDouble(1), IrCondition::GreaterEqual, true);
compareFold(build.constDouble(1), nan, IrCondition::GreaterEqual, false);
compareFold(build.constDouble(1), build.constDouble(1), IrCondition::NotGreaterEqual, false);
compareFold(build.constDouble(1), build.constDouble(2), IrCondition::NotGreaterEqual, true);
compareFold(build.constDouble(2), build.constDouble(1), IrCondition::NotGreaterEqual, false);
compareFold(build.constDouble(1), nan, IrCondition::NotGreaterEqual, true);
}
TEST_SUITE_END();

View File

@ -226,7 +226,7 @@ TEST_CASE_FIXTURE(Fixture, "dependent_generic_aliases")
LUAU_REQUIRE_ERROR_COUNT(1, result);
const char* expectedError;
std::string expectedError;
if (FFlag::LuauTypeMismatchInvarianceInError)
expectedError = "Type 'bad' could not be converted into 'U<number>'\n"
"caused by:\n"

View File

@ -95,6 +95,26 @@ TEST_CASE_FIXTURE(Fixture, "infer_that_function_does_not_return_a_table")
CHECK_EQ(result.errors[0], (TypeError{Location{Position{5, 8}, Position{5, 24}}, NotATable{typeChecker.numberType}}));
}
TEST_CASE_FIXTURE(Fixture, "generalize_table_property")
{
CheckResult result = check(R"(
local T = {}
T.foo = function(x)
return x
end
)");
LUAU_REQUIRE_NO_ERRORS(result);
TypeId t = requireType("T");
const TableType* tt = get<TableType>(follow(t));
REQUIRE(tt);
TypeId fooTy = tt->props.at("foo").type;
CHECK("<a>(a) -> a" == toString(fooTy));
}
TEST_CASE_FIXTURE(Fixture, "vararg_functions_should_allow_calls_of_any_types_and_size")
{
CheckResult result = check(R"(

View File

@ -303,14 +303,9 @@ TEST_CASE_FIXTURE(Fixture, "calling_self_generic_methods")
end
)");
if (FFlag::DebugLuauDeferredConstraintResolution)
LUAU_REQUIRE_NO_ERRORS(result);
else
{
// TODO: Should typecheck but currently errors CLI-39916
// TODO: Should typecheck but currently errors CLI-54277
LUAU_REQUIRE_ERRORS(result);
}
}
TEST_CASE_FIXTURE(Fixture, "infer_generic_property")
{
@ -1053,8 +1048,11 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "infer_generic_function_function_argument")
)");
LUAU_REQUIRE_NO_ERRORS(result);
}
result = check(R"(
TEST_CASE_FIXTURE(BuiltinsFixture, "infer_generic_function_function_argument_2")
{
CheckResult result = check(R"(
local function map<a, b>(arr: {a}, f: (a) -> b)
local r = {}
for i,v in ipairs(arr) do
@ -1068,8 +1066,11 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "infer_generic_function_function_argument")
LUAU_REQUIRE_NO_ERRORS(result);
REQUIRE_EQ("{boolean}", toString(requireType("r")));
}
check(R"(
TEST_CASE_FIXTURE(BuiltinsFixture, "infer_generic_function_function_argument_3")
{
CheckResult result = check(R"(
local function foldl<a, b>(arr: {a}, init: b, f: (b, a) -> b)
local r = init
for i,v in ipairs(arr) do
@ -1214,10 +1215,6 @@ TEST_CASE_FIXTURE(Fixture, "quantify_functions_even_if_they_have_an_explicit_gen
TEST_CASE_FIXTURE(Fixture, "do_not_always_instantiate_generic_intersection_types")
{
ScopedFastFlag sff[] = {
{"LuauMaybeGenericIntersectionTypes", true},
};
CheckResult result = check(R"(
--!strict
type Array<T> = { [number]: T }

View File

@ -464,7 +464,7 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "typecheck_unary_minus")
local foo
local mt = {}
mt.__unm = function(val: typeof(foo)): string
mt.__unm = function(val): string
return tostring(val.value) .. "test"
end

View File

@ -1478,8 +1478,6 @@ end
TEST_CASE_FIXTURE(BuiltinsFixture, "refine_unknown_to_table_then_take_the_length")
{
ScopedFastFlag sff{"DebugLuauDeferredConstraintResolution", true};
CheckResult result = check(R"(
local function f(x: unknown)
if typeof(x) == "table" then
@ -1488,8 +1486,16 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "refine_unknown_to_table_then_take_the_length
end
)");
if (FFlag::DebugLuauDeferredConstraintResolution)
{
LUAU_REQUIRE_NO_ERRORS(result);
CHECK_EQ("table", toString(requireTypeAtPosition({3, 29})));
CHECK_EQ("a & table", toString(requireTypeAtPosition({3, 29})));
}
else
{
LUAU_REQUIRE_ERROR_COUNT(1, result);
CHECK_EQ("unknown", toString(requireTypeAtPosition({3, 29})));
}
}
TEST_CASE_FIXTURE(RefinementClassFixture, "refine_a_param_that_got_resolved_during_constraint_solving_stage")

View File

@ -362,8 +362,6 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "fuzz_tail_unification_issue")
TEST_CASE_FIXTURE(BuiltinsFixture, "fuzz_unify_any_should_check_log")
{
ScopedFastFlag luauUnifyAnyTxnLog{"LuauUnifyAnyTxnLog", true};
CheckResult result = check(R"(
repeat
_._,_ = nil

View File

@ -1,8 +1,4 @@
AnnotationTests.corecursive_types_error_on_tight_loop
AnnotationTests.duplicate_type_param_name
AnnotationTests.generic_aliases_are_cloned_properly
AnnotationTests.occurs_check_on_cyclic_intersection_type
AnnotationTests.occurs_check_on_cyclic_union_type
AnnotationTests.instantiate_type_fun_should_not_trip_rbxassert
AnnotationTests.too_many_type_params
AnnotationTests.two_type_params
AstQuery.last_argument_function_call_type
@ -14,9 +10,6 @@ AutocompleteTest.autocomplete_oop_implicit_self
AutocompleteTest.autocomplete_string_singleton_equality
AutocompleteTest.do_compatible_self_calls
AutocompleteTest.do_wrong_compatible_self_calls
AutocompleteTest.type_correct_expected_argument_type_pack_suggestion
AutocompleteTest.type_correct_expected_argument_type_suggestion_self
AutocompleteTest.type_correct_expected_return_type_pack_suggestion
AutocompleteTest.type_correct_expected_return_type_suggestion
AutocompleteTest.type_correct_suggestion_for_overloads
BuiltinTests.aliased_string_format
@ -51,41 +44,32 @@ BuiltinTests.table_pack_variadic
DefinitionTests.class_definition_overload_metamethods
DefinitionTests.class_definition_string_props
DefinitionTests.definition_file_classes
DefinitionTests.definitions_symbols_are_generated_for_recursively_referenced_types
DefinitionTests.single_class_type_identity_in_global_types
FrontendTest.environments
FrontendTest.nocheck_cycle_used_by_checked
FrontendTest.reexport_cyclic_type
GenericsTests.apply_type_function_nested_generics1
GenericsTests.apply_type_function_nested_generics2
GenericsTests.better_mismatch_error_messages
GenericsTests.bound_tables_do_not_clone_original_fields
GenericsTests.check_mutual_generic_functions
GenericsTests.correctly_instantiate_polymorphic_member_functions
GenericsTests.do_not_infer_generic_functions
GenericsTests.duplicate_generic_type_packs
GenericsTests.duplicate_generic_types
GenericsTests.generic_argument_count_too_few
GenericsTests.generic_argument_count_too_many
GenericsTests.generic_functions_should_be_memory_safe
GenericsTests.generic_table_method
GenericsTests.generic_type_pack_parentheses
GenericsTests.higher_rank_polymorphism_should_not_accept_instantiated_arguments
GenericsTests.infer_generic_function_function_argument
GenericsTests.infer_generic_function_function_argument_2
GenericsTests.infer_generic_function_function_argument_3
GenericsTests.infer_generic_function_function_argument_overloaded
GenericsTests.infer_generic_lib_function_function_argument
GenericsTests.infer_generic_property
GenericsTests.instantiated_function_argument_names
GenericsTests.instantiation_sharing_types
GenericsTests.no_stack_overflow_from_quantifying
GenericsTests.reject_clashing_generic_and_pack_names
GenericsTests.self_recursive_instantiated_param
IntersectionTypes.no_stack_overflow_from_flattenintersection
IntersectionTypes.select_correct_union_fn
IntersectionTypes.should_still_pick_an_overload_whose_arguments_are_unions
IntersectionTypes.table_intersection_write_sealed
IntersectionTypes.table_intersection_write_sealed_indirect
IntersectionTypes.table_write_sealed_indirect
ModuleTests.any_persistance_does_not_leak
ModuleTests.clone_self_property
ModuleTests.deepClone_cyclic_table
NonstrictModeTests.for_in_iterator_variables_are_any
@ -102,10 +86,6 @@ NonstrictModeTests.parameters_having_type_any_are_optional
NonstrictModeTests.table_dot_insert_and_recursive_calls
NonstrictModeTests.table_props_are_any
Normalize.cyclic_table_normalizes_sensibly
ParseErrorRecovery.generic_type_list_recovery
ParseErrorRecovery.recovery_of_parenthesized_expressions
ParserTests.parse_nesting_based_end_detection_failsafe_earlier
ParserTests.parse_nesting_based_end_detection_local_function
ProvisionalTests.assign_table_with_refined_property_with_a_similar_type_is_illegal
ProvisionalTests.bail_early_if_unification_is_too_complicated
ProvisionalTests.do_not_ice_when_trying_to_pick_first_of_generic_type_pack
@ -114,7 +94,6 @@ ProvisionalTests.free_options_cannot_be_unified_together
ProvisionalTests.generic_type_leak_to_module_interface_variadic
ProvisionalTests.greedy_inference_with_shared_self_triggers_function_with_no_returns
ProvisionalTests.pcall_returns_at_least_two_value_but_function_returns_nothing
ProvisionalTests.refine_unknown_to_table_then_clone_it
ProvisionalTests.setmetatable_constrains_free_type_into_free_table
ProvisionalTests.specialization_binds_with_prototypes_too_early
ProvisionalTests.table_insert_with_a_singleton_argument
@ -122,10 +101,11 @@ ProvisionalTests.typeguard_inference_incomplete
ProvisionalTests.weirditer_should_not_loop_forever
RefinementTest.apply_refinements_on_astexprindexexpr_whose_subscript_expr_is_constant_string
RefinementTest.discriminate_tag
RefinementTest.else_with_no_explicit_expression_should_also_refine_the_tagged_union
RefinementTest.falsiness_of_TruthyPredicate_narrows_into_nil
RefinementTest.narrow_property_of_a_bounded_variable
RefinementTest.nonoptional_type_can_narrow_to_nil_if_sense_is_true
RefinementTest.refine_a_param_that_got_resolved_during_constraint_solving_stage
RefinementTest.refine_unknowns
RefinementTest.type_guard_can_filter_for_intersection_of_tables
RefinementTest.type_narrow_for_all_the_userdata
RefinementTest.type_narrow_to_vector
@ -157,7 +137,6 @@ TableTests.found_like_key_in_table_property_access
TableTests.found_multiple_like_keys
TableTests.function_calls_produces_sealed_table_given_unsealed_table
TableTests.fuzz_table_unify_instantiated_table
TableTests.fuzz_table_unify_instantiated_table_with_prop_realloc
TableTests.generic_table_instantiation_potential_regression
TableTests.give_up_after_one_metatable_index_look_up
TableTests.indexer_on_sealed_table_must_unify_with_free_table
@ -198,39 +177,31 @@ TableTests.table_simple_call
TableTests.table_subtyping_with_extra_props_dont_report_multiple_errors
TableTests.table_subtyping_with_missing_props_dont_report_multiple_errors
TableTests.table_unification_4
TableTests.tc_member_function_2
TableTests.unifying_tables_shouldnt_uaf2
TableTests.used_colon_instead_of_dot
TableTests.used_dot_instead_of_colon
ToString.exhaustive_toString_of_cyclic_table
ToString.function_type_with_argument_names_generic
ToString.named_metatable_toStringNamedFunction
ToString.toStringDetailed2
ToString.toStringErrorPack
ToString.toStringNamedFunction_generic_pack
ToString.toStringNamedFunction_hide_self_param
ToString.toStringNamedFunction_hide_type_params
ToString.toStringNamedFunction_id
ToString.toStringNamedFunction_include_self_param
ToString.toStringNamedFunction_map
ToString.toStringNamedFunction_variadics
TryUnifyTests.cli_41095_concat_log_in_sealed_table_unification
TryUnifyTests.members_of_failed_typepack_unification_are_unified_with_errorType
TryUnifyTests.result_of_failed_typepack_unification_is_constrained
TryUnifyTests.typepack_unification_should_trim_free_tails
TryUnifyTests.variadics_should_use_reversed_properly
TypeAliases.cannot_create_cyclic_type_with_unknown_module
TypeAliases.corecursive_types_generic
TypeAliases.forward_declared_alias_is_not_clobbered_by_prior_unification_with_any
TypeAliases.generic_param_remap
TypeAliases.mismatched_generic_type_param
TypeAliases.mutually_recursive_types_errors
TypeAliases.mutually_recursive_types_restriction_not_ok_1
TypeAliases.mutually_recursive_types_restriction_not_ok_2
TypeAliases.mutually_recursive_types_swapsies_not_ok
TypeAliases.recursive_types_restriction_not_ok
TypeAliases.report_shadowed_aliases
TypeAliases.stringify_type_alias_of_recursive_template_table_type
TypeAliases.type_alias_local_mutation
TypeAliases.type_alias_local_rename
TypeAliases.type_alias_of_an_imported_recursive_generic_type
@ -298,7 +269,7 @@ TypeInferLoops.loop_iter_trailing_nil
TypeInferLoops.properly_infer_iteratee_is_a_free_table
TypeInferLoops.unreachable_code_after_infinite_loop
TypeInferModules.custom_require_global
TypeInferModules.do_not_modify_imported_types_4
TypeInferModules.do_not_modify_imported_types_5
TypeInferModules.module_type_conflict
TypeInferModules.module_type_conflict_instantiated
TypeInferModules.type_error_of_unknown_qualified_type
@ -312,6 +283,7 @@ TypeInferOperators.cannot_compare_tables_that_do_not_have_the_same_metatable
TypeInferOperators.cannot_indirectly_compare_types_that_do_not_have_a_metatable
TypeInferOperators.cannot_indirectly_compare_types_that_do_not_offer_overloaded_ordering_operators
TypeInferOperators.cli_38355_recursive_union
TypeInferOperators.compound_assign_metatable
TypeInferOperators.compound_assign_mismatch_metatable
TypeInferOperators.disallow_string_and_types_without_metatables_from_arithmetic_binary_ops
TypeInferOperators.in_nonstrict_mode_strip_nil_from_intersections_when_considering_relational_operators
@ -319,6 +291,7 @@ TypeInferOperators.infer_any_in_all_modes_when_lhs_is_unknown
TypeInferOperators.operator_eq_completely_incompatible
TypeInferOperators.typecheck_overloaded_multiply_that_is_an_intersection
TypeInferOperators.typecheck_overloaded_multiply_that_is_an_intersection_on_rhs
TypeInferOperators.typecheck_unary_len_error
TypeInferOperators.UnknownGlobalCompoundAssign
TypeInferOperators.unrelated_classes_cannot_be_compared
TypeInferOperators.unrelated_primitives_cannot_be_compared
@ -341,11 +314,8 @@ TypePackTests.type_alias_defaults_confusing_types
TypePackTests.type_alias_defaults_recursive_type
TypePackTests.type_alias_type_pack_multi
TypePackTests.type_alias_type_pack_variadic
TypePackTests.type_alias_type_packs
TypePackTests.type_alias_type_packs_errors
TypePackTests.type_alias_type_packs_import
TypePackTests.type_alias_type_packs_nested
TypePackTests.type_pack_type_parameters
TypePackTests.unify_variadic_tails_in_arguments
TypePackTests.unify_variadic_tails_in_arguments_free
TypePackTests.variadic_packs
@ -360,7 +330,6 @@ TypeSingletons.table_properties_type_error_escapes
TypeSingletons.taking_the_length_of_union_of_string_singleton
TypeSingletons.widen_the_supertype_if_it_is_free_and_subtype_has_singleton
TypeSingletons.widening_happens_almost_everywhere
TypeSingletons.widening_happens_almost_everywhere_except_for_tables
UnionTypes.index_on_a_union_type_with_missing_property
UnionTypes.optional_assignment_errors
UnionTypes.optional_call_error