mirror of
https://github.com/luau-lang/luau.git
synced 2024-11-15 06:15:44 +08:00
Sync to upstream/release/568 (#865)
* A small subset of control-flow refinements have been added to recognize type options that are unreachable after a conditional/unconditional code block. (Fixes https://github.com/Roblox/luau/issues/356). Some examples: ```lua local function f(x: string?) if not x then return end -- x is 'string' here end ``` Throwing calls like `error` or `assert(false)` instead of 'return' are also recognized. Existing complex refinements like type/typeof and tagged union checks are expected to work, among others. To enable this feature, `LuauTinyControlFlowAnalysis` exclusion has to be removed from `ExperimentalFlags.h`. If will become enabled unconditionally in the near future. * Linter has been integrated into the typechecker analysis so that type-aware lint warnings can work in any mode `Frontend::lint` methods were deprecated, `Frontend::check` has to be used instead with `runLintChecks` option set. Resulting lint warning are located inside `CheckResult`. * Fixed large performance drop and increased memory consumption when array is filled at an offset (Fixes https://github.com/Roblox/luau/issues/590) * Part of [Type error suppression RFC](https://github.com/Roblox/luau/blob/master/rfcs/type-error-suppression.md) was implemented making subtyping checks with `any` type transitive. --- In our work on the new type-solver: * `--!nocheck` mode no longer reports type errors * New solver will not be used for `--!nonstrict` modules until all issues with strict mode typechecking are fixed * Added control-flow aware type refinements mentioned earlier In native code generation: * `LOP_NAMECALL` has been translated to IR * `type` and `typeof` builtin fastcalls have been translated to IR/assembly * Additional steps were taken towards arm64 support
This commit is contained in:
parent
9311c0c57a
commit
42a2805f85
@ -2,12 +2,13 @@
|
||||
#pragma once
|
||||
|
||||
#include "Luau/Ast.h"
|
||||
#include "Luau/Refinement.h"
|
||||
#include "Luau/Constraint.h"
|
||||
#include "Luau/ControlFlow.h"
|
||||
#include "Luau/DataFlowGraph.h"
|
||||
#include "Luau/Module.h"
|
||||
#include "Luau/ModuleResolver.h"
|
||||
#include "Luau/NotNull.h"
|
||||
#include "Luau/Refinement.h"
|
||||
#include "Luau/Symbol.h"
|
||||
#include "Luau/Type.h"
|
||||
#include "Luau/Variant.h"
|
||||
@ -141,26 +142,26 @@ struct ConstraintGraphBuilder
|
||||
*/
|
||||
void visit(AstStatBlock* block);
|
||||
|
||||
void visitBlockWithoutChildScope(const ScopePtr& scope, AstStatBlock* block);
|
||||
ControlFlow visitBlockWithoutChildScope(const ScopePtr& scope, AstStatBlock* block);
|
||||
|
||||
void visit(const ScopePtr& scope, AstStat* stat);
|
||||
void visit(const ScopePtr& scope, AstStatBlock* block);
|
||||
void visit(const ScopePtr& scope, AstStatLocal* local);
|
||||
void visit(const ScopePtr& scope, AstStatFor* for_);
|
||||
void visit(const ScopePtr& scope, AstStatForIn* forIn);
|
||||
void visit(const ScopePtr& scope, AstStatWhile* while_);
|
||||
void visit(const ScopePtr& scope, AstStatRepeat* repeat);
|
||||
void visit(const ScopePtr& scope, AstStatLocalFunction* function);
|
||||
void visit(const ScopePtr& scope, AstStatFunction* function);
|
||||
void visit(const ScopePtr& scope, AstStatReturn* ret);
|
||||
void visit(const ScopePtr& scope, AstStatAssign* assign);
|
||||
void visit(const ScopePtr& scope, AstStatCompoundAssign* assign);
|
||||
void visit(const ScopePtr& scope, AstStatIf* ifStatement);
|
||||
void visit(const ScopePtr& scope, AstStatTypeAlias* alias);
|
||||
void visit(const ScopePtr& scope, AstStatDeclareGlobal* declareGlobal);
|
||||
void visit(const ScopePtr& scope, AstStatDeclareClass* declareClass);
|
||||
void visit(const ScopePtr& scope, AstStatDeclareFunction* declareFunction);
|
||||
void visit(const ScopePtr& scope, AstStatError* error);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStat* stat);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatBlock* block);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatLocal* local);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatFor* for_);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatForIn* forIn);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatWhile* while_);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatRepeat* repeat);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatLocalFunction* function);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatFunction* function);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatReturn* ret);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatAssign* assign);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatCompoundAssign* assign);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatIf* ifStatement);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatTypeAlias* alias);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatDeclareGlobal* declareGlobal);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatDeclareClass* declareClass);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatDeclareFunction* declareFunction);
|
||||
ControlFlow visit(const ScopePtr& scope, AstStatError* error);
|
||||
|
||||
InferencePack checkPack(const ScopePtr& scope, AstArray<AstExpr*> exprs, const std::vector<std::optional<TypeId>>& expectedTypes = {});
|
||||
InferencePack checkPack(const ScopePtr& scope, AstExpr* expr, const std::vector<std::optional<TypeId>>& expectedTypes = {});
|
||||
|
@ -143,6 +143,14 @@ struct ConstraintSolver
|
||||
bool block(TypeId target, NotNull<const Constraint> constraint);
|
||||
bool block(TypePackId target, NotNull<const Constraint> constraint);
|
||||
|
||||
/**
|
||||
* For all constraints that are blocked on one constraint, make them block
|
||||
* on a new constraint.
|
||||
* @param source the constraint to copy blocks from.
|
||||
* @param addition the constraint that other constraints should now block on.
|
||||
*/
|
||||
void inheritBlocks(NotNull<const Constraint> source, NotNull<const Constraint> addition);
|
||||
|
||||
// Traverse the type. If any blocked or pending types are found, block
|
||||
// the constraint on them.
|
||||
//
|
||||
|
36
Analysis/include/Luau/ControlFlow.h
Normal file
36
Analysis/include/Luau/ControlFlow.h
Normal file
@ -0,0 +1,36 @@
|
||||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#pragma once
|
||||
|
||||
#include <memory>
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
|
||||
struct Scope;
|
||||
using ScopePtr = std::shared_ptr<Scope>;
|
||||
|
||||
enum class ControlFlow
|
||||
{
|
||||
None = 0b00001,
|
||||
Returns = 0b00010,
|
||||
Throws = 0b00100,
|
||||
Break = 0b01000, // Currently unused.
|
||||
Continue = 0b10000, // Currently unused.
|
||||
};
|
||||
|
||||
inline ControlFlow operator&(ControlFlow a, ControlFlow b)
|
||||
{
|
||||
return ControlFlow(int(a) & int(b));
|
||||
}
|
||||
|
||||
inline ControlFlow operator|(ControlFlow a, ControlFlow b)
|
||||
{
|
||||
return ControlFlow(int(a) | int(b));
|
||||
}
|
||||
|
||||
inline bool matches(ControlFlow a, ControlFlow b)
|
||||
{
|
||||
return (a & b) != ControlFlow(0);
|
||||
}
|
||||
|
||||
} // namespace Luau
|
@ -89,14 +89,21 @@ struct FrontendOptions
|
||||
// order to get more precise type information)
|
||||
bool forAutocomplete = false;
|
||||
|
||||
bool runLintChecks = false;
|
||||
|
||||
// If not empty, randomly shuffle the constraint set before attempting to
|
||||
// solve. Use this value to seed the random number generator.
|
||||
std::optional<unsigned> randomizeConstraintResolutionSeed;
|
||||
|
||||
std::optional<LintOptions> enabledLintWarnings;
|
||||
};
|
||||
|
||||
struct CheckResult
|
||||
{
|
||||
std::vector<TypeError> errors;
|
||||
|
||||
LintResult lintResult;
|
||||
|
||||
std::vector<ModuleName> timeoutHits;
|
||||
};
|
||||
|
||||
@ -133,8 +140,9 @@ struct Frontend
|
||||
|
||||
CheckResult check(const ModuleName& name, std::optional<FrontendOptions> optionOverride = {}); // new shininess
|
||||
|
||||
LintResult lint(const ModuleName& name, std::optional<LintOptions> enabledLintWarnings = {});
|
||||
LintResult lint(const SourceModule& module, std::optional<LintOptions> enabledLintWarnings = {});
|
||||
// Use 'check' with 'runLintChecks' set to true in FrontendOptions (enabledLintWarnings be set there as well)
|
||||
LintResult lint_DEPRECATED(const ModuleName& name, std::optional<LintOptions> enabledLintWarnings = {});
|
||||
LintResult lint_DEPRECATED(const SourceModule& module, std::optional<LintOptions> enabledLintWarnings = {});
|
||||
|
||||
bool isDirty(const ModuleName& name, bool forAutocomplete = false) const;
|
||||
void markDirty(const ModuleName& name, std::vector<ModuleName>* markedDirty = nullptr);
|
||||
|
@ -2,6 +2,7 @@
|
||||
#pragma once
|
||||
|
||||
#include "Luau/Error.h"
|
||||
#include "Luau/Linter.h"
|
||||
#include "Luau/FileResolver.h"
|
||||
#include "Luau/ParseOptions.h"
|
||||
#include "Luau/ParseResult.h"
|
||||
@ -88,6 +89,7 @@ struct Module
|
||||
|
||||
std::unordered_map<Name, TypeId> declaredGlobals;
|
||||
ErrorVec errors;
|
||||
LintResult lintResult;
|
||||
Mode mode;
|
||||
SourceCode::Type type;
|
||||
bool timeout = false;
|
||||
|
@ -19,6 +19,8 @@ using ModulePtr = std::shared_ptr<Module>;
|
||||
|
||||
bool isSubtype(TypeId subTy, TypeId superTy, NotNull<Scope> scope, NotNull<BuiltinTypes> builtinTypes, InternalErrorReporter& ice);
|
||||
bool isSubtype(TypePackId subTy, TypePackId superTy, NotNull<Scope> scope, NotNull<BuiltinTypes> builtinTypes, InternalErrorReporter& ice);
|
||||
bool isConsistentSubtype(TypeId subTy, TypeId superTy, NotNull<Scope> scope, NotNull<BuiltinTypes> builtinTypes, InternalErrorReporter& ice);
|
||||
bool isConsistentSubtype(TypePackId subTy, TypePackId superTy, NotNull<Scope> scope, NotNull<BuiltinTypes> builtinTypes, InternalErrorReporter& ice);
|
||||
|
||||
class TypeIds
|
||||
{
|
||||
@ -203,7 +205,7 @@ struct NormalizedFunctionType
|
||||
};
|
||||
|
||||
// A normalized generic/free type is a union, where each option is of the form (X & T) where
|
||||
// * X is either a free type or a generic
|
||||
// * X is either a free type, a generic or a blocked type.
|
||||
// * T is a normalized type.
|
||||
struct NormalizedType;
|
||||
using NormalizedTyvars = std::unordered_map<TypeId, std::unique_ptr<NormalizedType>>;
|
||||
@ -214,7 +216,7 @@ bool isInhabited_DEPRECATED(const NormalizedType& norm);
|
||||
// * P is a union of primitive types (including singletons, classes and the error type)
|
||||
// * T is a union of table types
|
||||
// * F is a union of an intersection of function types
|
||||
// * G is a union of generic/free normalized types, intersected with a normalized type
|
||||
// * G is a union of generic/free/blocked types, intersected with a normalized type
|
||||
struct NormalizedType
|
||||
{
|
||||
// The top part of the type.
|
||||
|
@ -66,6 +66,7 @@ struct Scope
|
||||
|
||||
RefinementMap refinements;
|
||||
DenseHashMap<const Def*, TypeId> dcrRefinements{nullptr};
|
||||
void inheritRefinements(const ScopePtr& childScope);
|
||||
|
||||
// For mutually recursive type aliases, it's important that
|
||||
// they use the same types for the same names.
|
||||
|
@ -102,7 +102,7 @@ struct BlockedType
|
||||
BlockedType();
|
||||
int index;
|
||||
|
||||
static int nextIndex;
|
||||
static int DEPRECATED_nextIndex;
|
||||
};
|
||||
|
||||
struct PrimitiveType
|
||||
|
@ -2,14 +2,15 @@
|
||||
#pragma once
|
||||
|
||||
#include "Luau/Anyification.h"
|
||||
#include "Luau/Predicate.h"
|
||||
#include "Luau/ControlFlow.h"
|
||||
#include "Luau/Error.h"
|
||||
#include "Luau/Module.h"
|
||||
#include "Luau/Symbol.h"
|
||||
#include "Luau/Predicate.h"
|
||||
#include "Luau/Substitution.h"
|
||||
#include "Luau/Symbol.h"
|
||||
#include "Luau/TxnLog.h"
|
||||
#include "Luau/TypePack.h"
|
||||
#include "Luau/Type.h"
|
||||
#include "Luau/TypePack.h"
|
||||
#include "Luau/Unifier.h"
|
||||
#include "Luau/UnifierSharedState.h"
|
||||
|
||||
@ -87,28 +88,28 @@ struct TypeChecker
|
||||
|
||||
std::vector<std::pair<Location, ScopePtr>> getScopes() const;
|
||||
|
||||
void check(const ScopePtr& scope, const AstStat& statement);
|
||||
void check(const ScopePtr& scope, const AstStatBlock& statement);
|
||||
void check(const ScopePtr& scope, const AstStatIf& statement);
|
||||
void check(const ScopePtr& scope, const AstStatWhile& statement);
|
||||
void check(const ScopePtr& scope, const AstStatRepeat& statement);
|
||||
void check(const ScopePtr& scope, const AstStatReturn& return_);
|
||||
void check(const ScopePtr& scope, const AstStatAssign& assign);
|
||||
void check(const ScopePtr& scope, const AstStatCompoundAssign& assign);
|
||||
void check(const ScopePtr& scope, const AstStatLocal& local);
|
||||
void check(const ScopePtr& scope, const AstStatFor& local);
|
||||
void check(const ScopePtr& scope, const AstStatForIn& forin);
|
||||
void check(const ScopePtr& scope, TypeId ty, const ScopePtr& funScope, const AstStatFunction& function);
|
||||
void check(const ScopePtr& scope, TypeId ty, const ScopePtr& funScope, const AstStatLocalFunction& function);
|
||||
void check(const ScopePtr& scope, const AstStatTypeAlias& typealias);
|
||||
void check(const ScopePtr& scope, const AstStatDeclareClass& declaredClass);
|
||||
void check(const ScopePtr& scope, const AstStatDeclareFunction& declaredFunction);
|
||||
ControlFlow check(const ScopePtr& scope, const AstStat& statement);
|
||||
ControlFlow check(const ScopePtr& scope, const AstStatBlock& statement);
|
||||
ControlFlow check(const ScopePtr& scope, const AstStatIf& statement);
|
||||
ControlFlow check(const ScopePtr& scope, const AstStatWhile& statement);
|
||||
ControlFlow check(const ScopePtr& scope, const AstStatRepeat& statement);
|
||||
ControlFlow check(const ScopePtr& scope, const AstStatReturn& return_);
|
||||
ControlFlow check(const ScopePtr& scope, const AstStatAssign& assign);
|
||||
ControlFlow check(const ScopePtr& scope, const AstStatCompoundAssign& assign);
|
||||
ControlFlow check(const ScopePtr& scope, const AstStatLocal& local);
|
||||
ControlFlow check(const ScopePtr& scope, const AstStatFor& local);
|
||||
ControlFlow check(const ScopePtr& scope, const AstStatForIn& forin);
|
||||
ControlFlow check(const ScopePtr& scope, TypeId ty, const ScopePtr& funScope, const AstStatFunction& function);
|
||||
ControlFlow check(const ScopePtr& scope, TypeId ty, const ScopePtr& funScope, const AstStatLocalFunction& function);
|
||||
ControlFlow check(const ScopePtr& scope, const AstStatTypeAlias& typealias);
|
||||
ControlFlow check(const ScopePtr& scope, const AstStatDeclareClass& declaredClass);
|
||||
ControlFlow check(const ScopePtr& scope, const AstStatDeclareFunction& declaredFunction);
|
||||
|
||||
void prototype(const ScopePtr& scope, const AstStatTypeAlias& typealias, int subLevel = 0);
|
||||
void prototype(const ScopePtr& scope, const AstStatDeclareClass& declaredClass);
|
||||
|
||||
void checkBlock(const ScopePtr& scope, const AstStatBlock& statement);
|
||||
void checkBlockWithoutRecursionCheck(const ScopePtr& scope, const AstStatBlock& statement);
|
||||
ControlFlow checkBlock(const ScopePtr& scope, const AstStatBlock& statement);
|
||||
ControlFlow checkBlockWithoutRecursionCheck(const ScopePtr& scope, const AstStatBlock& statement);
|
||||
void checkBlockTypeAliases(const ScopePtr& scope, std::vector<AstStat*>& sorted);
|
||||
|
||||
WithPredicate<TypeId> checkExpr(
|
||||
|
@ -81,6 +81,8 @@ namespace Luau::Unifiable
|
||||
|
||||
using Name = std::string;
|
||||
|
||||
int freshIndex();
|
||||
|
||||
struct Free
|
||||
{
|
||||
explicit Free(TypeLevel level);
|
||||
|
@ -58,6 +58,7 @@ struct Unifier
|
||||
|
||||
NotNull<Scope> scope; // const Scope maybe
|
||||
TxnLog log;
|
||||
bool failure = false;
|
||||
ErrorVec errors;
|
||||
Location location;
|
||||
Variance variance = Covariant;
|
||||
@ -93,7 +94,7 @@ private:
|
||||
|
||||
// Traverse the two types provided and block on any BlockedTypes we find.
|
||||
// Returns true if any types were blocked on.
|
||||
bool blockOnBlockedTypes(TypeId subTy, TypeId superTy);
|
||||
bool DEPRECATED_blockOnBlockedTypes(TypeId subTy, TypeId superTy);
|
||||
|
||||
void tryUnifyTypeWithUnion(TypeId subTy, TypeId superTy, const UnionType* uv, bool cacheEnabled, bool isFunctionCall);
|
||||
void tryUnifyTypeWithIntersection(TypeId subTy, TypeId superTy, const IntersectionType* uv);
|
||||
|
@ -5,6 +5,7 @@
|
||||
#include "Luau/Breadcrumb.h"
|
||||
#include "Luau/Common.h"
|
||||
#include "Luau/Constraint.h"
|
||||
#include "Luau/ControlFlow.h"
|
||||
#include "Luau/DcrLogger.h"
|
||||
#include "Luau/ModuleResolver.h"
|
||||
#include "Luau/RecursionCounter.h"
|
||||
@ -22,6 +23,7 @@ LUAU_FASTFLAG(LuauNegatedClassTypes);
|
||||
namespace Luau
|
||||
{
|
||||
|
||||
bool doesCallError(const AstExprCall* call); // TypeInfer.cpp
|
||||
const AstStat* getFallthrough(const AstStat* node); // TypeInfer.cpp
|
||||
|
||||
static std::optional<AstExpr*> matchRequire(const AstExprCall& call)
|
||||
@ -344,14 +346,14 @@ void ConstraintGraphBuilder::visit(AstStatBlock* block)
|
||||
logger->captureGenerationModule(module);
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visitBlockWithoutChildScope(const ScopePtr& scope, AstStatBlock* block)
|
||||
ControlFlow ConstraintGraphBuilder::visitBlockWithoutChildScope(const ScopePtr& scope, AstStatBlock* block)
|
||||
{
|
||||
RecursionCounter counter{&recursionCount};
|
||||
|
||||
if (recursionCount >= FInt::LuauCheckRecursionLimit)
|
||||
{
|
||||
reportCodeTooComplex(block->location);
|
||||
return;
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
std::unordered_map<Name, Location> aliasDefinitionLocations;
|
||||
@ -396,59 +398,77 @@ void ConstraintGraphBuilder::visitBlockWithoutChildScope(const ScopePtr& scope,
|
||||
}
|
||||
}
|
||||
|
||||
std::optional<ControlFlow> firstControlFlow;
|
||||
for (AstStat* stat : block->body)
|
||||
visit(scope, stat);
|
||||
{
|
||||
ControlFlow cf = visit(scope, stat);
|
||||
if (cf != ControlFlow::None && !firstControlFlow)
|
||||
firstControlFlow = cf;
|
||||
}
|
||||
|
||||
return firstControlFlow.value_or(ControlFlow::None);
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStat* stat)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStat* stat)
|
||||
{
|
||||
RecursionLimiter limiter{&recursionCount, FInt::LuauCheckRecursionLimit};
|
||||
|
||||
if (auto s = stat->as<AstStatBlock>())
|
||||
visit(scope, s);
|
||||
return visit(scope, s);
|
||||
else if (auto i = stat->as<AstStatIf>())
|
||||
visit(scope, i);
|
||||
return visit(scope, i);
|
||||
else if (auto s = stat->as<AstStatWhile>())
|
||||
visit(scope, s);
|
||||
return visit(scope, s);
|
||||
else if (auto s = stat->as<AstStatRepeat>())
|
||||
visit(scope, s);
|
||||
return visit(scope, s);
|
||||
else if (stat->is<AstStatBreak>() || stat->is<AstStatContinue>())
|
||||
{
|
||||
// Nothing
|
||||
return ControlFlow::None; // TODO: ControlFlow::Break/Continue
|
||||
}
|
||||
else if (auto r = stat->as<AstStatReturn>())
|
||||
visit(scope, r);
|
||||
return visit(scope, r);
|
||||
else if (auto e = stat->as<AstStatExpr>())
|
||||
{
|
||||
checkPack(scope, e->expr);
|
||||
|
||||
if (auto call = e->expr->as<AstExprCall>(); call && doesCallError(call))
|
||||
return ControlFlow::Throws;
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
else if (auto s = stat->as<AstStatLocal>())
|
||||
visit(scope, s);
|
||||
return visit(scope, s);
|
||||
else if (auto s = stat->as<AstStatFor>())
|
||||
visit(scope, s);
|
||||
return visit(scope, s);
|
||||
else if (auto s = stat->as<AstStatForIn>())
|
||||
visit(scope, s);
|
||||
return visit(scope, s);
|
||||
else if (auto a = stat->as<AstStatAssign>())
|
||||
visit(scope, a);
|
||||
return visit(scope, a);
|
||||
else if (auto a = stat->as<AstStatCompoundAssign>())
|
||||
visit(scope, a);
|
||||
return visit(scope, a);
|
||||
else if (auto f = stat->as<AstStatFunction>())
|
||||
visit(scope, f);
|
||||
return visit(scope, f);
|
||||
else if (auto f = stat->as<AstStatLocalFunction>())
|
||||
visit(scope, f);
|
||||
return visit(scope, f);
|
||||
else if (auto a = stat->as<AstStatTypeAlias>())
|
||||
visit(scope, a);
|
||||
return visit(scope, a);
|
||||
else if (auto s = stat->as<AstStatDeclareGlobal>())
|
||||
visit(scope, s);
|
||||
return visit(scope, s);
|
||||
else if (auto s = stat->as<AstStatDeclareFunction>())
|
||||
visit(scope, s);
|
||||
return visit(scope, s);
|
||||
else if (auto s = stat->as<AstStatDeclareClass>())
|
||||
visit(scope, s);
|
||||
return visit(scope, s);
|
||||
else if (auto s = stat->as<AstStatError>())
|
||||
visit(scope, s);
|
||||
return visit(scope, s);
|
||||
else
|
||||
{
|
||||
LUAU_ASSERT(0 && "Internal error: Unknown AstStat type");
|
||||
return ControlFlow::None;
|
||||
}
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocal* local)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocal* local)
|
||||
{
|
||||
std::vector<TypeId> varTypes;
|
||||
varTypes.reserve(local->vars.size);
|
||||
@ -534,7 +554,7 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocal* local)
|
||||
}
|
||||
}
|
||||
|
||||
if (local->vars.size == 1 && local->values.size == 1 && firstValueType)
|
||||
if (local->vars.size == 1 && local->values.size == 1 && firstValueType && scope.get() == rootScope)
|
||||
{
|
||||
AstLocal* var = local->vars.data[0];
|
||||
AstExpr* value = local->values.data[0];
|
||||
@ -592,9 +612,11 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocal* local)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatFor* for_)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatFor* for_)
|
||||
{
|
||||
TypeId annotationTy = builtinTypes->numberType;
|
||||
if (for_->var->annotation)
|
||||
@ -619,9 +641,11 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatFor* for_)
|
||||
forScope->dcrRefinements[bc->def] = annotationTy;
|
||||
|
||||
visit(forScope, for_->body);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatForIn* forIn)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatForIn* forIn)
|
||||
{
|
||||
ScopePtr loopScope = childScope(forIn, scope);
|
||||
|
||||
@ -645,27 +669,33 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatForIn* forIn)
|
||||
addConstraint(loopScope, getLocation(forIn->values), IterableConstraint{iterator, variablePack});
|
||||
|
||||
visit(loopScope, forIn->body);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatWhile* while_)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatWhile* while_)
|
||||
{
|
||||
check(scope, while_->condition);
|
||||
|
||||
ScopePtr whileScope = childScope(while_, scope);
|
||||
|
||||
visit(whileScope, while_->body);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatRepeat* repeat)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatRepeat* repeat)
|
||||
{
|
||||
ScopePtr repeatScope = childScope(repeat, scope);
|
||||
|
||||
visitBlockWithoutChildScope(repeatScope, repeat->body);
|
||||
|
||||
check(repeatScope, repeat->condition);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocalFunction* function)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocalFunction* function)
|
||||
{
|
||||
// Local
|
||||
// Global
|
||||
@ -699,9 +729,11 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatLocalFunction*
|
||||
});
|
||||
|
||||
addConstraint(scope, std::move(c));
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatFunction* function)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatFunction* function)
|
||||
{
|
||||
// Name could be AstStatLocal, AstStatGlobal, AstStatIndexName.
|
||||
// With or without self
|
||||
@ -779,9 +811,11 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatFunction* funct
|
||||
});
|
||||
|
||||
addConstraint(scope, std::move(c));
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatReturn* ret)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatReturn* ret)
|
||||
{
|
||||
// At this point, the only way scope->returnType should have anything
|
||||
// interesting in it is if the function has an explicit return annotation.
|
||||
@ -793,13 +827,18 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatReturn* ret)
|
||||
|
||||
TypePackId exprTypes = checkPack(scope, ret->list, expectedTypes).tp;
|
||||
addConstraint(scope, ret->location, PackSubtypeConstraint{exprTypes, scope->returnType});
|
||||
|
||||
return ControlFlow::Returns;
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatBlock* block)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatBlock* block)
|
||||
{
|
||||
ScopePtr innerScope = childScope(block, scope);
|
||||
|
||||
visitBlockWithoutChildScope(innerScope, block);
|
||||
ControlFlow flow = visitBlockWithoutChildScope(innerScope, block);
|
||||
scope->inheritRefinements(innerScope);
|
||||
|
||||
return flow;
|
||||
}
|
||||
|
||||
static void bindFreeType(TypeId a, TypeId b)
|
||||
@ -819,7 +858,7 @@ static void bindFreeType(TypeId a, TypeId b)
|
||||
asMutable(b)->ty.emplace<BoundType>(a);
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatAssign* assign)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatAssign* assign)
|
||||
{
|
||||
std::vector<TypeId> varTypes = checkLValues(scope, assign->vars);
|
||||
|
||||
@ -839,9 +878,11 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatAssign* assign)
|
||||
TypePackId varPack = arena->addTypePack({varTypes});
|
||||
|
||||
addConstraint(scope, assign->location, PackSubtypeConstraint{exprPack, varPack});
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatCompoundAssign* assign)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatCompoundAssign* assign)
|
||||
{
|
||||
// We need to tweak the BinaryConstraint that we emit, so we cannot use the
|
||||
// strategy of falsifying an AST fragment.
|
||||
@ -852,23 +893,34 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatCompoundAssign*
|
||||
addConstraint(scope, assign->location,
|
||||
BinaryConstraint{assign->op, varTy, valueTy, resultType, assign, &module->astOriginalCallTypes, &module->astOverloadResolvedTypes});
|
||||
addConstraint(scope, assign->location, SubtypeConstraint{resultType, varTy});
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatIf* ifStatement)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatIf* ifStatement)
|
||||
{
|
||||
ScopePtr condScope = childScope(ifStatement->condition, scope);
|
||||
RefinementId refinement = check(condScope, ifStatement->condition, std::nullopt).refinement;
|
||||
RefinementId refinement = check(scope, ifStatement->condition, std::nullopt).refinement;
|
||||
|
||||
ScopePtr thenScope = childScope(ifStatement->thenbody, scope);
|
||||
applyRefinements(thenScope, ifStatement->condition->location, refinement);
|
||||
visit(thenScope, ifStatement->thenbody);
|
||||
|
||||
ScopePtr elseScope = childScope(ifStatement->elsebody ? ifStatement->elsebody : ifStatement, scope);
|
||||
applyRefinements(elseScope, ifStatement->elseLocation.value_or(ifStatement->condition->location), refinementArena.negation(refinement));
|
||||
|
||||
ControlFlow thencf = visit(thenScope, ifStatement->thenbody);
|
||||
ControlFlow elsecf = ControlFlow::None;
|
||||
if (ifStatement->elsebody)
|
||||
{
|
||||
ScopePtr elseScope = childScope(ifStatement->elsebody, scope);
|
||||
applyRefinements(elseScope, ifStatement->elseLocation.value_or(ifStatement->condition->location), refinementArena.negation(refinement));
|
||||
visit(elseScope, ifStatement->elsebody);
|
||||
}
|
||||
elsecf = visit(elseScope, ifStatement->elsebody);
|
||||
|
||||
if (matches(thencf, ControlFlow::Returns | ControlFlow::Throws) && elsecf == ControlFlow::None)
|
||||
scope->inheritRefinements(elseScope);
|
||||
else if (thencf == ControlFlow::None && matches(elsecf, ControlFlow::Returns | ControlFlow::Throws))
|
||||
scope->inheritRefinements(thenScope);
|
||||
|
||||
if (matches(thencf, ControlFlow::Returns | ControlFlow::Throws) && matches(elsecf, ControlFlow::Returns | ControlFlow::Throws))
|
||||
return ControlFlow::Returns;
|
||||
else
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
static bool occursCheck(TypeId needle, TypeId haystack)
|
||||
@ -890,7 +942,7 @@ static bool occursCheck(TypeId needle, TypeId haystack)
|
||||
return false;
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatTypeAlias* alias)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatTypeAlias* alias)
|
||||
{
|
||||
ScopePtr* defnScope = astTypeAliasDefiningScopes.find(alias);
|
||||
|
||||
@ -904,7 +956,7 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatTypeAlias* alia
|
||||
// case we just skip over it.
|
||||
auto bindingIt = typeBindings->find(alias->name.value);
|
||||
if (bindingIt == typeBindings->end() || defnScope == nullptr)
|
||||
return;
|
||||
return ControlFlow::None;
|
||||
|
||||
TypeId ty = resolveType(*defnScope, alias->type, /* inTypeArguments */ false);
|
||||
|
||||
@ -935,9 +987,11 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatTypeAlias* alia
|
||||
std::move(typeParams),
|
||||
std::move(typePackParams),
|
||||
});
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareGlobal* global)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareGlobal* global)
|
||||
{
|
||||
LUAU_ASSERT(global->type);
|
||||
|
||||
@ -949,6 +1003,8 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareGlobal*
|
||||
|
||||
BreadcrumbId bc = dfg->getBreadcrumb(global);
|
||||
rootScope->dcrRefinements[bc->def] = globalTy;
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
static bool isMetamethod(const Name& name)
|
||||
@ -958,7 +1014,7 @@ static bool isMetamethod(const Name& name)
|
||||
name == "__metatable" || name == "__eq" || name == "__lt" || name == "__le" || name == "__mode" || name == "__iter" || name == "__len";
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareClass* declaredClass)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareClass* declaredClass)
|
||||
{
|
||||
std::optional<TypeId> superTy = FFlag::LuauNegatedClassTypes ? std::make_optional(builtinTypes->classType) : std::nullopt;
|
||||
if (declaredClass->superName)
|
||||
@ -969,7 +1025,7 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareClass* d
|
||||
if (!lookupType)
|
||||
{
|
||||
reportError(declaredClass->location, UnknownSymbol{superName, UnknownSymbol::Type});
|
||||
return;
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
// We don't have generic classes, so this assertion _should_ never be hit.
|
||||
@ -981,7 +1037,7 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareClass* d
|
||||
reportError(declaredClass->location,
|
||||
GenericError{format("Cannot use non-class type '%s' as a superclass of class '%s'", superName.c_str(), declaredClass->name.value)});
|
||||
|
||||
return;
|
||||
return ControlFlow::None;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1056,9 +1112,11 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareClass* d
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareFunction* global)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareFunction* global)
|
||||
{
|
||||
std::vector<std::pair<Name, GenericTypeDefinition>> generics = createGenerics(scope, global->generics);
|
||||
std::vector<std::pair<Name, GenericTypePackDefinition>> genericPacks = createGenericPacks(scope, global->genericPacks);
|
||||
@ -1097,14 +1155,18 @@ void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatDeclareFunction
|
||||
|
||||
BreadcrumbId bc = dfg->getBreadcrumb(global);
|
||||
rootScope->dcrRefinements[bc->def] = fnType;
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatError* error)
|
||||
ControlFlow ConstraintGraphBuilder::visit(const ScopePtr& scope, AstStatError* error)
|
||||
{
|
||||
for (AstStat* stat : error->statements)
|
||||
visit(scope, stat);
|
||||
for (AstExpr* expr : error->expressions)
|
||||
check(scope, expr);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
InferencePack ConstraintGraphBuilder::checkPack(
|
||||
|
@ -1273,19 +1273,11 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<cons
|
||||
auto ic = pushConstraintGreedy(InstantiationConstraint{instantiatedTy, fn});
|
||||
auto sc = pushConstraintGreedy(SubtypeConstraint{instantiatedTy, inferredTy});
|
||||
|
||||
// Anything that is blocked on this constraint must also be blocked on our
|
||||
// synthesized constraints.
|
||||
auto blockedIt = blocked.find(constraint.get());
|
||||
if (blockedIt != blocked.end())
|
||||
{
|
||||
for (const auto& blockedConstraint : blockedIt->second)
|
||||
{
|
||||
if (ic)
|
||||
block(NotNull{ic}, blockedConstraint);
|
||||
if (sc)
|
||||
block(NotNull{sc}, blockedConstraint);
|
||||
}
|
||||
}
|
||||
if (ic)
|
||||
inheritBlocks(constraint, NotNull{ic});
|
||||
|
||||
if (sc)
|
||||
inheritBlocks(constraint, NotNull{sc});
|
||||
|
||||
unblock(c.result);
|
||||
return true;
|
||||
@ -1330,7 +1322,7 @@ bool ConstraintSolver::tryDispatch(const HasPropConstraint& c, NotNull<const Con
|
||||
return false;
|
||||
}
|
||||
|
||||
asMutable(c.resultType)->ty.emplace<BoundType>(result.value_or(builtinTypes->errorRecoveryType()));
|
||||
asMutable(c.resultType)->ty.emplace<BoundType>(result.value_or(builtinTypes->anyType));
|
||||
unblock(c.resultType);
|
||||
return true;
|
||||
}
|
||||
@ -1796,13 +1788,23 @@ bool ConstraintSolver::tryDispatchIterableFunction(
|
||||
return false;
|
||||
}
|
||||
|
||||
const TypeId firstIndex = isNil(firstIndexTy) ? arena->freshType(constraint->scope) // FIXME: Surely this should be a union (free | nil)
|
||||
: firstIndexTy;
|
||||
TypeId firstIndex;
|
||||
TypeId retIndex;
|
||||
if (isNil(firstIndexTy) || isOptional(firstIndexTy))
|
||||
{
|
||||
firstIndex = arena->addType(UnionType{{arena->freshType(constraint->scope), builtinTypes->nilType}});
|
||||
retIndex = firstIndex;
|
||||
}
|
||||
else
|
||||
{
|
||||
firstIndex = firstIndexTy;
|
||||
retIndex = arena->addType(UnionType{{firstIndexTy, builtinTypes->nilType}});
|
||||
}
|
||||
|
||||
// nextTy : (tableTy, indexTy?) -> (indexTy?, valueTailTy...)
|
||||
const TypePackId nextArgPack = arena->addTypePack({tableTy, arena->addType(UnionType{{firstIndex, builtinTypes->nilType}})});
|
||||
const TypePackId nextArgPack = arena->addTypePack({tableTy, firstIndex});
|
||||
const TypePackId valueTailTy = arena->addTypePack(FreeTypePack{constraint->scope});
|
||||
const TypePackId nextRetPack = arena->addTypePack(TypePack{{firstIndex}, valueTailTy});
|
||||
const TypePackId nextRetPack = arena->addTypePack(TypePack{{retIndex}, valueTailTy});
|
||||
|
||||
const TypeId expectedNextTy = arena->addType(FunctionType{TypeLevel{}, constraint->scope, nextArgPack, nextRetPack});
|
||||
unify(nextTy, expectedNextTy, constraint->scope);
|
||||
@ -1825,7 +1827,8 @@ bool ConstraintSolver::tryDispatchIterableFunction(
|
||||
modifiedNextRetHead.push_back(*it);
|
||||
|
||||
TypePackId modifiedNextRetPack = arena->addTypePack(std::move(modifiedNextRetHead), it.tail());
|
||||
pushConstraint(constraint->scope, constraint->location, PackSubtypeConstraint{c.variables, modifiedNextRetPack});
|
||||
auto psc = pushConstraint(constraint->scope, constraint->location, PackSubtypeConstraint{c.variables, modifiedNextRetPack});
|
||||
inheritBlocks(constraint, psc);
|
||||
|
||||
return true;
|
||||
}
|
||||
@ -1883,7 +1886,17 @@ std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTa
|
||||
TypeId indexType = follow(indexProp->second.type);
|
||||
|
||||
if (auto ft = get<FunctionType>(indexType))
|
||||
return {{}, first(ft->retTypes)};
|
||||
{
|
||||
TypePack rets = extendTypePack(*arena, builtinTypes, ft->retTypes, 1);
|
||||
if (1 == rets.head.size())
|
||||
return {{}, rets.head[0]};
|
||||
else
|
||||
{
|
||||
// This should probably be an error: We need the first result of the MT.__index method,
|
||||
// but it returns 0 values. See CLI-68672
|
||||
return {{}, builtinTypes->nilType};
|
||||
}
|
||||
}
|
||||
else
|
||||
return lookupTableProp(indexType, propName, seen);
|
||||
}
|
||||
@ -2009,6 +2022,20 @@ bool ConstraintSolver::block(TypePackId target, NotNull<const Constraint> constr
|
||||
return false;
|
||||
}
|
||||
|
||||
void ConstraintSolver::inheritBlocks(NotNull<const Constraint> source, NotNull<const Constraint> addition)
|
||||
{
|
||||
// Anything that is blocked on this constraint must also be blocked on our
|
||||
// synthesized constraints.
|
||||
auto blockedIt = blocked.find(source.get());
|
||||
if (blockedIt != blocked.end())
|
||||
{
|
||||
for (const auto& blockedConstraint : blockedIt->second)
|
||||
{
|
||||
block(addition, blockedConstraint);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Blocker : TypeOnceVisitor
|
||||
{
|
||||
NotNull<ConstraintSolver> solver;
|
||||
|
@ -29,6 +29,7 @@ LUAU_FASTINT(LuauTypeInferRecursionLimit)
|
||||
LUAU_FASTINT(LuauTarjanChildLimit)
|
||||
LUAU_FASTFLAG(LuauInferInNoCheckMode)
|
||||
LUAU_FASTFLAGVARIABLE(LuauKnowsTheDataModel3, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauLintInTypecheck, false)
|
||||
LUAU_FASTINTVARIABLE(LuauAutocompleteCheckTimeoutMs, 100)
|
||||
LUAU_FASTFLAGVARIABLE(DebugLuauDeferredConstraintResolution, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauDefinitionFileSourceModule, false)
|
||||
@ -330,7 +331,7 @@ std::optional<std::string> pathExprToModuleName(const ModuleName& currentModuleN
|
||||
namespace
|
||||
{
|
||||
|
||||
ErrorVec accumulateErrors(
|
||||
static ErrorVec accumulateErrors(
|
||||
const std::unordered_map<ModuleName, SourceNode>& sourceNodes, const std::unordered_map<ModuleName, ModulePtr>& modules, const ModuleName& name)
|
||||
{
|
||||
std::unordered_set<ModuleName> seen;
|
||||
@ -375,6 +376,25 @@ ErrorVec accumulateErrors(
|
||||
return result;
|
||||
}
|
||||
|
||||
static void filterLintOptions(LintOptions& lintOptions, const std::vector<HotComment>& hotcomments, Mode mode)
|
||||
{
|
||||
LUAU_ASSERT(FFlag::LuauLintInTypecheck);
|
||||
|
||||
uint64_t ignoreLints = LintWarning::parseMask(hotcomments);
|
||||
|
||||
lintOptions.warningMask &= ~ignoreLints;
|
||||
|
||||
if (mode != Mode::NoCheck)
|
||||
{
|
||||
lintOptions.disableWarning(Luau::LintWarning::Code_UnknownGlobal);
|
||||
}
|
||||
|
||||
if (mode == Mode::Strict)
|
||||
{
|
||||
lintOptions.disableWarning(Luau::LintWarning::Code_ImplicitReturn);
|
||||
}
|
||||
}
|
||||
|
||||
// Given a source node (start), find all requires that start a transitive dependency path that ends back at start
|
||||
// For each such path, record the full path and the location of the require in the starting module.
|
||||
// Note that this is O(V^2) for a fully connected graph and produces O(V) paths of length O(V)
|
||||
@ -514,8 +534,24 @@ CheckResult Frontend::check(const ModuleName& name, std::optional<FrontendOption
|
||||
throw InternalCompilerError("Frontend::modules does not have data for " + name, name);
|
||||
}
|
||||
|
||||
return CheckResult{
|
||||
accumulateErrors(sourceNodes, frontendOptions.forAutocomplete ? moduleResolverForAutocomplete.modules : moduleResolver.modules, name)};
|
||||
if (FFlag::LuauLintInTypecheck)
|
||||
{
|
||||
std::unordered_map<ModuleName, ModulePtr>& modules =
|
||||
frontendOptions.forAutocomplete ? moduleResolverForAutocomplete.modules : moduleResolver.modules;
|
||||
|
||||
checkResult.errors = accumulateErrors(sourceNodes, modules, name);
|
||||
|
||||
// Get lint result only for top checked module
|
||||
if (auto it = modules.find(name); it != modules.end())
|
||||
checkResult.lintResult = it->second->lintResult;
|
||||
|
||||
return checkResult;
|
||||
}
|
||||
else
|
||||
{
|
||||
return CheckResult{accumulateErrors(
|
||||
sourceNodes, frontendOptions.forAutocomplete ? moduleResolverForAutocomplete.modules : moduleResolver.modules, name)};
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<ModuleName> buildQueue;
|
||||
@ -579,7 +615,7 @@ CheckResult Frontend::check(const ModuleName& name, std::optional<FrontendOption
|
||||
else
|
||||
typeCheckerForAutocomplete.unifierIterationLimit = std::nullopt;
|
||||
|
||||
ModulePtr moduleForAutocomplete = FFlag::DebugLuauDeferredConstraintResolution
|
||||
ModulePtr moduleForAutocomplete = (FFlag::DebugLuauDeferredConstraintResolution && mode == Mode::Strict)
|
||||
? check(sourceModule, mode, requireCycles, /*forAutocomplete*/ true, /*recordJsonLog*/ false)
|
||||
: typeCheckerForAutocomplete.check(sourceModule, Mode::Strict, environmentScope);
|
||||
|
||||
@ -609,8 +645,9 @@ CheckResult Frontend::check(const ModuleName& name, std::optional<FrontendOption
|
||||
|
||||
const bool recordJsonLog = FFlag::DebugLuauLogSolverToJson && moduleName == name;
|
||||
|
||||
ModulePtr module = FFlag::DebugLuauDeferredConstraintResolution ? check(sourceModule, mode, requireCycles, /*forAutocomplete*/ false, recordJsonLog)
|
||||
: typeChecker.check(sourceModule, mode, environmentScope);
|
||||
ModulePtr module = (FFlag::DebugLuauDeferredConstraintResolution && mode == Mode::Strict)
|
||||
? check(sourceModule, mode, requireCycles, /*forAutocomplete*/ false, recordJsonLog)
|
||||
: typeChecker.check(sourceModule, mode, environmentScope);
|
||||
|
||||
stats.timeCheck += getTimestamp() - timestamp;
|
||||
stats.filesStrict += mode == Mode::Strict;
|
||||
@ -619,6 +656,28 @@ CheckResult Frontend::check(const ModuleName& name, std::optional<FrontendOption
|
||||
if (module == nullptr)
|
||||
throw InternalCompilerError("Frontend::check produced a nullptr module for " + moduleName, moduleName);
|
||||
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution && mode == Mode::NoCheck)
|
||||
module->errors.clear();
|
||||
|
||||
if (frontendOptions.runLintChecks)
|
||||
{
|
||||
LUAU_TIMETRACE_SCOPE("lint", "Frontend");
|
||||
|
||||
LUAU_ASSERT(FFlag::LuauLintInTypecheck);
|
||||
|
||||
LintOptions lintOptions = frontendOptions.enabledLintWarnings.value_or(config.enabledLint);
|
||||
filterLintOptions(lintOptions, sourceModule.hotcomments, mode);
|
||||
|
||||
double timestamp = getTimestamp();
|
||||
|
||||
std::vector<LintWarning> warnings =
|
||||
Luau::lint(sourceModule.root, *sourceModule.names, environmentScope, module.get(), sourceModule.hotcomments, lintOptions);
|
||||
|
||||
stats.timeLint += getTimestamp() - timestamp;
|
||||
|
||||
module->lintResult = classifyLints(warnings, config);
|
||||
}
|
||||
|
||||
if (!frontendOptions.retainFullTypeGraphs)
|
||||
{
|
||||
// copyErrors needs to allocate into interfaceTypes as it copies
|
||||
@ -665,6 +724,16 @@ CheckResult Frontend::check(const ModuleName& name, std::optional<FrontendOption
|
||||
sourceNode.dirtyModule = false;
|
||||
}
|
||||
|
||||
if (FFlag::LuauLintInTypecheck)
|
||||
{
|
||||
// Get lint result only for top checked module
|
||||
std::unordered_map<ModuleName, ModulePtr>& modules =
|
||||
frontendOptions.forAutocomplete ? moduleResolverForAutocomplete.modules : moduleResolver.modules;
|
||||
|
||||
if (auto it = modules.find(name); it != modules.end())
|
||||
checkResult.lintResult = it->second->lintResult;
|
||||
}
|
||||
|
||||
return checkResult;
|
||||
}
|
||||
|
||||
@ -793,8 +862,10 @@ ScopePtr Frontend::getModuleEnvironment(const SourceModule& module, const Config
|
||||
return result;
|
||||
}
|
||||
|
||||
LintResult Frontend::lint(const ModuleName& name, std::optional<Luau::LintOptions> enabledLintWarnings)
|
||||
LintResult Frontend::lint_DEPRECATED(const ModuleName& name, std::optional<Luau::LintOptions> enabledLintWarnings)
|
||||
{
|
||||
LUAU_ASSERT(!FFlag::LuauLintInTypecheck);
|
||||
|
||||
LUAU_TIMETRACE_SCOPE("Frontend::lint", "Frontend");
|
||||
LUAU_TIMETRACE_ARGUMENT("name", name.c_str());
|
||||
|
||||
@ -803,11 +874,13 @@ LintResult Frontend::lint(const ModuleName& name, std::optional<Luau::LintOption
|
||||
if (!sourceModule)
|
||||
return LintResult{}; // FIXME: We really should do something a bit more obvious when a file is too broken to lint.
|
||||
|
||||
return lint(*sourceModule, enabledLintWarnings);
|
||||
return lint_DEPRECATED(*sourceModule, enabledLintWarnings);
|
||||
}
|
||||
|
||||
LintResult Frontend::lint(const SourceModule& module, std::optional<Luau::LintOptions> enabledLintWarnings)
|
||||
LintResult Frontend::lint_DEPRECATED(const SourceModule& module, std::optional<Luau::LintOptions> enabledLintWarnings)
|
||||
{
|
||||
LUAU_ASSERT(!FFlag::LuauLintInTypecheck);
|
||||
|
||||
LUAU_TIMETRACE_SCOPE("Frontend::lint", "Frontend");
|
||||
LUAU_TIMETRACE_ARGUMENT("module", module.name.c_str());
|
||||
|
||||
|
@ -20,8 +20,10 @@ LUAU_FASTINTVARIABLE(LuauNormalizeCacheLimit, 100000);
|
||||
LUAU_FASTFLAGVARIABLE(LuauNegatedClassTypes, false);
|
||||
LUAU_FASTFLAGVARIABLE(LuauNegatedFunctionTypes, false);
|
||||
LUAU_FASTFLAGVARIABLE(LuauNegatedTableTypes, false);
|
||||
LUAU_FASTFLAGVARIABLE(LuauNormalizeBlockedTypes, false);
|
||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||
LUAU_FASTFLAG(LuauUninhabitedSubAnything2)
|
||||
LUAU_FASTFLAG(LuauTransitiveSubtyping)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -325,6 +327,8 @@ static int tyvarIndex(TypeId ty)
|
||||
return gtv->index;
|
||||
else if (const FreeType* ftv = get<FreeType>(ty))
|
||||
return ftv->index;
|
||||
else if (const BlockedType* btv = get<BlockedType>(ty))
|
||||
return btv->index;
|
||||
else
|
||||
return 0;
|
||||
}
|
||||
@ -529,7 +533,7 @@ static bool areNormalizedClasses(const NormalizedClassType& tys)
|
||||
|
||||
static bool isPlainTyvar(TypeId ty)
|
||||
{
|
||||
return (get<FreeType>(ty) || get<GenericType>(ty));
|
||||
return (get<FreeType>(ty) || get<GenericType>(ty) || (FFlag::LuauNormalizeBlockedTypes && get<BlockedType>(ty)));
|
||||
}
|
||||
|
||||
static bool isNormalizedTyvar(const NormalizedTyvars& tyvars)
|
||||
@ -1271,6 +1275,8 @@ void Normalizer::unionTables(TypeIds& heres, const TypeIds& theres)
|
||||
bool Normalizer::unionNormals(NormalizedType& here, const NormalizedType& there, int ignoreSmallerTyvars)
|
||||
{
|
||||
TypeId tops = unionOfTops(here.tops, there.tops);
|
||||
if (FFlag::LuauTransitiveSubtyping && get<UnknownType>(tops) && (get<ErrorType>(here.errors) || get<ErrorType>(there.errors)))
|
||||
tops = builtinTypes->anyType;
|
||||
if (!get<NeverType>(tops))
|
||||
{
|
||||
clearNormal(here);
|
||||
@ -1341,12 +1347,21 @@ bool Normalizer::unionNormalWithTy(NormalizedType& here, TypeId there, int ignor
|
||||
if (get<AnyType>(there) || get<UnknownType>(there))
|
||||
{
|
||||
TypeId tops = unionOfTops(here.tops, there);
|
||||
if (FFlag::LuauTransitiveSubtyping && get<UnknownType>(tops) && get<ErrorType>(here.errors))
|
||||
tops = builtinTypes->anyType;
|
||||
clearNormal(here);
|
||||
here.tops = tops;
|
||||
return true;
|
||||
}
|
||||
else if (get<NeverType>(there) || !get<NeverType>(here.tops))
|
||||
else if (!FFlag::LuauTransitiveSubtyping && (get<NeverType>(there) || !get<NeverType>(here.tops)))
|
||||
return true;
|
||||
else if (FFlag::LuauTransitiveSubtyping && (get<NeverType>(there) || get<AnyType>(here.tops)))
|
||||
return true;
|
||||
else if (FFlag::LuauTransitiveSubtyping && get<ErrorType>(there) && get<UnknownType>(here.tops))
|
||||
{
|
||||
here.tops = builtinTypes->anyType;
|
||||
return true;
|
||||
}
|
||||
else if (const UnionType* utv = get<UnionType>(there))
|
||||
{
|
||||
for (UnionTypeIterator it = begin(utv); it != end(utv); ++it)
|
||||
@ -1363,7 +1378,9 @@ bool Normalizer::unionNormalWithTy(NormalizedType& here, TypeId there, int ignor
|
||||
return false;
|
||||
return unionNormals(here, norm);
|
||||
}
|
||||
else if (get<GenericType>(there) || get<FreeType>(there))
|
||||
else if (FFlag::LuauTransitiveSubtyping && get<UnknownType>(here.tops))
|
||||
return true;
|
||||
else if (get<GenericType>(there) || get<FreeType>(there) || (FFlag::LuauNormalizeBlockedTypes && get<BlockedType>(there)))
|
||||
{
|
||||
if (tyvarIndex(there) <= ignoreSmallerTyvars)
|
||||
return true;
|
||||
@ -1441,7 +1458,7 @@ bool Normalizer::unionNormalWithTy(NormalizedType& here, TypeId there, int ignor
|
||||
if (!unionNormals(here, *tn))
|
||||
return false;
|
||||
}
|
||||
else if (get<BlockedType>(there))
|
||||
else if (!FFlag::LuauNormalizeBlockedTypes && get<BlockedType>(there))
|
||||
LUAU_ASSERT(!"Internal error: Trying to normalize a BlockedType");
|
||||
else
|
||||
LUAU_ASSERT(!"Unreachable");
|
||||
@ -2527,7 +2544,7 @@ bool Normalizer::intersectNormalWithTy(NormalizedType& here, TypeId there)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
else if (get<GenericType>(there) || get<FreeType>(there))
|
||||
else if (get<GenericType>(there) || get<FreeType>(there) || (FFlag::LuauNormalizeBlockedTypes && get<BlockedType>(there)))
|
||||
{
|
||||
NormalizedType thereNorm{builtinTypes};
|
||||
NormalizedType topNorm{builtinTypes};
|
||||
@ -2802,6 +2819,32 @@ TypeId Normalizer::typeFromNormal(const NormalizedType& norm)
|
||||
}
|
||||
|
||||
bool isSubtype(TypeId subTy, TypeId superTy, NotNull<Scope> scope, NotNull<BuiltinTypes> builtinTypes, InternalErrorReporter& ice)
|
||||
{
|
||||
if (!FFlag::LuauTransitiveSubtyping)
|
||||
return isConsistentSubtype(subTy, superTy, scope, builtinTypes, ice);
|
||||
UnifierSharedState sharedState{&ice};
|
||||
TypeArena arena;
|
||||
Normalizer normalizer{&arena, builtinTypes, NotNull{&sharedState}};
|
||||
Unifier u{NotNull{&normalizer}, Mode::Strict, scope, Location{}, Covariant};
|
||||
|
||||
u.tryUnify(subTy, superTy);
|
||||
return !u.failure;
|
||||
}
|
||||
|
||||
bool isSubtype(TypePackId subPack, TypePackId superPack, NotNull<Scope> scope, NotNull<BuiltinTypes> builtinTypes, InternalErrorReporter& ice)
|
||||
{
|
||||
if (!FFlag::LuauTransitiveSubtyping)
|
||||
return isConsistentSubtype(subPack, superPack, scope, builtinTypes, ice);
|
||||
UnifierSharedState sharedState{&ice};
|
||||
TypeArena arena;
|
||||
Normalizer normalizer{&arena, builtinTypes, NotNull{&sharedState}};
|
||||
Unifier u{NotNull{&normalizer}, Mode::Strict, scope, Location{}, Covariant};
|
||||
|
||||
u.tryUnify(subPack, superPack);
|
||||
return !u.failure;
|
||||
}
|
||||
|
||||
bool isConsistentSubtype(TypeId subTy, TypeId superTy, NotNull<Scope> scope, NotNull<BuiltinTypes> builtinTypes, InternalErrorReporter& ice)
|
||||
{
|
||||
UnifierSharedState sharedState{&ice};
|
||||
TypeArena arena;
|
||||
@ -2813,7 +2856,7 @@ bool isSubtype(TypeId subTy, TypeId superTy, NotNull<Scope> scope, NotNull<Built
|
||||
return ok;
|
||||
}
|
||||
|
||||
bool isSubtype(TypePackId subPack, TypePackId superPack, NotNull<Scope> scope, NotNull<BuiltinTypes> builtinTypes, InternalErrorReporter& ice)
|
||||
bool isConsistentSubtype(TypePackId subPack, TypePackId superPack, NotNull<Scope> scope, NotNull<BuiltinTypes> builtinTypes, InternalErrorReporter& ice)
|
||||
{
|
||||
UnifierSharedState sharedState{&ice};
|
||||
TypeArena arena;
|
||||
|
@ -27,7 +27,6 @@ struct Quantifier final : TypeOnceVisitor
|
||||
explicit Quantifier(TypeLevel level)
|
||||
: level(level)
|
||||
{
|
||||
LUAU_ASSERT(!FFlag::DebugLuauDeferredConstraintResolution);
|
||||
}
|
||||
|
||||
/// @return true if outer encloses inner
|
||||
|
@ -149,6 +149,28 @@ std::optional<Binding> Scope::linearSearchForBinding(const std::string& name, bo
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
// Updates the `this` scope with the refinements from the `childScope` excluding ones that doesn't exist in `this`.
|
||||
void Scope::inheritRefinements(const ScopePtr& childScope)
|
||||
{
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
{
|
||||
for (const auto& [k, a] : childScope->dcrRefinements)
|
||||
{
|
||||
if (lookup(NotNull{k}))
|
||||
dcrRefinements[k] = a;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (const auto& [k, a] : childScope->refinements)
|
||||
{
|
||||
Symbol symbol = getBaseSymbol(k);
|
||||
if (lookup(symbol))
|
||||
refinements[k] = a;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool subsumesStrict(Scope* left, Scope* right)
|
||||
{
|
||||
while (right)
|
||||
|
@ -25,6 +25,7 @@ LUAU_FASTINTVARIABLE(LuauTypeMaximumStringifierLength, 500)
|
||||
LUAU_FASTINTVARIABLE(LuauTableTypeMaximumStringifierLength, 0)
|
||||
LUAU_FASTINT(LuauTypeInferRecursionLimit)
|
||||
LUAU_FASTFLAG(LuauInstantiateInSubtyping)
|
||||
LUAU_FASTFLAG(LuauNormalizeBlockedTypes)
|
||||
LUAU_FASTFLAGVARIABLE(LuauMatchReturnsOptionalString, false);
|
||||
|
||||
namespace Luau
|
||||
@ -431,11 +432,11 @@ bool hasLength(TypeId ty, DenseHashSet<TypeId>& seen, int* recursionCount)
|
||||
}
|
||||
|
||||
BlockedType::BlockedType()
|
||||
: index(++nextIndex)
|
||||
: index(FFlag::LuauNormalizeBlockedTypes ? Unifiable::freshIndex() : ++DEPRECATED_nextIndex)
|
||||
{
|
||||
}
|
||||
|
||||
int BlockedType::nextIndex = 0;
|
||||
int BlockedType::DEPRECATED_nextIndex = 0;
|
||||
|
||||
PendingExpansionType::PendingExpansionType(
|
||||
std::optional<AstName> prefix, AstName name, std::vector<TypeId> typeArguments, std::vector<TypePackId> packArguments)
|
||||
|
@ -43,6 +43,8 @@ LUAU_FASTFLAG(LuauNegatedClassTypes)
|
||||
LUAU_FASTFLAGVARIABLE(LuauAllowIndexClassParameters, false)
|
||||
LUAU_FASTFLAG(LuauUninhabitedSubAnything2)
|
||||
LUAU_FASTFLAGVARIABLE(LuauTypecheckTypeguards, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauTinyControlFlowAnalysis, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauReducingAndOr, false)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -344,42 +346,54 @@ ModulePtr TypeChecker::checkWithoutRecursionCheck(const SourceModule& module, Mo
|
||||
return std::move(currentModule);
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStat& program)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStat& program)
|
||||
{
|
||||
if (finishTime && TimeTrace::getClock() > *finishTime)
|
||||
throw TimeLimitError(iceHandler->moduleName);
|
||||
|
||||
if (auto block = program.as<AstStatBlock>())
|
||||
check(scope, *block);
|
||||
return check(scope, *block);
|
||||
else if (auto if_ = program.as<AstStatIf>())
|
||||
check(scope, *if_);
|
||||
return check(scope, *if_);
|
||||
else if (auto while_ = program.as<AstStatWhile>())
|
||||
check(scope, *while_);
|
||||
return check(scope, *while_);
|
||||
else if (auto repeat = program.as<AstStatRepeat>())
|
||||
check(scope, *repeat);
|
||||
else if (program.is<AstStatBreak>())
|
||||
return check(scope, *repeat);
|
||||
else if (program.is<AstStatBreak>() || program.is<AstStatContinue>())
|
||||
{
|
||||
} // Nothing to do
|
||||
else if (program.is<AstStatContinue>())
|
||||
{
|
||||
} // Nothing to do
|
||||
// Nothing to do
|
||||
return ControlFlow::None;
|
||||
}
|
||||
else if (auto return_ = program.as<AstStatReturn>())
|
||||
check(scope, *return_);
|
||||
return check(scope, *return_);
|
||||
else if (auto expr = program.as<AstStatExpr>())
|
||||
{
|
||||
checkExprPack(scope, *expr->expr);
|
||||
|
||||
if (FFlag::LuauTinyControlFlowAnalysis)
|
||||
{
|
||||
if (auto call = expr->expr->as<AstExprCall>(); call && doesCallError(call))
|
||||
return ControlFlow::Throws;
|
||||
}
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
else if (auto local = program.as<AstStatLocal>())
|
||||
check(scope, *local);
|
||||
return check(scope, *local);
|
||||
else if (auto for_ = program.as<AstStatFor>())
|
||||
check(scope, *for_);
|
||||
return check(scope, *for_);
|
||||
else if (auto forIn = program.as<AstStatForIn>())
|
||||
check(scope, *forIn);
|
||||
return check(scope, *forIn);
|
||||
else if (auto assign = program.as<AstStatAssign>())
|
||||
check(scope, *assign);
|
||||
return check(scope, *assign);
|
||||
else if (auto assign = program.as<AstStatCompoundAssign>())
|
||||
check(scope, *assign);
|
||||
return check(scope, *assign);
|
||||
else if (program.is<AstStatFunction>())
|
||||
ice("Should not be calling two-argument check() on a function statement", program.location);
|
||||
else if (program.is<AstStatLocalFunction>())
|
||||
ice("Should not be calling two-argument check() on a function statement", program.location);
|
||||
else if (auto typealias = program.as<AstStatTypeAlias>())
|
||||
check(scope, *typealias);
|
||||
return check(scope, *typealias);
|
||||
else if (auto global = program.as<AstStatDeclareGlobal>())
|
||||
{
|
||||
TypeId globalType = resolveType(scope, *global->type);
|
||||
@ -387,11 +401,13 @@ void TypeChecker::check(const ScopePtr& scope, const AstStat& program)
|
||||
|
||||
currentModule->declaredGlobals[globalName] = globalType;
|
||||
currentModule->getModuleScope()->bindings[global->name] = Binding{globalType, global->location};
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
else if (auto global = program.as<AstStatDeclareFunction>())
|
||||
check(scope, *global);
|
||||
return check(scope, *global);
|
||||
else if (auto global = program.as<AstStatDeclareClass>())
|
||||
check(scope, *global);
|
||||
return check(scope, *global);
|
||||
else if (auto errorStatement = program.as<AstStatError>())
|
||||
{
|
||||
const size_t oldSize = currentModule->errors.size();
|
||||
@ -405,37 +421,40 @@ void TypeChecker::check(const ScopePtr& scope, const AstStat& program)
|
||||
// HACK: We want to run typechecking on the contents of the AstStatError, but
|
||||
// we don't think the type errors will be useful most of the time.
|
||||
currentModule->errors.resize(oldSize);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
else
|
||||
ice("Unknown AstStat");
|
||||
|
||||
if (finishTime && TimeTrace::getClock() > *finishTime)
|
||||
throw TimeLimitError(iceHandler->moduleName);
|
||||
}
|
||||
|
||||
// This particular overload is for do...end. If you need to not increase the scope level, use checkBlock directly.
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatBlock& block)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatBlock& block)
|
||||
{
|
||||
ScopePtr child = childScope(scope, block.location);
|
||||
checkBlock(child, block);
|
||||
|
||||
ControlFlow flow = checkBlock(child, block);
|
||||
scope->inheritRefinements(child);
|
||||
|
||||
return flow;
|
||||
}
|
||||
|
||||
void TypeChecker::checkBlock(const ScopePtr& scope, const AstStatBlock& block)
|
||||
ControlFlow TypeChecker::checkBlock(const ScopePtr& scope, const AstStatBlock& block)
|
||||
{
|
||||
RecursionCounter _rc(&checkRecursionCount);
|
||||
if (FInt::LuauCheckRecursionLimit > 0 && checkRecursionCount >= FInt::LuauCheckRecursionLimit)
|
||||
{
|
||||
reportErrorCodeTooComplex(block.location);
|
||||
return;
|
||||
return ControlFlow::None;
|
||||
}
|
||||
try
|
||||
{
|
||||
checkBlockWithoutRecursionCheck(scope, block);
|
||||
return checkBlockWithoutRecursionCheck(scope, block);
|
||||
}
|
||||
catch (const RecursionLimitException&)
|
||||
{
|
||||
reportErrorCodeTooComplex(block.location);
|
||||
return;
|
||||
return ControlFlow::None;
|
||||
}
|
||||
}
|
||||
|
||||
@ -488,7 +507,7 @@ struct InplaceDemoter : TypeOnceVisitor
|
||||
}
|
||||
};
|
||||
|
||||
void TypeChecker::checkBlockWithoutRecursionCheck(const ScopePtr& scope, const AstStatBlock& block)
|
||||
ControlFlow TypeChecker::checkBlockWithoutRecursionCheck(const ScopePtr& scope, const AstStatBlock& block)
|
||||
{
|
||||
int subLevel = 0;
|
||||
|
||||
@ -528,6 +547,7 @@ void TypeChecker::checkBlockWithoutRecursionCheck(const ScopePtr& scope, const A
|
||||
}
|
||||
};
|
||||
|
||||
std::optional<ControlFlow> firstFlow;
|
||||
while (protoIter != sorted.end())
|
||||
{
|
||||
// protoIter walks forward
|
||||
@ -570,7 +590,9 @@ void TypeChecker::checkBlockWithoutRecursionCheck(const ScopePtr& scope, const A
|
||||
|
||||
// We do check the current element, so advance checkIter beyond it.
|
||||
++checkIter;
|
||||
check(scope, **protoIter);
|
||||
ControlFlow flow = check(scope, **protoIter);
|
||||
if (flow != ControlFlow::None && !firstFlow)
|
||||
firstFlow = flow;
|
||||
}
|
||||
else if (auto fun = (*protoIter)->as<AstStatFunction>())
|
||||
{
|
||||
@ -631,7 +653,11 @@ void TypeChecker::checkBlockWithoutRecursionCheck(const ScopePtr& scope, const A
|
||||
scope->bindings[fun->name] = {funTy, fun->name->location};
|
||||
}
|
||||
else
|
||||
check(scope, **protoIter);
|
||||
{
|
||||
ControlFlow flow = check(scope, **protoIter);
|
||||
if (flow != ControlFlow::None && !firstFlow)
|
||||
firstFlow = flow;
|
||||
}
|
||||
|
||||
++protoIter;
|
||||
}
|
||||
@ -643,6 +669,8 @@ void TypeChecker::checkBlockWithoutRecursionCheck(const ScopePtr& scope, const A
|
||||
}
|
||||
|
||||
checkBlockTypeAliases(scope, sorted);
|
||||
|
||||
return firstFlow.value_or(ControlFlow::None);
|
||||
}
|
||||
|
||||
LUAU_NOINLINE void TypeChecker::checkBlockTypeAliases(const ScopePtr& scope, std::vector<AstStat*>& sorted)
|
||||
@ -717,19 +745,45 @@ static std::optional<Predicate> tryGetTypeGuardPredicate(const AstExprBinary& ex
|
||||
return predicate;
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatIf& statement)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatIf& statement)
|
||||
{
|
||||
WithPredicate<TypeId> result = checkExpr(scope, *statement.condition);
|
||||
|
||||
ScopePtr ifScope = childScope(scope, statement.thenbody->location);
|
||||
resolve(result.predicates, ifScope, true);
|
||||
check(ifScope, *statement.thenbody);
|
||||
ScopePtr thenScope = childScope(scope, statement.thenbody->location);
|
||||
resolve(result.predicates, thenScope, true);
|
||||
|
||||
if (statement.elsebody)
|
||||
if (FFlag::LuauTinyControlFlowAnalysis)
|
||||
{
|
||||
ScopePtr elseScope = childScope(scope, statement.elsebody->location);
|
||||
ScopePtr elseScope = childScope(scope, statement.elsebody ? statement.elsebody->location : statement.location);
|
||||
resolve(result.predicates, elseScope, false);
|
||||
check(elseScope, *statement.elsebody);
|
||||
|
||||
ControlFlow thencf = check(thenScope, *statement.thenbody);
|
||||
ControlFlow elsecf = ControlFlow::None;
|
||||
if (statement.elsebody)
|
||||
elsecf = check(elseScope, *statement.elsebody);
|
||||
|
||||
if (matches(thencf, ControlFlow::Returns | ControlFlow::Throws) && elsecf == ControlFlow::None)
|
||||
scope->inheritRefinements(elseScope);
|
||||
else if (thencf == ControlFlow::None && matches(elsecf, ControlFlow::Returns | ControlFlow::Throws))
|
||||
scope->inheritRefinements(thenScope);
|
||||
|
||||
if (matches(thencf, ControlFlow::Returns | ControlFlow::Throws) && matches(elsecf, ControlFlow::Returns | ControlFlow::Throws))
|
||||
return ControlFlow::Returns;
|
||||
else
|
||||
return ControlFlow::None;
|
||||
}
|
||||
else
|
||||
{
|
||||
check(thenScope, *statement.thenbody);
|
||||
|
||||
if (statement.elsebody)
|
||||
{
|
||||
ScopePtr elseScope = childScope(scope, statement.elsebody->location);
|
||||
resolve(result.predicates, elseScope, false);
|
||||
check(elseScope, *statement.elsebody);
|
||||
}
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
}
|
||||
|
||||
@ -750,22 +804,26 @@ ErrorVec TypeChecker::canUnify(TypePackId subTy, TypePackId superTy, const Scope
|
||||
return canUnify_(subTy, superTy, scope, location);
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatWhile& statement)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatWhile& statement)
|
||||
{
|
||||
WithPredicate<TypeId> result = checkExpr(scope, *statement.condition);
|
||||
|
||||
ScopePtr whileScope = childScope(scope, statement.body->location);
|
||||
resolve(result.predicates, whileScope, true);
|
||||
check(whileScope, *statement.body);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatRepeat& statement)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatRepeat& statement)
|
||||
{
|
||||
ScopePtr repScope = childScope(scope, statement.location);
|
||||
|
||||
checkBlock(repScope, *statement.body);
|
||||
|
||||
checkExpr(repScope, *statement.condition);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
struct Demoter : Substitution
|
||||
@ -822,7 +880,7 @@ struct Demoter : Substitution
|
||||
}
|
||||
};
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatReturn& return_)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatReturn& return_)
|
||||
{
|
||||
std::vector<std::optional<TypeId>> expectedTypes;
|
||||
expectedTypes.reserve(return_.list.size);
|
||||
@ -858,10 +916,12 @@ void TypeChecker::check(const ScopePtr& scope, const AstStatReturn& return_)
|
||||
if (!errors.empty())
|
||||
currentModule->getModuleScope()->returnType = addTypePack({anyType});
|
||||
|
||||
return;
|
||||
return FFlag::LuauTinyControlFlowAnalysis ? ControlFlow::Returns : ControlFlow::None;
|
||||
}
|
||||
|
||||
unify(retPack, scope->returnType, scope, return_.location, CountMismatch::Context::Return);
|
||||
|
||||
return FFlag::LuauTinyControlFlowAnalysis ? ControlFlow::Returns : ControlFlow::None;
|
||||
}
|
||||
|
||||
template<typename Id>
|
||||
@ -893,7 +953,7 @@ ErrorVec TypeChecker::tryUnify(TypePackId subTy, TypePackId superTy, const Scope
|
||||
return tryUnify_(subTy, superTy, scope, location);
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatAssign& assign)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatAssign& assign)
|
||||
{
|
||||
std::vector<std::optional<TypeId>> expectedTypes;
|
||||
expectedTypes.reserve(assign.vars.size);
|
||||
@ -993,9 +1053,11 @@ void TypeChecker::check(const ScopePtr& scope, const AstStatAssign& assign)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatCompoundAssign& assign)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatCompoundAssign& assign)
|
||||
{
|
||||
AstExprBinary expr(assign.location, assign.op, assign.var, assign.value);
|
||||
|
||||
@ -1005,9 +1067,11 @@ void TypeChecker::check(const ScopePtr& scope, const AstStatCompoundAssign& assi
|
||||
TypeId result = checkBinaryOperation(scope, expr, left, right);
|
||||
|
||||
unify(result, left, scope, assign.location);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatLocal& local)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatLocal& local)
|
||||
{
|
||||
// Important subtlety: A local variable is not in scope while its initializer is being evaluated.
|
||||
// For instance, you cannot do this:
|
||||
@ -1144,9 +1208,11 @@ void TypeChecker::check(const ScopePtr& scope, const AstStatLocal& local)
|
||||
|
||||
for (const auto& [local, binding] : varBindings)
|
||||
scope->bindings[local] = binding;
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatFor& expr)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatFor& expr)
|
||||
{
|
||||
ScopePtr loopScope = childScope(scope, expr.location);
|
||||
|
||||
@ -1169,9 +1235,11 @@ void TypeChecker::check(const ScopePtr& scope, const AstStatFor& expr)
|
||||
unify(checkExpr(loopScope, *expr.step).type, loopVarType, scope, expr.step->location);
|
||||
|
||||
check(loopScope, *expr.body);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatForIn& forin)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatForIn& forin)
|
||||
{
|
||||
ScopePtr loopScope = childScope(scope, forin.location);
|
||||
|
||||
@ -1360,9 +1428,11 @@ void TypeChecker::check(const ScopePtr& scope, const AstStatForIn& forin)
|
||||
unify(retPack, varPack, scope, forin.location);
|
||||
|
||||
check(loopScope, *forin.body);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, TypeId ty, const ScopePtr& funScope, const AstStatFunction& function)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, TypeId ty, const ScopePtr& funScope, const AstStatFunction& function)
|
||||
{
|
||||
if (auto exprName = function.name->as<AstExprGlobal>())
|
||||
{
|
||||
@ -1387,8 +1457,6 @@ void TypeChecker::check(const ScopePtr& scope, TypeId ty, const ScopePtr& funSco
|
||||
globalBindings[name] = oldBinding;
|
||||
else
|
||||
globalBindings[name] = {quantify(funScope, ty, exprName->location), exprName->location};
|
||||
|
||||
return;
|
||||
}
|
||||
else if (auto name = function.name->as<AstExprLocal>())
|
||||
{
|
||||
@ -1397,7 +1465,6 @@ void TypeChecker::check(const ScopePtr& scope, TypeId ty, const ScopePtr& funSco
|
||||
checkFunctionBody(funScope, ty, *function.func);
|
||||
|
||||
scope->bindings[name->local] = {anyIfNonstrict(quantify(funScope, ty, name->local->location)), name->local->location};
|
||||
return;
|
||||
}
|
||||
else if (auto name = function.name->as<AstExprIndexName>())
|
||||
{
|
||||
@ -1444,9 +1511,11 @@ void TypeChecker::check(const ScopePtr& scope, TypeId ty, const ScopePtr& funSco
|
||||
|
||||
checkFunctionBody(funScope, ty, *function.func);
|
||||
}
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, TypeId ty, const ScopePtr& funScope, const AstStatLocalFunction& function)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, TypeId ty, const ScopePtr& funScope, const AstStatLocalFunction& function)
|
||||
{
|
||||
Name name = function.name->name.value;
|
||||
|
||||
@ -1455,15 +1524,17 @@ void TypeChecker::check(const ScopePtr& scope, TypeId ty, const ScopePtr& funSco
|
||||
checkFunctionBody(funScope, ty, *function.func);
|
||||
|
||||
scope->bindings[function.name] = {quantify(funScope, ty, function.name->location), function.name->location};
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatTypeAlias& typealias)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatTypeAlias& typealias)
|
||||
{
|
||||
Name name = typealias.name.value;
|
||||
|
||||
// If the alias is missing a name, we can't do anything with it. Ignore it.
|
||||
if (name == kParseNameError)
|
||||
return;
|
||||
return ControlFlow::None;
|
||||
|
||||
std::optional<TypeFun> binding;
|
||||
if (auto it = scope->exportedTypeBindings.find(name); it != scope->exportedTypeBindings.end())
|
||||
@ -1476,7 +1547,7 @@ void TypeChecker::check(const ScopePtr& scope, const AstStatTypeAlias& typealias
|
||||
// If the first pass failed (this should mean a duplicate definition), the second pass isn't going to be
|
||||
// interesting.
|
||||
if (duplicateTypeAliases.find({typealias.exported, name}))
|
||||
return;
|
||||
return ControlFlow::None;
|
||||
|
||||
// By now this alias must have been `prototype()`d first.
|
||||
if (!binding)
|
||||
@ -1557,6 +1628,8 @@ void TypeChecker::check(const ScopePtr& scope, const AstStatTypeAlias& typealias
|
||||
|
||||
if (unify(ty, bindingType, aliasScope, typealias.location))
|
||||
bindingType = ty;
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void TypeChecker::prototype(const ScopePtr& scope, const AstStatTypeAlias& typealias, int subLevel)
|
||||
@ -1648,13 +1721,13 @@ void TypeChecker::prototype(const ScopePtr& scope, const AstStatDeclareClass& de
|
||||
scope->exportedTypeBindings[className] = TypeFun{{}, classTy};
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatDeclareClass& declaredClass)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatDeclareClass& declaredClass)
|
||||
{
|
||||
Name className(declaredClass.name.value);
|
||||
|
||||
// Don't bother checking if the class definition was incorrect
|
||||
if (incorrectClassDefinitions.find(&declaredClass))
|
||||
return;
|
||||
return ControlFlow::None;
|
||||
|
||||
std::optional<TypeFun> binding;
|
||||
if (auto it = scope->exportedTypeBindings.find(className); it != scope->exportedTypeBindings.end())
|
||||
@ -1721,9 +1794,11 @@ void TypeChecker::check(const ScopePtr& scope, const AstStatDeclareClass& declar
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void TypeChecker::check(const ScopePtr& scope, const AstStatDeclareFunction& global)
|
||||
ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatDeclareFunction& global)
|
||||
{
|
||||
ScopePtr funScope = childFunctionScope(scope, global.location);
|
||||
|
||||
@ -1754,6 +1829,8 @@ void TypeChecker::check(const ScopePtr& scope, const AstStatDeclareFunction& glo
|
||||
|
||||
currentModule->declaredGlobals[fnName] = fnType;
|
||||
currentModule->getModuleScope()->bindings[global.name] = Binding{fnType, global.location};
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
WithPredicate<TypeId> TypeChecker::checkExpr(const ScopePtr& scope, const AstExpr& expr, std::optional<TypeId> expectedType, bool forceSingleton)
|
||||
@ -2785,6 +2862,16 @@ TypeId TypeChecker::checkRelationalOperation(
|
||||
if (notNever)
|
||||
{
|
||||
LUAU_ASSERT(oty);
|
||||
|
||||
if (FFlag::LuauReducingAndOr)
|
||||
{
|
||||
// Perform a limited form of type reduction for booleans
|
||||
if (isPrim(*oty, PrimitiveType::Boolean) && get<BooleanSingleton>(get<SingletonType>(follow(rhsType))))
|
||||
return booleanType;
|
||||
if (isPrim(rhsType, PrimitiveType::Boolean) && get<BooleanSingleton>(get<SingletonType>(follow(*oty))))
|
||||
return booleanType;
|
||||
}
|
||||
|
||||
return unionOfTypes(*oty, rhsType, scope, expr.location, false);
|
||||
}
|
||||
else
|
||||
@ -2808,6 +2895,16 @@ TypeId TypeChecker::checkRelationalOperation(
|
||||
if (notNever)
|
||||
{
|
||||
LUAU_ASSERT(oty);
|
||||
|
||||
if (FFlag::LuauReducingAndOr)
|
||||
{
|
||||
// Perform a limited form of type reduction for booleans
|
||||
if (isPrim(*oty, PrimitiveType::Boolean) && get<BooleanSingleton>(get<SingletonType>(follow(rhsType))))
|
||||
return booleanType;
|
||||
if (isPrim(rhsType, PrimitiveType::Boolean) && get<BooleanSingleton>(get<SingletonType>(follow(*oty))))
|
||||
return booleanType;
|
||||
}
|
||||
|
||||
return unionOfTypes(*oty, rhsType, scope, expr.location);
|
||||
}
|
||||
else
|
||||
|
@ -8,6 +8,11 @@ namespace Unifiable
|
||||
|
||||
static int nextIndex = 0;
|
||||
|
||||
int freshIndex()
|
||||
{
|
||||
return ++nextIndex;
|
||||
}
|
||||
|
||||
Free::Free(TypeLevel level)
|
||||
: index(++nextIndex)
|
||||
, level(level)
|
||||
|
@ -20,9 +20,11 @@ LUAU_FASTFLAG(LuauErrorRecoveryType)
|
||||
LUAU_FASTFLAGVARIABLE(LuauInstantiateInSubtyping, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauUninhabitedSubAnything2, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauMaintainScopesInUnifier, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauTransitiveSubtyping, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauTinyUnifyNormalsFix, false)
|
||||
LUAU_FASTFLAG(LuauClassTypeVarsInSubstitution)
|
||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||
LUAU_FASTFLAG(LuauNormalizeBlockedTypes)
|
||||
LUAU_FASTFLAG(LuauNegatedFunctionTypes)
|
||||
LUAU_FASTFLAG(LuauNegatedClassTypes)
|
||||
LUAU_FASTFLAG(LuauNegatedTableTypes)
|
||||
@ -475,16 +477,27 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool
|
||||
if (log.get<AnyType>(superTy))
|
||||
return tryUnifyWithAny(subTy, builtinTypes->anyType);
|
||||
|
||||
if (log.get<ErrorType>(superTy))
|
||||
if (!FFlag::LuauTransitiveSubtyping && log.get<ErrorType>(superTy))
|
||||
return tryUnifyWithAny(subTy, builtinTypes->errorType);
|
||||
|
||||
if (log.get<UnknownType>(superTy))
|
||||
if (!FFlag::LuauTransitiveSubtyping && log.get<UnknownType>(superTy))
|
||||
return tryUnifyWithAny(subTy, builtinTypes->unknownType);
|
||||
|
||||
if (log.get<AnyType>(subTy))
|
||||
{
|
||||
if (FFlag::LuauTransitiveSubtyping && normalize)
|
||||
{
|
||||
// TODO: there are probably cheaper ways to check if any <: T.
|
||||
const NormalizedType* superNorm = normalizer->normalize(superTy);
|
||||
if (!log.get<AnyType>(superNorm->tops))
|
||||
failure = true;
|
||||
}
|
||||
else
|
||||
failure = true;
|
||||
return tryUnifyWithAny(superTy, builtinTypes->anyType);
|
||||
}
|
||||
|
||||
if (log.get<ErrorType>(subTy))
|
||||
if (!FFlag::LuauTransitiveSubtyping && log.get<ErrorType>(subTy))
|
||||
return tryUnifyWithAny(superTy, builtinTypes->errorType);
|
||||
|
||||
if (log.get<NeverType>(subTy))
|
||||
@ -539,6 +552,35 @@ void Unifier::tryUnify_(TypeId subTy, TypeId superTy, bool isFunctionCall, bool
|
||||
{
|
||||
tryUnifyIntersectionWithType(subTy, uv, superTy, cacheEnabled, isFunctionCall);
|
||||
}
|
||||
else if (FFlag::LuauTransitiveSubtyping && log.get<AnyType>(subTy))
|
||||
{
|
||||
tryUnifyWithAny(superTy, builtinTypes->unknownType);
|
||||
failure = true;
|
||||
}
|
||||
else if (FFlag::LuauTransitiveSubtyping && log.get<ErrorType>(subTy) && log.get<ErrorType>(superTy))
|
||||
{
|
||||
// error <: error
|
||||
}
|
||||
else if (FFlag::LuauTransitiveSubtyping && log.get<ErrorType>(superTy))
|
||||
{
|
||||
tryUnifyWithAny(subTy, builtinTypes->errorType);
|
||||
failure = true;
|
||||
}
|
||||
else if (FFlag::LuauTransitiveSubtyping && log.get<ErrorType>(subTy))
|
||||
{
|
||||
tryUnifyWithAny(superTy, builtinTypes->errorType);
|
||||
failure = true;
|
||||
}
|
||||
else if (FFlag::LuauTransitiveSubtyping && log.get<UnknownType>(superTy))
|
||||
{
|
||||
// At this point, all the supertypes of `error` have been handled,
|
||||
// and if `error </: T` then `T <: unknown`.
|
||||
tryUnifyWithAny(subTy, builtinTypes->unknownType);
|
||||
}
|
||||
else if (FFlag::LuauTransitiveSubtyping && log.get<UnknownType>(superTy))
|
||||
{
|
||||
tryUnifyWithAny(subTy, builtinTypes->unknownType);
|
||||
}
|
||||
else if (log.getMutable<PrimitiveType>(superTy) && log.getMutable<PrimitiveType>(subTy))
|
||||
tryUnifyPrimitives(subTy, superTy);
|
||||
|
||||
@ -611,6 +653,7 @@ void Unifier::tryUnifyUnionWithType(TypeId subTy, const UnionType* subUnion, Typ
|
||||
{
|
||||
// A | B <: T if and only if A <: T and B <: T
|
||||
bool failed = false;
|
||||
bool errorsSuppressed = true;
|
||||
std::optional<TypeError> unificationTooComplex;
|
||||
std::optional<TypeError> firstFailedOption;
|
||||
|
||||
@ -626,13 +669,17 @@ void Unifier::tryUnifyUnionWithType(TypeId subTy, const UnionType* subUnion, Typ
|
||||
|
||||
if (auto e = hasUnificationTooComplex(innerState.errors))
|
||||
unificationTooComplex = e;
|
||||
else if (!innerState.errors.empty())
|
||||
else if (FFlag::LuauTransitiveSubtyping ? innerState.failure : !innerState.errors.empty())
|
||||
{
|
||||
// If errors were suppressed, we store the log up, so we can commit it if no other option succeeds.
|
||||
if (FFlag::LuauTransitiveSubtyping && innerState.errors.empty())
|
||||
logs.push_back(std::move(innerState.log));
|
||||
// 'nil' option is skipped from extended report because we present the type in a special way - 'T?'
|
||||
if (!firstFailedOption && !isNil(type))
|
||||
else if (!firstFailedOption && !isNil(type))
|
||||
firstFailedOption = {innerState.errors.front()};
|
||||
|
||||
failed = true;
|
||||
errorsSuppressed &= innerState.errors.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@ -684,12 +731,13 @@ void Unifier::tryUnifyUnionWithType(TypeId subTy, const UnionType* subUnion, Typ
|
||||
{
|
||||
if (firstFailedOption)
|
||||
reportError(location, TypeMismatch{superTy, subTy, "Not all union options are compatible.", *firstFailedOption, mismatchContext()});
|
||||
else
|
||||
else if (!FFlag::LuauTransitiveSubtyping || !errorsSuppressed)
|
||||
reportError(location, TypeMismatch{superTy, subTy, mismatchContext()});
|
||||
failure = true;
|
||||
}
|
||||
}
|
||||
|
||||
struct BlockedTypeFinder : TypeOnceVisitor
|
||||
struct DEPRECATED_BlockedTypeFinder : TypeOnceVisitor
|
||||
{
|
||||
std::unordered_set<TypeId> blockedTypes;
|
||||
|
||||
@ -700,9 +748,10 @@ struct BlockedTypeFinder : TypeOnceVisitor
|
||||
}
|
||||
};
|
||||
|
||||
bool Unifier::blockOnBlockedTypes(TypeId subTy, TypeId superTy)
|
||||
bool Unifier::DEPRECATED_blockOnBlockedTypes(TypeId subTy, TypeId superTy)
|
||||
{
|
||||
BlockedTypeFinder blockedTypeFinder;
|
||||
LUAU_ASSERT(!FFlag::LuauNormalizeBlockedTypes);
|
||||
DEPRECATED_BlockedTypeFinder blockedTypeFinder;
|
||||
blockedTypeFinder.traverse(subTy);
|
||||
blockedTypeFinder.traverse(superTy);
|
||||
if (!blockedTypeFinder.blockedTypes.empty())
|
||||
@ -718,6 +767,7 @@ void Unifier::tryUnifyTypeWithUnion(TypeId subTy, TypeId superTy, const UnionTyp
|
||||
{
|
||||
// T <: A | B if T <: A or T <: B
|
||||
bool found = false;
|
||||
bool errorsSuppressed = false;
|
||||
std::optional<TypeError> unificationTooComplex;
|
||||
|
||||
size_t failedOptionCount = 0;
|
||||
@ -754,6 +804,21 @@ void Unifier::tryUnifyTypeWithUnion(TypeId subTy, TypeId superTy, const UnionTyp
|
||||
}
|
||||
}
|
||||
|
||||
if (FFlag::LuauTransitiveSubtyping && !foundHeuristic)
|
||||
{
|
||||
for (size_t i = 0; i < uv->options.size(); ++i)
|
||||
{
|
||||
TypeId type = uv->options[i];
|
||||
|
||||
if (subTy == type)
|
||||
{
|
||||
foundHeuristic = true;
|
||||
startIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!foundHeuristic && cacheEnabled)
|
||||
{
|
||||
auto& cache = sharedState.cachedUnify;
|
||||
@ -779,7 +844,7 @@ void Unifier::tryUnifyTypeWithUnion(TypeId subTy, TypeId superTy, const UnionTyp
|
||||
innerState.normalize = false;
|
||||
innerState.tryUnify_(subTy, type, isFunctionCall);
|
||||
|
||||
if (innerState.errors.empty())
|
||||
if (FFlag::LuauTransitiveSubtyping ? !innerState.failure : innerState.errors.empty())
|
||||
{
|
||||
found = true;
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
@ -790,6 +855,10 @@ void Unifier::tryUnifyTypeWithUnion(TypeId subTy, TypeId superTy, const UnionTyp
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (FFlag::LuauTransitiveSubtyping && innerState.errors.empty())
|
||||
{
|
||||
errorsSuppressed = true;
|
||||
}
|
||||
else if (auto e = hasUnificationTooComplex(innerState.errors))
|
||||
{
|
||||
unificationTooComplex = e;
|
||||
@ -810,11 +879,32 @@ void Unifier::tryUnifyTypeWithUnion(TypeId subTy, TypeId superTy, const UnionTyp
|
||||
{
|
||||
reportError(*unificationTooComplex);
|
||||
}
|
||||
else if (FFlag::LuauTransitiveSubtyping && !found && normalize)
|
||||
{
|
||||
// It is possible that T <: A | B even though T </: A and T </:B
|
||||
// for example boolean <: true | false.
|
||||
// We deal with this by type normalization.
|
||||
const NormalizedType* subNorm = normalizer->normalize(subTy);
|
||||
const NormalizedType* superNorm = normalizer->normalize(superTy);
|
||||
Unifier innerState = makeChildUnifier();
|
||||
if (!subNorm || !superNorm)
|
||||
return reportError(location, UnificationTooComplex{});
|
||||
else if ((failedOptionCount == 1 || foundHeuristic) && failedOption)
|
||||
innerState.tryUnifyNormalizedTypes(subTy, superTy, *subNorm, *superNorm, "None of the union options are compatible. For example:", *failedOption);
|
||||
else
|
||||
innerState.tryUnifyNormalizedTypes(subTy, superTy, *subNorm, *superNorm, "none of the union options are compatible");
|
||||
if (!innerState.failure)
|
||||
log.concat(std::move(innerState.log));
|
||||
else if (errorsSuppressed || innerState.errors.empty())
|
||||
failure = true;
|
||||
else
|
||||
reportError(std::move(innerState.errors.front()));
|
||||
}
|
||||
else if (!found && normalize)
|
||||
{
|
||||
// We cannot normalize a type that contains blocked types. We have to
|
||||
// stop for now if we find any.
|
||||
if (blockOnBlockedTypes(subTy, superTy))
|
||||
if (!FFlag::LuauNormalizeBlockedTypes && DEPRECATED_blockOnBlockedTypes(subTy, superTy))
|
||||
return;
|
||||
|
||||
// It is possible that T <: A | B even though T </: A and T </:B
|
||||
@ -831,7 +921,9 @@ void Unifier::tryUnifyTypeWithUnion(TypeId subTy, TypeId superTy, const UnionTyp
|
||||
}
|
||||
else if (!found)
|
||||
{
|
||||
if ((failedOptionCount == 1 || foundHeuristic) && failedOption)
|
||||
if (FFlag::LuauTransitiveSubtyping && errorsSuppressed)
|
||||
failure = true;
|
||||
else if ((failedOptionCount == 1 || foundHeuristic) && failedOption)
|
||||
reportError(
|
||||
location, TypeMismatch{superTy, subTy, "None of the union options are compatible. For example:", *failedOption, mismatchContext()});
|
||||
else
|
||||
@ -864,6 +956,7 @@ void Unifier::tryUnifyTypeWithIntersection(TypeId subTy, TypeId superTy, const I
|
||||
logs.push_back(std::move(innerState.log));
|
||||
else
|
||||
log.concat(std::move(innerState.log));
|
||||
failure |= innerState.failure;
|
||||
}
|
||||
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
@ -895,6 +988,7 @@ void Unifier::tryUnifyIntersectionWithType(TypeId subTy, const IntersectionType*
|
||||
{
|
||||
// A & B <: T if A <: T or B <: T
|
||||
bool found = false;
|
||||
bool errorsSuppressed = false;
|
||||
std::optional<TypeError> unificationTooComplex;
|
||||
|
||||
size_t startIndex = 0;
|
||||
@ -919,7 +1013,7 @@ void Unifier::tryUnifyIntersectionWithType(TypeId subTy, const IntersectionType*
|
||||
{
|
||||
// We cannot normalize a type that contains blocked types. We have to
|
||||
// stop for now if we find any.
|
||||
if (blockOnBlockedTypes(subTy, superTy))
|
||||
if (!FFlag::LuauNormalizeBlockedTypes && DEPRECATED_blockOnBlockedTypes(subTy, superTy))
|
||||
return;
|
||||
|
||||
// Sometimes a negation type is inside one of the types, e.g. { p: number } & { p: ~number }.
|
||||
@ -951,13 +1045,18 @@ void Unifier::tryUnifyIntersectionWithType(TypeId subTy, const IntersectionType*
|
||||
innerState.normalize = false;
|
||||
innerState.tryUnify_(type, superTy, isFunctionCall);
|
||||
|
||||
// TODO: This sets errorSuppressed to true if any of the parts is error-suppressing,
|
||||
// in paricular any & T is error-suppressing. Really, errorSuppressed should be true if
|
||||
// all of the parts are error-suppressing, but that fails to typecheck lua-apps.
|
||||
if (innerState.errors.empty())
|
||||
{
|
||||
found = true;
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
errorsSuppressed = innerState.failure;
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution || (FFlag::LuauTransitiveSubtyping && innerState.failure))
|
||||
logs.push_back(std::move(innerState.log));
|
||||
else
|
||||
{
|
||||
errorsSuppressed = false;
|
||||
log.concat(std::move(innerState.log));
|
||||
break;
|
||||
}
|
||||
@ -970,6 +1069,8 @@ void Unifier::tryUnifyIntersectionWithType(TypeId subTy, const IntersectionType*
|
||||
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
log.concat(combineLogsIntoIntersection(std::move(logs)));
|
||||
else if (FFlag::LuauTransitiveSubtyping && errorsSuppressed)
|
||||
log.concat(std::move(logs.front()));
|
||||
|
||||
if (unificationTooComplex)
|
||||
reportError(*unificationTooComplex);
|
||||
@ -977,7 +1078,7 @@ void Unifier::tryUnifyIntersectionWithType(TypeId subTy, const IntersectionType*
|
||||
{
|
||||
// We cannot normalize a type that contains blocked types. We have to
|
||||
// stop for now if we find any.
|
||||
if (blockOnBlockedTypes(subTy, superTy))
|
||||
if (!FFlag::LuauNormalizeBlockedTypes && DEPRECATED_blockOnBlockedTypes(subTy, superTy))
|
||||
return;
|
||||
|
||||
// It is possible that A & B <: T even though A </: T and B </: T
|
||||
@ -994,19 +1095,39 @@ void Unifier::tryUnifyIntersectionWithType(TypeId subTy, const IntersectionType*
|
||||
{
|
||||
reportError(location, TypeMismatch{superTy, subTy, "none of the intersection parts are compatible", mismatchContext()});
|
||||
}
|
||||
else if (errorsSuppressed)
|
||||
failure = true;
|
||||
}
|
||||
|
||||
void Unifier::tryUnifyNormalizedTypes(
|
||||
TypeId subTy, TypeId superTy, const NormalizedType& subNorm, const NormalizedType& superNorm, std::string reason, std::optional<TypeError> error)
|
||||
{
|
||||
if (get<UnknownType>(superNorm.tops) || get<AnyType>(superNorm.tops) || get<AnyType>(subNorm.tops))
|
||||
if (!FFlag::LuauTransitiveSubtyping && get<UnknownType>(superNorm.tops))
|
||||
return;
|
||||
else if (get<UnknownType>(subNorm.tops))
|
||||
else if (get<AnyType>(superNorm.tops))
|
||||
return;
|
||||
else if (get<AnyType>(subNorm.tops))
|
||||
{
|
||||
failure = true;
|
||||
return;
|
||||
}
|
||||
else if (!FFlag::LuauTransitiveSubtyping && get<UnknownType>(subNorm.tops))
|
||||
return reportError(location, TypeMismatch{superTy, subTy, reason, error, mismatchContext()});
|
||||
|
||||
if (get<ErrorType>(subNorm.errors))
|
||||
if (!get<ErrorType>(superNorm.errors))
|
||||
return reportError(location, TypeMismatch{superTy, subTy, reason, error, mismatchContext()});
|
||||
{
|
||||
failure = true;
|
||||
if (!FFlag::LuauTransitiveSubtyping)
|
||||
reportError(location, TypeMismatch{superTy, subTy, reason, error, mismatchContext()});
|
||||
return;
|
||||
}
|
||||
|
||||
if (FFlag::LuauTransitiveSubtyping && get<UnknownType>(superNorm.tops))
|
||||
return;
|
||||
|
||||
if (FFlag::LuauTransitiveSubtyping && get<UnknownType>(subNorm.tops))
|
||||
return reportError(location, TypeMismatch{superTy, subTy, reason, error, mismatchContext()});
|
||||
|
||||
if (get<PrimitiveType>(subNorm.booleans))
|
||||
{
|
||||
@ -1911,6 +2032,7 @@ void Unifier::tryUnifyTables(TypeId subTy, TypeId superTy, bool isIntersection)
|
||||
|
||||
if (innerState.errors.empty())
|
||||
log.concat(std::move(innerState.log));
|
||||
failure |= innerState.failure;
|
||||
}
|
||||
else if (subTable->indexer && maybeString(subTable->indexer->indexType))
|
||||
{
|
||||
@ -1926,6 +2048,7 @@ void Unifier::tryUnifyTables(TypeId subTy, TypeId superTy, bool isIntersection)
|
||||
|
||||
if (innerState.errors.empty())
|
||||
log.concat(std::move(innerState.log));
|
||||
failure |= innerState.failure;
|
||||
}
|
||||
else if (subTable->state == TableState::Unsealed && isOptional(prop.type))
|
||||
// This is sound because unsealed table types are precise, so `{ p : T } <: { p : T, q : U? }`
|
||||
@ -1988,6 +2111,7 @@ void Unifier::tryUnifyTables(TypeId subTy, TypeId superTy, bool isIntersection)
|
||||
|
||||
if (innerState.errors.empty())
|
||||
log.concat(std::move(innerState.log));
|
||||
failure |= innerState.failure;
|
||||
}
|
||||
else if (superTable->state == TableState::Unsealed)
|
||||
{
|
||||
@ -2059,6 +2183,7 @@ void Unifier::tryUnifyTables(TypeId subTy, TypeId superTy, bool isIntersection)
|
||||
|
||||
if (innerState.errors.empty())
|
||||
log.concat(std::move(innerState.log));
|
||||
failure |= innerState.failure;
|
||||
}
|
||||
else if (superTable->indexer)
|
||||
{
|
||||
@ -2234,6 +2359,7 @@ void Unifier::tryUnifyWithMetatable(TypeId subTy, TypeId superTy, bool reversed)
|
||||
location, TypeMismatch{reversed ? subTy : superTy, reversed ? superTy : subTy, "", innerState.errors.front(), mismatchContext()});
|
||||
|
||||
log.concat(std::move(innerState.log));
|
||||
failure |= innerState.failure;
|
||||
}
|
||||
else if (TableType* subTable = log.getMutable<TableType>(subTy))
|
||||
{
|
||||
@ -2274,6 +2400,7 @@ void Unifier::tryUnifyWithMetatable(TypeId subTy, TypeId superTy, bool reversed)
|
||||
{
|
||||
log.concat(std::move(innerState.log));
|
||||
log.bindTable(subTy, superTy);
|
||||
failure |= innerState.failure;
|
||||
}
|
||||
}
|
||||
else
|
||||
@ -2367,6 +2494,7 @@ void Unifier::tryUnifyWithClass(TypeId subTy, TypeId superTy, bool reversed)
|
||||
if (innerState.errors.empty())
|
||||
{
|
||||
log.concat(std::move(innerState.log));
|
||||
failure |= innerState.failure;
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -2398,7 +2526,7 @@ void Unifier::tryUnifyNegations(TypeId subTy, TypeId superTy)
|
||||
|
||||
// We cannot normalize a type that contains blocked types. We have to
|
||||
// stop for now if we find any.
|
||||
if (blockOnBlockedTypes(subTy, superTy))
|
||||
if (!FFlag::LuauNormalizeBlockedTypes && DEPRECATED_blockOnBlockedTypes(subTy, superTy))
|
||||
return;
|
||||
|
||||
const NormalizedType* subNorm = normalizer->normalize(subTy);
|
||||
@ -2726,6 +2854,7 @@ Unifier Unifier::makeChildUnifier()
|
||||
void Unifier::reportError(Location location, TypeErrorData data)
|
||||
{
|
||||
errors.emplace_back(std::move(location), std::move(data));
|
||||
failure = true;
|
||||
}
|
||||
|
||||
// A utility function that appends the given error to the unifier's error log.
|
||||
@ -2736,6 +2865,7 @@ void Unifier::reportError(Location location, TypeErrorData data)
|
||||
void Unifier::reportError(TypeError err)
|
||||
{
|
||||
errors.push_back(std::move(err));
|
||||
failure = true;
|
||||
}
|
||||
|
||||
|
||||
|
@ -123,13 +123,13 @@ private:
|
||||
// return [explist]
|
||||
AstStat* parseReturn();
|
||||
|
||||
// type Name `=' typeannotation
|
||||
// type Name `=' Type
|
||||
AstStat* parseTypeAlias(const Location& start, bool exported);
|
||||
|
||||
AstDeclaredClassProp parseDeclaredClassMethod();
|
||||
|
||||
// `declare global' Name: typeannotation |
|
||||
// `declare function' Name`(' [parlist] `)' [`:` TypeAnnotation]
|
||||
// `declare global' Name: Type |
|
||||
// `declare function' Name`(' [parlist] `)' [`:` Type]
|
||||
AstStat* parseDeclaration(const Location& start);
|
||||
|
||||
// varlist `=' explist
|
||||
@ -140,7 +140,7 @@ private:
|
||||
|
||||
std::pair<AstLocal*, AstArray<AstLocal*>> prepareFunctionArguments(const Location& start, bool hasself, const TempVector<Binding>& args);
|
||||
|
||||
// funcbodyhead ::= `(' [namelist [`,' `...'] | `...'] `)' [`:` TypeAnnotation]
|
||||
// funcbodyhead ::= `(' [namelist [`,' `...'] | `...'] `)' [`:` Type]
|
||||
// funcbody ::= funcbodyhead block end
|
||||
std::pair<AstExprFunction*, AstLocal*> parseFunctionBody(
|
||||
bool hasself, const Lexeme& matchFunction, const AstName& debugname, const Name* localName);
|
||||
@ -148,21 +148,21 @@ private:
|
||||
// explist ::= {exp `,'} exp
|
||||
void parseExprList(TempVector<AstExpr*>& result);
|
||||
|
||||
// binding ::= Name [`:` TypeAnnotation]
|
||||
// binding ::= Name [`:` Type]
|
||||
Binding parseBinding();
|
||||
|
||||
// bindinglist ::= (binding | `...') {`,' bindinglist}
|
||||
// Returns the location of the vararg ..., or std::nullopt if the function is not vararg.
|
||||
std::tuple<bool, Location, AstTypePack*> parseBindingList(TempVector<Binding>& result, bool allowDot3 = false);
|
||||
|
||||
AstType* parseOptionalTypeAnnotation();
|
||||
AstType* parseOptionalType();
|
||||
|
||||
// TypeList ::= TypeAnnotation [`,' TypeList]
|
||||
// ReturnType ::= TypeAnnotation | `(' TypeList `)'
|
||||
// TableProp ::= Name `:' TypeAnnotation
|
||||
// TableIndexer ::= `[' TypeAnnotation `]' `:' TypeAnnotation
|
||||
// TypeList ::= Type [`,' TypeList]
|
||||
// ReturnType ::= Type | `(' TypeList `)'
|
||||
// TableProp ::= Name `:' Type
|
||||
// TableIndexer ::= `[' Type `]' `:' Type
|
||||
// PropList ::= (TableProp | TableIndexer) [`,' PropList]
|
||||
// TypeAnnotation
|
||||
// Type
|
||||
// ::= Name
|
||||
// | `nil`
|
||||
// | `{' [PropList] `}'
|
||||
@ -171,24 +171,25 @@ private:
|
||||
// Returns the variadic annotation, if it exists.
|
||||
AstTypePack* parseTypeList(TempVector<AstType*>& result, TempVector<std::optional<AstArgumentName>>& resultNames);
|
||||
|
||||
std::optional<AstTypeList> parseOptionalReturnTypeAnnotation();
|
||||
std::pair<Location, AstTypeList> parseReturnTypeAnnotation();
|
||||
std::optional<AstTypeList> parseOptionalReturnType();
|
||||
std::pair<Location, AstTypeList> parseReturnType();
|
||||
|
||||
AstTableIndexer* parseTableIndexerAnnotation();
|
||||
AstTableIndexer* parseTableIndexer();
|
||||
|
||||
AstTypeOrPack parseFunctionTypeAnnotation(bool allowPack);
|
||||
AstType* parseFunctionTypeAnnotationTail(const Lexeme& begin, AstArray<AstGenericType> generics, AstArray<AstGenericTypePack> genericPacks,
|
||||
AstArray<AstType*>& params, AstArray<std::optional<AstArgumentName>>& paramNames, AstTypePack* varargAnnotation);
|
||||
AstTypeOrPack parseFunctionType(bool allowPack);
|
||||
AstType* parseFunctionTypeTail(const Lexeme& begin, AstArray<AstGenericType> generics, AstArray<AstGenericTypePack> genericPacks,
|
||||
AstArray<AstType*> params, AstArray<std::optional<AstArgumentName>> paramNames, AstTypePack* varargAnnotation);
|
||||
|
||||
AstType* parseTableTypeAnnotation();
|
||||
AstTypeOrPack parseSimpleTypeAnnotation(bool allowPack);
|
||||
AstType* parseTableType();
|
||||
AstTypeOrPack parseSimpleType(bool allowPack);
|
||||
|
||||
AstTypeOrPack parseTypeOrPackAnnotation();
|
||||
AstType* parseTypeAnnotation(TempVector<AstType*>& parts, const Location& begin);
|
||||
AstType* parseTypeAnnotation();
|
||||
AstTypeOrPack parseTypeOrPack();
|
||||
AstType* parseType();
|
||||
|
||||
AstTypePack* parseTypePackAnnotation();
|
||||
AstTypePack* parseVariadicArgumentAnnotation();
|
||||
AstTypePack* parseTypePack();
|
||||
AstTypePack* parseVariadicArgumentTypePack();
|
||||
|
||||
AstType* parseTypeSuffix(AstType* type, const Location& begin);
|
||||
|
||||
static std::optional<AstExprUnary::Op> parseUnaryOp(const Lexeme& l);
|
||||
static std::optional<AstExprBinary::Op> parseBinaryOp(const Lexeme& l);
|
||||
@ -215,7 +216,7 @@ private:
|
||||
// primaryexp -> prefixexp { `.' NAME | `[' exp `]' | `:' NAME funcargs | funcargs }
|
||||
AstExpr* parsePrimaryExpr(bool asStatement);
|
||||
|
||||
// asexp -> simpleexp [`::' typeAnnotation]
|
||||
// asexp -> simpleexp [`::' Type]
|
||||
AstExpr* parseAssertionExpr();
|
||||
|
||||
// simpleexp -> NUMBER | STRING | NIL | true | false | ... | constructor | FUNCTION body | primaryexp
|
||||
@ -244,7 +245,7 @@ private:
|
||||
// `<' namelist `>'
|
||||
std::pair<AstArray<AstGenericType>, AstArray<AstGenericTypePack>> parseGenericTypeList(bool withDefaultValues);
|
||||
|
||||
// `<' typeAnnotation[, ...] `>'
|
||||
// `<' Type[, ...] `>'
|
||||
AstArray<AstTypeOrPack> parseTypeParams();
|
||||
|
||||
std::optional<AstArray<char>> parseCharArray();
|
||||
@ -302,13 +303,12 @@ private:
|
||||
AstStatError* reportStatError(const Location& location, const AstArray<AstExpr*>& expressions, const AstArray<AstStat*>& statements,
|
||||
const char* format, ...) LUAU_PRINTF_ATTR(5, 6);
|
||||
AstExprError* reportExprError(const Location& location, const AstArray<AstExpr*>& expressions, const char* format, ...) LUAU_PRINTF_ATTR(4, 5);
|
||||
AstTypeError* reportTypeAnnotationError(const Location& location, const AstArray<AstType*>& types, const char* format, ...)
|
||||
LUAU_PRINTF_ATTR(4, 5);
|
||||
AstTypeError* reportTypeError(const Location& location, const AstArray<AstType*>& types, const char* format, ...) LUAU_PRINTF_ATTR(4, 5);
|
||||
// `parseErrorLocation` is associated with the parser error
|
||||
// `astErrorLocation` is associated with the AstTypeError created
|
||||
// It can be useful to have different error locations so that the parse error can include the next lexeme, while the AstTypeError can precisely
|
||||
// define the location (possibly of zero size) where a type annotation is expected.
|
||||
AstTypeError* reportMissingTypeAnnotationError(const Location& parseErrorLocation, const Location& astErrorLocation, const char* format, ...)
|
||||
AstTypeError* reportMissingTypeError(const Location& parseErrorLocation, const Location& astErrorLocation, const char* format, ...)
|
||||
LUAU_PRINTF_ATTR(4, 5);
|
||||
|
||||
AstExpr* reportFunctionArgsError(AstExpr* func, bool self);
|
||||
@ -401,8 +401,8 @@ private:
|
||||
std::vector<Binding> scratchBinding;
|
||||
std::vector<AstLocal*> scratchLocal;
|
||||
std::vector<AstTableProp> scratchTableTypeProps;
|
||||
std::vector<AstType*> scratchAnnotation;
|
||||
std::vector<AstTypeOrPack> scratchTypeOrPackAnnotation;
|
||||
std::vector<AstType*> scratchType;
|
||||
std::vector<AstTypeOrPack> scratchTypeOrPack;
|
||||
std::vector<AstDeclaredClassProp> scratchDeclaredClassProps;
|
||||
std::vector<AstExprTable::Item> scratchItem;
|
||||
std::vector<AstArgumentName> scratchArgName;
|
||||
|
@ -130,7 +130,7 @@ void TempVector<T>::push_back(const T& item)
|
||||
size_++;
|
||||
}
|
||||
|
||||
static bool shouldParseTypePackAnnotation(Lexer& lexer)
|
||||
static bool shouldParseTypePack(Lexer& lexer)
|
||||
{
|
||||
if (lexer.current().type == Lexeme::Dot3)
|
||||
return true;
|
||||
@ -330,11 +330,12 @@ AstStat* Parser::parseStat()
|
||||
if (options.allowTypeAnnotations)
|
||||
{
|
||||
if (ident == "type")
|
||||
return parseTypeAlias(expr->location, /* exported =*/false);
|
||||
return parseTypeAlias(expr->location, /* exported= */ false);
|
||||
|
||||
if (ident == "export" && lexer.current().type == Lexeme::Name && AstName(lexer.current().name) == "type")
|
||||
{
|
||||
nextLexeme();
|
||||
return parseTypeAlias(expr->location, /* exported =*/true);
|
||||
return parseTypeAlias(expr->location, /* exported= */ true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -742,7 +743,7 @@ AstStat* Parser::parseReturn()
|
||||
return allocator.alloc<AstStatReturn>(Location(start, end), copy(list));
|
||||
}
|
||||
|
||||
// type Name [`<' varlist `>'] `=' typeannotation
|
||||
// type Name [`<' varlist `>'] `=' Type
|
||||
AstStat* Parser::parseTypeAlias(const Location& start, bool exported)
|
||||
{
|
||||
// note: `type` token is already parsed for us, so we just need to parse the rest
|
||||
@ -757,7 +758,7 @@ AstStat* Parser::parseTypeAlias(const Location& start, bool exported)
|
||||
|
||||
expectAndConsume('=', "type alias");
|
||||
|
||||
AstType* type = parseTypeAnnotation();
|
||||
AstType* type = parseType();
|
||||
|
||||
return allocator.alloc<AstStatTypeAlias>(Location(start, type->location), name->name, name->location, generics, genericPacks, type, exported);
|
||||
}
|
||||
@ -789,16 +790,16 @@ AstDeclaredClassProp Parser::parseDeclaredClassMethod()
|
||||
|
||||
expectMatchAndConsume(')', matchParen);
|
||||
|
||||
AstTypeList retTypes = parseOptionalReturnTypeAnnotation().value_or(AstTypeList{copy<AstType*>(nullptr, 0), nullptr});
|
||||
AstTypeList retTypes = parseOptionalReturnType().value_or(AstTypeList{copy<AstType*>(nullptr, 0), nullptr});
|
||||
Location end = lexer.current().location;
|
||||
|
||||
TempVector<AstType*> vars(scratchAnnotation);
|
||||
TempVector<AstType*> vars(scratchType);
|
||||
TempVector<std::optional<AstArgumentName>> varNames(scratchOptArgName);
|
||||
|
||||
if (args.size() == 0 || args[0].name.name != "self" || args[0].annotation != nullptr)
|
||||
{
|
||||
return AstDeclaredClassProp{
|
||||
fnName.name, reportTypeAnnotationError(Location(start, end), {}, "'self' must be present as the unannotated first parameter"), true};
|
||||
fnName.name, reportTypeError(Location(start, end), {}, "'self' must be present as the unannotated first parameter"), true};
|
||||
}
|
||||
|
||||
// Skip the first index.
|
||||
@ -809,7 +810,7 @@ AstDeclaredClassProp Parser::parseDeclaredClassMethod()
|
||||
if (args[i].annotation)
|
||||
vars.push_back(args[i].annotation);
|
||||
else
|
||||
vars.push_back(reportTypeAnnotationError(Location(start, end), {}, "All declaration parameters aside from 'self' must be annotated"));
|
||||
vars.push_back(reportTypeError(Location(start, end), {}, "All declaration parameters aside from 'self' must be annotated"));
|
||||
}
|
||||
|
||||
if (vararg && !varargAnnotation)
|
||||
@ -846,10 +847,10 @@ AstStat* Parser::parseDeclaration(const Location& start)
|
||||
|
||||
expectMatchAndConsume(')', matchParen);
|
||||
|
||||
AstTypeList retTypes = parseOptionalReturnTypeAnnotation().value_or(AstTypeList{copy<AstType*>(nullptr, 0)});
|
||||
AstTypeList retTypes = parseOptionalReturnType().value_or(AstTypeList{copy<AstType*>(nullptr, 0)});
|
||||
Location end = lexer.current().location;
|
||||
|
||||
TempVector<AstType*> vars(scratchAnnotation);
|
||||
TempVector<AstType*> vars(scratchType);
|
||||
TempVector<AstArgumentName> varNames(scratchArgName);
|
||||
|
||||
for (size_t i = 0; i < args.size(); ++i)
|
||||
@ -898,7 +899,7 @@ AstStat* Parser::parseDeclaration(const Location& start)
|
||||
|
||||
expectMatchAndConsume(']', begin);
|
||||
expectAndConsume(':', "property type annotation");
|
||||
AstType* type = parseTypeAnnotation();
|
||||
AstType* type = parseType();
|
||||
|
||||
// TODO: since AstName conains a char*, it can't contain null
|
||||
bool containsNull = chars && (strnlen(chars->data, chars->size) < chars->size);
|
||||
@ -912,7 +913,7 @@ AstStat* Parser::parseDeclaration(const Location& start)
|
||||
{
|
||||
Name propName = parseName("property name");
|
||||
expectAndConsume(':', "property type annotation");
|
||||
AstType* propType = parseTypeAnnotation();
|
||||
AstType* propType = parseType();
|
||||
props.push_back(AstDeclaredClassProp{propName.name, propType, false});
|
||||
}
|
||||
}
|
||||
@ -926,7 +927,7 @@ AstStat* Parser::parseDeclaration(const Location& start)
|
||||
{
|
||||
expectAndConsume(':', "global variable declaration");
|
||||
|
||||
AstType* type = parseTypeAnnotation();
|
||||
AstType* type = parseType();
|
||||
return allocator.alloc<AstStatDeclareGlobal>(Location(start, type->location), globalName->name, type);
|
||||
}
|
||||
else
|
||||
@ -1027,7 +1028,7 @@ std::pair<AstExprFunction*, AstLocal*> Parser::parseFunctionBody(
|
||||
|
||||
expectMatchAndConsume(')', matchParen, true);
|
||||
|
||||
std::optional<AstTypeList> typelist = parseOptionalReturnTypeAnnotation();
|
||||
std::optional<AstTypeList> typelist = parseOptionalReturnType();
|
||||
|
||||
AstLocal* funLocal = nullptr;
|
||||
|
||||
@ -1085,7 +1086,7 @@ Parser::Binding Parser::parseBinding()
|
||||
if (!name)
|
||||
name = Name(nameError, lexer.current().location);
|
||||
|
||||
AstType* annotation = parseOptionalTypeAnnotation();
|
||||
AstType* annotation = parseOptionalType();
|
||||
|
||||
return Binding(*name, annotation);
|
||||
}
|
||||
@ -1104,7 +1105,7 @@ std::tuple<bool, Location, AstTypePack*> Parser::parseBindingList(TempVector<Bin
|
||||
if (lexer.current().type == ':')
|
||||
{
|
||||
nextLexeme();
|
||||
tailAnnotation = parseVariadicArgumentAnnotation();
|
||||
tailAnnotation = parseVariadicArgumentTypePack();
|
||||
}
|
||||
|
||||
return {true, varargLocation, tailAnnotation};
|
||||
@ -1120,24 +1121,24 @@ std::tuple<bool, Location, AstTypePack*> Parser::parseBindingList(TempVector<Bin
|
||||
return {false, Location(), nullptr};
|
||||
}
|
||||
|
||||
AstType* Parser::parseOptionalTypeAnnotation()
|
||||
AstType* Parser::parseOptionalType()
|
||||
{
|
||||
if (options.allowTypeAnnotations && lexer.current().type == ':')
|
||||
{
|
||||
nextLexeme();
|
||||
return parseTypeAnnotation();
|
||||
return parseType();
|
||||
}
|
||||
else
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// TypeList ::= TypeAnnotation [`,' TypeList] | ...TypeAnnotation
|
||||
// TypeList ::= Type [`,' TypeList] | ...Type
|
||||
AstTypePack* Parser::parseTypeList(TempVector<AstType*>& result, TempVector<std::optional<AstArgumentName>>& resultNames)
|
||||
{
|
||||
while (true)
|
||||
{
|
||||
if (shouldParseTypePackAnnotation(lexer))
|
||||
return parseTypePackAnnotation();
|
||||
if (shouldParseTypePack(lexer))
|
||||
return parseTypePack();
|
||||
|
||||
if (lexer.current().type == Lexeme::Name && lexer.lookahead().type == ':')
|
||||
{
|
||||
@ -1156,7 +1157,7 @@ AstTypePack* Parser::parseTypeList(TempVector<AstType*>& result, TempVector<std:
|
||||
resultNames.push_back({});
|
||||
}
|
||||
|
||||
result.push_back(parseTypeAnnotation());
|
||||
result.push_back(parseType());
|
||||
if (lexer.current().type != ',')
|
||||
break;
|
||||
|
||||
@ -1172,7 +1173,7 @@ AstTypePack* Parser::parseTypeList(TempVector<AstType*>& result, TempVector<std:
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
std::optional<AstTypeList> Parser::parseOptionalReturnTypeAnnotation()
|
||||
std::optional<AstTypeList> Parser::parseOptionalReturnType()
|
||||
{
|
||||
if (options.allowTypeAnnotations && (lexer.current().type == ':' || lexer.current().type == Lexeme::SkinnyArrow))
|
||||
{
|
||||
@ -1183,7 +1184,7 @@ std::optional<AstTypeList> Parser::parseOptionalReturnTypeAnnotation()
|
||||
|
||||
unsigned int oldRecursionCount = recursionCounter;
|
||||
|
||||
auto [_location, result] = parseReturnTypeAnnotation();
|
||||
auto [_location, result] = parseReturnType();
|
||||
|
||||
// At this point, if we find a , character, it indicates that there are multiple return types
|
||||
// in this type annotation, but the list wasn't wrapped in parentheses.
|
||||
@ -1202,27 +1203,27 @@ std::optional<AstTypeList> Parser::parseOptionalReturnTypeAnnotation()
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
// ReturnType ::= TypeAnnotation | `(' TypeList `)'
|
||||
std::pair<Location, AstTypeList> Parser::parseReturnTypeAnnotation()
|
||||
// ReturnType ::= Type | `(' TypeList `)'
|
||||
std::pair<Location, AstTypeList> Parser::parseReturnType()
|
||||
{
|
||||
incrementRecursionCounter("type annotation");
|
||||
|
||||
TempVector<AstType*> result(scratchAnnotation);
|
||||
TempVector<std::optional<AstArgumentName>> resultNames(scratchOptArgName);
|
||||
AstTypePack* varargAnnotation = nullptr;
|
||||
|
||||
Lexeme begin = lexer.current();
|
||||
|
||||
if (lexer.current().type != '(')
|
||||
{
|
||||
if (shouldParseTypePackAnnotation(lexer))
|
||||
varargAnnotation = parseTypePackAnnotation();
|
||||
if (shouldParseTypePack(lexer))
|
||||
{
|
||||
AstTypePack* typePack = parseTypePack();
|
||||
|
||||
return {typePack->location, AstTypeList{{}, typePack}};
|
||||
}
|
||||
else
|
||||
result.push_back(parseTypeAnnotation());
|
||||
{
|
||||
AstType* type = parseType();
|
||||
|
||||
Location resultLocation = result.size() == 0 ? varargAnnotation->location : result[0]->location;
|
||||
|
||||
return {resultLocation, AstTypeList{copy(result), varargAnnotation}};
|
||||
return {type->location, AstTypeList{copy(&type, 1), nullptr}};
|
||||
}
|
||||
}
|
||||
|
||||
nextLexeme();
|
||||
@ -1231,6 +1232,10 @@ std::pair<Location, AstTypeList> Parser::parseReturnTypeAnnotation()
|
||||
|
||||
matchRecoveryStopOnToken[Lexeme::SkinnyArrow]++;
|
||||
|
||||
TempVector<AstType*> result(scratchType);
|
||||
TempVector<std::optional<AstArgumentName>> resultNames(scratchOptArgName);
|
||||
AstTypePack* varargAnnotation = nullptr;
|
||||
|
||||
// possibly () -> ReturnType
|
||||
if (lexer.current().type != ')')
|
||||
varargAnnotation = parseTypeList(result, resultNames);
|
||||
@ -1246,9 +1251,9 @@ std::pair<Location, AstTypeList> Parser::parseReturnTypeAnnotation()
|
||||
// If it turns out that it's just '(A)', it's possible that there are unions/intersections to follow, so fold over it.
|
||||
if (result.size() == 1)
|
||||
{
|
||||
AstType* returnType = parseTypeAnnotation(result, innerBegin);
|
||||
AstType* returnType = parseTypeSuffix(result[0], innerBegin);
|
||||
|
||||
// If parseTypeAnnotation parses nothing, then returnType->location.end only points at the last non-type-pack
|
||||
// If parseType parses nothing, then returnType->location.end only points at the last non-type-pack
|
||||
// type to successfully parse. We need the span of the whole annotation.
|
||||
Position endPos = result.size() == 1 ? location.end : returnType->location.end;
|
||||
|
||||
@ -1258,39 +1263,33 @@ std::pair<Location, AstTypeList> Parser::parseReturnTypeAnnotation()
|
||||
return {location, AstTypeList{copy(result), varargAnnotation}};
|
||||
}
|
||||
|
||||
AstArray<AstGenericType> generics{nullptr, 0};
|
||||
AstArray<AstGenericTypePack> genericPacks{nullptr, 0};
|
||||
AstArray<AstType*> types = copy(result);
|
||||
AstArray<std::optional<AstArgumentName>> names = copy(resultNames);
|
||||
AstType* tail = parseFunctionTypeTail(begin, {}, {}, copy(result), copy(resultNames), varargAnnotation);
|
||||
|
||||
TempVector<AstType*> fallbackReturnTypes(scratchAnnotation);
|
||||
fallbackReturnTypes.push_back(parseFunctionTypeAnnotationTail(begin, generics, genericPacks, types, names, varargAnnotation));
|
||||
|
||||
return {Location{location, fallbackReturnTypes[0]->location}, AstTypeList{copy(fallbackReturnTypes), varargAnnotation}};
|
||||
return {Location{location, tail->location}, AstTypeList{copy(&tail, 1), varargAnnotation}};
|
||||
}
|
||||
|
||||
// TableIndexer ::= `[' TypeAnnotation `]' `:' TypeAnnotation
|
||||
AstTableIndexer* Parser::parseTableIndexerAnnotation()
|
||||
// TableIndexer ::= `[' Type `]' `:' Type
|
||||
AstTableIndexer* Parser::parseTableIndexer()
|
||||
{
|
||||
const Lexeme begin = lexer.current();
|
||||
nextLexeme(); // [
|
||||
|
||||
AstType* index = parseTypeAnnotation();
|
||||
AstType* index = parseType();
|
||||
|
||||
expectMatchAndConsume(']', begin);
|
||||
|
||||
expectAndConsume(':', "table field");
|
||||
|
||||
AstType* result = parseTypeAnnotation();
|
||||
AstType* result = parseType();
|
||||
|
||||
return allocator.alloc<AstTableIndexer>(AstTableIndexer{index, result, Location(begin.location, result->location)});
|
||||
}
|
||||
|
||||
// TableProp ::= Name `:' TypeAnnotation
|
||||
// TableProp ::= Name `:' Type
|
||||
// TablePropOrIndexer ::= TableProp | TableIndexer
|
||||
// PropList ::= TablePropOrIndexer {fieldsep TablePropOrIndexer} [fieldsep]
|
||||
// TableTypeAnnotation ::= `{' PropList `}'
|
||||
AstType* Parser::parseTableTypeAnnotation()
|
||||
// TableType ::= `{' PropList `}'
|
||||
AstType* Parser::parseTableType()
|
||||
{
|
||||
incrementRecursionCounter("type annotation");
|
||||
|
||||
@ -1313,7 +1312,7 @@ AstType* Parser::parseTableTypeAnnotation()
|
||||
expectMatchAndConsume(']', begin);
|
||||
expectAndConsume(':', "table field");
|
||||
|
||||
AstType* type = parseTypeAnnotation();
|
||||
AstType* type = parseType();
|
||||
|
||||
// TODO: since AstName conains a char*, it can't contain null
|
||||
bool containsNull = chars && (strnlen(chars->data, chars->size) < chars->size);
|
||||
@ -1329,19 +1328,19 @@ AstType* Parser::parseTableTypeAnnotation()
|
||||
{
|
||||
// maybe we don't need to parse the entire badIndexer...
|
||||
// however, we either have { or [ to lint, not the entire table type or the bad indexer.
|
||||
AstTableIndexer* badIndexer = parseTableIndexerAnnotation();
|
||||
AstTableIndexer* badIndexer = parseTableIndexer();
|
||||
|
||||
// we lose all additional indexer expressions from the AST after error recovery here
|
||||
report(badIndexer->location, "Cannot have more than one table indexer");
|
||||
}
|
||||
else
|
||||
{
|
||||
indexer = parseTableIndexerAnnotation();
|
||||
indexer = parseTableIndexer();
|
||||
}
|
||||
}
|
||||
else if (props.empty() && !indexer && !(lexer.current().type == Lexeme::Name && lexer.lookahead().type == ':'))
|
||||
{
|
||||
AstType* type = parseTypeAnnotation();
|
||||
AstType* type = parseType();
|
||||
|
||||
// array-like table type: {T} desugars into {[number]: T}
|
||||
AstType* index = allocator.alloc<AstTypeReference>(type->location, std::nullopt, nameNumber);
|
||||
@ -1358,7 +1357,7 @@ AstType* Parser::parseTableTypeAnnotation()
|
||||
|
||||
expectAndConsume(':', "table field");
|
||||
|
||||
AstType* type = parseTypeAnnotation();
|
||||
AstType* type = parseType();
|
||||
|
||||
props.push_back({name->name, name->location, type});
|
||||
}
|
||||
@ -1382,9 +1381,9 @@ AstType* Parser::parseTableTypeAnnotation()
|
||||
return allocator.alloc<AstTypeTable>(Location(start, end), copy(props), indexer);
|
||||
}
|
||||
|
||||
// ReturnType ::= TypeAnnotation | `(' TypeList `)'
|
||||
// FunctionTypeAnnotation ::= [`<' varlist `>'] `(' [TypeList] `)' `->` ReturnType
|
||||
AstTypeOrPack Parser::parseFunctionTypeAnnotation(bool allowPack)
|
||||
// ReturnType ::= Type | `(' TypeList `)'
|
||||
// FunctionType ::= [`<' varlist `>'] `(' [TypeList] `)' `->` ReturnType
|
||||
AstTypeOrPack Parser::parseFunctionType(bool allowPack)
|
||||
{
|
||||
incrementRecursionCounter("type annotation");
|
||||
|
||||
@ -1400,7 +1399,7 @@ AstTypeOrPack Parser::parseFunctionTypeAnnotation(bool allowPack)
|
||||
|
||||
matchRecoveryStopOnToken[Lexeme::SkinnyArrow]++;
|
||||
|
||||
TempVector<AstType*> params(scratchAnnotation);
|
||||
TempVector<AstType*> params(scratchType);
|
||||
TempVector<std::optional<AstArgumentName>> names(scratchOptArgName);
|
||||
AstTypePack* varargAnnotation = nullptr;
|
||||
|
||||
@ -1432,12 +1431,11 @@ AstTypeOrPack Parser::parseFunctionTypeAnnotation(bool allowPack)
|
||||
|
||||
AstArray<std::optional<AstArgumentName>> paramNames = copy(names);
|
||||
|
||||
return {parseFunctionTypeAnnotationTail(begin, generics, genericPacks, paramTypes, paramNames, varargAnnotation), {}};
|
||||
return {parseFunctionTypeTail(begin, generics, genericPacks, paramTypes, paramNames, varargAnnotation), {}};
|
||||
}
|
||||
|
||||
AstType* Parser::parseFunctionTypeAnnotationTail(const Lexeme& begin, AstArray<AstGenericType> generics, AstArray<AstGenericTypePack> genericPacks,
|
||||
AstArray<AstType*>& params, AstArray<std::optional<AstArgumentName>>& paramNames, AstTypePack* varargAnnotation)
|
||||
|
||||
AstType* Parser::parseFunctionTypeTail(const Lexeme& begin, AstArray<AstGenericType> generics, AstArray<AstGenericTypePack> genericPacks,
|
||||
AstArray<AstType*> params, AstArray<std::optional<AstArgumentName>> paramNames, AstTypePack* varargAnnotation)
|
||||
{
|
||||
incrementRecursionCounter("type annotation");
|
||||
|
||||
@ -1458,21 +1456,22 @@ AstType* Parser::parseFunctionTypeAnnotationTail(const Lexeme& begin, AstArray<A
|
||||
expectAndConsume(Lexeme::SkinnyArrow, "function type");
|
||||
}
|
||||
|
||||
auto [endLocation, returnTypeList] = parseReturnTypeAnnotation();
|
||||
auto [endLocation, returnTypeList] = parseReturnType();
|
||||
|
||||
AstTypeList paramTypes = AstTypeList{params, varargAnnotation};
|
||||
return allocator.alloc<AstTypeFunction>(Location(begin.location, endLocation), generics, genericPacks, paramTypes, paramNames, returnTypeList);
|
||||
}
|
||||
|
||||
// typeannotation ::=
|
||||
// Type ::=
|
||||
// nil |
|
||||
// Name[`.' Name] [`<' namelist `>'] |
|
||||
// `{' [PropList] `}' |
|
||||
// `(' [TypeList] `)' `->` ReturnType
|
||||
// `typeof` typeannotation
|
||||
AstType* Parser::parseTypeAnnotation(TempVector<AstType*>& parts, const Location& begin)
|
||||
// `typeof` Type
|
||||
AstType* Parser::parseTypeSuffix(AstType* type, const Location& begin)
|
||||
{
|
||||
LUAU_ASSERT(!parts.empty());
|
||||
TempVector<AstType*> parts(scratchType);
|
||||
parts.push_back(type);
|
||||
|
||||
incrementRecursionCounter("type annotation");
|
||||
|
||||
@ -1487,7 +1486,7 @@ AstType* Parser::parseTypeAnnotation(TempVector<AstType*>& parts, const Location
|
||||
if (c == '|')
|
||||
{
|
||||
nextLexeme();
|
||||
parts.push_back(parseSimpleTypeAnnotation(/* allowPack= */ false).type);
|
||||
parts.push_back(parseSimpleType(/* allowPack= */ false).type);
|
||||
isUnion = true;
|
||||
}
|
||||
else if (c == '?')
|
||||
@ -1500,7 +1499,7 @@ AstType* Parser::parseTypeAnnotation(TempVector<AstType*>& parts, const Location
|
||||
else if (c == '&')
|
||||
{
|
||||
nextLexeme();
|
||||
parts.push_back(parseSimpleTypeAnnotation(/* allowPack= */ false).type);
|
||||
parts.push_back(parseSimpleType(/* allowPack= */ false).type);
|
||||
isIntersection = true;
|
||||
}
|
||||
else if (c == Lexeme::Dot3)
|
||||
@ -1513,11 +1512,11 @@ AstType* Parser::parseTypeAnnotation(TempVector<AstType*>& parts, const Location
|
||||
}
|
||||
|
||||
if (parts.size() == 1)
|
||||
return parts[0];
|
||||
return type;
|
||||
|
||||
if (isUnion && isIntersection)
|
||||
{
|
||||
return reportTypeAnnotationError(Location(begin, parts.back()->location), copy(parts),
|
||||
return reportTypeError(Location(begin, parts.back()->location), copy(parts),
|
||||
"Mixing union and intersection types is not allowed; consider wrapping in parentheses.");
|
||||
}
|
||||
|
||||
@ -1533,16 +1532,14 @@ AstType* Parser::parseTypeAnnotation(TempVector<AstType*>& parts, const Location
|
||||
ParseError::raise(begin, "Composite type was not an intersection or union.");
|
||||
}
|
||||
|
||||
AstTypeOrPack Parser::parseTypeOrPackAnnotation()
|
||||
AstTypeOrPack Parser::parseTypeOrPack()
|
||||
{
|
||||
unsigned int oldRecursionCount = recursionCounter;
|
||||
incrementRecursionCounter("type annotation");
|
||||
|
||||
Location begin = lexer.current().location;
|
||||
|
||||
TempVector<AstType*> parts(scratchAnnotation);
|
||||
|
||||
auto [type, typePack] = parseSimpleTypeAnnotation(/* allowPack= */ true);
|
||||
auto [type, typePack] = parseSimpleType(/* allowPack= */ true);
|
||||
|
||||
if (typePack)
|
||||
{
|
||||
@ -1550,31 +1547,28 @@ AstTypeOrPack Parser::parseTypeOrPackAnnotation()
|
||||
return {{}, typePack};
|
||||
}
|
||||
|
||||
parts.push_back(type);
|
||||
|
||||
recursionCounter = oldRecursionCount;
|
||||
|
||||
return {parseTypeAnnotation(parts, begin), {}};
|
||||
return {parseTypeSuffix(type, begin), {}};
|
||||
}
|
||||
|
||||
AstType* Parser::parseTypeAnnotation()
|
||||
AstType* Parser::parseType()
|
||||
{
|
||||
unsigned int oldRecursionCount = recursionCounter;
|
||||
incrementRecursionCounter("type annotation");
|
||||
|
||||
Location begin = lexer.current().location;
|
||||
|
||||
TempVector<AstType*> parts(scratchAnnotation);
|
||||
parts.push_back(parseSimpleTypeAnnotation(/* allowPack= */ false).type);
|
||||
AstType* type = parseSimpleType(/* allowPack= */ false).type;
|
||||
|
||||
recursionCounter = oldRecursionCount;
|
||||
|
||||
return parseTypeAnnotation(parts, begin);
|
||||
return parseTypeSuffix(type, begin);
|
||||
}
|
||||
|
||||
// typeannotation ::= nil | Name[`.' Name] [ `<' typeannotation [`,' ...] `>' ] | `typeof' `(' expr `)' | `{' [PropList] `}'
|
||||
// Type ::= nil | Name[`.' Name] [ `<' Type [`,' ...] `>' ] | `typeof' `(' expr `)' | `{' [PropList] `}'
|
||||
// | [`<' varlist `>'] `(' [TypeList] `)' `->` ReturnType
|
||||
AstTypeOrPack Parser::parseSimpleTypeAnnotation(bool allowPack)
|
||||
AstTypeOrPack Parser::parseSimpleType(bool allowPack)
|
||||
{
|
||||
incrementRecursionCounter("type annotation");
|
||||
|
||||
@ -1603,18 +1597,18 @@ AstTypeOrPack Parser::parseSimpleTypeAnnotation(bool allowPack)
|
||||
return {allocator.alloc<AstTypeSingletonString>(start, svalue)};
|
||||
}
|
||||
else
|
||||
return {reportTypeAnnotationError(start, {}, "String literal contains malformed escape sequence")};
|
||||
return {reportTypeError(start, {}, "String literal contains malformed escape sequence")};
|
||||
}
|
||||
else if (lexer.current().type == Lexeme::InterpStringBegin || lexer.current().type == Lexeme::InterpStringSimple)
|
||||
{
|
||||
parseInterpString();
|
||||
|
||||
return {reportTypeAnnotationError(start, {}, "Interpolated string literals cannot be used as types")};
|
||||
return {reportTypeError(start, {}, "Interpolated string literals cannot be used as types")};
|
||||
}
|
||||
else if (lexer.current().type == Lexeme::BrokenString)
|
||||
{
|
||||
nextLexeme();
|
||||
return {reportTypeAnnotationError(start, {}, "Malformed string")};
|
||||
return {reportTypeError(start, {}, "Malformed string")};
|
||||
}
|
||||
else if (lexer.current().type == Lexeme::Name)
|
||||
{
|
||||
@ -1663,17 +1657,17 @@ AstTypeOrPack Parser::parseSimpleTypeAnnotation(bool allowPack)
|
||||
}
|
||||
else if (lexer.current().type == '{')
|
||||
{
|
||||
return {parseTableTypeAnnotation(), {}};
|
||||
return {parseTableType(), {}};
|
||||
}
|
||||
else if (lexer.current().type == '(' || lexer.current().type == '<')
|
||||
{
|
||||
return parseFunctionTypeAnnotation(allowPack);
|
||||
return parseFunctionType(allowPack);
|
||||
}
|
||||
else if (lexer.current().type == Lexeme::ReservedFunction)
|
||||
{
|
||||
nextLexeme();
|
||||
|
||||
return {reportTypeAnnotationError(start, {},
|
||||
return {reportTypeError(start, {},
|
||||
"Using 'function' as a type annotation is not supported, consider replacing with a function type annotation e.g. '(...any) -> "
|
||||
"...any'"),
|
||||
{}};
|
||||
@ -1685,12 +1679,11 @@ AstTypeOrPack Parser::parseSimpleTypeAnnotation(bool allowPack)
|
||||
// The parse error includes the next lexeme to make it easier to display where the error is (e.g. in an IDE or a CLI error message).
|
||||
// Including the current lexeme also makes the parse error consistent with other parse errors returned by Luau.
|
||||
Location parseErrorLocation(lexer.previousLocation().end, start.end);
|
||||
return {
|
||||
reportMissingTypeAnnotationError(parseErrorLocation, astErrorlocation, "Expected type, got %s", lexer.current().toString().c_str()), {}};
|
||||
return {reportMissingTypeError(parseErrorLocation, astErrorlocation, "Expected type, got %s", lexer.current().toString().c_str()), {}};
|
||||
}
|
||||
}
|
||||
|
||||
AstTypePack* Parser::parseVariadicArgumentAnnotation()
|
||||
AstTypePack* Parser::parseVariadicArgumentTypePack()
|
||||
{
|
||||
// Generic: a...
|
||||
if (lexer.current().type == Lexeme::Name && lexer.lookahead().type == Lexeme::Dot3)
|
||||
@ -1705,19 +1698,19 @@ AstTypePack* Parser::parseVariadicArgumentAnnotation()
|
||||
// Variadic: T
|
||||
else
|
||||
{
|
||||
AstType* variadicAnnotation = parseTypeAnnotation();
|
||||
AstType* variadicAnnotation = parseType();
|
||||
return allocator.alloc<AstTypePackVariadic>(variadicAnnotation->location, variadicAnnotation);
|
||||
}
|
||||
}
|
||||
|
||||
AstTypePack* Parser::parseTypePackAnnotation()
|
||||
AstTypePack* Parser::parseTypePack()
|
||||
{
|
||||
// Variadic: ...T
|
||||
if (lexer.current().type == Lexeme::Dot3)
|
||||
{
|
||||
Location start = lexer.current().location;
|
||||
nextLexeme();
|
||||
AstType* varargTy = parseTypeAnnotation();
|
||||
AstType* varargTy = parseType();
|
||||
return allocator.alloc<AstTypePackVariadic>(Location(start, varargTy->location), varargTy);
|
||||
}
|
||||
// Generic: a...
|
||||
@ -2054,7 +2047,7 @@ AstExpr* Parser::parsePrimaryExpr(bool asStatement)
|
||||
return expr;
|
||||
}
|
||||
|
||||
// asexp -> simpleexp [`::' typeannotation]
|
||||
// asexp -> simpleexp [`::' Type]
|
||||
AstExpr* Parser::parseAssertionExpr()
|
||||
{
|
||||
Location start = lexer.current().location;
|
||||
@ -2063,7 +2056,7 @@ AstExpr* Parser::parseAssertionExpr()
|
||||
if (options.allowTypeAnnotations && lexer.current().type == Lexeme::DoubleColon)
|
||||
{
|
||||
nextLexeme();
|
||||
AstType* annotation = parseTypeAnnotation();
|
||||
AstType* annotation = parseType();
|
||||
return allocator.alloc<AstExprTypeAssertion>(Location(start, annotation->location), expr, annotation);
|
||||
}
|
||||
else
|
||||
@ -2455,15 +2448,15 @@ std::pair<AstArray<AstGenericType>, AstArray<AstGenericTypePack>> Parser::parseG
|
||||
|
||||
Lexeme packBegin = lexer.current();
|
||||
|
||||
if (shouldParseTypePackAnnotation(lexer))
|
||||
if (shouldParseTypePack(lexer))
|
||||
{
|
||||
AstTypePack* typePack = parseTypePackAnnotation();
|
||||
AstTypePack* typePack = parseTypePack();
|
||||
|
||||
namePacks.push_back({name, nameLocation, typePack});
|
||||
}
|
||||
else if (!FFlag::LuauParserErrorsOnMissingDefaultTypePackArgument && lexer.current().type == '(')
|
||||
{
|
||||
auto [type, typePack] = parseTypeOrPackAnnotation();
|
||||
auto [type, typePack] = parseTypeOrPack();
|
||||
|
||||
if (type)
|
||||
report(Location(packBegin.location.begin, lexer.previousLocation().end), "Expected type pack after '=', got type");
|
||||
@ -2472,7 +2465,7 @@ std::pair<AstArray<AstGenericType>, AstArray<AstGenericTypePack>> Parser::parseG
|
||||
}
|
||||
else if (FFlag::LuauParserErrorsOnMissingDefaultTypePackArgument)
|
||||
{
|
||||
auto [type, typePack] = parseTypeOrPackAnnotation();
|
||||
auto [type, typePack] = parseTypeOrPack();
|
||||
|
||||
if (type)
|
||||
report(type->location, "Expected type pack after '=', got type");
|
||||
@ -2495,7 +2488,7 @@ std::pair<AstArray<AstGenericType>, AstArray<AstGenericTypePack>> Parser::parseG
|
||||
seenDefault = true;
|
||||
nextLexeme();
|
||||
|
||||
AstType* defaultType = parseTypeAnnotation();
|
||||
AstType* defaultType = parseType();
|
||||
|
||||
names.push_back({name, nameLocation, defaultType});
|
||||
}
|
||||
@ -2532,7 +2525,7 @@ std::pair<AstArray<AstGenericType>, AstArray<AstGenericTypePack>> Parser::parseG
|
||||
|
||||
AstArray<AstTypeOrPack> Parser::parseTypeParams()
|
||||
{
|
||||
TempVector<AstTypeOrPack> parameters{scratchTypeOrPackAnnotation};
|
||||
TempVector<AstTypeOrPack> parameters{scratchTypeOrPack};
|
||||
|
||||
if (lexer.current().type == '<')
|
||||
{
|
||||
@ -2541,15 +2534,15 @@ AstArray<AstTypeOrPack> Parser::parseTypeParams()
|
||||
|
||||
while (true)
|
||||
{
|
||||
if (shouldParseTypePackAnnotation(lexer))
|
||||
if (shouldParseTypePack(lexer))
|
||||
{
|
||||
AstTypePack* typePack = parseTypePackAnnotation();
|
||||
AstTypePack* typePack = parseTypePack();
|
||||
|
||||
parameters.push_back({{}, typePack});
|
||||
}
|
||||
else if (lexer.current().type == '(')
|
||||
{
|
||||
auto [type, typePack] = parseTypeOrPackAnnotation();
|
||||
auto [type, typePack] = parseTypeOrPack();
|
||||
|
||||
if (typePack)
|
||||
parameters.push_back({{}, typePack});
|
||||
@ -2562,7 +2555,7 @@ AstArray<AstTypeOrPack> Parser::parseTypeParams()
|
||||
}
|
||||
else
|
||||
{
|
||||
parameters.push_back({parseTypeAnnotation(), {}});
|
||||
parameters.push_back({parseType(), {}});
|
||||
}
|
||||
|
||||
if (lexer.current().type == ',')
|
||||
@ -3018,7 +3011,7 @@ AstExprError* Parser::reportExprError(const Location& location, const AstArray<A
|
||||
return allocator.alloc<AstExprError>(location, expressions, unsigned(parseErrors.size() - 1));
|
||||
}
|
||||
|
||||
AstTypeError* Parser::reportTypeAnnotationError(const Location& location, const AstArray<AstType*>& types, const char* format, ...)
|
||||
AstTypeError* Parser::reportTypeError(const Location& location, const AstArray<AstType*>& types, const char* format, ...)
|
||||
{
|
||||
va_list args;
|
||||
va_start(args, format);
|
||||
@ -3028,7 +3021,7 @@ AstTypeError* Parser::reportTypeAnnotationError(const Location& location, const
|
||||
return allocator.alloc<AstTypeError>(location, types, false, unsigned(parseErrors.size() - 1));
|
||||
}
|
||||
|
||||
AstTypeError* Parser::reportMissingTypeAnnotationError(const Location& parseErrorLocation, const Location& astErrorLocation, const char* format, ...)
|
||||
AstTypeError* Parser::reportMissingTypeError(const Location& parseErrorLocation, const Location& astErrorLocation, const char* format, ...)
|
||||
{
|
||||
va_list args;
|
||||
va_start(args, format);
|
||||
|
@ -14,6 +14,7 @@
|
||||
#endif
|
||||
|
||||
LUAU_FASTFLAG(DebugLuauTimeTracing)
|
||||
LUAU_FASTFLAG(LuauLintInTypecheck)
|
||||
|
||||
enum class ReportFormat
|
||||
{
|
||||
@ -80,7 +81,7 @@ static bool analyzeFile(Luau::Frontend& frontend, const char* name, ReportFormat
|
||||
for (auto& error : cr.errors)
|
||||
reportError(frontend, format, error);
|
||||
|
||||
Luau::LintResult lr = frontend.lint(name);
|
||||
Luau::LintResult lr = FFlag::LuauLintInTypecheck ? cr.lintResult : frontend.lint_DEPRECATED(name);
|
||||
|
||||
std::string humanReadableName = frontend.fileResolver->getHumanReadableModuleName(name);
|
||||
for (auto& error : lr.errors)
|
||||
@ -263,6 +264,7 @@ int main(int argc, char** argv)
|
||||
|
||||
Luau::FrontendOptions frontendOptions;
|
||||
frontendOptions.retainFullTypeGraphs = annotate;
|
||||
frontendOptions.runLintChecks = FFlag::LuauLintInTypecheck;
|
||||
|
||||
CliFileResolver fileResolver;
|
||||
CliConfigResolver configResolver(mode);
|
||||
|
@ -93,6 +93,13 @@ public:
|
||||
// Assigns label position to the current location
|
||||
void setLabel(Label& label);
|
||||
|
||||
// Extracts code offset (in bytes) from label
|
||||
uint32_t getLabelOffset(const Label& label)
|
||||
{
|
||||
LUAU_ASSERT(label.location != ~0u);
|
||||
return label.location * 4;
|
||||
}
|
||||
|
||||
void logAppend(const char* fmt, ...) LUAU_PRINTF_ATTR(2, 3);
|
||||
|
||||
uint32_t getCodeSize() const;
|
||||
|
@ -155,6 +155,13 @@ public:
|
||||
// Assigns label position to the current location
|
||||
void setLabel(Label& label);
|
||||
|
||||
// Extracts code offset (in bytes) from label
|
||||
uint32_t getLabelOffset(const Label& label)
|
||||
{
|
||||
LUAU_ASSERT(label.location != ~0u);
|
||||
return label.location;
|
||||
}
|
||||
|
||||
// Constant allocation (uses rip-relative addressing)
|
||||
OperandX64 i64(int64_t value);
|
||||
OperandX64 f32(float value);
|
||||
|
@ -21,7 +21,8 @@ struct CodeAllocator
|
||||
// Places data and code into the executable page area
|
||||
// To allow allocation while previously allocated code is already running, allocation has page granularity
|
||||
// It's important to group functions together so that page alignment won't result in a lot of wasted space
|
||||
bool allocate(uint8_t* data, size_t dataSize, uint8_t* code, size_t codeSize, uint8_t*& result, size_t& resultSize, uint8_t*& resultCodeStart);
|
||||
bool allocate(
|
||||
const uint8_t* data, size_t dataSize, const uint8_t* code, size_t codeSize, uint8_t*& result, size_t& resultSize, uint8_t*& resultCodeStart);
|
||||
|
||||
// Provided to callbacks
|
||||
void* context = nullptr;
|
||||
|
@ -42,6 +42,7 @@ struct CfgInfo
|
||||
std::vector<uint32_t> successorsOffsets;
|
||||
|
||||
std::vector<RegisterSet> in;
|
||||
std::vector<RegisterSet> def;
|
||||
std::vector<RegisterSet> out;
|
||||
|
||||
RegisterSet captured;
|
||||
|
@ -1,8 +1,9 @@
|
||||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#pragma once
|
||||
|
||||
#include "Luau/Common.h"
|
||||
#include "Luau/Bytecode.h"
|
||||
#include "Luau/Common.h"
|
||||
#include "Luau/DenseHash.h"
|
||||
#include "Luau/IrData.h"
|
||||
|
||||
#include <vector>
|
||||
@ -19,6 +20,8 @@ struct AssemblyOptions;
|
||||
|
||||
struct IrBuilder
|
||||
{
|
||||
IrBuilder();
|
||||
|
||||
void buildFunctionIr(Proto* proto);
|
||||
|
||||
void rebuildBytecodeBasicBlocks(Proto* proto);
|
||||
@ -38,7 +41,7 @@ struct IrBuilder
|
||||
IrOp constUint(unsigned value);
|
||||
IrOp constDouble(double value);
|
||||
IrOp constTag(uint8_t value);
|
||||
IrOp constAny(IrConst constant);
|
||||
IrOp constAny(IrConst constant, uint64_t asCommonKey);
|
||||
|
||||
IrOp cond(IrCondition cond);
|
||||
|
||||
@ -67,6 +70,45 @@ struct IrBuilder
|
||||
uint32_t activeBlockIdx = ~0u;
|
||||
|
||||
std::vector<uint32_t> instIndexToBlock; // Block index at the bytecode instruction
|
||||
|
||||
// Similar to BytecodeBuilder, duplicate constants are removed used the same method
|
||||
struct ConstantKey
|
||||
{
|
||||
IrConstKind kind;
|
||||
// Note: this stores value* from IrConst; when kind is Double, this stores the same bits as double does but in uint64_t.
|
||||
uint64_t value;
|
||||
|
||||
bool operator==(const ConstantKey& key) const
|
||||
{
|
||||
return kind == key.kind && value == key.value;
|
||||
}
|
||||
};
|
||||
|
||||
struct ConstantKeyHash
|
||||
{
|
||||
size_t operator()(const ConstantKey& key) const
|
||||
{
|
||||
// finalizer from MurmurHash64B
|
||||
const uint32_t m = 0x5bd1e995;
|
||||
|
||||
uint32_t h1 = uint32_t(key.value);
|
||||
uint32_t h2 = uint32_t(key.value >> 32) ^ (int(key.kind) * m);
|
||||
|
||||
h1 ^= h2 >> 18;
|
||||
h1 *= m;
|
||||
h2 ^= h1 >> 22;
|
||||
h2 *= m;
|
||||
h1 ^= h2 >> 17;
|
||||
h1 *= m;
|
||||
h2 ^= h1 >> 19;
|
||||
h2 *= m;
|
||||
|
||||
// ... truncated to 32-bit output (normally hash is equal to (uint64_t(h1) << 32) | h2, but we only really need the lower 32-bit half)
|
||||
return size_t(h2);
|
||||
}
|
||||
};
|
||||
|
||||
DenseHashMap<ConstantKey, uint32_t, ConstantKeyHash> constantMap;
|
||||
};
|
||||
|
||||
} // namespace CodeGen
|
||||
|
@ -22,7 +22,7 @@ namespace CodeGen
|
||||
// In the command description, following abbreviations are used:
|
||||
// * Rn - VM stack register slot, n in 0..254
|
||||
// * Kn - VM proto constant slot, n in 0..2^23-1
|
||||
// * UPn - VM function upvalue slot, n in 0..254
|
||||
// * UPn - VM function upvalue slot, n in 0..199
|
||||
// * A, B, C, D, E are instruction arguments
|
||||
enum class IrCmd : uint8_t
|
||||
{
|
||||
@ -64,6 +64,11 @@ enum class IrCmd : uint8_t
|
||||
// A: pointer (Table)
|
||||
GET_SLOT_NODE_ADDR,
|
||||
|
||||
// Get pointer (LuaNode) to table node element at the main position of the specified key hash
|
||||
// A: pointer (Table)
|
||||
// B: unsigned int
|
||||
GET_HASH_NODE_ADDR,
|
||||
|
||||
// Store a tag into TValue
|
||||
// A: Rn
|
||||
// B: tag
|
||||
@ -173,6 +178,13 @@ enum class IrCmd : uint8_t
|
||||
// E: block (if false)
|
||||
JUMP_CMP_ANY,
|
||||
|
||||
// Perform a conditional jump based on cached table node slot matching the actual table node slot for a key
|
||||
// A: pointer (LuaNode)
|
||||
// B: Kn
|
||||
// C: block (if matches)
|
||||
// D: block (if it doesn't)
|
||||
JUMP_SLOT_MATCH,
|
||||
|
||||
// Get table length
|
||||
// A: pointer (Table)
|
||||
TABLE_LEN,
|
||||
@ -189,7 +201,13 @@ enum class IrCmd : uint8_t
|
||||
// Try to convert a double number into a table index (int) or jump if it's not an integer
|
||||
// A: double
|
||||
// B: block
|
||||
NUM_TO_INDEX,
|
||||
TRY_NUM_TO_INDEX,
|
||||
|
||||
// Try to get pointer to tag method TValue inside the table's metatable or jump if there is no such value or metatable
|
||||
// A: table
|
||||
// B: int
|
||||
// C: block
|
||||
TRY_CALL_FASTGETTM,
|
||||
|
||||
// Convert integer into a double number
|
||||
// A: int
|
||||
@ -315,6 +333,11 @@ enum class IrCmd : uint8_t
|
||||
// C: block
|
||||
CHECK_SLOT_MATCH,
|
||||
|
||||
// Guard against table node with a linked next node to ensure that our lookup hits the main position of the key
|
||||
// A: pointer (LuaNode)
|
||||
// B: block
|
||||
CHECK_NODE_NO_NEXT,
|
||||
|
||||
// Special operations
|
||||
|
||||
// Check interrupt handler
|
||||
@ -361,14 +384,6 @@ enum class IrCmd : uint8_t
|
||||
// E: unsigned int (table index to start from)
|
||||
LOP_SETLIST,
|
||||
|
||||
// Load function from source register using name into target register and copying source register into target register + 1
|
||||
// A: unsigned int (bytecode instruction index)
|
||||
// B: Rn (target)
|
||||
// C: Rn (source)
|
||||
// D: block (next)
|
||||
// E: block (fallback)
|
||||
LOP_NAMECALL,
|
||||
|
||||
// Call specified function
|
||||
// A: unsigned int (bytecode instruction index)
|
||||
// B: Rn (function, followed by arguments)
|
||||
@ -576,6 +591,16 @@ struct IrOp
|
||||
, index(index)
|
||||
{
|
||||
}
|
||||
|
||||
bool operator==(const IrOp& rhs) const
|
||||
{
|
||||
return kind == rhs.kind && index == rhs.index;
|
||||
}
|
||||
|
||||
bool operator!=(const IrOp& rhs) const
|
||||
{
|
||||
return !(*this == rhs);
|
||||
}
|
||||
};
|
||||
|
||||
static_assert(sizeof(IrOp) == 4);
|
||||
|
@ -37,5 +37,9 @@ std::string toString(IrFunction& function, bool includeUseInfo);
|
||||
|
||||
std::string dump(IrFunction& function);
|
||||
|
||||
std::string toDot(IrFunction& function, bool includeInst);
|
||||
|
||||
std::string dumpDot(IrFunction& function, bool includeInst);
|
||||
|
||||
} // namespace CodeGen
|
||||
} // namespace Luau
|
||||
|
@ -98,7 +98,7 @@ inline bool isBlockTerminator(IrCmd cmd)
|
||||
case IrCmd::JUMP_EQ_POINTER:
|
||||
case IrCmd::JUMP_CMP_NUM:
|
||||
case IrCmd::JUMP_CMP_ANY:
|
||||
case IrCmd::LOP_NAMECALL:
|
||||
case IrCmd::JUMP_SLOT_MATCH:
|
||||
case IrCmd::LOP_RETURN:
|
||||
case IrCmd::LOP_FORGLOOP:
|
||||
case IrCmd::LOP_FORGLOOP_FALLBACK:
|
||||
@ -125,6 +125,7 @@ inline bool hasResult(IrCmd cmd)
|
||||
case IrCmd::LOAD_ENV:
|
||||
case IrCmd::GET_ARR_ADDR:
|
||||
case IrCmd::GET_SLOT_NODE_ADDR:
|
||||
case IrCmd::GET_HASH_NODE_ADDR:
|
||||
case IrCmd::ADD_INT:
|
||||
case IrCmd::SUB_INT:
|
||||
case IrCmd::ADD_NUM:
|
||||
@ -140,7 +141,8 @@ inline bool hasResult(IrCmd cmd)
|
||||
case IrCmd::TABLE_LEN:
|
||||
case IrCmd::NEW_TABLE:
|
||||
case IrCmd::DUP_TABLE:
|
||||
case IrCmd::NUM_TO_INDEX:
|
||||
case IrCmd::TRY_NUM_TO_INDEX:
|
||||
case IrCmd::TRY_CALL_FASTGETTM:
|
||||
case IrCmd::INT_TO_NUM:
|
||||
case IrCmd::SUBSTITUTE:
|
||||
case IrCmd::INVOKE_FASTCALL:
|
||||
|
@ -112,7 +112,7 @@ CodeAllocator::~CodeAllocator()
|
||||
}
|
||||
|
||||
bool CodeAllocator::allocate(
|
||||
uint8_t* data, size_t dataSize, uint8_t* code, size_t codeSize, uint8_t*& result, size_t& resultSize, uint8_t*& resultCodeStart)
|
||||
const uint8_t* data, size_t dataSize, const uint8_t* code, size_t codeSize, uint8_t*& result, size_t& resultSize, uint8_t*& resultCodeStart)
|
||||
{
|
||||
// 'Round up' to preserve code alignment
|
||||
size_t alignedDataSize = (dataSize + (kCodeAlignment - 1)) & ~(kCodeAlignment - 1);
|
||||
|
@ -1,7 +1,6 @@
|
||||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#include "Luau/CodeGen.h"
|
||||
|
||||
#include "Luau/AssemblyBuilderX64.h"
|
||||
#include "Luau/Common.h"
|
||||
#include "Luau/CodeAllocator.h"
|
||||
#include "Luau/CodeBlockUnwind.h"
|
||||
@ -9,12 +8,17 @@
|
||||
#include "Luau/IrBuilder.h"
|
||||
#include "Luau/OptimizeConstProp.h"
|
||||
#include "Luau/OptimizeFinalX64.h"
|
||||
|
||||
#include "Luau/UnwindBuilder.h"
|
||||
#include "Luau/UnwindBuilderDwarf2.h"
|
||||
#include "Luau/UnwindBuilderWin.h"
|
||||
|
||||
#include "Luau/AssemblyBuilderX64.h"
|
||||
#include "Luau/AssemblyBuilderA64.h"
|
||||
|
||||
#include "CustomExecUtils.h"
|
||||
#include "CodeGenX64.h"
|
||||
#include "CodeGenA64.h"
|
||||
#include "EmitCommonX64.h"
|
||||
#include "EmitInstructionX64.h"
|
||||
#include "IrLoweringX64.h"
|
||||
@ -39,32 +43,55 @@ namespace Luau
|
||||
namespace CodeGen
|
||||
{
|
||||
|
||||
constexpr uint32_t kFunctionAlignment = 32;
|
||||
|
||||
static void assembleHelpers(X64::AssemblyBuilderX64& build, ModuleHelpers& helpers)
|
||||
{
|
||||
if (build.logText)
|
||||
build.logAppend("; exitContinueVm\n");
|
||||
helpers.exitContinueVm = build.setLabel();
|
||||
emitExit(build, /* continueInVm */ true);
|
||||
|
||||
if (build.logText)
|
||||
build.logAppend("; exitNoContinueVm\n");
|
||||
helpers.exitNoContinueVm = build.setLabel();
|
||||
emitExit(build, /* continueInVm */ false);
|
||||
|
||||
if (build.logText)
|
||||
build.logAppend("; continueCallInVm\n");
|
||||
helpers.continueCallInVm = build.setLabel();
|
||||
emitContinueCallInVm(build);
|
||||
}
|
||||
|
||||
static NativeProto* assembleFunction(X64::AssemblyBuilderX64& build, NativeState& data, ModuleHelpers& helpers, Proto* proto, AssemblyOptions options)
|
||||
static NativeProto* createNativeProto(Proto* proto, const IrBuilder& ir)
|
||||
{
|
||||
NativeProto* result = new NativeProto();
|
||||
|
||||
result->proto = proto;
|
||||
result->instTargets = new uintptr_t[proto->sizecode];
|
||||
|
||||
for (int i = 0; i < proto->sizecode; i++)
|
||||
{
|
||||
auto [irLocation, asmLocation] = ir.function.bcMapping[i];
|
||||
|
||||
result->instTargets[i] = irLocation == ~0u ? 0 : asmLocation;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
[[maybe_unused]] static void lowerIr(
|
||||
X64::AssemblyBuilderX64& build, IrBuilder& ir, NativeState& data, ModuleHelpers& helpers, Proto* proto, AssemblyOptions options)
|
||||
{
|
||||
constexpr uint32_t kFunctionAlignment = 32;
|
||||
|
||||
optimizeMemoryOperandsX64(ir.function);
|
||||
|
||||
build.align(kFunctionAlignment, X64::AlignmentDataX64::Ud2);
|
||||
|
||||
X64::IrLoweringX64 lowering(build, helpers, data, proto, ir.function);
|
||||
|
||||
lowering.lower(options);
|
||||
}
|
||||
|
||||
[[maybe_unused]] static void lowerIr(
|
||||
A64::AssemblyBuilderA64& build, IrBuilder& ir, NativeState& data, ModuleHelpers& helpers, Proto* proto, AssemblyOptions options)
|
||||
{
|
||||
Label start = build.setLabel();
|
||||
|
||||
build.mov(A64::x0, 1); // finish function in VM
|
||||
build.ret();
|
||||
|
||||
// TODO: This is only needed while we don't support all IR opcodes
|
||||
// When we can't translate some parts of the function, we instead encode a dummy assembly sequence that hands off control to VM
|
||||
// In the future we could return nullptr from assembleFunction and handle it because there may be other reasons for why we refuse to assemble.
|
||||
for (int i = 0; i < proto->sizecode; i++)
|
||||
ir.function.bcMapping[i].asmLocation = build.getLabelOffset(start);
|
||||
}
|
||||
|
||||
template<typename AssemblyBuilder>
|
||||
static NativeProto* assembleFunction(AssemblyBuilder& build, NativeState& data, ModuleHelpers& helpers, Proto* proto, AssemblyOptions options)
|
||||
{
|
||||
if (options.includeAssembly || options.includeIr)
|
||||
{
|
||||
if (proto->debugname)
|
||||
@ -93,43 +120,24 @@ static NativeProto* assembleFunction(X64::AssemblyBuilderX64& build, NativeState
|
||||
build.logAppend("\n");
|
||||
}
|
||||
|
||||
build.align(kFunctionAlignment, X64::AlignmentDataX64::Ud2);
|
||||
|
||||
Label start = build.setLabel();
|
||||
|
||||
IrBuilder builder;
|
||||
builder.buildFunctionIr(proto);
|
||||
IrBuilder ir;
|
||||
ir.buildFunctionIr(proto);
|
||||
|
||||
if (!FFlag::DebugCodegenNoOpt)
|
||||
{
|
||||
constPropInBlockChains(builder);
|
||||
constPropInBlockChains(ir);
|
||||
}
|
||||
|
||||
// TODO: cfg info has to be computed earlier to use in optimizations
|
||||
// It's done here to appear in text output and to measure performance impact on code generation
|
||||
computeCfgInfo(builder.function);
|
||||
computeCfgInfo(ir.function);
|
||||
|
||||
optimizeMemoryOperandsX64(builder.function);
|
||||
|
||||
X64::IrLoweringX64 lowering(build, helpers, data, proto, builder.function);
|
||||
|
||||
lowering.lower(options);
|
||||
|
||||
result->instTargets = new uintptr_t[proto->sizecode];
|
||||
|
||||
for (int i = 0; i < proto->sizecode; i++)
|
||||
{
|
||||
auto [irLocation, asmLocation] = builder.function.bcMapping[i];
|
||||
|
||||
result->instTargets[i] = irLocation == ~0u ? 0 : asmLocation - start.location;
|
||||
}
|
||||
|
||||
result->location = start.location;
|
||||
lowerIr(build, ir, data, helpers, proto, options);
|
||||
|
||||
if (build.logText)
|
||||
build.logAppend("\n");
|
||||
|
||||
return result;
|
||||
return createNativeProto(proto, ir);
|
||||
}
|
||||
|
||||
static void destroyNativeProto(NativeProto* nativeProto)
|
||||
@ -207,6 +215,8 @@ bool isSupported()
|
||||
if ((cpuinfo[2] & (1 << 28)) == 0)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
#elif defined(__aarch64__)
|
||||
return true;
|
||||
#else
|
||||
return false;
|
||||
@ -232,11 +242,19 @@ void create(lua_State* L)
|
||||
initFallbackTable(data);
|
||||
initHelperFunctions(data);
|
||||
|
||||
#if defined(__x86_64__) || defined(_M_X64)
|
||||
if (!X64::initEntryFunction(data))
|
||||
{
|
||||
destroyNativeState(L);
|
||||
return;
|
||||
}
|
||||
#elif defined(__aarch64__)
|
||||
if (!A64::initEntryFunction(data))
|
||||
{
|
||||
destroyNativeState(L);
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
|
||||
lua_ExecutionCallbacks* ecb = getExecutionCallbacks(L);
|
||||
|
||||
@ -270,14 +288,21 @@ void compile(lua_State* L, int idx)
|
||||
if (!getNativeState(L))
|
||||
return;
|
||||
|
||||
#if defined(__aarch64__)
|
||||
A64::AssemblyBuilderA64 build(/* logText= */ false);
|
||||
#else
|
||||
X64::AssemblyBuilderX64 build(/* logText= */ false);
|
||||
#endif
|
||||
|
||||
NativeState* data = getNativeState(L);
|
||||
|
||||
std::vector<Proto*> protos;
|
||||
gatherFunctions(protos, clvalue(func)->l.p);
|
||||
|
||||
ModuleHelpers helpers;
|
||||
assembleHelpers(build, helpers);
|
||||
#if !defined(__aarch64__)
|
||||
X64::assembleHelpers(build, helpers);
|
||||
#endif
|
||||
|
||||
std::vector<NativeProto*> results;
|
||||
results.reserve(protos.size());
|
||||
@ -292,8 +317,8 @@ void compile(lua_State* L, int idx)
|
||||
uint8_t* nativeData = nullptr;
|
||||
size_t sizeNativeData = 0;
|
||||
uint8_t* codeStart = nullptr;
|
||||
if (!data->codeAllocator.allocate(
|
||||
build.data.data(), int(build.data.size()), build.code.data(), int(build.code.size()), nativeData, sizeNativeData, codeStart))
|
||||
if (!data->codeAllocator.allocate(build.data.data(), int(build.data.size()), reinterpret_cast<const uint8_t*>(build.code.data()),
|
||||
int(build.code.size() * sizeof(build.code[0])), nativeData, sizeNativeData, codeStart))
|
||||
{
|
||||
for (NativeProto* result : results)
|
||||
destroyNativeProto(result);
|
||||
@ -305,7 +330,7 @@ void compile(lua_State* L, int idx)
|
||||
for (NativeProto* result : results)
|
||||
{
|
||||
for (int i = 0; i < result->proto->sizecode; i++)
|
||||
result->instTargets[i] += uintptr_t(codeStart + result->location);
|
||||
result->instTargets[i] += uintptr_t(codeStart);
|
||||
|
||||
LUAU_ASSERT(result->proto->sizecode);
|
||||
result->entryTarget = result->instTargets[0];
|
||||
@ -321,7 +346,11 @@ std::string getAssembly(lua_State* L, int idx, AssemblyOptions options)
|
||||
LUAU_ASSERT(lua_isLfunction(L, idx));
|
||||
const TValue* func = luaA_toobject(L, idx);
|
||||
|
||||
#if defined(__aarch64__)
|
||||
A64::AssemblyBuilderA64 build(/* logText= */ options.includeAssembly);
|
||||
#else
|
||||
X64::AssemblyBuilderX64 build(/* logText= */ options.includeAssembly);
|
||||
#endif
|
||||
|
||||
NativeState data;
|
||||
initFallbackTable(data);
|
||||
@ -330,7 +359,9 @@ std::string getAssembly(lua_State* L, int idx, AssemblyOptions options)
|
||||
gatherFunctions(protos, clvalue(func)->l.p);
|
||||
|
||||
ModuleHelpers helpers;
|
||||
assembleHelpers(build, helpers);
|
||||
#if !defined(__aarch64__)
|
||||
X64::assembleHelpers(build, helpers);
|
||||
#endif
|
||||
|
||||
for (Proto* p : protos)
|
||||
if (p)
|
||||
@ -342,7 +373,9 @@ std::string getAssembly(lua_State* L, int idx, AssemblyOptions options)
|
||||
build.finalize();
|
||||
|
||||
if (options.outputBinary)
|
||||
return std::string(build.code.begin(), build.code.end()) + std::string(build.data.begin(), build.data.end());
|
||||
return std::string(
|
||||
reinterpret_cast<const char*>(build.code.data()), reinterpret_cast<const char*>(build.code.data() + build.code.size())) +
|
||||
std::string(build.data.begin(), build.data.end());
|
||||
else
|
||||
return build.text;
|
||||
}
|
||||
|
69
CodeGen/src/CodeGenA64.cpp
Normal file
69
CodeGen/src/CodeGenA64.cpp
Normal file
@ -0,0 +1,69 @@
|
||||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#include "CodeGenA64.h"
|
||||
|
||||
#include "Luau/AssemblyBuilderA64.h"
|
||||
#include "Luau/UnwindBuilder.h"
|
||||
|
||||
#include "CustomExecUtils.h"
|
||||
#include "NativeState.h"
|
||||
|
||||
#include "lstate.h"
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
namespace CodeGen
|
||||
{
|
||||
namespace A64
|
||||
{
|
||||
|
||||
bool initEntryFunction(NativeState& data)
|
||||
{
|
||||
AssemblyBuilderA64 build(/* logText= */ false);
|
||||
UnwindBuilder& unwind = *data.unwindBuilder.get();
|
||||
|
||||
unwind.start();
|
||||
unwind.allocStack(8); // TODO: this is only necessary to align stack by 16 bytes, as start() allocates 8b return pointer
|
||||
|
||||
// TODO: prologue goes here
|
||||
|
||||
unwind.finish();
|
||||
|
||||
size_t prologueSize = build.setLabel().location;
|
||||
|
||||
// Setup native execution environment
|
||||
// TODO: figure out state layout
|
||||
|
||||
// Jump to the specified instruction; further control flow will be handled with custom ABI with register setup from EmitCommonX64.h
|
||||
build.br(x2);
|
||||
|
||||
// Even though we jumped away, we will return here in the end
|
||||
Label returnOff = build.setLabel();
|
||||
|
||||
// Cleanup and exit
|
||||
// TODO: epilogue
|
||||
|
||||
build.ret();
|
||||
|
||||
build.finalize();
|
||||
|
||||
LUAU_ASSERT(build.data.empty());
|
||||
|
||||
if (!data.codeAllocator.allocate(build.data.data(), int(build.data.size()), reinterpret_cast<const uint8_t*>(build.code.data()),
|
||||
int(build.code.size() * sizeof(build.code[0])), data.gateData, data.gateDataSize, data.context.gateEntry))
|
||||
{
|
||||
LUAU_ASSERT(!"failed to create entry function");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Set the offset at the begining so that functions in new blocks will not overlay the locations
|
||||
// specified by the unwind information of the entry function
|
||||
unwind.setBeginOffset(prologueSize);
|
||||
|
||||
data.context.gateExit = data.context.gateEntry + returnOff.location;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace A64
|
||||
} // namespace CodeGen
|
||||
} // namespace Luau
|
18
CodeGen/src/CodeGenA64.h
Normal file
18
CodeGen/src/CodeGenA64.h
Normal file
@ -0,0 +1,18 @@
|
||||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#pragma once
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
namespace CodeGen
|
||||
{
|
||||
|
||||
struct NativeState;
|
||||
|
||||
namespace A64
|
||||
{
|
||||
|
||||
bool initEntryFunction(NativeState& data);
|
||||
|
||||
} // namespace A64
|
||||
} // namespace CodeGen
|
||||
} // namespace Luau
|
@ -143,6 +143,24 @@ bool initEntryFunction(NativeState& data)
|
||||
return true;
|
||||
}
|
||||
|
||||
void assembleHelpers(X64::AssemblyBuilderX64& build, ModuleHelpers& helpers)
|
||||
{
|
||||
if (build.logText)
|
||||
build.logAppend("; exitContinueVm\n");
|
||||
helpers.exitContinueVm = build.setLabel();
|
||||
emitExit(build, /* continueInVm */ true);
|
||||
|
||||
if (build.logText)
|
||||
build.logAppend("; exitNoContinueVm\n");
|
||||
helpers.exitNoContinueVm = build.setLabel();
|
||||
emitExit(build, /* continueInVm */ false);
|
||||
|
||||
if (build.logText)
|
||||
build.logAppend("; continueCallInVm\n");
|
||||
helpers.continueCallInVm = build.setLabel();
|
||||
emitContinueCallInVm(build);
|
||||
}
|
||||
|
||||
} // namespace X64
|
||||
} // namespace CodeGen
|
||||
} // namespace Luau
|
||||
|
@ -7,11 +7,15 @@ namespace CodeGen
|
||||
{
|
||||
|
||||
struct NativeState;
|
||||
struct ModuleHelpers;
|
||||
|
||||
namespace X64
|
||||
{
|
||||
|
||||
class AssemblyBuilderX64;
|
||||
|
||||
bool initEntryFunction(NativeState& data);
|
||||
void assembleHelpers(AssemblyBuilderX64& build, ModuleHelpers& helpers);
|
||||
|
||||
} // namespace X64
|
||||
} // namespace CodeGen
|
||||
|
@ -286,6 +286,31 @@ void emitBuiltinMathSign(IrRegAllocX64& regs, AssemblyBuilderX64& build, int npa
|
||||
build.vmovsd(luauRegValue(ra), tmp0.reg);
|
||||
}
|
||||
|
||||
void emitBuiltinType(IrRegAllocX64& regs, AssemblyBuilderX64& build, int nparams, int ra, int arg, OperandX64 args, int nresults)
|
||||
{
|
||||
ScopedRegX64 tmp0{regs, SizeX64::qword};
|
||||
ScopedRegX64 tag{regs, SizeX64::dword};
|
||||
|
||||
build.mov(tag.reg, luauRegTag(arg));
|
||||
|
||||
build.mov(tmp0.reg, qword[rState + offsetof(lua_State, global)]);
|
||||
build.mov(tmp0.reg, qword[tmp0.reg + qwordReg(tag.reg) * sizeof(TString*) + offsetof(global_State, ttname)]);
|
||||
|
||||
build.mov(luauRegValue(ra), tmp0.reg);
|
||||
}
|
||||
|
||||
void emitBuiltinTypeof(IrRegAllocX64& regs, AssemblyBuilderX64& build, int nparams, int ra, int arg, OperandX64 args, int nresults)
|
||||
{
|
||||
regs.assertAllFree();
|
||||
|
||||
build.mov(rArg1, rState);
|
||||
build.lea(rArg2, luauRegAddress(arg));
|
||||
|
||||
build.call(qword[rNativeContext + offsetof(NativeContext, luaT_objtypenamestr)]);
|
||||
|
||||
build.mov(luauRegValue(ra), rax);
|
||||
}
|
||||
|
||||
void emitBuiltin(IrRegAllocX64& regs, AssemblyBuilderX64& build, int bfid, int ra, int arg, IrOp args, int nparams, int nresults)
|
||||
{
|
||||
OperandX64 argsOp = 0;
|
||||
@ -353,6 +378,10 @@ void emitBuiltin(IrRegAllocX64& regs, AssemblyBuilderX64& build, int bfid, int r
|
||||
return emitBuiltinMathModf(regs, build, nparams, ra, arg, argsOp, nresults);
|
||||
case LBF_MATH_SIGN:
|
||||
return emitBuiltinMathSign(regs, build, nparams, ra, arg, argsOp, nresults);
|
||||
case LBF_TYPE:
|
||||
return emitBuiltinType(regs, build, nparams, ra, arg, argsOp, nresults);
|
||||
case LBF_TYPEOF:
|
||||
return emitBuiltinTypeof(regs, build, nparams, ra, arg, argsOp, nresults);
|
||||
default:
|
||||
LUAU_ASSERT(!"missing x64 lowering");
|
||||
break;
|
||||
|
@ -18,51 +18,6 @@ namespace CodeGen
|
||||
namespace X64
|
||||
{
|
||||
|
||||
void emitInstNameCall(AssemblyBuilderX64& build, const Instruction* pc, int pcpos, const TValue* k, Label& next, Label& fallback)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = LUAU_INSN_B(*pc);
|
||||
uint32_t aux = pc[1];
|
||||
|
||||
Label secondfpath;
|
||||
|
||||
jumpIfTagIsNot(build, rb, LUA_TTABLE, fallback);
|
||||
|
||||
RegisterX64 table = r8;
|
||||
build.mov(table, luauRegValue(rb));
|
||||
|
||||
// &h->node[tsvalue(kv)->hash & (sizenode(h) - 1)];
|
||||
RegisterX64 node = rdx;
|
||||
build.mov(node, qword[table + offsetof(Table, node)]);
|
||||
build.mov(eax, 1);
|
||||
build.mov(cl, byte[table + offsetof(Table, lsizenode)]);
|
||||
build.shl(eax, cl);
|
||||
build.dec(eax);
|
||||
build.and_(eax, tsvalue(&k[aux])->hash);
|
||||
build.shl(rax, kLuaNodeSizeLog2);
|
||||
build.add(node, rax);
|
||||
|
||||
jumpIfNodeKeyNotInExpectedSlot(build, rax, node, luauConstantValue(aux), secondfpath);
|
||||
|
||||
setLuauReg(build, xmm0, ra + 1, luauReg(rb));
|
||||
setLuauReg(build, xmm0, ra, luauNodeValue(node));
|
||||
build.jmp(next);
|
||||
|
||||
build.setLabel(secondfpath);
|
||||
|
||||
jumpIfNodeHasNext(build, node, fallback);
|
||||
callGetFastTmOrFallback(build, table, TM_INDEX, fallback);
|
||||
jumpIfTagIsNot(build, rax, LUA_TTABLE, fallback);
|
||||
|
||||
build.mov(table, qword[rax + offsetof(TValue, value)]);
|
||||
|
||||
getTableNodeAtCachedSlot(build, rax, node, table, pcpos);
|
||||
jumpIfNodeKeyNotInExpectedSlot(build, rax, node, luauConstantValue(aux), fallback);
|
||||
|
||||
setLuauReg(build, xmm0, ra + 1, luauReg(rb));
|
||||
setLuauReg(build, xmm0, ra, luauNodeValue(node));
|
||||
}
|
||||
|
||||
void emitInstCall(AssemblyBuilderX64& build, ModuleHelpers& helpers, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
|
@ -21,7 +21,6 @@ namespace X64
|
||||
|
||||
class AssemblyBuilderX64;
|
||||
|
||||
void emitInstNameCall(AssemblyBuilderX64& build, const Instruction* pc, int pcpos, const TValue* k, Label& next, Label& fallback);
|
||||
void emitInstCall(AssemblyBuilderX64& build, ModuleHelpers& helpers, const Instruction* pc, int pcpos);
|
||||
void emitInstReturn(AssemblyBuilderX64& build, ModuleHelpers& helpers, const Instruction* pc, int pcpos);
|
||||
void emitInstSetList(AssemblyBuilderX64& build, const Instruction* pc, Label& next);
|
||||
|
@ -124,6 +124,10 @@ static void requireVariadicSequence(RegisterSet& sourceRs, const RegisterSet& de
|
||||
{
|
||||
if (!defRs.varargSeq)
|
||||
{
|
||||
// Peel away registers from variadic sequence that we define
|
||||
while (defRs.regs.test(varargStart))
|
||||
varargStart++;
|
||||
|
||||
LUAU_ASSERT(!sourceRs.varargSeq || sourceRs.varargStart == varargStart);
|
||||
|
||||
sourceRs.varargSeq = true;
|
||||
@ -296,11 +300,6 @@ static RegisterSet computeBlockLiveInRegSet(IrFunction& function, const IrBlock&
|
||||
use(inst.b);
|
||||
useRange(inst.c.index, function.intOp(inst.d));
|
||||
break;
|
||||
case IrCmd::LOP_NAMECALL:
|
||||
use(inst.c);
|
||||
|
||||
defRange(inst.b.index, 2);
|
||||
break;
|
||||
case IrCmd::LOP_CALL:
|
||||
use(inst.b);
|
||||
useRange(inst.b.index + 1, function.intOp(inst.c));
|
||||
@ -411,6 +410,13 @@ static RegisterSet computeBlockLiveInRegSet(IrFunction& function, const IrBlock&
|
||||
break;
|
||||
|
||||
default:
|
||||
// All instructions which reference registers have to be handled explicitly
|
||||
LUAU_ASSERT(inst.a.kind != IrOpKind::VmReg);
|
||||
LUAU_ASSERT(inst.b.kind != IrOpKind::VmReg);
|
||||
LUAU_ASSERT(inst.c.kind != IrOpKind::VmReg);
|
||||
LUAU_ASSERT(inst.d.kind != IrOpKind::VmReg);
|
||||
LUAU_ASSERT(inst.e.kind != IrOpKind::VmReg);
|
||||
LUAU_ASSERT(inst.f.kind != IrOpKind::VmReg);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -430,17 +436,20 @@ static void computeCfgLiveInOutRegSets(IrFunction& function)
|
||||
{
|
||||
CfgInfo& info = function.cfg;
|
||||
|
||||
// Clear existing data
|
||||
// 'in' and 'captured' data is not cleared because it will be overwritten below
|
||||
info.def.clear();
|
||||
info.out.clear();
|
||||
|
||||
// Try to compute Luau VM register use-def info
|
||||
info.in.resize(function.blocks.size());
|
||||
info.def.resize(function.blocks.size());
|
||||
info.out.resize(function.blocks.size());
|
||||
|
||||
// Captured registers are tracked for the whole function
|
||||
// It should be possible to have a more precise analysis for them in the future
|
||||
std::bitset<256> capturedRegs;
|
||||
|
||||
std::vector<RegisterSet> defRss;
|
||||
defRss.resize(function.blocks.size());
|
||||
|
||||
// First we compute live-in set of each block
|
||||
for (size_t blockIdx = 0; blockIdx < function.blocks.size(); blockIdx++)
|
||||
{
|
||||
@ -449,7 +458,7 @@ static void computeCfgLiveInOutRegSets(IrFunction& function)
|
||||
if (block.kind == IrBlockKind::Dead)
|
||||
continue;
|
||||
|
||||
info.in[blockIdx] = computeBlockLiveInRegSet(function, block, defRss[blockIdx], capturedRegs);
|
||||
info.in[blockIdx] = computeBlockLiveInRegSet(function, block, info.def[blockIdx], capturedRegs);
|
||||
}
|
||||
|
||||
info.captured.regs = capturedRegs;
|
||||
@ -480,8 +489,8 @@ static void computeCfgLiveInOutRegSets(IrFunction& function)
|
||||
|
||||
IrBlock& curr = function.blocks[blockIdx];
|
||||
RegisterSet& inRs = info.in[blockIdx];
|
||||
RegisterSet& defRs = info.def[blockIdx];
|
||||
RegisterSet& outRs = info.out[blockIdx];
|
||||
RegisterSet& defRs = defRss[blockIdx];
|
||||
|
||||
// Current block has to provide all registers in successor blocks
|
||||
for (uint32_t succIdx : successors(info, blockIdx))
|
||||
@ -547,6 +556,10 @@ static void computeCfgBlockEdges(IrFunction& function)
|
||||
{
|
||||
CfgInfo& info = function.cfg;
|
||||
|
||||
// Clear existing data
|
||||
info.predecessorsOffsets.clear();
|
||||
info.successorsOffsets.clear();
|
||||
|
||||
// Compute predecessors block edges
|
||||
info.predecessorsOffsets.reserve(function.blocks.size());
|
||||
info.successorsOffsets.reserve(function.blocks.size());
|
||||
|
@ -1,8 +1,6 @@
|
||||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#include "Luau/IrBuilder.h"
|
||||
|
||||
#include "Luau/Common.h"
|
||||
#include "Luau/DenseHash.h"
|
||||
#include "Luau/IrAnalysis.h"
|
||||
#include "Luau/IrUtils.h"
|
||||
|
||||
@ -11,6 +9,8 @@
|
||||
|
||||
#include "lapi.h"
|
||||
|
||||
#include <string.h>
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
namespace CodeGen
|
||||
@ -18,6 +18,11 @@ namespace CodeGen
|
||||
|
||||
constexpr unsigned kNoAssociatedBlockIndex = ~0u;
|
||||
|
||||
IrBuilder::IrBuilder()
|
||||
: constantMap({IrConstKind::Bool, ~0ull})
|
||||
{
|
||||
}
|
||||
|
||||
void IrBuilder::buildFunctionIr(Proto* proto)
|
||||
{
|
||||
function.proto = proto;
|
||||
@ -377,19 +382,8 @@ void IrBuilder::translateInst(LuauOpcode op, const Instruction* pc, int i)
|
||||
translateInstCapture(*this, pc, i);
|
||||
break;
|
||||
case LOP_NAMECALL:
|
||||
{
|
||||
IrOp next = blockAtInst(i + getOpLength(LOP_NAMECALL));
|
||||
IrOp fallback = block(IrBlockKind::Fallback);
|
||||
|
||||
inst(IrCmd::LOP_NAMECALL, constUint(i), vmReg(LUAU_INSN_A(*pc)), vmReg(LUAU_INSN_B(*pc)), next, fallback);
|
||||
|
||||
beginBlock(fallback);
|
||||
inst(IrCmd::FALLBACK_NAMECALL, constUint(i), vmReg(LUAU_INSN_A(*pc)), vmReg(LUAU_INSN_B(*pc)), vmConst(pc[1]));
|
||||
inst(IrCmd::JUMP, next);
|
||||
|
||||
beginBlock(next);
|
||||
translateInstNamecall(*this, pc, i);
|
||||
break;
|
||||
}
|
||||
case LOP_PREPVARARGS:
|
||||
inst(IrCmd::FALLBACK_PREPVARARGS, constUint(i), constInt(LUAU_INSN_A(*pc)));
|
||||
break;
|
||||
@ -501,7 +495,7 @@ IrOp IrBuilder::constBool(bool value)
|
||||
IrConst constant;
|
||||
constant.kind = IrConstKind::Bool;
|
||||
constant.valueBool = value;
|
||||
return constAny(constant);
|
||||
return constAny(constant, uint64_t(value));
|
||||
}
|
||||
|
||||
IrOp IrBuilder::constInt(int value)
|
||||
@ -509,7 +503,7 @@ IrOp IrBuilder::constInt(int value)
|
||||
IrConst constant;
|
||||
constant.kind = IrConstKind::Int;
|
||||
constant.valueInt = value;
|
||||
return constAny(constant);
|
||||
return constAny(constant, uint64_t(value));
|
||||
}
|
||||
|
||||
IrOp IrBuilder::constUint(unsigned value)
|
||||
@ -517,7 +511,7 @@ IrOp IrBuilder::constUint(unsigned value)
|
||||
IrConst constant;
|
||||
constant.kind = IrConstKind::Uint;
|
||||
constant.valueUint = value;
|
||||
return constAny(constant);
|
||||
return constAny(constant, uint64_t(value));
|
||||
}
|
||||
|
||||
IrOp IrBuilder::constDouble(double value)
|
||||
@ -525,7 +519,12 @@ IrOp IrBuilder::constDouble(double value)
|
||||
IrConst constant;
|
||||
constant.kind = IrConstKind::Double;
|
||||
constant.valueDouble = value;
|
||||
return constAny(constant);
|
||||
|
||||
uint64_t asCommonKey;
|
||||
static_assert(sizeof(asCommonKey) == sizeof(value), "Expecting double to be 64-bit");
|
||||
memcpy(&asCommonKey, &value, sizeof(value));
|
||||
|
||||
return constAny(constant, asCommonKey);
|
||||
}
|
||||
|
||||
IrOp IrBuilder::constTag(uint8_t value)
|
||||
@ -533,13 +532,21 @@ IrOp IrBuilder::constTag(uint8_t value)
|
||||
IrConst constant;
|
||||
constant.kind = IrConstKind::Tag;
|
||||
constant.valueTag = value;
|
||||
return constAny(constant);
|
||||
return constAny(constant, uint64_t(value));
|
||||
}
|
||||
|
||||
IrOp IrBuilder::constAny(IrConst constant)
|
||||
IrOp IrBuilder::constAny(IrConst constant, uint64_t asCommonKey)
|
||||
{
|
||||
ConstantKey key{constant.kind, asCommonKey};
|
||||
|
||||
if (uint32_t* cache = constantMap.find(key))
|
||||
return {IrOpKind::Constant, *cache};
|
||||
|
||||
uint32_t index = uint32_t(function.constants.size());
|
||||
function.constants.push_back(constant);
|
||||
|
||||
constantMap[key] = index;
|
||||
|
||||
return {IrOpKind::Constant, index};
|
||||
}
|
||||
|
||||
|
@ -90,6 +90,8 @@ const char* getCmdName(IrCmd cmd)
|
||||
return "GET_ARR_ADDR";
|
||||
case IrCmd::GET_SLOT_NODE_ADDR:
|
||||
return "GET_SLOT_NODE_ADDR";
|
||||
case IrCmd::GET_HASH_NODE_ADDR:
|
||||
return "GET_HASH_NODE_ADDR";
|
||||
case IrCmd::STORE_TAG:
|
||||
return "STORE_TAG";
|
||||
case IrCmd::STORE_POINTER:
|
||||
@ -142,14 +144,18 @@ const char* getCmdName(IrCmd cmd)
|
||||
return "JUMP_CMP_NUM";
|
||||
case IrCmd::JUMP_CMP_ANY:
|
||||
return "JUMP_CMP_ANY";
|
||||
case IrCmd::JUMP_SLOT_MATCH:
|
||||
return "JUMP_SLOT_MATCH";
|
||||
case IrCmd::TABLE_LEN:
|
||||
return "TABLE_LEN";
|
||||
case IrCmd::NEW_TABLE:
|
||||
return "NEW_TABLE";
|
||||
case IrCmd::DUP_TABLE:
|
||||
return "DUP_TABLE";
|
||||
case IrCmd::NUM_TO_INDEX:
|
||||
return "NUM_TO_INDEX";
|
||||
case IrCmd::TRY_NUM_TO_INDEX:
|
||||
return "TRY_NUM_TO_INDEX";
|
||||
case IrCmd::TRY_CALL_FASTGETTM:
|
||||
return "TRY_CALL_FASTGETTM";
|
||||
case IrCmd::INT_TO_NUM:
|
||||
return "INT_TO_NUM";
|
||||
case IrCmd::ADJUST_STACK_TO_REG:
|
||||
@ -192,6 +198,8 @@ const char* getCmdName(IrCmd cmd)
|
||||
return "CHECK_ARRAY_SIZE";
|
||||
case IrCmd::CHECK_SLOT_MATCH:
|
||||
return "CHECK_SLOT_MATCH";
|
||||
case IrCmd::CHECK_NODE_NO_NEXT:
|
||||
return "CHECK_NODE_NO_NEXT";
|
||||
case IrCmd::INTERRUPT:
|
||||
return "INTERRUPT";
|
||||
case IrCmd::CHECK_GC:
|
||||
@ -210,8 +218,6 @@ const char* getCmdName(IrCmd cmd)
|
||||
return "CAPTURE";
|
||||
case IrCmd::LOP_SETLIST:
|
||||
return "LOP_SETLIST";
|
||||
case IrCmd::LOP_NAMECALL:
|
||||
return "LOP_NAMECALL";
|
||||
case IrCmd::LOP_CALL:
|
||||
return "LOP_CALL";
|
||||
case IrCmd::LOP_RETURN:
|
||||
@ -397,7 +403,7 @@ static void appendBlockSet(IrToStringContext& ctx, BlockIteratorWrapper blocks)
|
||||
}
|
||||
}
|
||||
|
||||
static void appendRegisterSet(IrToStringContext& ctx, const RegisterSet& rs)
|
||||
static void appendRegisterSet(IrToStringContext& ctx, const RegisterSet& rs, const char* separator)
|
||||
{
|
||||
bool comma = false;
|
||||
|
||||
@ -406,7 +412,7 @@ static void appendRegisterSet(IrToStringContext& ctx, const RegisterSet& rs)
|
||||
if (rs.regs.test(i))
|
||||
{
|
||||
if (comma)
|
||||
append(ctx.result, ", ");
|
||||
ctx.result.append(separator);
|
||||
comma = true;
|
||||
|
||||
append(ctx.result, "R%d", int(i));
|
||||
@ -416,7 +422,7 @@ static void appendRegisterSet(IrToStringContext& ctx, const RegisterSet& rs)
|
||||
if (rs.varargSeq)
|
||||
{
|
||||
if (comma)
|
||||
append(ctx.result, ", ");
|
||||
ctx.result.append(separator);
|
||||
|
||||
append(ctx.result, "R%d...", rs.varargStart);
|
||||
}
|
||||
@ -428,7 +434,7 @@ void toStringDetailed(IrToStringContext& ctx, const IrBlock& block, uint32_t ind
|
||||
if (block.useCount == 0 && block.kind != IrBlockKind::Dead && ctx.cfg.captured.regs.any())
|
||||
{
|
||||
append(ctx.result, "; captured regs: ");
|
||||
appendRegisterSet(ctx, ctx.cfg.captured);
|
||||
appendRegisterSet(ctx, ctx.cfg.captured, ", ");
|
||||
append(ctx.result, "\n\n");
|
||||
}
|
||||
|
||||
@ -484,7 +490,7 @@ void toStringDetailed(IrToStringContext& ctx, const IrBlock& block, uint32_t ind
|
||||
if (in.regs.any() || in.varargSeq)
|
||||
{
|
||||
append(ctx.result, "; in regs: ");
|
||||
appendRegisterSet(ctx, in);
|
||||
appendRegisterSet(ctx, in, ", ");
|
||||
append(ctx.result, "\n");
|
||||
}
|
||||
}
|
||||
@ -497,7 +503,7 @@ void toStringDetailed(IrToStringContext& ctx, const IrBlock& block, uint32_t ind
|
||||
if (out.regs.any() || out.varargSeq)
|
||||
{
|
||||
append(ctx.result, "; out regs: ");
|
||||
appendRegisterSet(ctx, out);
|
||||
appendRegisterSet(ctx, out, ", ");
|
||||
append(ctx.result, "\n");
|
||||
}
|
||||
}
|
||||
@ -551,5 +557,108 @@ std::string dump(IrFunction& function)
|
||||
return result;
|
||||
}
|
||||
|
||||
std::string toDot(IrFunction& function, bool includeInst)
|
||||
{
|
||||
std::string result;
|
||||
IrToStringContext ctx{result, function.blocks, function.constants, function.cfg};
|
||||
|
||||
auto appendLabelRegset = [&ctx](std::vector<RegisterSet>& regSets, size_t blockIdx, const char* name) {
|
||||
if (blockIdx < regSets.size())
|
||||
{
|
||||
const RegisterSet& rs = regSets[blockIdx];
|
||||
|
||||
if (rs.regs.any() || rs.varargSeq)
|
||||
{
|
||||
append(ctx.result, "|{%s|", name);
|
||||
appendRegisterSet(ctx, rs, "|");
|
||||
append(ctx.result, "}");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
append(ctx.result, "digraph CFG {\n");
|
||||
append(ctx.result, "node[shape=record]\n");
|
||||
|
||||
for (size_t i = 0; i < function.blocks.size(); i++)
|
||||
{
|
||||
IrBlock& block = function.blocks[i];
|
||||
|
||||
append(ctx.result, "b%u [", unsigned(i));
|
||||
|
||||
if (block.kind == IrBlockKind::Fallback)
|
||||
append(ctx.result, "style=filled;fillcolor=salmon;");
|
||||
else if (block.kind == IrBlockKind::Bytecode)
|
||||
append(ctx.result, "style=filled;fillcolor=palegreen;");
|
||||
|
||||
append(ctx.result, "label=\"{");
|
||||
toString(ctx, block, uint32_t(i));
|
||||
|
||||
appendLabelRegset(ctx.cfg.in, i, "in");
|
||||
|
||||
if (includeInst && block.start != ~0u)
|
||||
{
|
||||
for (uint32_t instIdx = block.start; instIdx <= block.finish; instIdx++)
|
||||
{
|
||||
IrInst& inst = function.instructions[instIdx];
|
||||
|
||||
// Skip pseudo instructions unless they are still referenced
|
||||
if (isPseudo(inst.cmd) && inst.useCount == 0)
|
||||
continue;
|
||||
|
||||
append(ctx.result, "|");
|
||||
toString(ctx, inst, instIdx);
|
||||
}
|
||||
}
|
||||
|
||||
appendLabelRegset(ctx.cfg.def, i, "def");
|
||||
appendLabelRegset(ctx.cfg.out, i, "out");
|
||||
|
||||
append(ctx.result, "}\"];\n");
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < function.blocks.size(); i++)
|
||||
{
|
||||
IrBlock& block = function.blocks[i];
|
||||
|
||||
if (block.start == ~0u)
|
||||
continue;
|
||||
|
||||
for (uint32_t instIdx = block.start; instIdx != ~0u && instIdx <= block.finish; instIdx++)
|
||||
{
|
||||
IrInst& inst = function.instructions[instIdx];
|
||||
|
||||
auto checkOp = [&](IrOp op) {
|
||||
if (op.kind == IrOpKind::Block)
|
||||
{
|
||||
if (function.blocks[op.index].kind != IrBlockKind::Fallback)
|
||||
append(ctx.result, "b%u -> b%u [weight=10];\n", unsigned(i), op.index);
|
||||
else
|
||||
append(ctx.result, "b%u -> b%u;\n", unsigned(i), op.index);
|
||||
}
|
||||
};
|
||||
|
||||
checkOp(inst.a);
|
||||
checkOp(inst.b);
|
||||
checkOp(inst.c);
|
||||
checkOp(inst.d);
|
||||
checkOp(inst.e);
|
||||
checkOp(inst.f);
|
||||
}
|
||||
}
|
||||
|
||||
append(ctx.result, "}\n");
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
std::string dumpDot(IrFunction& function, bool includeInst)
|
||||
{
|
||||
std::string result = toDot(function, includeInst);
|
||||
|
||||
printf("%s\n", result.c_str());
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
} // namespace CodeGen
|
||||
} // namespace Luau
|
||||
|
@ -200,6 +200,10 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, IrBlock& next)
|
||||
build.mov(inst.regX64, luauRegValue(inst.a.index));
|
||||
else if (inst.a.kind == IrOpKind::VmConst)
|
||||
build.mov(inst.regX64, luauConstantValue(inst.a.index));
|
||||
// If we have a register, we assume it's a pointer to TValue
|
||||
// We might introduce explicit operand types in the future to make this more robust
|
||||
else if (inst.a.kind == IrOpKind::Inst)
|
||||
build.mov(inst.regX64, qword[regOp(inst.a) + offsetof(TValue, value)]);
|
||||
else
|
||||
LUAU_ASSERT(!"Unsupported instruction form");
|
||||
break;
|
||||
@ -277,6 +281,25 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, IrBlock& next)
|
||||
getTableNodeAtCachedSlot(build, tmp.reg, inst.regX64, regOp(inst.a), uintOp(inst.b));
|
||||
break;
|
||||
}
|
||||
case IrCmd::GET_HASH_NODE_ADDR:
|
||||
{
|
||||
inst.regX64 = regs.allocGprReg(SizeX64::qword);
|
||||
|
||||
// Custom bit shift value can only be placed in cl
|
||||
ScopedRegX64 shiftTmp{regs, regs.takeGprReg(rcx)};
|
||||
|
||||
ScopedRegX64 tmp{regs, SizeX64::qword};
|
||||
|
||||
build.mov(inst.regX64, qword[regOp(inst.a) + offsetof(Table, node)]);
|
||||
build.mov(dwordReg(tmp.reg), 1);
|
||||
build.mov(byteReg(shiftTmp.reg), byte[regOp(inst.a) + offsetof(Table, lsizenode)]);
|
||||
build.shl(dwordReg(tmp.reg), byteReg(shiftTmp.reg));
|
||||
build.dec(dwordReg(tmp.reg));
|
||||
build.and_(dwordReg(tmp.reg), uintOp(inst.b));
|
||||
build.shl(tmp.reg, kLuaNodeSizeLog2);
|
||||
build.add(inst.regX64, tmp.reg);
|
||||
break;
|
||||
};
|
||||
case IrCmd::STORE_TAG:
|
||||
LUAU_ASSERT(inst.a.kind == IrOpKind::VmReg);
|
||||
|
||||
@ -686,6 +709,16 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, IrBlock& next)
|
||||
jumpOrFallthrough(blockOp(inst.e), next);
|
||||
break;
|
||||
}
|
||||
case IrCmd::JUMP_SLOT_MATCH:
|
||||
{
|
||||
LUAU_ASSERT(inst.b.kind == IrOpKind::VmConst);
|
||||
|
||||
ScopedRegX64 tmp{regs, SizeX64::qword};
|
||||
|
||||
jumpIfNodeKeyNotInExpectedSlot(build, tmp.reg, regOp(inst.a), luauConstantValue(inst.b.index), labelOp(inst.d));
|
||||
jumpOrFallthrough(blockOp(inst.c), next);
|
||||
break;
|
||||
}
|
||||
case IrCmd::TABLE_LEN:
|
||||
inst.regX64 = regs.allocXmmReg();
|
||||
|
||||
@ -715,7 +748,7 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, IrBlock& next)
|
||||
if (inst.regX64 != rax)
|
||||
build.mov(inst.regX64, rax);
|
||||
break;
|
||||
case IrCmd::NUM_TO_INDEX:
|
||||
case IrCmd::TRY_NUM_TO_INDEX:
|
||||
{
|
||||
inst.regX64 = regs.allocGprReg(SizeX64::dword);
|
||||
|
||||
@ -724,6 +757,16 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, IrBlock& next)
|
||||
convertNumberToIndexOrJump(build, tmp.reg, regOp(inst.a), inst.regX64, labelOp(inst.b));
|
||||
break;
|
||||
}
|
||||
case IrCmd::TRY_CALL_FASTGETTM:
|
||||
{
|
||||
inst.regX64 = regs.allocGprReg(SizeX64::qword);
|
||||
|
||||
callGetFastTmOrFallback(build, regOp(inst.a), TMS(intOp(inst.b)), labelOp(inst.c));
|
||||
|
||||
if (inst.regX64 != rax)
|
||||
build.mov(inst.regX64, rax);
|
||||
break;
|
||||
}
|
||||
case IrCmd::INT_TO_NUM:
|
||||
inst.regX64 = regs.allocXmmReg();
|
||||
|
||||
@ -1017,6 +1060,9 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, IrBlock& next)
|
||||
jumpIfNodeKeyNotInExpectedSlot(build, tmp.reg, regOp(inst.a), luauConstantValue(inst.b.index), labelOp(inst.c));
|
||||
break;
|
||||
}
|
||||
case IrCmd::CHECK_NODE_NO_NEXT:
|
||||
jumpIfNodeHasNext(build, regOp(inst.a), labelOp(inst.b));
|
||||
break;
|
||||
case IrCmd::INTERRUPT:
|
||||
emitInterrupt(build, uintOp(inst.a));
|
||||
break;
|
||||
@ -1114,16 +1160,6 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, IrBlock& next)
|
||||
build.setLabel(next);
|
||||
break;
|
||||
}
|
||||
case IrCmd::LOP_NAMECALL:
|
||||
{
|
||||
const Instruction* pc = proto->code + uintOp(inst.a);
|
||||
LUAU_ASSERT(inst.b.kind == IrOpKind::VmReg);
|
||||
LUAU_ASSERT(inst.c.kind == IrOpKind::VmReg);
|
||||
|
||||
emitInstNameCall(build, pc, uintOp(inst.a), proto->k, blockOp(inst.d).label, blockOp(inst.e).label);
|
||||
jumpOrFallthrough(blockOp(inst.d), next);
|
||||
break;
|
||||
}
|
||||
case IrCmd::LOP_CALL:
|
||||
{
|
||||
const Instruction* pc = proto->code + uintOp(inst.a);
|
||||
|
@ -210,6 +210,34 @@ BuiltinImplResult translateBuiltinMathClamp(IrBuilder& build, int nparams, int r
|
||||
return {BuiltinImplType::UsesFallback, 1};
|
||||
}
|
||||
|
||||
BuiltinImplResult translateBuiltinType(IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, IrOp fallback)
|
||||
{
|
||||
if (nparams < 1 || nresults > 1)
|
||||
return {BuiltinImplType::None, -1};
|
||||
|
||||
build.inst(
|
||||
IrCmd::FASTCALL, build.constUint(LBF_TYPE), build.vmReg(ra), build.vmReg(arg), args, build.constInt(nparams), build.constInt(nresults));
|
||||
|
||||
// TODO: tag update might not be required, we place it here now because FASTCALL is not modeled in constant propagation yet
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TSTRING));
|
||||
|
||||
return {BuiltinImplType::UsesFallback, 1};
|
||||
}
|
||||
|
||||
BuiltinImplResult translateBuiltinTypeof(IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, IrOp fallback)
|
||||
{
|
||||
if (nparams < 1 || nresults > 1)
|
||||
return {BuiltinImplType::None, -1};
|
||||
|
||||
build.inst(
|
||||
IrCmd::FASTCALL, build.constUint(LBF_TYPEOF), build.vmReg(ra), build.vmReg(arg), args, build.constInt(nparams), build.constInt(nresults));
|
||||
|
||||
// TODO: tag update might not be required, we place it here now because FASTCALL is not modeled in constant propagation yet
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TSTRING));
|
||||
|
||||
return {BuiltinImplType::UsesFallback, 1};
|
||||
}
|
||||
|
||||
BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg, IrOp args, int nparams, int nresults, IrOp fallback)
|
||||
{
|
||||
switch (bfid)
|
||||
@ -254,6 +282,10 @@ BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg,
|
||||
case LBF_MATH_FREXP:
|
||||
case LBF_MATH_MODF:
|
||||
return translateBuiltinNumberTo2Number(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, fallback);
|
||||
case LBF_TYPE:
|
||||
return translateBuiltinType(build, nparams, ra, arg, args, nresults, fallback);
|
||||
case LBF_TYPEOF:
|
||||
return translateBuiltinTypeof(build, nparams, ra, arg, args, nresults, fallback);
|
||||
default:
|
||||
return {BuiltinImplType::None, -1};
|
||||
}
|
||||
|
@ -806,7 +806,7 @@ void translateInstGetTable(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
IrOp vb = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
|
||||
IrOp vc = build.inst(IrCmd::LOAD_DOUBLE, build.vmReg(rc));
|
||||
|
||||
IrOp index = build.inst(IrCmd::NUM_TO_INDEX, vc, fallback);
|
||||
IrOp index = build.inst(IrCmd::TRY_NUM_TO_INDEX, vc, fallback);
|
||||
|
||||
index = build.inst(IrCmd::SUB_INT, index, build.constInt(1));
|
||||
|
||||
@ -843,7 +843,7 @@ void translateInstSetTable(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
IrOp vb = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
|
||||
IrOp vc = build.inst(IrCmd::LOAD_DOUBLE, build.vmReg(rc));
|
||||
|
||||
IrOp index = build.inst(IrCmd::NUM_TO_INDEX, vc, fallback);
|
||||
IrOp index = build.inst(IrCmd::TRY_NUM_TO_INDEX, vc, fallback);
|
||||
|
||||
index = build.inst(IrCmd::SUB_INT, index, build.constInt(1));
|
||||
|
||||
@ -1035,5 +1035,63 @@ void translateInstCapture(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
}
|
||||
}
|
||||
|
||||
void translateInstNamecall(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
{
|
||||
int ra = LUAU_INSN_A(*pc);
|
||||
int rb = LUAU_INSN_B(*pc);
|
||||
uint32_t aux = pc[1];
|
||||
|
||||
IrOp next = build.blockAtInst(pcpos + getOpLength(LOP_NAMECALL));
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
IrOp firstFastPathSuccess = build.block(IrBlockKind::Internal);
|
||||
IrOp secondFastPath = build.block(IrBlockKind::Internal);
|
||||
|
||||
build.loadAndCheckTag(build.vmReg(rb), LUA_TTABLE, fallback);
|
||||
IrOp table = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
|
||||
|
||||
LUAU_ASSERT(build.function.proto);
|
||||
IrOp addrNodeEl = build.inst(IrCmd::GET_HASH_NODE_ADDR, table, build.constUint(tsvalue(&build.function.proto->k[aux])->hash));
|
||||
|
||||
// We use 'jump' version instead of 'check' guard because we are jumping away into a non-fallback block
|
||||
// This is required by CFG live range analysis because both non-fallback blocks define the same registers
|
||||
build.inst(IrCmd::JUMP_SLOT_MATCH, addrNodeEl, build.vmConst(aux), firstFastPathSuccess, secondFastPath);
|
||||
|
||||
build.beginBlock(firstFastPathSuccess);
|
||||
build.inst(IrCmd::STORE_POINTER, build.vmReg(ra + 1), table);
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra + 1), build.constTag(LUA_TTABLE));
|
||||
|
||||
IrOp nodeEl = build.inst(IrCmd::LOAD_NODE_VALUE_TV, addrNodeEl);
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), nodeEl);
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
|
||||
build.beginBlock(secondFastPath);
|
||||
|
||||
build.inst(IrCmd::CHECK_NODE_NO_NEXT, addrNodeEl, fallback);
|
||||
|
||||
IrOp indexPtr = build.inst(IrCmd::TRY_CALL_FASTGETTM, table, build.constInt(TM_INDEX), fallback);
|
||||
|
||||
build.loadAndCheckTag(indexPtr, LUA_TTABLE, fallback);
|
||||
IrOp index = build.inst(IrCmd::LOAD_POINTER, indexPtr);
|
||||
|
||||
IrOp addrIndexNodeEl = build.inst(IrCmd::GET_SLOT_NODE_ADDR, index, build.constUint(pcpos));
|
||||
build.inst(IrCmd::CHECK_SLOT_MATCH, addrIndexNodeEl, build.vmConst(aux), fallback);
|
||||
|
||||
// TODO: original 'table' was clobbered by a call inside 'FASTGETTM'
|
||||
// Ideally, such calls should have to effect on SSA IR values, but simple register allocator doesn't support it
|
||||
IrOp table2 = build.inst(IrCmd::LOAD_POINTER, build.vmReg(rb));
|
||||
build.inst(IrCmd::STORE_POINTER, build.vmReg(ra + 1), table2);
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra + 1), build.constTag(LUA_TTABLE));
|
||||
|
||||
IrOp indexNodeEl = build.inst(IrCmd::LOAD_NODE_VALUE_TV, addrIndexNodeEl);
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), indexNodeEl);
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
|
||||
build.beginBlock(fallback);
|
||||
build.inst(IrCmd::FALLBACK_NAMECALL, build.constUint(pcpos), build.vmReg(ra), build.vmReg(rb), build.vmConst(aux));
|
||||
build.inst(IrCmd::JUMP, next);
|
||||
|
||||
build.beginBlock(next);
|
||||
}
|
||||
|
||||
} // namespace CodeGen
|
||||
} // namespace Luau
|
||||
|
@ -60,6 +60,7 @@ void translateInstGetGlobal(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstSetGlobal(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstConcat(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstCapture(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
void translateInstNamecall(IrBuilder& build, const Instruction* pc, int pcpos);
|
||||
|
||||
} // namespace CodeGen
|
||||
} // namespace Luau
|
||||
|
@ -360,7 +360,7 @@ void foldConstants(IrBuilder& build, IrFunction& function, IrBlock& block, uint3
|
||||
replace(function, block, index, {IrCmd::JUMP, inst.e});
|
||||
}
|
||||
break;
|
||||
case IrCmd::NUM_TO_INDEX:
|
||||
case IrCmd::TRY_NUM_TO_INDEX:
|
||||
if (inst.a.kind == IrOpKind::Constant)
|
||||
{
|
||||
double value = function.doubleOp(inst.a);
|
||||
|
@ -80,6 +80,7 @@ void initHelperFunctions(NativeState& data)
|
||||
data.context.luaF_close = luaF_close;
|
||||
|
||||
data.context.luaT_gettm = luaT_gettm;
|
||||
data.context.luaT_objtypenamestr = luaT_objtypenamestr;
|
||||
|
||||
data.context.libm_exp = exp;
|
||||
data.context.libm_pow = pow;
|
||||
|
@ -39,7 +39,6 @@ struct NativeProto
|
||||
uintptr_t* instTargets = nullptr; // TODO: NativeProto should be variable-size with all target embedded
|
||||
|
||||
Proto* proto = nullptr;
|
||||
uint32_t location = 0;
|
||||
};
|
||||
|
||||
struct NativeContext
|
||||
@ -79,6 +78,7 @@ struct NativeContext
|
||||
void (*luaF_close)(lua_State* L, StkId level) = nullptr;
|
||||
|
||||
const TValue* (*luaT_gettm)(Table* events, TMS event, TString* ename) = nullptr;
|
||||
const TString* (*luaT_objtypenamestr)(lua_State* L, const TValue* o) = nullptr;
|
||||
|
||||
double (*libm_exp)(double) = nullptr;
|
||||
double (*libm_pow)(double, double) = nullptr;
|
||||
|
@ -319,10 +319,7 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||
{
|
||||
if (inst.b.kind == IrOpKind::Constant)
|
||||
{
|
||||
std::optional<double> oldValue = function.asDoubleOp(state.tryGetValue(inst.a));
|
||||
double newValue = function.doubleOp(inst.b);
|
||||
|
||||
if (oldValue && *oldValue == newValue)
|
||||
if (state.tryGetValue(inst.a) == inst.b)
|
||||
kill(function, inst);
|
||||
else
|
||||
state.saveValue(inst.a, inst.b);
|
||||
@ -338,10 +335,7 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||
{
|
||||
if (inst.b.kind == IrOpKind::Constant)
|
||||
{
|
||||
std::optional<int> oldValue = function.asIntOp(state.tryGetValue(inst.a));
|
||||
int newValue = function.intOp(inst.b);
|
||||
|
||||
if (oldValue && *oldValue == newValue)
|
||||
if (state.tryGetValue(inst.a) == inst.b)
|
||||
kill(function, inst);
|
||||
else
|
||||
state.saveValue(inst.a, inst.b);
|
||||
@ -504,6 +498,7 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||
case IrCmd::LOAD_ENV:
|
||||
case IrCmd::GET_ARR_ADDR:
|
||||
case IrCmd::GET_SLOT_NODE_ADDR:
|
||||
case IrCmd::GET_HASH_NODE_ADDR:
|
||||
case IrCmd::STORE_NODE_VALUE_TV:
|
||||
case IrCmd::ADD_INT:
|
||||
case IrCmd::SUB_INT:
|
||||
@ -519,13 +514,16 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||
case IrCmd::NOT_ANY:
|
||||
case IrCmd::JUMP:
|
||||
case IrCmd::JUMP_EQ_POINTER:
|
||||
case IrCmd::JUMP_SLOT_MATCH:
|
||||
case IrCmd::TABLE_LEN:
|
||||
case IrCmd::NEW_TABLE:
|
||||
case IrCmd::DUP_TABLE:
|
||||
case IrCmd::NUM_TO_INDEX:
|
||||
case IrCmd::TRY_NUM_TO_INDEX:
|
||||
case IrCmd::TRY_CALL_FASTGETTM:
|
||||
case IrCmd::INT_TO_NUM:
|
||||
case IrCmd::CHECK_ARRAY_SIZE:
|
||||
case IrCmd::CHECK_SLOT_MATCH:
|
||||
case IrCmd::CHECK_NODE_NO_NEXT:
|
||||
case IrCmd::BARRIER_TABLE_BACK:
|
||||
case IrCmd::LOP_RETURN:
|
||||
case IrCmd::LOP_COVERAGE:
|
||||
@ -552,7 +550,6 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||
case IrCmd::CONCAT:
|
||||
case IrCmd::PREPARE_FORN:
|
||||
case IrCmd::INTERRUPT: // TODO: it will be important to keep tag/value state, but we have to track register capture
|
||||
case IrCmd::LOP_NAMECALL:
|
||||
case IrCmd::LOP_CALL:
|
||||
case IrCmd::LOP_FORGLOOP:
|
||||
case IrCmd::LOP_FORGLOOP_FALLBACK:
|
||||
@ -633,7 +630,7 @@ static std::vector<uint32_t> collectDirectBlockJumpPath(IrFunction& function, st
|
||||
// * if the successor has multiple uses, it can't have such 'live in' values without phi nodes that we don't have yet
|
||||
// Another possibility is to have two paths from 'block' into the target through two intermediate blocks
|
||||
// Usually that would mean that we would have a conditional jump at the end of 'block'
|
||||
// But using check guards and fallback clocks it becomes a possible setup
|
||||
// But using check guards and fallback blocks it becomes a possible setup
|
||||
// We avoid this by making sure fallbacks rejoin the other immediate successor of 'block'
|
||||
LUAU_ASSERT(getLiveOutValueCount(function, *block) == 0);
|
||||
|
||||
|
@ -201,6 +201,7 @@ void UnwindBuilderDwarf2::setupFrameReg(X64::RegisterX64 reg, int espOffset)
|
||||
void UnwindBuilderDwarf2::finish()
|
||||
{
|
||||
LUAU_ASSERT(stackOffset % 16 == 0 && "stack has to be aligned to 16 bytes after prologue");
|
||||
LUAU_ASSERT(fdeEntryStart != nullptr);
|
||||
|
||||
pos = alignPosition(fdeEntryStart, pos);
|
||||
writeu32(fdeEntryStart, unsigned(pos - fdeEntryStart - 4)); // Length field itself is excluded from length
|
||||
@ -220,7 +221,9 @@ void UnwindBuilderDwarf2::finalize(char* target, void* funcAddress, size_t funcS
|
||||
{
|
||||
memcpy(target, rawData, getSize());
|
||||
|
||||
LUAU_ASSERT(fdeEntryStart != nullptr);
|
||||
unsigned fdeEntryStartPos = unsigned(fdeEntryStart - rawData);
|
||||
|
||||
writeu64((uint8_t*)target + fdeEntryStartPos + kFdeInitialLocationOffset, uintptr_t(funcAddress));
|
||||
writeu64((uint8_t*)target + fdeEntryStartPos + kFdeAddressRangeOffset, funcSize);
|
||||
}
|
||||
|
@ -25,7 +25,7 @@
|
||||
// Additionally, in some specific instructions such as ANDK, the limit on the encoded value is smaller; this means that if a value is larger, a different instruction must be selected.
|
||||
//
|
||||
// Registers: 0-254. Registers refer to the values on the function's stack frame, including arguments.
|
||||
// Upvalues: 0-254. Upvalues refer to the values stored in the closure object.
|
||||
// Upvalues: 0-199. Upvalues refer to the values stored in the closure object.
|
||||
// Constants: 0-2^23-1. Constants are stored in a table allocated with each proto; to allow for future bytecode tweaks the encodable value is limited to 23 bits.
|
||||
// Closures: 0-2^15-1. Closures are created from child protos via a child index; the limit is for the number of closures immediately referenced in each function.
|
||||
// Jumps: -2^23..2^23. Jump offsets are specified in word increments, so jumping over an instruction may sometimes require an offset of 2 or more. Note that for jump instructions with AUX, the AUX word is included as part of the jump offset.
|
||||
@ -93,12 +93,12 @@ enum LuauOpcode
|
||||
|
||||
// GETUPVAL: load upvalue from the upvalue table for the current function
|
||||
// A: target register
|
||||
// B: upvalue index (0..255)
|
||||
// B: upvalue index
|
||||
LOP_GETUPVAL,
|
||||
|
||||
// SETUPVAL: store value into the upvalue table for the current function
|
||||
// A: target register
|
||||
// B: upvalue index (0..255)
|
||||
// B: upvalue index
|
||||
LOP_SETUPVAL,
|
||||
|
||||
// CLOSEUPVALS: close (migrate to heap) all upvalues that were captured for registers >= target
|
||||
|
@ -11,8 +11,9 @@ inline bool isFlagExperimental(const char* flag)
|
||||
// Flags in this list are disabled by default in various command-line tools. They may have behavior that is not fully final,
|
||||
// or critical bugs that are found after the code has been submitted.
|
||||
static const char* const kList[] = {
|
||||
"LuauInstantiateInSubtyping", // requires some fixes to lua-apps code
|
||||
"LuauTypecheckTypeguards", // requires some fixes to lua-apps code (CLI-67030)
|
||||
"LuauInstantiateInSubtyping", // requires some fixes to lua-apps code
|
||||
"LuauTypecheckTypeguards", // requires some fixes to lua-apps code (CLI-67030)
|
||||
"LuauTinyControlFlowAnalysis", // waiting for updates to packages depended by internal builtin plugins
|
||||
// makes sure we always have at least one entry
|
||||
nullptr,
|
||||
};
|
||||
|
@ -25,7 +25,6 @@ LUAU_FASTINTVARIABLE(LuauCompileInlineThreshold, 25)
|
||||
LUAU_FASTINTVARIABLE(LuauCompileInlineThresholdMaxBoost, 300)
|
||||
LUAU_FASTINTVARIABLE(LuauCompileInlineDepth, 5)
|
||||
|
||||
LUAU_FASTFLAGVARIABLE(LuauCompileTerminateBC, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauCompileBuiltinArity, false)
|
||||
|
||||
namespace Luau
|
||||
@ -143,7 +142,7 @@ struct Compiler
|
||||
return stat->body.size > 0 && alwaysTerminates(stat->body.data[stat->body.size - 1]);
|
||||
else if (node->is<AstStatReturn>())
|
||||
return true;
|
||||
else if (FFlag::LuauCompileTerminateBC && (node->is<AstStatBreak>() || node->is<AstStatContinue>()))
|
||||
else if (node->is<AstStatBreak>() || node->is<AstStatContinue>())
|
||||
return true;
|
||||
else if (AstStatIf* stat = node->as<AstStatIf>())
|
||||
return stat->elsebody && alwaysTerminates(stat->thenbody) && alwaysTerminates(stat->elsebody);
|
||||
|
3
Makefile
3
Makefile
@ -143,6 +143,9 @@ aliases: $(EXECUTABLE_ALIASES)
|
||||
test: $(TESTS_TARGET)
|
||||
$(TESTS_TARGET) $(TESTS_ARGS)
|
||||
|
||||
conformance: $(TESTS_TARGET)
|
||||
$(TESTS_TARGET) $(TESTS_ARGS) -ts=Conformance
|
||||
|
||||
clean:
|
||||
rm -rf $(BUILD)
|
||||
rm -rf $(EXECUTABLE_ALIASES)
|
||||
|
@ -135,6 +135,7 @@ target_sources(Luau.Analysis PRIVATE
|
||||
Analysis/include/Luau/Constraint.h
|
||||
Analysis/include/Luau/ConstraintGraphBuilder.h
|
||||
Analysis/include/Luau/ConstraintSolver.h
|
||||
Analysis/include/Luau/ControlFlow.h
|
||||
Analysis/include/Luau/DataFlowGraph.h
|
||||
Analysis/include/Luau/DcrLogger.h
|
||||
Analysis/include/Luau/Def.h
|
||||
@ -370,6 +371,7 @@ if(TARGET Luau.UnitTest)
|
||||
tests/TypeInfer.annotations.test.cpp
|
||||
tests/TypeInfer.anyerror.test.cpp
|
||||
tests/TypeInfer.builtins.test.cpp
|
||||
tests/TypeInfer.cfa.test.cpp
|
||||
tests/TypeInfer.classes.test.cpp
|
||||
tests/TypeInfer.definitions.test.cpp
|
||||
tests/TypeInfer.functions.test.cpp
|
||||
|
@ -33,6 +33,8 @@
|
||||
|
||||
#include <string.h>
|
||||
|
||||
LUAU_FASTFLAGVARIABLE(LuauArrBoundResizeFix, false)
|
||||
|
||||
// max size of both array and hash part is 2^MAXBITS
|
||||
#define MAXBITS 26
|
||||
#define MAXSIZE (1 << MAXBITS)
|
||||
@ -454,15 +456,43 @@ static void rehash(lua_State* L, Table* t, const TValue* ek)
|
||||
int nasize = numusearray(t, nums); // count keys in array part
|
||||
int totaluse = nasize; // all those keys are integer keys
|
||||
totaluse += numusehash(t, nums, &nasize); // count keys in hash part
|
||||
|
||||
// count extra key
|
||||
if (ttisnumber(ek))
|
||||
nasize += countint(nvalue(ek), nums);
|
||||
totaluse++;
|
||||
|
||||
// compute new size for array part
|
||||
int na = computesizes(nums, &nasize);
|
||||
int nh = totaluse - na;
|
||||
// enforce the boundary invariant; for performance, only do hash lookups if we must
|
||||
nasize = adjustasize(t, nasize, ek);
|
||||
|
||||
if (FFlag::LuauArrBoundResizeFix)
|
||||
{
|
||||
// enforce the boundary invariant; for performance, only do hash lookups if we must
|
||||
int nadjusted = adjustasize(t, nasize, ek);
|
||||
|
||||
// count how many extra elements belong to array part instead of hash part
|
||||
int aextra = nadjusted - nasize;
|
||||
|
||||
if (aextra != 0)
|
||||
{
|
||||
// we no longer need to store those extra array elements in hash part
|
||||
nh -= aextra;
|
||||
|
||||
// because hash nodes are twice as large as array nodes, the memory we saved for hash parts can be used by array part
|
||||
// this follows the general sparse array part optimization where array is allocated when 50% occupation is reached
|
||||
nasize = nadjusted + aextra;
|
||||
|
||||
// since the size was changed, it's again important to enforce the boundary invariant at the new size
|
||||
nasize = adjustasize(t, nasize, ek);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// enforce the boundary invariant; for performance, only do hash lookups if we must
|
||||
nasize = adjustasize(t, nasize, ek);
|
||||
}
|
||||
|
||||
// resize the table to new computed sizes
|
||||
resize(L, t, nasize, nh);
|
||||
}
|
||||
|
@ -1677,8 +1677,6 @@ RETURN R0 0
|
||||
|
||||
TEST_CASE("LoopBreak")
|
||||
{
|
||||
ScopedFastFlag sff("LuauCompileTerminateBC", true);
|
||||
|
||||
// default codegen: compile breaks as unconditional jumps
|
||||
CHECK_EQ("\n" + compileFunction0("while true do if math.random() < 0.5 then break else end end"), R"(
|
||||
L0: GETIMPORT R0 2 [math.random]
|
||||
@ -1703,8 +1701,6 @@ L1: RETURN R0 0
|
||||
|
||||
TEST_CASE("LoopContinue")
|
||||
{
|
||||
ScopedFastFlag sff("LuauCompileTerminateBC", true);
|
||||
|
||||
// default codegen: compile continue as unconditional jumps
|
||||
CHECK_EQ("\n" + compileFunction0("repeat if math.random() < 0.5 then continue else end break until false error()"), R"(
|
||||
L0: GETIMPORT R0 2 [math.random]
|
||||
@ -2214,6 +2210,46 @@ TEST_CASE("RecursionParse")
|
||||
{
|
||||
CHECK_EQ(std::string(e.what()), "Exceeded allowed recursion depth; simplify your block to make the code compile");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
Luau::compileOrThrow(bcb, "local f: " + rep("(", 1500) + "nil" + rep(")", 1500));
|
||||
CHECK(!"Expected exception");
|
||||
}
|
||||
catch (std::exception& e)
|
||||
{
|
||||
CHECK_EQ(std::string(e.what()), "Exceeded allowed recursion depth; simplify your type annotation to make the code compile");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
Luau::compileOrThrow(bcb, "local f: () " + rep("-> ()", 1500));
|
||||
CHECK(!"Expected exception");
|
||||
}
|
||||
catch (std::exception& e)
|
||||
{
|
||||
CHECK_EQ(std::string(e.what()), "Exceeded allowed recursion depth; simplify your type annotation to make the code compile");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
Luau::compileOrThrow(bcb, "local f: " + rep("{x:", 1500) + "nil" + rep("}", 1500));
|
||||
CHECK(!"Expected exception");
|
||||
}
|
||||
catch (std::exception& e)
|
||||
{
|
||||
CHECK_EQ(std::string(e.what()), "Exceeded allowed recursion depth; simplify your type annotation to make the code compile");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
Luau::compileOrThrow(bcb, "local f: " + rep("(nil & ", 1500) + "nil" + rep(")", 1500));
|
||||
CHECK(!"Expected exception");
|
||||
}
|
||||
catch (std::exception& e)
|
||||
{
|
||||
CHECK_EQ(std::string(e.what()), "Exceeded allowed recursion depth; simplify your type annotation to make the code compile");
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("ArrayIndexLiteral")
|
||||
@ -6816,8 +6852,6 @@ RETURN R0 0
|
||||
|
||||
TEST_CASE("ElideJumpAfterIf")
|
||||
{
|
||||
ScopedFastFlag sff("LuauCompileTerminateBC", true);
|
||||
|
||||
// break refers to outer loop => we can elide unconditional branches
|
||||
CHECK_EQ("\n" + compileFunction0(R"(
|
||||
local foo, bar = ...
|
||||
|
@ -13,7 +13,7 @@ ConstraintGraphBuilderFixture::ConstraintGraphBuilderFixture()
|
||||
{
|
||||
mainModule->reduction = std::make_unique<TypeReduction>(NotNull{&mainModule->internalTypes}, builtinTypes, NotNull{&ice});
|
||||
|
||||
BlockedType::nextIndex = 0;
|
||||
BlockedType::DEPRECATED_nextIndex = 0;
|
||||
BlockedTypePack::nextIndex = 0;
|
||||
}
|
||||
|
||||
|
@ -137,7 +137,7 @@ const Config& TestConfigResolver::getConfig(const ModuleName& name) const
|
||||
Fixture::Fixture(bool freeze, bool prepareAutocomplete)
|
||||
: sff_DebugLuauFreezeArena("DebugLuauFreezeArena", freeze)
|
||||
, frontend(&fileResolver, &configResolver,
|
||||
{/* retainFullTypeGraphs= */ true, /* forAutocomplete */ false, /* randomConstraintResolutionSeed */ randomSeed})
|
||||
{/* retainFullTypeGraphs= */ true, /* forAutocomplete */ false, /* runLintChecks */ false, /* randomConstraintResolutionSeed */ randomSeed})
|
||||
, builtinTypes(frontend.builtinTypes)
|
||||
{
|
||||
configResolver.defaultConfig.mode = Mode::Strict;
|
||||
@ -173,15 +173,19 @@ AstStatBlock* Fixture::parse(const std::string& source, const ParseOptions& pars
|
||||
// if AST is available, check how lint and typecheck handle error nodes
|
||||
if (result.root)
|
||||
{
|
||||
frontend.lint(*sourceModule);
|
||||
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
{
|
||||
Luau::check(*sourceModule, {}, builtinTypes, NotNull{&ice}, NotNull{&moduleResolver}, NotNull{&fileResolver},
|
||||
ModulePtr module = Luau::check(*sourceModule, {}, builtinTypes, NotNull{&ice}, NotNull{&moduleResolver}, NotNull{&fileResolver},
|
||||
frontend.globals.globalScope, frontend.options);
|
||||
|
||||
Luau::lint(sourceModule->root, *sourceModule->names, frontend.globals.globalScope, module.get(), sourceModule->hotcomments, {});
|
||||
}
|
||||
else
|
||||
frontend.typeChecker.check(*sourceModule, sourceModule->mode.value_or(Luau::Mode::Nonstrict));
|
||||
{
|
||||
ModulePtr module = frontend.typeChecker.check(*sourceModule, sourceModule->mode.value_or(Luau::Mode::Nonstrict));
|
||||
|
||||
Luau::lint(sourceModule->root, *sourceModule->names, frontend.globals.globalScope, module.get(), sourceModule->hotcomments, {});
|
||||
}
|
||||
}
|
||||
|
||||
throw ParseErrors(result.errors);
|
||||
@ -209,20 +213,23 @@ CheckResult Fixture::check(const std::string& source)
|
||||
|
||||
LintResult Fixture::lint(const std::string& source, const std::optional<LintOptions>& lintOptions)
|
||||
{
|
||||
ParseOptions parseOptions;
|
||||
parseOptions.captureComments = true;
|
||||
configResolver.defaultConfig.mode = Mode::Nonstrict;
|
||||
parse(source, parseOptions);
|
||||
ModuleName mm = fromString(mainModuleName);
|
||||
configResolver.defaultConfig.mode = Mode::Strict;
|
||||
fileResolver.source[mm] = std::move(source);
|
||||
frontend.markDirty(mm);
|
||||
|
||||
return frontend.lint(*sourceModule, lintOptions);
|
||||
return lintModule(mm);
|
||||
}
|
||||
|
||||
LintResult Fixture::lintTyped(const std::string& source, const std::optional<LintOptions>& lintOptions)
|
||||
LintResult Fixture::lintModule(const ModuleName& moduleName, const std::optional<LintOptions>& lintOptions)
|
||||
{
|
||||
check(source);
|
||||
ModuleName mm = fromString(mainModuleName);
|
||||
FrontendOptions options = frontend.options;
|
||||
options.runLintChecks = true;
|
||||
options.enabledLintWarnings = lintOptions;
|
||||
|
||||
return frontend.lint(mm, lintOptions);
|
||||
CheckResult result = frontend.check(moduleName, options);
|
||||
|
||||
return result.lintResult;
|
||||
}
|
||||
|
||||
ParseResult Fixture::parseEx(const std::string& source, const ParseOptions& options)
|
||||
|
@ -66,7 +66,7 @@ struct Fixture
|
||||
CheckResult check(const std::string& source);
|
||||
|
||||
LintResult lint(const std::string& source, const std::optional<LintOptions>& lintOptions = {});
|
||||
LintResult lintTyped(const std::string& source, const std::optional<LintOptions>& lintOptions = {});
|
||||
LintResult lintModule(const ModuleName& moduleName, const std::optional<LintOptions>& lintOptions = {});
|
||||
|
||||
/// Parse with all language extensions enabled
|
||||
ParseResult parseEx(const std::string& source, const ParseOptions& parseOptions = {});
|
||||
@ -94,6 +94,7 @@ struct Fixture
|
||||
TypeId requireTypeAlias(const std::string& name);
|
||||
|
||||
ScopedFastFlag sff_DebugLuauFreezeArena;
|
||||
ScopedFastFlag luauLintInTypecheck{"LuauLintInTypecheck", true};
|
||||
|
||||
TestFileResolver fileResolver;
|
||||
TestConfigResolver configResolver;
|
||||
|
@ -456,16 +456,16 @@ TEST_CASE_FIXTURE(FrontendFixture, "dont_reparse_clean_file_when_linting")
|
||||
end
|
||||
)";
|
||||
|
||||
frontend.check("Modules/A");
|
||||
configResolver.defaultConfig.enabledLint.enableWarning(LintWarning::Code_ForRange);
|
||||
|
||||
lintModule("Modules/A");
|
||||
|
||||
fileResolver.source["Modules/A"] = R"(
|
||||
-- We have fixed the lint error, but we did not tell the Frontend that the file is changed!
|
||||
-- Therefore, we expect Frontend to reuse the parse tree.
|
||||
-- Therefore, we expect Frontend to reuse the results from previous lint.
|
||||
)";
|
||||
|
||||
configResolver.defaultConfig.enabledLint.enableWarning(LintWarning::Code_ForRange);
|
||||
|
||||
LintResult lintResult = frontend.lint("Modules/A");
|
||||
LintResult lintResult = lintModule("Modules/A");
|
||||
|
||||
CHECK_EQ(1, lintResult.warnings.size());
|
||||
}
|
||||
@ -760,25 +760,49 @@ TEST_CASE_FIXTURE(FrontendFixture, "test_lint_uses_correct_config")
|
||||
|
||||
configResolver.configFiles["Module/A"].enabledLint.enableWarning(LintWarning::Code_ForRange);
|
||||
|
||||
auto result = frontend.lint("Module/A");
|
||||
auto result = lintModule("Module/A");
|
||||
CHECK_EQ(1, result.warnings.size());
|
||||
|
||||
configResolver.configFiles["Module/A"].enabledLint.disableWarning(LintWarning::Code_ForRange);
|
||||
frontend.markDirty("Module/A");
|
||||
|
||||
auto result2 = frontend.lint("Module/A");
|
||||
auto result2 = lintModule("Module/A");
|
||||
CHECK_EQ(0, result2.warnings.size());
|
||||
|
||||
LintOptions overrideOptions;
|
||||
|
||||
overrideOptions.enableWarning(LintWarning::Code_ForRange);
|
||||
auto result3 = frontend.lint("Module/A", overrideOptions);
|
||||
frontend.markDirty("Module/A");
|
||||
|
||||
auto result3 = lintModule("Module/A", overrideOptions);
|
||||
CHECK_EQ(1, result3.warnings.size());
|
||||
|
||||
overrideOptions.disableWarning(LintWarning::Code_ForRange);
|
||||
auto result4 = frontend.lint("Module/A", overrideOptions);
|
||||
frontend.markDirty("Module/A");
|
||||
|
||||
auto result4 = lintModule("Module/A", overrideOptions);
|
||||
CHECK_EQ(0, result4.warnings.size());
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(FrontendFixture, "lint_results_are_only_for_checked_module")
|
||||
{
|
||||
fileResolver.source["Module/A"] = R"(
|
||||
local _ = 0b10000000000000000000000000000000000000000000000000000000000000000
|
||||
)";
|
||||
|
||||
fileResolver.source["Module/B"] = R"(
|
||||
require(script.Parent.A)
|
||||
local _ = 0x10000000000000000
|
||||
)";
|
||||
|
||||
LintResult lintResult = lintModule("Module/B");
|
||||
CHECK_EQ(1, lintResult.warnings.size());
|
||||
|
||||
// Check cached result
|
||||
lintResult = lintModule("Module/B");
|
||||
CHECK_EQ(1, lintResult.warnings.size());
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(FrontendFixture, "discard_type_graphs")
|
||||
{
|
||||
Frontend fe{&fileResolver, &configResolver, {false}};
|
||||
|
@ -62,22 +62,16 @@ public:
|
||||
build.inst(IrCmd::LOP_RETURN, build.constUint(2));
|
||||
};
|
||||
|
||||
void checkEq(IrOp lhs, IrOp rhs)
|
||||
{
|
||||
CHECK_EQ(lhs.kind, rhs.kind);
|
||||
LUAU_ASSERT(lhs.kind != IrOpKind::Constant && "can't compare constants, each ref is unique");
|
||||
CHECK_EQ(lhs.index, rhs.index);
|
||||
}
|
||||
|
||||
void checkEq(IrOp instOp, const IrInst& inst)
|
||||
{
|
||||
const IrInst& target = build.function.instOp(instOp);
|
||||
CHECK(target.cmd == inst.cmd);
|
||||
checkEq(target.a, inst.a);
|
||||
checkEq(target.b, inst.b);
|
||||
checkEq(target.c, inst.c);
|
||||
checkEq(target.d, inst.d);
|
||||
checkEq(target.e, inst.e);
|
||||
CHECK(target.a == inst.a);
|
||||
CHECK(target.b == inst.b);
|
||||
CHECK(target.c == inst.c);
|
||||
CHECK(target.d == inst.d);
|
||||
CHECK(target.e == inst.e);
|
||||
CHECK(target.f == inst.f);
|
||||
}
|
||||
|
||||
IrBuilder build;
|
||||
@ -405,18 +399,18 @@ bb_11:
|
||||
TEST_CASE_FIXTURE(IrBuilderFixture, "NumToIndex")
|
||||
{
|
||||
withOneBlock([this](IrOp a) {
|
||||
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::NUM_TO_INDEX, build.constDouble(4), a));
|
||||
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::TRY_NUM_TO_INDEX, build.constDouble(4), a));
|
||||
build.inst(IrCmd::LOP_RETURN, build.constUint(0));
|
||||
});
|
||||
|
||||
withOneBlock([this](IrOp a) {
|
||||
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::NUM_TO_INDEX, build.constDouble(1.2), a));
|
||||
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::TRY_NUM_TO_INDEX, build.constDouble(1.2), a));
|
||||
build.inst(IrCmd::LOP_RETURN, build.constUint(0));
|
||||
});
|
||||
|
||||
withOneBlock([this](IrOp a) {
|
||||
IrOp nan = build.inst(IrCmd::DIV_NUM, build.constDouble(0.0), build.constDouble(0.0));
|
||||
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::NUM_TO_INDEX, nan, a));
|
||||
build.inst(IrCmd::STORE_INT, build.vmReg(0), build.inst(IrCmd::TRY_NUM_TO_INDEX, nan, a));
|
||||
build.inst(IrCmd::LOP_RETURN, build.constUint(0));
|
||||
});
|
||||
|
||||
@ -1676,4 +1670,64 @@ bb_2:
|
||||
)");
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(IrBuilderFixture, "VariadicSequencePeeling")
|
||||
{
|
||||
IrOp entry = build.block(IrBlockKind::Internal);
|
||||
IrOp a = build.block(IrBlockKind::Internal);
|
||||
IrOp b = build.block(IrBlockKind::Internal);
|
||||
IrOp exit = build.block(IrBlockKind::Internal);
|
||||
|
||||
build.beginBlock(entry);
|
||||
build.inst(IrCmd::FALLBACK_GETVARARGS, build.constUint(0), build.vmReg(3), build.constInt(-1));
|
||||
build.inst(IrCmd::JUMP_EQ_TAG, build.inst(IrCmd::LOAD_TAG, build.vmReg(0)), build.constTag(tnumber), a, b);
|
||||
|
||||
build.beginBlock(a);
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(2), build.inst(IrCmd::LOAD_TVALUE, build.vmReg(0)));
|
||||
build.inst(IrCmd::JUMP, exit);
|
||||
|
||||
build.beginBlock(b);
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(2), build.inst(IrCmd::LOAD_TVALUE, build.vmReg(1)));
|
||||
build.inst(IrCmd::JUMP, exit);
|
||||
|
||||
build.beginBlock(exit);
|
||||
build.inst(IrCmd::LOP_RETURN, build.constUint(0), build.vmReg(2), build.constInt(-1));
|
||||
|
||||
updateUseCounts(build.function);
|
||||
computeCfgInfo(build.function);
|
||||
|
||||
CHECK("\n" + toString(build.function, /* includeUseInfo */ false) == R"(
|
||||
bb_0:
|
||||
; successors: bb_1, bb_2
|
||||
; in regs: R0, R1
|
||||
; out regs: R0, R1, R3...
|
||||
FALLBACK_GETVARARGS 0u, R3, -1i
|
||||
%1 = LOAD_TAG R0
|
||||
JUMP_EQ_TAG %1, tnumber, bb_1, bb_2
|
||||
|
||||
bb_1:
|
||||
; predecessors: bb_0
|
||||
; successors: bb_3
|
||||
; in regs: R0, R3...
|
||||
; out regs: R2...
|
||||
%3 = LOAD_TVALUE R0
|
||||
STORE_TVALUE R2, %3
|
||||
JUMP bb_3
|
||||
|
||||
bb_2:
|
||||
; predecessors: bb_0
|
||||
; successors: bb_3
|
||||
; in regs: R1, R3...
|
||||
; out regs: R2...
|
||||
%6 = LOAD_TVALUE R1
|
||||
STORE_TVALUE R2, %6
|
||||
JUMP bb_3
|
||||
|
||||
bb_3:
|
||||
; predecessors: bb_1, bb_2
|
||||
; in regs: R2...
|
||||
LOP_RETURN 0u, R2, -1i
|
||||
|
||||
)");
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
@ -733,6 +733,7 @@ end
|
||||
TEST_CASE_FIXTURE(Fixture, "ImplicitReturn")
|
||||
{
|
||||
LintResult result = lint(R"(
|
||||
--!nonstrict
|
||||
function f1(a)
|
||||
if not a then
|
||||
return 5
|
||||
@ -789,20 +790,21 @@ return f1,f2,f3,f4,f5,f6,f7
|
||||
)");
|
||||
|
||||
REQUIRE(3 == result.warnings.size());
|
||||
CHECK_EQ(result.warnings[0].location.begin.line, 4);
|
||||
CHECK_EQ(result.warnings[0].location.begin.line, 5);
|
||||
CHECK_EQ(result.warnings[0].text,
|
||||
"Function 'f1' can implicitly return no values even though there's an explicit return at line 4; add explicit return to silence");
|
||||
CHECK_EQ(result.warnings[1].location.begin.line, 28);
|
||||
"Function 'f1' can implicitly return no values even though there's an explicit return at line 5; add explicit return to silence");
|
||||
CHECK_EQ(result.warnings[1].location.begin.line, 29);
|
||||
CHECK_EQ(result.warnings[1].text,
|
||||
"Function 'f4' can implicitly return no values even though there's an explicit return at line 25; add explicit return to silence");
|
||||
CHECK_EQ(result.warnings[2].location.begin.line, 44);
|
||||
"Function 'f4' can implicitly return no values even though there's an explicit return at line 26; add explicit return to silence");
|
||||
CHECK_EQ(result.warnings[2].location.begin.line, 45);
|
||||
CHECK_EQ(result.warnings[2].text,
|
||||
"Function can implicitly return no values even though there's an explicit return at line 44; add explicit return to silence");
|
||||
"Function can implicitly return no values even though there's an explicit return at line 45; add explicit return to silence");
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "ImplicitReturnInfiniteLoop")
|
||||
{
|
||||
LintResult result = lint(R"(
|
||||
--!nonstrict
|
||||
function f1(a)
|
||||
while true do
|
||||
if math.random() > 0.5 then
|
||||
@ -845,12 +847,12 @@ return f1,f2,f3,f4
|
||||
)");
|
||||
|
||||
REQUIRE(2 == result.warnings.size());
|
||||
CHECK_EQ(result.warnings[0].location.begin.line, 25);
|
||||
CHECK_EQ(result.warnings[0].location.begin.line, 26);
|
||||
CHECK_EQ(result.warnings[0].text,
|
||||
"Function 'f3' can implicitly return no values even though there's an explicit return at line 21; add explicit return to silence");
|
||||
CHECK_EQ(result.warnings[1].location.begin.line, 36);
|
||||
"Function 'f3' can implicitly return no values even though there's an explicit return at line 22; add explicit return to silence");
|
||||
CHECK_EQ(result.warnings[1].location.begin.line, 37);
|
||||
CHECK_EQ(result.warnings[1].text,
|
||||
"Function 'f4' can implicitly return no values even though there's an explicit return at line 32; add explicit return to silence");
|
||||
"Function 'f4' can implicitly return no values even though there's an explicit return at line 33; add explicit return to silence");
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "TypeAnnotationsShouldNotProduceWarnings")
|
||||
@ -1164,7 +1166,7 @@ os.date("!*t")
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "FormatStringTyped")
|
||||
{
|
||||
LintResult result = lintTyped(R"~(
|
||||
LintResult result = lint(R"~(
|
||||
local s: string, nons = ...
|
||||
|
||||
string.match(s, "[]")
|
||||
@ -1285,7 +1287,7 @@ TEST_CASE_FIXTURE(Fixture, "use_all_parent_scopes_for_globals")
|
||||
local _bar: typeof(os.clock) = os.clock
|
||||
)";
|
||||
|
||||
LintResult result = frontend.lint("A");
|
||||
LintResult result = lintModule("A");
|
||||
|
||||
REQUIRE(0 == result.warnings.size());
|
||||
}
|
||||
@ -1471,7 +1473,7 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "DeprecatedApiTyped")
|
||||
|
||||
freeze(frontend.globals.globalTypes);
|
||||
|
||||
LintResult result = lintTyped(R"(
|
||||
LintResult result = lint(R"(
|
||||
return function (i: Instance)
|
||||
i:Wait(1.0)
|
||||
print(i.Name)
|
||||
@ -1518,7 +1520,7 @@ end
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "TableOperations")
|
||||
{
|
||||
LintResult result = lintTyped(R"(
|
||||
LintResult result = lint(R"(
|
||||
local t = {}
|
||||
local tt = {}
|
||||
|
||||
|
@ -28,6 +28,18 @@ struct IsSubtypeFixture : Fixture
|
||||
|
||||
return ::Luau::isSubtype(a, b, NotNull{module->getModuleScope().get()}, builtinTypes, ice);
|
||||
}
|
||||
|
||||
bool isConsistentSubtype(TypeId a, TypeId b)
|
||||
{
|
||||
Location location;
|
||||
ModulePtr module = getMainModule();
|
||||
REQUIRE(module);
|
||||
|
||||
if (!module->hasModuleScope())
|
||||
FAIL("isSubtype: module scope data is not available");
|
||||
|
||||
return ::Luau::isConsistentSubtype(a, b, NotNull{module->getModuleScope().get()}, builtinTypes, ice);
|
||||
}
|
||||
};
|
||||
} // namespace
|
||||
|
||||
@ -86,8 +98,8 @@ TEST_CASE_FIXTURE(IsSubtypeFixture, "functions_and_any")
|
||||
|
||||
// any makes things work even when it makes no sense.
|
||||
|
||||
CHECK(isSubtype(b, a));
|
||||
CHECK(isSubtype(a, b));
|
||||
CHECK(isConsistentSubtype(b, a));
|
||||
CHECK(isConsistentSubtype(a, b));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(IsSubtypeFixture, "variadic_functions_with_no_head")
|
||||
@ -163,6 +175,10 @@ TEST_CASE_FIXTURE(IsSubtypeFixture, "table_with_union_prop")
|
||||
|
||||
TEST_CASE_FIXTURE(IsSubtypeFixture, "table_with_any_prop")
|
||||
{
|
||||
ScopedFastFlag sffs[] = {
|
||||
{"LuauTransitiveSubtyping", true},
|
||||
};
|
||||
|
||||
check(R"(
|
||||
local a: {x: number}
|
||||
local b: {x: any}
|
||||
@ -172,7 +188,8 @@ TEST_CASE_FIXTURE(IsSubtypeFixture, "table_with_any_prop")
|
||||
TypeId b = requireType("b");
|
||||
|
||||
CHECK(isSubtype(a, b));
|
||||
CHECK(isSubtype(b, a));
|
||||
CHECK(!isSubtype(b, a));
|
||||
CHECK(isConsistentSubtype(b, a));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(IsSubtypeFixture, "intersection")
|
||||
@ -216,6 +233,10 @@ TEST_CASE_FIXTURE(IsSubtypeFixture, "union_and_intersection")
|
||||
|
||||
TEST_CASE_FIXTURE(IsSubtypeFixture, "tables")
|
||||
{
|
||||
ScopedFastFlag sffs[] = {
|
||||
{"LuauTransitiveSubtyping", true},
|
||||
};
|
||||
|
||||
check(R"(
|
||||
local a: {x: number}
|
||||
local b: {x: any}
|
||||
@ -229,7 +250,8 @@ TEST_CASE_FIXTURE(IsSubtypeFixture, "tables")
|
||||
TypeId d = requireType("d");
|
||||
|
||||
CHECK(isSubtype(a, b));
|
||||
CHECK(isSubtype(b, a));
|
||||
CHECK(!isSubtype(b, a));
|
||||
CHECK(isConsistentSubtype(b, a));
|
||||
|
||||
CHECK(!isSubtype(c, a));
|
||||
CHECK(!isSubtype(a, c));
|
||||
@ -358,6 +380,92 @@ TEST_CASE_FIXTURE(IsSubtypeFixture, "metatable" * doctest::expected_failures{1})
|
||||
}
|
||||
#endif
|
||||
|
||||
TEST_CASE_FIXTURE(IsSubtypeFixture, "any_is_unknown_union_error")
|
||||
{
|
||||
ScopedFastFlag sffs[] = {
|
||||
{"LuauTransitiveSubtyping", true},
|
||||
};
|
||||
|
||||
check(R"(
|
||||
local err = 5.nope.nope -- err is now an error type
|
||||
local a : any
|
||||
local b : (unknown | typeof(err))
|
||||
)");
|
||||
|
||||
TypeId a = requireType("a");
|
||||
TypeId b = requireType("b");
|
||||
|
||||
CHECK(isSubtype(a, b));
|
||||
CHECK(isSubtype(b, a));
|
||||
CHECK_EQ("*error-type*", toString(requireType("err")));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(IsSubtypeFixture, "any_intersect_T_is_T")
|
||||
{
|
||||
ScopedFastFlag sffs[] = {
|
||||
{"LuauTransitiveSubtyping", true},
|
||||
};
|
||||
|
||||
check(R"(
|
||||
local a : (any & string)
|
||||
local b : string
|
||||
local c : number
|
||||
)");
|
||||
|
||||
TypeId a = requireType("a");
|
||||
TypeId b = requireType("b");
|
||||
TypeId c = requireType("c");
|
||||
|
||||
CHECK(isSubtype(a, b));
|
||||
CHECK(isSubtype(b, a));
|
||||
CHECK(!isSubtype(a, c));
|
||||
CHECK(!isSubtype(c, a));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(IsSubtypeFixture, "error_suppression")
|
||||
{
|
||||
ScopedFastFlag sffs[] = {
|
||||
{"LuauTransitiveSubtyping", true},
|
||||
};
|
||||
|
||||
check("");
|
||||
|
||||
TypeId any = builtinTypes->anyType;
|
||||
TypeId err = builtinTypes->errorType;
|
||||
TypeId str = builtinTypes->stringType;
|
||||
TypeId unk = builtinTypes->unknownType;
|
||||
|
||||
CHECK(!isSubtype(any, err));
|
||||
CHECK(isSubtype(err, any));
|
||||
CHECK(isConsistentSubtype(any, err));
|
||||
CHECK(isConsistentSubtype(err, any));
|
||||
|
||||
CHECK(!isSubtype(any, str));
|
||||
CHECK(isSubtype(str, any));
|
||||
CHECK(isConsistentSubtype(any, str));
|
||||
CHECK(isConsistentSubtype(str, any));
|
||||
|
||||
CHECK(!isSubtype(any, unk));
|
||||
CHECK(isSubtype(unk, any));
|
||||
CHECK(isConsistentSubtype(any, unk));
|
||||
CHECK(isConsistentSubtype(unk, any));
|
||||
|
||||
CHECK(!isSubtype(err, str));
|
||||
CHECK(!isSubtype(str, err));
|
||||
CHECK(isConsistentSubtype(err, str));
|
||||
CHECK(isConsistentSubtype(str, err));
|
||||
|
||||
CHECK(!isSubtype(err, unk));
|
||||
CHECK(!isSubtype(unk, err));
|
||||
CHECK(isConsistentSubtype(err, unk));
|
||||
CHECK(isConsistentSubtype(unk, err));
|
||||
|
||||
CHECK(isSubtype(str, unk));
|
||||
CHECK(!isSubtype(unk, str));
|
||||
CHECK(isConsistentSubtype(str, unk));
|
||||
CHECK(!isConsistentSubtype(unk, str));
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
||||
struct NormalizeFixture : Fixture
|
||||
@ -692,4 +800,17 @@ TEST_CASE_FIXTURE(NormalizeFixture, "negations_of_tables")
|
||||
CHECK("table" == toString(normal("Not<Not<tbl>>")));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(NormalizeFixture, "normalize_blocked_types")
|
||||
{
|
||||
ScopedFastFlag sff[] {
|
||||
{"LuauNormalizeBlockedTypes", true},
|
||||
};
|
||||
|
||||
Type blocked{BlockedType{}};
|
||||
|
||||
const NormalizedType* norm = normalizer.normalize(&blocked);
|
||||
|
||||
CHECK_EQ(normalizer.typeFromNormal(*norm), &blocked);
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
@ -263,9 +263,8 @@ TEST_CASE_FIXTURE(LimitFixture, "typescript_port_of_Result_type")
|
||||
)LUA";
|
||||
|
||||
CheckResult result = check(src);
|
||||
CodeTooComplex ctc;
|
||||
|
||||
CHECK(hasError(result, &ctc));
|
||||
CHECK(hasError<CodeTooComplex>(result));
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
@ -225,7 +225,10 @@ TEST_CASE_FIXTURE(Fixture, "calling_error_type_yields_error")
|
||||
|
||||
CHECK_EQ("unknown", err->name);
|
||||
|
||||
CHECK_EQ("*error-type*", toString(requireType("a")));
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
CHECK_EQ("any", toString(requireType("a")));
|
||||
else
|
||||
CHECK_EQ("*error-type*", toString(requireType("a")));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "chain_calling_error_type_yields_error")
|
||||
@ -234,7 +237,10 @@ TEST_CASE_FIXTURE(Fixture, "chain_calling_error_type_yields_error")
|
||||
local a = Utility.Create "Foo" {}
|
||||
)");
|
||||
|
||||
CHECK_EQ("*error-type*", toString(requireType("a")));
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
CHECK_EQ("any", toString(requireType("a")));
|
||||
else
|
||||
CHECK_EQ("*error-type*", toString(requireType("a")));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "replace_every_free_type_when_unifying_a_complex_function_with_any")
|
||||
@ -343,4 +349,19 @@ stat = stat and tonumber(stat) or stat
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "intersection_of_any_can_have_props")
|
||||
{
|
||||
// *blocked-130* ~ hasProp any & ~(false?), "_status"
|
||||
CheckResult result = check(R"(
|
||||
function foo(x: any, y)
|
||||
if x then
|
||||
return x._status
|
||||
end
|
||||
return y
|
||||
end
|
||||
)");
|
||||
|
||||
CHECK("(any, any) -> any" == toString(requireType("foo")));
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
@ -704,11 +704,7 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "select_with_variadic_typepack_tail_and_strin
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
CHECK_EQ("string", toString(requireType("foo")));
|
||||
else
|
||||
CHECK_EQ("any", toString(requireType("foo")));
|
||||
|
||||
CHECK_EQ("any", toString(requireType("foo")));
|
||||
CHECK_EQ("any", toString(requireType("bar")));
|
||||
CHECK_EQ("any", toString(requireType("baz")));
|
||||
CHECK_EQ("any", toString(requireType("quux")));
|
||||
@ -996,11 +992,16 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "table_freeze_is_generic")
|
||||
|
||||
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
||||
CHECK_EQ("Key 'b' not found in table '{| a: number |}'", toString(result.errors[0]));
|
||||
CHECK(Location({13, 18}, {13, 23}) == result.errors[0].location);
|
||||
|
||||
CHECK_EQ("number", toString(requireType("a")));
|
||||
CHECK_EQ("string", toString(requireType("b")));
|
||||
CHECK_EQ("boolean", toString(requireType("c")));
|
||||
CHECK_EQ("*error-type*", toString(requireType("d")));
|
||||
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
CHECK_EQ("any", toString(requireType("d")));
|
||||
else
|
||||
CHECK_EQ("*error-type*", toString(requireType("d")));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "set_metatable_needs_arguments")
|
||||
|
380
tests/TypeInfer.cfa.test.cpp
Normal file
380
tests/TypeInfer.cfa.test.cpp
Normal file
@ -0,0 +1,380 @@
|
||||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#include "Fixture.h"
|
||||
|
||||
#include "Luau/Symbol.h"
|
||||
#include "doctest.h"
|
||||
|
||||
using namespace Luau;
|
||||
|
||||
TEST_SUITE_BEGIN("ControlFlowAnalysis");
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "if_not_x_return")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?)
|
||||
if not x then
|
||||
return
|
||||
end
|
||||
|
||||
local foo = x
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({6, 24})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "if_not_x_return_elif_not_y_return")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?, y: string?)
|
||||
if not x then
|
||||
return
|
||||
elseif not y then
|
||||
return
|
||||
end
|
||||
|
||||
local foo = x
|
||||
local bar = y
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({8, 24})));
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({9, 24})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "if_not_x_return_elif_rand_return_elif_not_y_return")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?, y: string?)
|
||||
if not x then
|
||||
return
|
||||
elseif math.random() > 0.5 then
|
||||
return
|
||||
elseif not y then
|
||||
return
|
||||
end
|
||||
|
||||
local foo = x
|
||||
local bar = y
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({10, 24})));
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({11, 24})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "if_not_x_return_elif_not_rand_return_elif_not_y_fallthrough")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?, y: string?)
|
||||
if not x then
|
||||
return
|
||||
elseif math.random() > 0.5 then
|
||||
return
|
||||
elseif not y then
|
||||
|
||||
end
|
||||
|
||||
local foo = x
|
||||
local bar = y
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({10, 24})));
|
||||
CHECK_EQ("string?", toString(requireTypeAtPosition({11, 24})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "if_not_x_return_elif_not_y_fallthrough_elif_not_z_return")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?, y: string?, z: string?)
|
||||
if not x then
|
||||
return
|
||||
elseif not y then
|
||||
|
||||
elseif not z then
|
||||
return
|
||||
end
|
||||
|
||||
local foo = x
|
||||
local bar = y
|
||||
local baz = z
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({10, 24})));
|
||||
CHECK_EQ("string?", toString(requireTypeAtPosition({11, 24})));
|
||||
CHECK_EQ("string?", toString(requireTypeAtPosition({12, 24})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "do_if_not_x_return")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?)
|
||||
do
|
||||
if not x then
|
||||
return
|
||||
end
|
||||
end
|
||||
|
||||
local foo = x
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({8, 24})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "early_return_in_a_loop_which_isnt_guaranteed_to_run_first")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?)
|
||||
while math.random() > 0.5 do
|
||||
if not x then
|
||||
return
|
||||
end
|
||||
|
||||
local foo = x
|
||||
end
|
||||
|
||||
local bar = x
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({7, 28})));
|
||||
CHECK_EQ("string?", toString(requireTypeAtPosition({10, 24})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "early_return_in_a_loop_which_is_guaranteed_to_run_first")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?)
|
||||
repeat
|
||||
if not x then
|
||||
return
|
||||
end
|
||||
|
||||
local foo = x
|
||||
until math.random() > 0.5
|
||||
|
||||
local bar = x
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({7, 28})));
|
||||
CHECK_EQ("string?", toString(requireTypeAtPosition({10, 24}))); // TODO: This is wrong, should be `string`.
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "early_return_in_a_loop_which_is_guaranteed_to_run_first_2")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?)
|
||||
for i = 1, 10 do
|
||||
if not x then
|
||||
return
|
||||
end
|
||||
|
||||
local foo = x
|
||||
end
|
||||
|
||||
local bar = x
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({7, 28})));
|
||||
CHECK_EQ("string?", toString(requireTypeAtPosition({10, 24}))); // TODO: This is wrong, should be `string`.
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "if_not_x_then_error")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?)
|
||||
if not x then
|
||||
error("oops")
|
||||
end
|
||||
|
||||
local foo = x
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({6, 24})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "if_not_x_then_assert_false")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?)
|
||||
if not x then
|
||||
assert(false)
|
||||
end
|
||||
|
||||
local foo = x
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({6, 24})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "if_not_x_return_if_not_y_return")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?, y: string?)
|
||||
if not x then
|
||||
return
|
||||
end
|
||||
|
||||
if not y then
|
||||
return
|
||||
end
|
||||
|
||||
local foo = x
|
||||
local bar = y
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({10, 24})));
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({11, 24})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "type_alias_does_not_leak_out")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?)
|
||||
if typeof(x) == "string" then
|
||||
return
|
||||
else
|
||||
type Foo = number
|
||||
end
|
||||
|
||||
local foo: Foo = x
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
||||
|
||||
CHECK_EQ("Unknown type 'Foo'", toString(result.errors[0]));
|
||||
|
||||
CHECK_EQ("nil", toString(requireTypeAtPosition({8, 29})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "prototyping_and_visiting_alias_has_the_same_scope")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
// In CGB, we walk the block to prototype aliases. We then visit the block in-order, which will resolve the prototype to a real type.
|
||||
// That second walk assumes that the name occurs in the same `Scope` that the prototype walk had. If we arbitrarily change scope midway
|
||||
// through, we'd invoke UB.
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?)
|
||||
type Foo = number
|
||||
|
||||
if typeof(x) == "string" then
|
||||
return
|
||||
end
|
||||
|
||||
local foo: Foo = x
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
||||
|
||||
CHECK_EQ("Type 'nil' could not be converted into 'number'", toString(result.errors[0]));
|
||||
|
||||
CHECK_EQ("nil", toString(requireTypeAtPosition({8, 29})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "tagged_unions")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
type Ok<T> = { tag: "ok", value: T }
|
||||
type Err<E> = { tag: "err", error: E }
|
||||
type Result<T, E> = Ok<T> | Err<E>
|
||||
|
||||
local function map<T, U, E>(result: Result<T, E>, f: (T) -> U): Result<U, E>
|
||||
if result.tag == "ok" then
|
||||
local tag = result.tag
|
||||
local val = result.value
|
||||
|
||||
return { tag = "ok", value = f(result.value) }
|
||||
end
|
||||
|
||||
local tag = result.tag
|
||||
local err = result.error
|
||||
|
||||
return result
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
|
||||
CHECK_EQ("\"ok\"", toString(requireTypeAtPosition({7, 35})));
|
||||
CHECK_EQ("T", toString(requireTypeAtPosition({8, 35})));
|
||||
|
||||
CHECK_EQ("\"err\"", toString(requireTypeAtPosition({13, 31})));
|
||||
CHECK_EQ("E", toString(requireTypeAtPosition({14, 31})));
|
||||
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
CHECK_EQ("{| error: E, tag: \"err\" |}", toString(requireTypeAtPosition({16, 19})));
|
||||
else
|
||||
CHECK_EQ("Err<E>", toString(requireTypeAtPosition({16, 19})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "do_assert_x")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: string?)
|
||||
do
|
||||
assert(x)
|
||||
end
|
||||
|
||||
local foo = x
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
|
||||
CHECK_EQ("string", toString(requireTypeAtPosition({6, 24})));
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
@ -689,4 +689,22 @@ TEST_CASE_FIXTURE(Fixture, "for_loop_lower_bound_is_string_3")
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "cli_68448_iterators_need_not_accept_nil")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
local function makeEnum(members)
|
||||
local enum = {}
|
||||
for _, memberName in ipairs(members) do
|
||||
enum[memberName] = memberName
|
||||
end
|
||||
return enum
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
// HACK (CLI-68453): We name this inner table `enum`. For now, use the
|
||||
// exhaustive switch to see past it.
|
||||
CHECK(toString(requireType("makeEnum"), {true}) == "<a>({a}) -> {| [a]: a |}");
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
@ -485,6 +485,8 @@ return unpack(l0[_])
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "check_imported_module_names")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauTinyControlFlowAnalysis", true};
|
||||
|
||||
fileResolver.source["game/A"] = R"(
|
||||
return function(...) end
|
||||
)";
|
||||
@ -506,19 +508,10 @@ return l0
|
||||
|
||||
ModulePtr mod = getMainModule();
|
||||
REQUIRE(mod);
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
{
|
||||
REQUIRE(mod->scopes.size() >= 4);
|
||||
CHECK(mod->scopes[0].second->importedModules["l0"] == "game/B");
|
||||
CHECK(mod->scopes[3].second->importedModules["l1"] == "game/A");
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
REQUIRE(mod->scopes.size() >= 3);
|
||||
CHECK(mod->scopes[0].second->importedModules["l0"] == "game/B");
|
||||
CHECK(mod->scopes[2].second->importedModules["l1"] == "game/A");
|
||||
}
|
||||
REQUIRE(mod->scopes.size() == 4);
|
||||
CHECK(mod->scopes[0].second->importedModules["l0"] == "game/B");
|
||||
CHECK(mod->scopes[3].second->importedModules["l1"] == "game/A");
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
@ -309,4 +309,21 @@ TEST_CASE_FIXTURE(Fixture, "dont_bind_free_tables_to_themselves")
|
||||
)");
|
||||
}
|
||||
|
||||
// We should probably flag an error on this. See CLI-68672
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "flag_when_index_metamethod_returns_0_values")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
local T = {}
|
||||
function T.__index()
|
||||
end
|
||||
|
||||
local a = setmetatable({}, T)
|
||||
local p = a.prop
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
|
||||
CHECK("nil" == toString(requireType("p")));
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
@ -1109,4 +1109,28 @@ local f1 = f or 'f'
|
||||
CHECK("string" == toString(requireType("f1")));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "reducing_and")
|
||||
{
|
||||
ScopedFastFlag sff[]{
|
||||
{"LuauTryhardAnd", true},
|
||||
{"LuauReducingAndOr", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
type Foo = { name: string?, flag: boolean? }
|
||||
local arr: {Foo} = {}
|
||||
|
||||
local function foo(arg: {name: string}?)
|
||||
local name = if arg and arg.name then arg.name else nil
|
||||
|
||||
table.insert(arr, {
|
||||
name = name or "",
|
||||
flag = name ~= nil and name ~= "",
|
||||
})
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
@ -470,6 +470,10 @@ TEST_CASE_FIXTURE(Fixture, "dcr_can_partially_dispatch_a_constraint")
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "free_options_cannot_be_unified_together")
|
||||
{
|
||||
ScopedFastFlag sff[] = {
|
||||
{"LuauTransitiveSubtyping", true},
|
||||
};
|
||||
|
||||
TypeArena arena;
|
||||
TypeId nilType = builtinTypes->nilType;
|
||||
|
||||
@ -488,7 +492,7 @@ TEST_CASE_FIXTURE(Fixture, "free_options_cannot_be_unified_together")
|
||||
|
||||
u.tryUnify(option1, option2);
|
||||
|
||||
CHECK(u.errors.empty());
|
||||
CHECK(!u.failure);
|
||||
|
||||
u.log.commit();
|
||||
|
||||
@ -548,7 +552,10 @@ return wrapStrictTable(Constants, "Constants")
|
||||
|
||||
std::optional<TypeId> result = first(m->returnType);
|
||||
REQUIRE(result);
|
||||
CHECK(get<AnyType>(*result));
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
CHECK_EQ("(any?) & ~table", toString(*result));
|
||||
else
|
||||
CHECK_MESSAGE(get<AnyType>(*result), *result);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "generic_type_leak_to_module_interface_variadic")
|
||||
|
@ -1615,7 +1615,8 @@ TEST_CASE_FIXTURE(Fixture, "refine_a_property_of_some_global")
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_ERROR_COUNT(3, result);
|
||||
CHECK_EQ("*error-type*", toString(requireTypeAtPosition({4, 30})));
|
||||
|
||||
CHECK_EQ("~false & ~nil", toString(requireTypeAtPosition({4, 30})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "dataflow_analysis_can_tell_refinements_when_its_appropriate_to_refine_into_nil_or_never")
|
||||
|
@ -1590,8 +1590,16 @@ TEST_CASE_FIXTURE(Fixture, "casting_tables_with_props_into_table_with_indexer4")
|
||||
local hi: number = foo({ a = "hi" }, "a") -- shouldn't typecheck since at runtime hi is "hi"
|
||||
)");
|
||||
|
||||
// This typechecks but shouldn't
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
{
|
||||
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
||||
CHECK(toString(result.errors[0]) == "Type 'string' could not be converted into 'number'");
|
||||
}
|
||||
else
|
||||
{
|
||||
// This typechecks but shouldn't
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "table_subtyping_with_missing_props_dont_report_multiple_errors")
|
||||
|
@ -103,6 +103,16 @@ TEST_CASE_FIXTURE(Fixture, "infer_in_nocheck_mode")
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "obvious_type_error_in_nocheck_mode")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
--!nocheck
|
||||
local x: string = 5
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "expr_statement")
|
||||
{
|
||||
CheckResult result = check("local foo = 5 foo()");
|
||||
@ -1185,6 +1195,9 @@ TEST_CASE_FIXTURE(Fixture, "dcr_delays_expansion_of_function_containing_blocked_
|
||||
ScopedFastFlag sff[] = {
|
||||
{"DebugLuauDeferredConstraintResolution", true},
|
||||
{"LuauTinyUnifyNormalsFix", true},
|
||||
// If we run this with error-suppression, it triggers an assertion.
|
||||
// FATAL ERROR: Assertion failed: !"Internal error: Trying to normalize a BlockedType"
|
||||
{"LuauTransitiveSubtyping", false},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
|
@ -27,16 +27,25 @@ TEST_SUITE_BEGIN("TryUnifyTests");
|
||||
|
||||
TEST_CASE_FIXTURE(TryUnifyFixture, "primitives_unify")
|
||||
{
|
||||
ScopedFastFlag sff[] = {
|
||||
{"LuauTransitiveSubtyping", true},
|
||||
};
|
||||
|
||||
Type numberOne{TypeVariant{PrimitiveType{PrimitiveType::Number}}};
|
||||
Type numberTwo = numberOne;
|
||||
|
||||
state.tryUnify(&numberTwo, &numberOne);
|
||||
|
||||
CHECK(!state.failure);
|
||||
CHECK(state.errors.empty());
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(TryUnifyFixture, "compatible_functions_are_unified")
|
||||
{
|
||||
ScopedFastFlag sff[] = {
|
||||
{"LuauTransitiveSubtyping", true},
|
||||
};
|
||||
|
||||
Type functionOne{
|
||||
TypeVariant{FunctionType(arena.addTypePack({arena.freshType(globalScope->level)}), arena.addTypePack({builtinTypes->numberType}))}};
|
||||
|
||||
@ -44,6 +53,7 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "compatible_functions_are_unified")
|
||||
FunctionType(arena.addTypePack({arena.freshType(globalScope->level)}), arena.addTypePack({arena.freshType(globalScope->level)}))}};
|
||||
|
||||
state.tryUnify(&functionTwo, &functionOne);
|
||||
CHECK(!state.failure);
|
||||
CHECK(state.errors.empty());
|
||||
|
||||
state.log.commit();
|
||||
@ -53,6 +63,10 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "compatible_functions_are_unified")
|
||||
|
||||
TEST_CASE_FIXTURE(TryUnifyFixture, "incompatible_functions_are_preserved")
|
||||
{
|
||||
ScopedFastFlag sff[] = {
|
||||
{"LuauTransitiveSubtyping", true},
|
||||
};
|
||||
|
||||
TypePackVar argPackOne{TypePack{{arena.freshType(globalScope->level)}, std::nullopt}};
|
||||
Type functionOne{
|
||||
TypeVariant{FunctionType(arena.addTypePack({arena.freshType(globalScope->level)}), arena.addTypePack({builtinTypes->numberType}))}};
|
||||
@ -66,6 +80,7 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "incompatible_functions_are_preserved")
|
||||
Type functionTwoSaved = functionTwo;
|
||||
|
||||
state.tryUnify(&functionTwo, &functionOne);
|
||||
CHECK(state.failure);
|
||||
CHECK(!state.errors.empty());
|
||||
|
||||
CHECK_EQ(functionOne, functionOneSaved);
|
||||
@ -74,6 +89,10 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "incompatible_functions_are_preserved")
|
||||
|
||||
TEST_CASE_FIXTURE(TryUnifyFixture, "tables_can_be_unified")
|
||||
{
|
||||
ScopedFastFlag sff[] = {
|
||||
{"LuauTransitiveSubtyping", true},
|
||||
};
|
||||
|
||||
Type tableOne{TypeVariant{
|
||||
TableType{{{"foo", {arena.freshType(globalScope->level)}}}, std::nullopt, globalScope->level, TableState::Unsealed},
|
||||
}};
|
||||
@ -86,6 +105,7 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "tables_can_be_unified")
|
||||
|
||||
state.tryUnify(&tableTwo, &tableOne);
|
||||
|
||||
CHECK(!state.failure);
|
||||
CHECK(state.errors.empty());
|
||||
|
||||
state.log.commit();
|
||||
@ -95,6 +115,10 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "tables_can_be_unified")
|
||||
|
||||
TEST_CASE_FIXTURE(TryUnifyFixture, "incompatible_tables_are_preserved")
|
||||
{
|
||||
ScopedFastFlag sff[] = {
|
||||
{"LuauTransitiveSubtyping", true},
|
||||
};
|
||||
|
||||
Type tableOne{TypeVariant{
|
||||
TableType{{{"foo", {arena.freshType(globalScope->level)}}, {"bar", {builtinTypes->numberType}}}, std::nullopt, globalScope->level,
|
||||
TableState::Unsealed},
|
||||
@ -109,6 +133,7 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "incompatible_tables_are_preserved")
|
||||
|
||||
state.tryUnify(&tableTwo, &tableOne);
|
||||
|
||||
CHECK(state.failure);
|
||||
CHECK_EQ(1, state.errors.size());
|
||||
|
||||
CHECK_NE(*getMutable<TableType>(&tableOne)->props["foo"].type, *getMutable<TableType>(&tableTwo)->props["foo"].type);
|
||||
@ -218,6 +243,7 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "variadic_type_pack_unification")
|
||||
TypePackVar variadicPack{VariadicTypePack{builtinTypes->numberType}};
|
||||
|
||||
state.tryUnify(&testPack, &variadicPack);
|
||||
CHECK(state.failure);
|
||||
CHECK(!state.errors.empty());
|
||||
}
|
||||
|
||||
@ -228,6 +254,7 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "variadic_tails_respect_progress")
|
||||
TypePackVar b{TypePack{{builtinTypes->numberType, builtinTypes->stringType}, &variadicPack}};
|
||||
|
||||
state.tryUnify(&b, &a);
|
||||
CHECK(!state.failure);
|
||||
CHECK(state.errors.empty());
|
||||
}
|
||||
|
||||
@ -270,8 +297,7 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "free_tail_is_grown_properly")
|
||||
arena.addTypePack(TypePack{{builtinTypes->numberType, builtinTypes->numberType, builtinTypes->numberType}, std::nullopt});
|
||||
TypePackId numberAndFreeTail = arena.addTypePack(TypePack{{builtinTypes->numberType}, arena.addTypePack(TypePackVar{FreeTypePack{TypeLevel{}}})});
|
||||
|
||||
ErrorVec unifyErrors = state.canUnify(numberAndFreeTail, threeNumbers);
|
||||
CHECK(unifyErrors.size() == 0);
|
||||
CHECK(state.canUnify(numberAndFreeTail, threeNumbers).empty());
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(TryUnifyFixture, "recursive_metatable_getmatchtag")
|
||||
@ -321,7 +347,10 @@ TEST_CASE_FIXTURE(TryUnifyFixture, "txnlog_preserves_pack_owner")
|
||||
|
||||
TEST_CASE_FIXTURE(TryUnifyFixture, "metatables_unify_against_shape_of_free_table")
|
||||
{
|
||||
ScopedFastFlag sff("DebugLuauDeferredConstraintResolution", true);
|
||||
ScopedFastFlag sff[] = {
|
||||
{"LuauTransitiveSubtyping", true},
|
||||
{"DebugLuauDeferredConstraintResolution", true},
|
||||
};
|
||||
|
||||
TableType::Props freeProps{
|
||||
{"foo", {builtinTypes->numberType}},
|
||||
|
@ -715,4 +715,62 @@ TEST_CASE_FIXTURE(Fixture, "less_greedy_unification_with_union_types_2")
|
||||
CHECK_EQ("({| x: number |} | {| x: string |}) -> number | string", toString(requireType("f")));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "union_table_any_property")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
function f(x)
|
||||
-- x : X
|
||||
-- sup : { p : { q : X } }?
|
||||
local sup = if true then { p = { q = x } } else nil
|
||||
local sub : { p : any }
|
||||
sup = nil
|
||||
sup = sub
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "union_function_any_args")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
local sup : ((...any) -> (...any))?
|
||||
local sub : ((number) -> (...any))
|
||||
sup = sub
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "optional_any")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
local sup : any?
|
||||
local sub : number
|
||||
sup = sub
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "generic_function_with_optional_arg")
|
||||
{
|
||||
ScopedFastFlag sff[] = {
|
||||
{"LuauTransitiveSubtyping", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function f<T>(x : T?) : {T}
|
||||
local result = {}
|
||||
if x then
|
||||
result[1] = x
|
||||
end
|
||||
return result
|
||||
end
|
||||
local t : {string} = f(nil)
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
@ -303,6 +303,7 @@ TEST_CASE_FIXTURE(Fixture, "dont_unify_operands_if_one_of_the_operand_is_never_i
|
||||
{
|
||||
ScopedFastFlag sff[]{
|
||||
{"LuauTryhardAnd", true},
|
||||
{"LuauReducingAndOr", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
@ -313,13 +314,7 @@ TEST_CASE_FIXTURE(Fixture, "dont_unify_operands_if_one_of_the_operand_is_never_i
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
CHECK_EQ("<a>(nil, a) -> boolean", toString(requireType("ord")));
|
||||
else
|
||||
{
|
||||
// Widening doesn't normalize yet, so the result is a bit strange
|
||||
CHECK_EQ("<a>(nil, a) -> boolean | boolean", toString(requireType("ord")));
|
||||
}
|
||||
CHECK_EQ("<a>(nil, a) -> boolean", toString(requireType("ord")));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "math_operators_and_never")
|
||||
|
@ -578,6 +578,21 @@ do
|
||||
assert(#t2 == 6)
|
||||
end
|
||||
|
||||
-- test boundary invariant in sparse arrays or various kinds
|
||||
do
|
||||
local function obscuredalloc() return {} end
|
||||
|
||||
local bits = 16
|
||||
|
||||
for i = 1, 2^bits - 1 do
|
||||
local t1 = obscuredalloc() -- to avoid NEWTABLE guessing correct size
|
||||
|
||||
for k = 1, bits do
|
||||
t1[k] = if bit32.extract(i, k - 1) == 1 then true else nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- test table.unpack fastcall for rejecting large unpacks
|
||||
do
|
||||
local ok, res = pcall(function()
|
||||
|
@ -31,8 +31,6 @@ BuiltinTests.table_pack_reduce
|
||||
BuiltinTests.table_pack_variadic
|
||||
DefinitionTests.class_definition_overload_metamethods
|
||||
DefinitionTests.class_definition_string_props
|
||||
FrontendTest.environments
|
||||
FrontendTest.nocheck_cycle_used_by_checked
|
||||
GenericsTests.apply_type_function_nested_generics2
|
||||
GenericsTests.better_mismatch_error_messages
|
||||
GenericsTests.bound_tables_do_not_clone_original_fields
|
||||
@ -54,19 +52,6 @@ GenericsTests.self_recursive_instantiated_param
|
||||
IntersectionTypes.table_intersection_write_sealed
|
||||
IntersectionTypes.table_intersection_write_sealed_indirect
|
||||
IntersectionTypes.table_write_sealed_indirect
|
||||
ModuleTests.clone_self_property
|
||||
NonstrictModeTests.for_in_iterator_variables_are_any
|
||||
NonstrictModeTests.function_parameters_are_any
|
||||
NonstrictModeTests.inconsistent_module_return_types_are_ok
|
||||
NonstrictModeTests.inconsistent_return_types_are_ok
|
||||
NonstrictModeTests.infer_nullary_function
|
||||
NonstrictModeTests.infer_the_maximum_number_of_values_the_function_could_return
|
||||
NonstrictModeTests.inline_table_props_are_also_any
|
||||
NonstrictModeTests.local_tables_are_not_any
|
||||
NonstrictModeTests.locals_are_any_by_default
|
||||
NonstrictModeTests.offer_a_hint_if_you_use_a_dot_instead_of_a_colon
|
||||
NonstrictModeTests.parameters_having_type_any_are_optional
|
||||
NonstrictModeTests.table_props_are_any
|
||||
ProvisionalTests.assign_table_with_refined_property_with_a_similar_type_is_illegal
|
||||
ProvisionalTests.bail_early_if_unification_is_too_complicated
|
||||
ProvisionalTests.do_not_ice_when_trying_to_pick_first_of_generic_type_pack
|
||||
@ -85,9 +70,7 @@ RefinementTest.typeguard_in_assert_position
|
||||
RefinementTest.x_as_any_if_x_is_instance_elseif_x_is_table
|
||||
RuntimeLimits.typescript_port_of_Result_type
|
||||
TableTests.a_free_shape_cannot_turn_into_a_scalar_if_it_is_not_compatible
|
||||
TableTests.any_when_indexing_into_an_unsealed_table_with_no_indexer_in_nonstrict_mode
|
||||
TableTests.casting_tables_with_props_into_table_with_indexer3
|
||||
TableTests.casting_tables_with_props_into_table_with_indexer4
|
||||
TableTests.checked_prop_too_early
|
||||
TableTests.disallow_indexing_into_an_unsealed_table_with_no_indexer_in_strict_mode
|
||||
TableTests.dont_crash_when_setmetatable_does_not_produce_a_metatabletypevar
|
||||
@ -117,7 +100,6 @@ TableTests.missing_metatable_for_sealed_tables_do_not_get_inferred
|
||||
TableTests.mixed_tables_with_implicit_numbered_keys
|
||||
TableTests.nil_assign_doesnt_hit_indexer
|
||||
TableTests.ok_to_set_nil_even_on_non_lvalue_base_expr
|
||||
TableTests.only_ascribe_synthetic_names_at_module_scope
|
||||
TableTests.oop_polymorphic
|
||||
TableTests.quantify_even_that_table_was_never_exported_at_all
|
||||
TableTests.quantify_metatables_of_metatables_of_table
|
||||
@ -138,7 +120,6 @@ ToString.named_metatable_toStringNamedFunction
|
||||
ToString.toStringDetailed2
|
||||
ToString.toStringErrorPack
|
||||
ToString.toStringNamedFunction_generic_pack
|
||||
ToString.toStringNamedFunction_map
|
||||
TryUnifyTests.members_of_failed_typepack_unification_are_unified_with_errorType
|
||||
TryUnifyTests.result_of_failed_typepack_unification_is_constrained
|
||||
TryUnifyTests.typepack_unification_should_trim_free_tails
|
||||
@ -154,15 +135,11 @@ TypeAliases.type_alias_local_rename
|
||||
TypeAliases.type_alias_locations
|
||||
TypeAliases.type_alias_of_an_imported_recursive_generic_type
|
||||
TypeInfer.check_type_infer_recursion_count
|
||||
TypeInfer.checking_should_not_ice
|
||||
TypeInfer.cli_50041_committing_txnlog_in_apollo_client_error
|
||||
TypeInfer.dont_report_type_errors_within_an_AstExprError
|
||||
TypeInfer.dont_report_type_errors_within_an_AstStatError
|
||||
TypeInfer.fuzz_free_table_type_change_during_index_check
|
||||
TypeInfer.globals
|
||||
TypeInfer.globals2
|
||||
TypeInfer.infer_assignment_value_types_mutable_lval
|
||||
TypeInfer.it_is_ok_to_have_inconsistent_number_of_return_values_in_nonstrict
|
||||
TypeInfer.no_stack_overflow_from_isoptional
|
||||
TypeInfer.no_stack_overflow_from_isoptional2
|
||||
TypeInfer.tc_after_error_recovery_no_replacement_name_in_error
|
||||
@ -173,17 +150,13 @@ TypeInferClasses.classes_without_overloaded_operators_cannot_be_added
|
||||
TypeInferClasses.index_instance_property
|
||||
TypeInferClasses.table_class_unification_reports_sane_errors_for_missing_properties
|
||||
TypeInferClasses.warn_when_prop_almost_matches
|
||||
TypeInferFunctions.calling_function_with_anytypepack_doesnt_leak_free_types
|
||||
TypeInferFunctions.cannot_hoist_interior_defns_into_signature
|
||||
TypeInferFunctions.check_function_before_lambda_that_uses_it
|
||||
TypeInferFunctions.dont_give_other_overloads_message_if_only_one_argument_matching_overload_exists
|
||||
TypeInferFunctions.duplicate_functions_with_different_signatures_not_allowed_in_nonstrict
|
||||
TypeInferFunctions.function_cast_error_uses_correct_language
|
||||
TypeInferFunctions.function_decl_non_self_sealed_overwrite_2
|
||||
TypeInferFunctions.function_decl_non_self_unsealed_overwrite
|
||||
TypeInferFunctions.function_does_not_return_enough_values
|
||||
TypeInferFunctions.function_statement_sealed_table_assignment_through_indexer
|
||||
TypeInferFunctions.improved_function_arg_mismatch_error_nonstrict
|
||||
TypeInferFunctions.improved_function_arg_mismatch_errors
|
||||
TypeInferFunctions.infer_anonymous_function_arguments
|
||||
TypeInferFunctions.infer_that_function_does_not_return_a_table
|
||||
@ -191,7 +164,6 @@ TypeInferFunctions.luau_subtyping_is_np_hard
|
||||
TypeInferFunctions.no_lossy_function_type
|
||||
TypeInferFunctions.occurs_check_failure_in_function_return_type
|
||||
TypeInferFunctions.record_matching_overload
|
||||
TypeInferFunctions.report_exiting_without_return_nonstrict
|
||||
TypeInferFunctions.report_exiting_without_return_strict
|
||||
TypeInferFunctions.return_type_by_overload
|
||||
TypeInferFunctions.too_few_arguments_variadic
|
||||
@ -204,11 +176,9 @@ TypeInferLoops.for_in_loop_error_on_factory_not_returning_the_right_amount_of_va
|
||||
TypeInferLoops.for_in_loop_with_next
|
||||
TypeInferLoops.for_in_with_generic_next
|
||||
TypeInferLoops.loop_iter_metamethod_ok_with_inference
|
||||
TypeInferLoops.loop_iter_no_indexer_nonstrict
|
||||
TypeInferLoops.loop_iter_trailing_nil
|
||||
TypeInferLoops.properly_infer_iteratee_is_a_free_table
|
||||
TypeInferLoops.unreachable_code_after_infinite_loop
|
||||
TypeInferModules.custom_require_global
|
||||
TypeInferModules.do_not_modify_imported_types_5
|
||||
TypeInferModules.module_type_conflict
|
||||
TypeInferModules.module_type_conflict_instantiated
|
||||
@ -220,18 +190,14 @@ TypeInferOperators.cli_38355_recursive_union
|
||||
TypeInferOperators.compound_assign_metatable
|
||||
TypeInferOperators.compound_assign_mismatch_metatable
|
||||
TypeInferOperators.disallow_string_and_types_without_metatables_from_arithmetic_binary_ops
|
||||
TypeInferOperators.in_nonstrict_mode_strip_nil_from_intersections_when_considering_relational_operators
|
||||
TypeInferOperators.infer_any_in_all_modes_when_lhs_is_unknown
|
||||
TypeInferOperators.operator_eq_completely_incompatible
|
||||
TypeInferOperators.typecheck_overloaded_multiply_that_is_an_intersection
|
||||
TypeInferOperators.typecheck_overloaded_multiply_that_is_an_intersection_on_rhs
|
||||
TypeInferOperators.typecheck_unary_len_error
|
||||
TypeInferOperators.UnknownGlobalCompoundAssign
|
||||
TypeInferOperators.unrelated_classes_cannot_be_compared
|
||||
TypeInferOperators.unrelated_primitives_cannot_be_compared
|
||||
TypeInferPrimitives.CheckMethodsOfNumber
|
||||
TypeInferPrimitives.string_index
|
||||
TypeInferUnknownNever.assign_to_global_which_is_never
|
||||
TypeInferUnknownNever.dont_unify_operands_if_one_of_the_operand_is_never_in_any_ordering_operators
|
||||
TypeInferUnknownNever.math_operators_and_never
|
||||
TypePackTests.detect_cyclic_typepacks2
|
||||
@ -250,6 +216,7 @@ TypeSingletons.table_properties_type_error_escapes
|
||||
TypeSingletons.taking_the_length_of_union_of_string_singleton
|
||||
TypeSingletons.widen_the_supertype_if_it_is_free_and_subtype_has_singleton
|
||||
TypeSingletons.widening_happens_almost_everywhere
|
||||
UnionTypes.generic_function_with_optional_arg
|
||||
UnionTypes.index_on_a_union_type_with_missing_property
|
||||
UnionTypes.optional_assignment_errors
|
||||
UnionTypes.optional_call_error
|
||||
|
Loading…
Reference in New Issue
Block a user