mirror of
https://github.com/luau-lang/luau.git
synced 2024-11-15 06:15:44 +08:00
Sync to upstream/release/603 (#1097)
# What's changed? - Record the location of properties for table types (closes #802) - Implement stricter UTF-8 validations as per the RFC (https://github.com/luau-lang/rfcs/pull/1) - Implement `buffer` as a new type in both the old and new solvers. - Changed errors produced by some `buffer` builtins to be a bit more generic to avoid platform-dependent error messages. - Fixed a bug where `Unifier` would copy some persistent types, tripping some internal assertions. - Type checking rules on relational operators is now a little bit more lax. - Improve dead code elimination for some `if` statements with complex always-false conditions ## New type solver - Dataflow analysis now generates phi nodes on exit of branches. - Dataflow analysis avoids producing a new definition for locals or properties that are not owned by that loop. - If a function parameter has been constrained to `never`, report errors at all uses of that parameter within that function. - Switch to using the new `Luau::Set` to replace `std::unordered_set` to alleviate some poor allocation characteristics which was negatively affecting overall performance. - Subtyping can now report many failing reasons instead of just the first one that we happened to find during the test. - Subtyping now also report reasons for type pack mismatches. - When visiting `if` statements or expressions, the resulting context are the common terms in both branches. ## Native codegen - Implement support for `buffer` builtins to its IR for x64 and A64. - Optimized `table.insert` by not inserting a table barrier if it is fastcalled with a constant. ## Internal Contributors Co-authored-by: Aaron Weiss <aaronweiss@roblox.com> Co-authored-by: Alexander McCord <amccord@roblox.com> Co-authored-by: Andy Friesen <afriesen@roblox.com> Co-authored-by: Arseny Kapoulkine <arseny@roblox.com> Co-authored-by: Aviral Goel <agoel@roblox.com> Co-authored-by: Lily Brown <lbrown@roblox.com> Co-authored-by: Vyacheslav Egorov <vegorov@roblox.com>
This commit is contained in:
parent
7105c81579
commit
c2ba1058c3
@ -148,6 +148,8 @@ private:
|
||||
*/
|
||||
ScopePtr childScope(AstNode* node, const ScopePtr& parent);
|
||||
|
||||
std::optional<TypeId> lookup(Scope* scope, DefId def);
|
||||
|
||||
/**
|
||||
* Adds a new constraint with no dependencies to a given scope.
|
||||
* @param scope the scope to add the constraint to.
|
||||
|
@ -3,14 +3,18 @@
|
||||
#pragma once
|
||||
|
||||
#include "Luau/Constraint.h"
|
||||
#include "Luau/DenseHash.h"
|
||||
#include "Luau/Error.h"
|
||||
#include "Luau/Location.h"
|
||||
#include "Luau/Module.h"
|
||||
#include "Luau/Normalize.h"
|
||||
#include "Luau/ToString.h"
|
||||
#include "Luau/Type.h"
|
||||
#include "Luau/TypeCheckLimits.h"
|
||||
#include "Luau/TypeFwd.h"
|
||||
#include "Luau/Variant.h"
|
||||
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
namespace Luau
|
||||
@ -74,6 +78,10 @@ struct ConstraintSolver
|
||||
std::unordered_map<BlockedConstraintId, std::vector<NotNull<const Constraint>>, HashBlockedConstraintId> blocked;
|
||||
// Memoized instantiations of type aliases.
|
||||
DenseHashMap<InstantiationSignature, TypeId, HashInstantiationSignature> instantiatedAliases{{}};
|
||||
// Breadcrumbs for where a free type's upper bound was expanded. We use
|
||||
// these to provide more helpful error messages when a free type is solved
|
||||
// as never unexpectedly.
|
||||
DenseHashMap<TypeId, std::vector<std::pair<Location, TypeId>>> upperBoundContributors{nullptr};
|
||||
|
||||
// A mapping from free types to the number of unresolved constraints that mention them.
|
||||
DenseHashMap<TypeId, size_t> unresolvedConstraints{{}};
|
||||
@ -140,7 +148,7 @@ struct ConstraintSolver
|
||||
std::pair<std::vector<TypeId>, std::optional<TypeId>> lookupTableProp(
|
||||
TypeId subjectType, const std::string& propName, bool suppressSimplification = false);
|
||||
std::pair<std::vector<TypeId>, std::optional<TypeId>> lookupTableProp(
|
||||
TypeId subjectType, const std::string& propName, bool suppressSimplification, std::unordered_set<TypeId>& seen);
|
||||
TypeId subjectType, const std::string& propName, bool suppressSimplification, DenseHashSet<TypeId>& seen);
|
||||
|
||||
void block(NotNull<const Constraint> target, NotNull<const Constraint> constraint);
|
||||
/**
|
||||
|
@ -3,6 +3,7 @@
|
||||
|
||||
// Do not include LValue. It should never be used here.
|
||||
#include "Luau/Ast.h"
|
||||
#include "Luau/ControlFlow.h"
|
||||
#include "Luau/DenseHash.h"
|
||||
#include "Luau/Def.h"
|
||||
#include "Luau/Symbol.h"
|
||||
@ -74,11 +75,18 @@ private:
|
||||
struct DfgScope
|
||||
{
|
||||
DfgScope* parent;
|
||||
bool isLoopScope;
|
||||
|
||||
DenseHashMap<Symbol, const Def*> bindings{Symbol{}};
|
||||
DenseHashMap<const Def*, std::unordered_map<std::string, const Def*>> props{nullptr};
|
||||
|
||||
std::optional<DefId> lookup(Symbol symbol) const;
|
||||
std::optional<DefId> lookup(DefId def, const std::string& key) const;
|
||||
|
||||
void inherit(const DfgScope* childScope);
|
||||
|
||||
bool canUpdateDefinition(Symbol symbol) const;
|
||||
bool canUpdateDefinition(DefId def, const std::string& key) const;
|
||||
};
|
||||
|
||||
struct DataFlowResult
|
||||
@ -106,31 +114,32 @@ private:
|
||||
|
||||
std::vector<std::unique_ptr<DfgScope>> scopes;
|
||||
|
||||
DfgScope* childScope(DfgScope* scope);
|
||||
DfgScope* childScope(DfgScope* scope, bool isLoopScope = false);
|
||||
void join(DfgScope* parent, DfgScope* a, DfgScope* b);
|
||||
|
||||
void visit(DfgScope* scope, AstStatBlock* b);
|
||||
void visitBlockWithoutChildScope(DfgScope* scope, AstStatBlock* b);
|
||||
ControlFlow visit(DfgScope* scope, AstStatBlock* b);
|
||||
ControlFlow visitBlockWithoutChildScope(DfgScope* scope, AstStatBlock* b);
|
||||
|
||||
void visit(DfgScope* scope, AstStat* s);
|
||||
void visit(DfgScope* scope, AstStatIf* i);
|
||||
void visit(DfgScope* scope, AstStatWhile* w);
|
||||
void visit(DfgScope* scope, AstStatRepeat* r);
|
||||
void visit(DfgScope* scope, AstStatBreak* b);
|
||||
void visit(DfgScope* scope, AstStatContinue* c);
|
||||
void visit(DfgScope* scope, AstStatReturn* r);
|
||||
void visit(DfgScope* scope, AstStatExpr* e);
|
||||
void visit(DfgScope* scope, AstStatLocal* l);
|
||||
void visit(DfgScope* scope, AstStatFor* f);
|
||||
void visit(DfgScope* scope, AstStatForIn* f);
|
||||
void visit(DfgScope* scope, AstStatAssign* a);
|
||||
void visit(DfgScope* scope, AstStatCompoundAssign* c);
|
||||
void visit(DfgScope* scope, AstStatFunction* f);
|
||||
void visit(DfgScope* scope, AstStatLocalFunction* l);
|
||||
void visit(DfgScope* scope, AstStatTypeAlias* t);
|
||||
void visit(DfgScope* scope, AstStatDeclareGlobal* d);
|
||||
void visit(DfgScope* scope, AstStatDeclareFunction* d);
|
||||
void visit(DfgScope* scope, AstStatDeclareClass* d);
|
||||
void visit(DfgScope* scope, AstStatError* error);
|
||||
ControlFlow visit(DfgScope* scope, AstStat* s);
|
||||
ControlFlow visit(DfgScope* scope, AstStatIf* i);
|
||||
ControlFlow visit(DfgScope* scope, AstStatWhile* w);
|
||||
ControlFlow visit(DfgScope* scope, AstStatRepeat* r);
|
||||
ControlFlow visit(DfgScope* scope, AstStatBreak* b);
|
||||
ControlFlow visit(DfgScope* scope, AstStatContinue* c);
|
||||
ControlFlow visit(DfgScope* scope, AstStatReturn* r);
|
||||
ControlFlow visit(DfgScope* scope, AstStatExpr* e);
|
||||
ControlFlow visit(DfgScope* scope, AstStatLocal* l);
|
||||
ControlFlow visit(DfgScope* scope, AstStatFor* f);
|
||||
ControlFlow visit(DfgScope* scope, AstStatForIn* f);
|
||||
ControlFlow visit(DfgScope* scope, AstStatAssign* a);
|
||||
ControlFlow visit(DfgScope* scope, AstStatCompoundAssign* c);
|
||||
ControlFlow visit(DfgScope* scope, AstStatFunction* f);
|
||||
ControlFlow visit(DfgScope* scope, AstStatLocalFunction* l);
|
||||
ControlFlow visit(DfgScope* scope, AstStatTypeAlias* t);
|
||||
ControlFlow visit(DfgScope* scope, AstStatDeclareGlobal* d);
|
||||
ControlFlow visit(DfgScope* scope, AstStatDeclareFunction* d);
|
||||
ControlFlow visit(DfgScope* scope, AstStatDeclareClass* d);
|
||||
ControlFlow visit(DfgScope* scope, AstStatError* error);
|
||||
|
||||
DataFlowResult visitExpr(DfgScope* scope, AstExpr* e);
|
||||
DataFlowResult visitExpr(DfgScope* scope, AstExprGroup* group);
|
||||
|
@ -79,8 +79,7 @@ struct DefArena
|
||||
TypedAllocator<Def> allocator;
|
||||
|
||||
DefId freshCell(bool subscripted = false);
|
||||
// TODO: implement once we have cases where we need to merge in definitions
|
||||
// DefId phi(const std::vector<DefId>& defs);
|
||||
DefId phi(DefId a, DefId b);
|
||||
};
|
||||
|
||||
} // namespace Luau
|
||||
|
@ -322,6 +322,7 @@ struct TypePackMismatch
|
||||
{
|
||||
TypePackId wantedTp;
|
||||
TypePackId givenTp;
|
||||
std::string reason;
|
||||
|
||||
bool operator==(const TypePackMismatch& rhs) const;
|
||||
};
|
||||
|
@ -71,7 +71,7 @@ struct SourceNode
|
||||
|
||||
ModuleName name;
|
||||
std::string humanReadableName;
|
||||
std::unordered_set<ModuleName> requireSet;
|
||||
DenseHashSet<ModuleName> requireSet{{}};
|
||||
std::vector<std::pair<ModuleName, Location>> requireLocations;
|
||||
bool dirtySourceModule = true;
|
||||
bool dirtyModule = true;
|
||||
@ -206,7 +206,7 @@ private:
|
||||
std::vector<ModuleName>& buildQueue, const ModuleName& root, bool forAutocomplete, std::function<bool(const ModuleName&)> canSkip = {});
|
||||
|
||||
void addBuildQueueItems(std::vector<BuildQueueItem>& items, std::vector<ModuleName>& buildQueue, bool cycleDetected,
|
||||
std::unordered_set<Luau::ModuleName>& seen, const FrontendOptions& frontendOptions);
|
||||
DenseHashSet<Luau::ModuleName>& seen, const FrontendOptions& frontendOptions);
|
||||
void checkBuildQueueItem(BuildQueueItem& item);
|
||||
void checkBuildQueueItems(std::vector<BuildQueueItem>& items);
|
||||
void recordItemResult(const BuildQueueItem& item);
|
||||
|
@ -102,6 +102,8 @@ struct Module
|
||||
DenseHashMap<const AstType*, TypeId> astResolvedTypes{nullptr};
|
||||
DenseHashMap<const AstTypePack*, TypePackId> astResolvedTypePacks{nullptr};
|
||||
|
||||
DenseHashMap<TypeId, std::vector<std::pair<Location, TypeId>>> upperBoundContributors{nullptr};
|
||||
|
||||
// Map AST nodes to the scope they create. Cannot be NotNull<Scope> because
|
||||
// we need a sentinel value for the map.
|
||||
DenseHashMap<const AstNode*, Scope*> astScopes{nullptr};
|
||||
|
@ -2,6 +2,7 @@
|
||||
#pragma once
|
||||
|
||||
#include "Luau/NotNull.h"
|
||||
#include "Luau/Set.h"
|
||||
#include "Luau/TypeFwd.h"
|
||||
#include "Luau/UnifierSharedState.h"
|
||||
|
||||
@ -9,7 +10,6 @@
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
#include <vector>
|
||||
|
||||
namespace Luau
|
||||
@ -254,6 +254,10 @@ struct NormalizedType
|
||||
// This type is either never or thread.
|
||||
TypeId threads;
|
||||
|
||||
// The buffer part of the type.
|
||||
// This type is either never or buffer.
|
||||
TypeId buffers;
|
||||
|
||||
// The (meta)table part of the type.
|
||||
// Each element of this set is a (meta)table type, or the top `table` type.
|
||||
// An empty set denotes never.
|
||||
@ -299,6 +303,7 @@ struct NormalizedType
|
||||
bool hasNumbers() const;
|
||||
bool hasStrings() const;
|
||||
bool hasThreads() const;
|
||||
bool hasBuffers() const;
|
||||
bool hasTables() const;
|
||||
bool hasFunctions() const;
|
||||
bool hasTyvars() const;
|
||||
@ -359,7 +364,7 @@ public:
|
||||
void unionTablesWithTable(TypeIds& heres, TypeId there);
|
||||
void unionTables(TypeIds& heres, const TypeIds& theres);
|
||||
bool unionNormals(NormalizedType& here, const NormalizedType& there, int ignoreSmallerTyvars = -1);
|
||||
bool unionNormalWithTy(NormalizedType& here, TypeId there, std::unordered_set<TypeId>& seenSetTypes, int ignoreSmallerTyvars = -1);
|
||||
bool unionNormalWithTy(NormalizedType& here, TypeId there, Set<TypeId>& seenSetTypes, int ignoreSmallerTyvars = -1);
|
||||
|
||||
// ------- Negations
|
||||
std::optional<NormalizedType> negateNormal(const NormalizedType& here);
|
||||
@ -381,15 +386,15 @@ public:
|
||||
std::optional<TypeId> intersectionOfFunctions(TypeId here, TypeId there);
|
||||
void intersectFunctionsWithFunction(NormalizedFunctionType& heress, TypeId there);
|
||||
void intersectFunctions(NormalizedFunctionType& heress, const NormalizedFunctionType& theress);
|
||||
bool intersectTyvarsWithTy(NormalizedTyvars& here, TypeId there, std::unordered_set<TypeId>& seenSetTypes);
|
||||
bool intersectTyvarsWithTy(NormalizedTyvars& here, TypeId there, Set<TypeId>& seenSetTypes);
|
||||
bool intersectNormals(NormalizedType& here, const NormalizedType& there, int ignoreSmallerTyvars = -1);
|
||||
bool intersectNormalWithTy(NormalizedType& here, TypeId there, std::unordered_set<TypeId>& seenSetTypes);
|
||||
bool intersectNormalWithTy(NormalizedType& here, TypeId there, Set<TypeId>& seenSetTypes);
|
||||
bool normalizeIntersections(const std::vector<TypeId>& intersections, NormalizedType& outType);
|
||||
|
||||
// Check for inhabitance
|
||||
bool isInhabited(TypeId ty);
|
||||
bool isInhabited(TypeId ty, std::unordered_set<TypeId> seen);
|
||||
bool isInhabited(const NormalizedType* norm, std::unordered_set<TypeId> seen = {});
|
||||
bool isInhabited(TypeId ty, Set<TypeId> seen);
|
||||
bool isInhabited(const NormalizedType* norm, Set<TypeId> seen = {nullptr});
|
||||
|
||||
// Check for intersections being inhabited
|
||||
bool isIntersectionInhabited(TypeId left, TypeId right);
|
||||
|
@ -56,7 +56,6 @@ struct Scope
|
||||
void addBuiltinTypeBinding(const Name& name, const TypeFun& tyFun);
|
||||
|
||||
std::optional<TypeId> lookup(Symbol sym) const;
|
||||
std::optional<TypeId> lookupLValue(DefId def) const;
|
||||
std::optional<TypeId> lookup(DefId def) const;
|
||||
std::optional<std::pair<TypeId, Scope*>> lookupEx(DefId def);
|
||||
std::optional<std::pair<Binding*, Scope*>> lookupEx(Symbol sym);
|
||||
@ -80,6 +79,7 @@ struct Scope
|
||||
// types here.
|
||||
DenseHashMap<const Def*, TypeId> rvalueRefinements{nullptr};
|
||||
|
||||
void inheritAssignments(const ScopePtr& childScope);
|
||||
void inheritRefinements(const ScopePtr& childScope);
|
||||
|
||||
// For mutually recursive type aliases, it's important that
|
||||
|
105
Analysis/include/Luau/Set.h
Normal file
105
Analysis/include/Luau/Set.h
Normal file
@ -0,0 +1,105 @@
|
||||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#pragma once
|
||||
|
||||
#include "Luau/DenseHash.h"
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
|
||||
template<typename T>
|
||||
using SetHashDefault = std::conditional_t<std::is_pointer_v<T>, DenseHashPointer, std::hash<T>>;
|
||||
|
||||
// This is an implementation of `unordered_set` using `DenseHashMap<T, bool>` to support erasure.
|
||||
// This lets us work around `DenseHashSet` limitations and get a more traditional set interface.
|
||||
template<typename T, typename Hash = SetHashDefault<T>>
|
||||
class Set
|
||||
{
|
||||
private:
|
||||
DenseHashMap<T, bool, Hash> mapping;
|
||||
size_t entryCount = 0;
|
||||
|
||||
public:
|
||||
Set(const T& empty_key)
|
||||
: mapping{empty_key}
|
||||
{
|
||||
}
|
||||
|
||||
bool insert(const T& element)
|
||||
{
|
||||
bool& entry = mapping[element];
|
||||
bool fresh = !entry;
|
||||
|
||||
if (fresh)
|
||||
{
|
||||
entry = true;
|
||||
entryCount++;
|
||||
}
|
||||
|
||||
return fresh;
|
||||
}
|
||||
|
||||
template<class Iterator>
|
||||
void insert(Iterator begin, Iterator end)
|
||||
{
|
||||
for (Iterator it = begin; it != end; ++it)
|
||||
insert(*it);
|
||||
}
|
||||
|
||||
void erase(const T& element)
|
||||
{
|
||||
bool& entry = mapping[element];
|
||||
|
||||
if (entry)
|
||||
{
|
||||
entry = false;
|
||||
entryCount--;
|
||||
}
|
||||
}
|
||||
|
||||
void clear()
|
||||
{
|
||||
mapping.clear();
|
||||
entryCount = 0;
|
||||
}
|
||||
|
||||
size_t size() const
|
||||
{
|
||||
return entryCount;
|
||||
}
|
||||
|
||||
bool empty() const
|
||||
{
|
||||
return entryCount == 0;
|
||||
}
|
||||
|
||||
size_t count(const T& element) const
|
||||
{
|
||||
const bool* entry = mapping.find(element);
|
||||
return (entry && *entry) ? 1 : 0;
|
||||
}
|
||||
|
||||
bool contains(const T& element) const
|
||||
{
|
||||
return count(element) != 0;
|
||||
}
|
||||
|
||||
bool operator==(const Set<T>& there) const
|
||||
{
|
||||
// if the sets are unequal sizes, then they cannot possibly be equal.
|
||||
if (size() != there.size())
|
||||
return false;
|
||||
|
||||
// otherwise, we'll need to check that every element we have here is in `there`.
|
||||
for (auto [elem, present] : mapping)
|
||||
{
|
||||
// if it's not, we'll return `false`
|
||||
if (present && there.contains(elem))
|
||||
return false;
|
||||
}
|
||||
|
||||
// otherwise, we've proven the two equal!
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace Luau
|
@ -2,11 +2,10 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "Luau/DenseHash.h"
|
||||
#include "Luau/NotNull.h"
|
||||
#include "Luau/TypeFwd.h"
|
||||
|
||||
#include <set>
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
|
||||
@ -16,7 +15,7 @@ struct SimplifyResult
|
||||
{
|
||||
TypeId result;
|
||||
|
||||
std::set<TypeId> blockedTypes;
|
||||
DenseHashSet<TypeId> blockedTypes;
|
||||
};
|
||||
|
||||
SimplifyResult simplifyIntersection(NotNull<BuiltinTypes> builtinTypes, NotNull<TypeArena> arena, TypeId ty, TypeId discriminant);
|
||||
|
@ -1,10 +1,11 @@
|
||||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#pragma once
|
||||
|
||||
#include "Luau/Set.h"
|
||||
#include "Luau/TypeFwd.h"
|
||||
#include "Luau/TypePairHash.h"
|
||||
#include "Luau/UnifierSharedState.h"
|
||||
#include "Luau/TypePath.h"
|
||||
#include "Luau/DenseHash.h"
|
||||
|
||||
#include <vector>
|
||||
#include <optional>
|
||||
@ -22,6 +23,9 @@ struct NormalizedType;
|
||||
struct NormalizedClassType;
|
||||
struct NormalizedStringType;
|
||||
struct NormalizedFunctionType;
|
||||
struct TypeArena;
|
||||
struct Scope;
|
||||
struct TableIndexer;
|
||||
|
||||
struct SubtypingReasoning
|
||||
{
|
||||
@ -31,6 +35,11 @@ struct SubtypingReasoning
|
||||
bool operator==(const SubtypingReasoning& other) const;
|
||||
};
|
||||
|
||||
struct SubtypingReasoningHash
|
||||
{
|
||||
size_t operator()(const SubtypingReasoning& r) const;
|
||||
};
|
||||
|
||||
struct SubtypingResult
|
||||
{
|
||||
bool isSubtype = false;
|
||||
@ -40,7 +49,7 @@ struct SubtypingResult
|
||||
|
||||
/// The reason for isSubtype to be false. May not be present even if
|
||||
/// isSubtype is false, depending on the input types.
|
||||
std::optional<SubtypingReasoning> reasoning;
|
||||
DenseHashSet<SubtypingReasoning, SubtypingReasoningHash> reasoning{SubtypingReasoning{}};
|
||||
|
||||
SubtypingResult& andAlso(const SubtypingResult& other);
|
||||
SubtypingResult& orElse(const SubtypingResult& other);
|
||||
@ -92,9 +101,9 @@ struct Subtyping
|
||||
|
||||
Variance variance = Variance::Covariant;
|
||||
|
||||
using SeenSet = std::unordered_set<std::pair<TypeId, TypeId>, TypeIdPairHash>;
|
||||
using SeenSet = Set<std::pair<TypeId, TypeId>, TypePairHash>;
|
||||
|
||||
SeenSet seenTypes;
|
||||
SeenSet seenTypes{{}};
|
||||
|
||||
Subtyping(NotNull<BuiltinTypes> builtinTypes, NotNull<TypeArena> typeArena, NotNull<Normalizer> normalizer,
|
||||
NotNull<InternalErrorReporter> iceReporter, NotNull<Scope> scope);
|
||||
|
@ -21,7 +21,6 @@
|
||||
#include <set>
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
#include <vector>
|
||||
|
||||
LUAU_FASTINT(LuauTableTypeMaximumStringifierLength)
|
||||
@ -141,6 +140,7 @@ struct PrimitiveType
|
||||
Thread,
|
||||
Function,
|
||||
Table,
|
||||
Buffer,
|
||||
};
|
||||
|
||||
Type type;
|
||||
@ -373,8 +373,15 @@ struct Property
|
||||
|
||||
bool deprecated = false;
|
||||
std::string deprecatedSuggestion;
|
||||
|
||||
// If this property was inferred from an expression, this field will be
|
||||
// populated with the source location of the corresponding table property.
|
||||
std::optional<Location> location = std::nullopt;
|
||||
|
||||
// If this property was built from an explicit type annotation, this field
|
||||
// will be populated with the source location of that table property.
|
||||
std::optional<Location> typeLocation = std::nullopt;
|
||||
|
||||
Tags tags;
|
||||
std::optional<std::string> documentationSymbol;
|
||||
|
||||
@ -740,6 +747,7 @@ bool isBoolean(TypeId ty);
|
||||
bool isNumber(TypeId ty);
|
||||
bool isString(TypeId ty);
|
||||
bool isThread(TypeId ty);
|
||||
bool isBuffer(TypeId ty);
|
||||
bool isOptional(TypeId ty);
|
||||
bool isTableIntersection(TypeId ty);
|
||||
bool isOverloadedFunction(TypeId ty);
|
||||
@ -798,6 +806,7 @@ public:
|
||||
const TypeId stringType;
|
||||
const TypeId booleanType;
|
||||
const TypeId threadType;
|
||||
const TypeId bufferType;
|
||||
const TypeId functionType;
|
||||
const TypeId classType;
|
||||
const TypeId tableType;
|
||||
@ -966,7 +975,7 @@ private:
|
||||
using SavedIterInfo = std::pair<const T*, size_t>;
|
||||
|
||||
std::deque<SavedIterInfo> stack;
|
||||
std::unordered_set<const T*> seen; // Only needed to protect the iterator from hanging the thread.
|
||||
DenseHashSet<const T*> seen{nullptr}; // Only needed to protect the iterator from hanging the thread.
|
||||
|
||||
void advance()
|
||||
{
|
||||
@ -993,7 +1002,7 @@ private:
|
||||
{
|
||||
// If we're about to descend into a cyclic type, we should skip over this.
|
||||
// Ideally this should never happen, but alas it does from time to time. :(
|
||||
if (seen.find(inner) != seen.end())
|
||||
if (seen.contains(inner))
|
||||
advance();
|
||||
else
|
||||
{
|
||||
|
@ -377,6 +377,7 @@ public:
|
||||
const TypeId stringType;
|
||||
const TypeId booleanType;
|
||||
const TypeId threadType;
|
||||
const TypeId bufferType;
|
||||
const TypeId anyType;
|
||||
const TypeId unknownType;
|
||||
const TypeId neverType;
|
||||
|
@ -4,7 +4,6 @@
|
||||
#include "Luau/TypeFwd.h"
|
||||
#include "Luau/Variant.h"
|
||||
#include "Luau/NotNull.h"
|
||||
#include "Luau/TypeOrPack.h"
|
||||
|
||||
#include <optional>
|
||||
#include <string>
|
||||
@ -153,6 +152,16 @@ struct Path
|
||||
}
|
||||
};
|
||||
|
||||
struct PathHash
|
||||
{
|
||||
size_t operator()(const Property& prop) const;
|
||||
size_t operator()(const Index& idx) const;
|
||||
size_t operator()(const TypeField& field) const;
|
||||
size_t operator()(const PackField& field) const;
|
||||
size_t operator()(const Component& component) const;
|
||||
size_t operator()(const Path& path) const;
|
||||
};
|
||||
|
||||
/// The canonical "empty" Path, meaning a Path with no components.
|
||||
static const Path kEmpty{};
|
||||
|
||||
@ -184,7 +193,7 @@ using Path = TypePath::Path;
|
||||
|
||||
/// Converts a Path to a string for debugging purposes. This output may not be
|
||||
/// terribly clear to end users of the Luau type system.
|
||||
std::string toString(const TypePath::Path& path);
|
||||
std::string toString(const TypePath::Path& path, bool prefixDot = false);
|
||||
|
||||
std::optional<TypeOrPack> traverse(TypeId root, const Path& path, NotNull<BuiltinTypes> builtinTypes);
|
||||
std::optional<TypeOrPack> traverse(TypePackId root, const Path& path, NotNull<BuiltinTypes> builtinTypes);
|
||||
|
@ -6,7 +6,6 @@
|
||||
#include "Luau/NotNull.h"
|
||||
#include "Luau/TypePairHash.h"
|
||||
#include "Luau/TypeCheckLimits.h"
|
||||
#include "Luau/TypeChecker2.h"
|
||||
#include "Luau/TypeFwd.h"
|
||||
|
||||
#include <optional>
|
||||
@ -37,6 +36,8 @@ struct Unifier2
|
||||
DenseHashSet<std::pair<TypeId, TypeId>, TypePairHash> seenTypePairings{{nullptr, nullptr}};
|
||||
DenseHashSet<std::pair<TypePackId, TypePackId>, TypePairHash> seenTypePackPairings{{nullptr, nullptr}};
|
||||
|
||||
DenseHashMap<TypeId, std::vector<TypeId>> expandedFreeTypes{nullptr};
|
||||
|
||||
int recursionCount = 0;
|
||||
int recursionLimit = 0;
|
||||
|
||||
|
@ -8,7 +8,6 @@
|
||||
|
||||
#include <math.h>
|
||||
|
||||
LUAU_FASTFLAG(LuauFloorDivision);
|
||||
LUAU_FASTFLAG(LuauClipExtraHasEndProps);
|
||||
|
||||
namespace Luau
|
||||
@ -519,7 +518,6 @@ struct AstJsonEncoder : public AstVisitor
|
||||
case AstExprBinary::Div:
|
||||
return writeString("Div");
|
||||
case AstExprBinary::FloorDiv:
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision);
|
||||
return writeString("FloorDiv");
|
||||
case AstExprBinary::Mod:
|
||||
return writeString("Mod");
|
||||
|
@ -12,7 +12,6 @@ LUAU_FASTFLAG(DebugLuauReadWriteProperties)
|
||||
|
||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||
LUAU_FASTINTVARIABLE(LuauTypeCloneRecursionLimit, 300)
|
||||
LUAU_FASTFLAGVARIABLE(LuauCloneCyclicUnions, false)
|
||||
|
||||
LUAU_FASTFLAGVARIABLE(LuauStacklessTypeClone3, false)
|
||||
LUAU_FASTINTVARIABLE(LuauTypeCloneIterationLimit, 100'000)
|
||||
@ -782,33 +781,19 @@ void TypeCloner::operator()(const AnyType& t)
|
||||
|
||||
void TypeCloner::operator()(const UnionType& t)
|
||||
{
|
||||
if (FFlag::LuauCloneCyclicUnions)
|
||||
{
|
||||
// We're just using this FreeType as a placeholder until we've finished
|
||||
// cloning the parts of this union so it is okay that its bounds are
|
||||
// nullptr. We'll never indirect them.
|
||||
TypeId result = dest.addType(FreeType{nullptr, /*lowerBound*/ nullptr, /*upperBound*/ nullptr});
|
||||
seenTypes[typeId] = result;
|
||||
// We're just using this FreeType as a placeholder until we've finished
|
||||
// cloning the parts of this union so it is okay that its bounds are
|
||||
// nullptr. We'll never indirect them.
|
||||
TypeId result = dest.addType(FreeType{nullptr, /*lowerBound*/ nullptr, /*upperBound*/ nullptr});
|
||||
seenTypes[typeId] = result;
|
||||
|
||||
std::vector<TypeId> options;
|
||||
options.reserve(t.options.size());
|
||||
std::vector<TypeId> options;
|
||||
options.reserve(t.options.size());
|
||||
|
||||
for (TypeId ty : t.options)
|
||||
options.push_back(clone(ty, dest, cloneState));
|
||||
for (TypeId ty : t.options)
|
||||
options.push_back(clone(ty, dest, cloneState));
|
||||
|
||||
asMutable(result)->ty.emplace<UnionType>(std::move(options));
|
||||
}
|
||||
else
|
||||
{
|
||||
std::vector<TypeId> options;
|
||||
options.reserve(t.options.size());
|
||||
|
||||
for (TypeId ty : t.options)
|
||||
options.push_back(clone(ty, dest, cloneState));
|
||||
|
||||
TypeId result = dest.addType(UnionType{std::move(options)});
|
||||
seenTypes[typeId] = result;
|
||||
}
|
||||
asMutable(result)->ty.emplace<UnionType>(std::move(options));
|
||||
}
|
||||
|
||||
void TypeCloner::operator()(const IntersectionType& t)
|
||||
|
@ -7,6 +7,7 @@
|
||||
#include "Luau/Constraint.h"
|
||||
#include "Luau/ControlFlow.h"
|
||||
#include "Luau/DcrLogger.h"
|
||||
#include "Luau/DenseHash.h"
|
||||
#include "Luau/ModuleResolver.h"
|
||||
#include "Luau/RecursionCounter.h"
|
||||
#include "Luau/Refinement.h"
|
||||
@ -23,9 +24,7 @@
|
||||
LUAU_FASTINT(LuauCheckRecursionLimit);
|
||||
LUAU_FASTFLAG(DebugLuauLogSolverToJson);
|
||||
LUAU_FASTFLAG(DebugLuauMagicTypes);
|
||||
LUAU_FASTFLAG(LuauParseDeclareClassIndexer);
|
||||
LUAU_FASTFLAG(LuauLoopControlFlowAnalysis);
|
||||
LUAU_FASTFLAG(LuauFloorDivision);
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -206,6 +205,66 @@ ScopePtr ConstraintGenerator::childScope(AstNode* node, const ScopePtr& parent)
|
||||
return scope;
|
||||
}
|
||||
|
||||
static std::vector<DefId> flatten(const Phi* phi)
|
||||
{
|
||||
std::vector<DefId> result;
|
||||
|
||||
std::deque<DefId> queue{phi->operands.begin(), phi->operands.end()};
|
||||
DenseHashSet<const Def*> seen{nullptr};
|
||||
|
||||
while (!queue.empty())
|
||||
{
|
||||
DefId next = queue.front();
|
||||
queue.pop_front();
|
||||
|
||||
// Phi nodes should never be cyclic.
|
||||
LUAU_ASSERT(!seen.find(next));
|
||||
if (seen.find(next))
|
||||
continue;
|
||||
seen.insert(next);
|
||||
|
||||
if (get<Cell>(next))
|
||||
result.push_back(next);
|
||||
else if (auto phi = get<Phi>(next))
|
||||
queue.insert(queue.end(), phi->operands.begin(), phi->operands.end());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
std::optional<TypeId> ConstraintGenerator::lookup(Scope* scope, DefId def)
|
||||
{
|
||||
if (get<Cell>(def))
|
||||
return scope->lookup(def);
|
||||
if (auto phi = get<Phi>(def))
|
||||
{
|
||||
if (auto found = scope->lookup(def))
|
||||
return *found;
|
||||
|
||||
TypeId res = builtinTypes->neverType;
|
||||
|
||||
for (DefId operand : flatten(phi))
|
||||
{
|
||||
// `scope->lookup(operand)` may return nothing because it could be a phi node of globals, but one of
|
||||
// the operand of that global has never been assigned a type, and so it should be an error.
|
||||
// e.g.
|
||||
// ```
|
||||
// if foo() then
|
||||
// g = 5
|
||||
// end
|
||||
// -- `g` here is a phi node of the assignment to `g`, or the original revision of `g` before the branch.
|
||||
// ```
|
||||
TypeId ty = scope->lookup(operand).value_or(builtinTypes->errorRecoveryType());
|
||||
res = simplifyUnion(builtinTypes, arena, res, ty).result;
|
||||
}
|
||||
|
||||
scope->lvalueTypes[def] = res;
|
||||
return res;
|
||||
}
|
||||
else
|
||||
ice->ice("ConstraintGenerator::lookup is inexhaustive?");
|
||||
}
|
||||
|
||||
NotNull<Constraint> ConstraintGenerator::addConstraint(const ScopePtr& scope, const Location& location, ConstraintV cv)
|
||||
{
|
||||
return NotNull{constraints.emplace_back(new Constraint{NotNull{scope.get()}, location, std::move(cv)}).get()};
|
||||
@ -393,7 +452,7 @@ void ConstraintGenerator::applyRefinements(const ScopePtr& scope, Location locat
|
||||
|
||||
for (auto& [def, partition] : refinements)
|
||||
{
|
||||
if (std::optional<TypeId> defTy = scope->lookup(def))
|
||||
if (std::optional<TypeId> defTy = lookup(scope.get(), def))
|
||||
{
|
||||
TypeId ty = *defTy;
|
||||
if (partition.shouldAppendNilType)
|
||||
@ -811,10 +870,10 @@ ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatFunction* f
|
||||
Checkpoint start = checkpoint(this);
|
||||
FunctionSignature sig = checkFunctionSignature(scope, function->func, /* expectedType */ std::nullopt, function->name->location);
|
||||
|
||||
std::unordered_set<Constraint*> excludeList;
|
||||
DenseHashSet<Constraint*> excludeList{nullptr};
|
||||
|
||||
DefId def = dfg->getDef(function->name);
|
||||
std::optional<TypeId> existingFunctionTy = scope->lookupLValue(def);
|
||||
std::optional<TypeId> existingFunctionTy = scope->lookup(def);
|
||||
|
||||
if (AstExprLocal* localName = function->name->as<AstExprLocal>())
|
||||
{
|
||||
@ -880,7 +939,7 @@ ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatFunction* f
|
||||
|
||||
Constraint* previous = nullptr;
|
||||
forEachConstraint(start, end, this, [&c, &excludeList, &previous](const ConstraintPtr& constraint) {
|
||||
if (!excludeList.count(constraint.get()))
|
||||
if (!excludeList.contains(constraint.get()))
|
||||
c->dependencies.push_back(NotNull{constraint.get()});
|
||||
|
||||
if (auto psc = get<PackSubtypeConstraint>(*constraint); psc && psc->returns)
|
||||
@ -918,7 +977,11 @@ ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatBlock* bloc
|
||||
ScopePtr innerScope = childScope(block, scope);
|
||||
|
||||
ControlFlow flow = visitBlockWithoutChildScope(innerScope, block);
|
||||
|
||||
// An AstStatBlock has linear control flow, i.e. one entry and one exit, so we can inherit
|
||||
// all the changes to the environment occurred by the statements in that block.
|
||||
scope->inheritRefinements(innerScope);
|
||||
scope->inheritAssignments(innerScope);
|
||||
|
||||
return flow;
|
||||
}
|
||||
@ -1000,6 +1063,11 @@ ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatIf* ifState
|
||||
else if (thencf == ControlFlow::None && elsecf != ControlFlow::None)
|
||||
scope->inheritRefinements(thenScope);
|
||||
|
||||
if (thencf == ControlFlow::None)
|
||||
scope->inheritAssignments(thenScope);
|
||||
if (elsecf == ControlFlow::None)
|
||||
scope->inheritAssignments(elseScope);
|
||||
|
||||
if (FFlag::LuauLoopControlFlowAnalysis && thencf == elsecf)
|
||||
return thencf;
|
||||
else if (matches(thencf, ControlFlow::Returns | ControlFlow::Throws) && matches(elsecf, ControlFlow::Returns | ControlFlow::Throws))
|
||||
@ -1098,7 +1166,7 @@ static bool isMetamethod(const Name& name)
|
||||
return name == "__index" || name == "__newindex" || name == "__call" || name == "__concat" || name == "__unm" || name == "__add" ||
|
||||
name == "__sub" || name == "__mul" || name == "__div" || name == "__mod" || name == "__pow" || name == "__tostring" ||
|
||||
name == "__metatable" || name == "__eq" || name == "__lt" || name == "__le" || name == "__mode" || name == "__iter" || name == "__len" ||
|
||||
(FFlag::LuauFloorDivision && name == "__idiv");
|
||||
name == "__idiv";
|
||||
}
|
||||
|
||||
ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatDeclareClass* declaredClass)
|
||||
@ -1140,7 +1208,7 @@ ControlFlow ConstraintGenerator::visit(const ScopePtr& scope, AstStatDeclareClas
|
||||
|
||||
scope->exportedTypeBindings[className] = TypeFun{{}, classTy};
|
||||
|
||||
if (FFlag::LuauParseDeclareClassIndexer && declaredClass->indexer)
|
||||
if (declaredClass->indexer)
|
||||
{
|
||||
RecursionCounter counter{&recursionCount};
|
||||
|
||||
@ -1645,12 +1713,12 @@ Inference ConstraintGenerator::check(const ScopePtr& scope, AstExprLocal* local)
|
||||
|
||||
// if we have a refinement key, we can look up its type.
|
||||
if (key)
|
||||
maybeTy = scope->lookup(key->def);
|
||||
maybeTy = lookup(scope.get(), key->def);
|
||||
|
||||
// if the current def doesn't have a type, we might be doing a compound assignment
|
||||
// and therefore might need to look at the rvalue def instead.
|
||||
if (!maybeTy && rvalueDef)
|
||||
maybeTy = scope->lookup(*rvalueDef);
|
||||
maybeTy = lookup(scope.get(), *rvalueDef);
|
||||
|
||||
if (maybeTy)
|
||||
{
|
||||
@ -1676,11 +1744,11 @@ Inference ConstraintGenerator::check(const ScopePtr& scope, AstExprGlobal* globa
|
||||
/* prepopulateGlobalScope() has already added all global functions to the environment by this point, so any
|
||||
* global that is not already in-scope is definitely an unknown symbol.
|
||||
*/
|
||||
if (auto ty = scope->lookup(def))
|
||||
if (auto ty = lookup(scope.get(), def))
|
||||
return Inference{*ty, refinementArena.proposition(key, builtinTypes->truthyType)};
|
||||
else if (auto ty = scope->lookup(global->name))
|
||||
{
|
||||
rootScope->rvalueRefinements[key->def] = *ty;
|
||||
rootScope->lvalueTypes[def] = *ty;
|
||||
return Inference{*ty, refinementArena.proposition(key, builtinTypes->truthyType)};
|
||||
}
|
||||
else
|
||||
@ -1698,7 +1766,7 @@ Inference ConstraintGenerator::check(const ScopePtr& scope, AstExprIndexName* in
|
||||
const RefinementKey* key = dfg->getRefinementKey(indexName);
|
||||
if (key)
|
||||
{
|
||||
if (auto ty = scope->lookup(key->def))
|
||||
if (auto ty = lookup(scope.get(), key->def))
|
||||
return Inference{*ty, refinementArena.proposition(key, builtinTypes->truthyType)};
|
||||
|
||||
scope->rvalueRefinements[key->def] = result;
|
||||
@ -1721,7 +1789,7 @@ Inference ConstraintGenerator::check(const ScopePtr& scope, AstExprIndexExpr* in
|
||||
const RefinementKey* key = dfg->getRefinementKey(indexExpr);
|
||||
if (key)
|
||||
{
|
||||
if (auto ty = scope->lookup(key->def))
|
||||
if (auto ty = lookup(scope.get(), key->def))
|
||||
return Inference{*ty, refinementArena.proposition(key, builtinTypes->truthyType)};
|
||||
|
||||
scope->rvalueRefinements[key->def] = result;
|
||||
@ -2063,6 +2131,8 @@ std::tuple<TypeId, TypeId, RefinementId> ConstraintGenerator::checkBinary(
|
||||
discriminantTy = builtinTypes->booleanType;
|
||||
else if (typeguard->type == "thread")
|
||||
discriminantTy = builtinTypes->threadType;
|
||||
else if (typeguard->type == "buffer")
|
||||
discriminantTy = builtinTypes->bufferType;
|
||||
else if (typeguard->type == "table")
|
||||
discriminantTy = augmentForErrorSupression(builtinTypes->tableType);
|
||||
else if (typeguard->type == "function")
|
||||
@ -2152,18 +2222,11 @@ std::optional<TypeId> ConstraintGenerator::checkLValue(const ScopePtr& scope, As
|
||||
*/
|
||||
std::optional<TypeId> annotatedTy = scope->lookup(local->local);
|
||||
if (annotatedTy)
|
||||
{
|
||||
addConstraint(scope, local->location, SubtypeConstraint{assignedTy, *annotatedTy});
|
||||
return annotatedTy;
|
||||
}
|
||||
else if (auto it = inferredBindings.find(local->local); it == inferredBindings.end())
|
||||
ice->ice("Cannot find AstLocal* in either Scope::bindings or inferredBindings?");
|
||||
|
||||
/*
|
||||
* As a safety measure, we'll assert that no type has yet been ascribed to
|
||||
* the corresponding def. We'll populate this when we generate
|
||||
* constraints for assignment and compound assignment statements.
|
||||
*/
|
||||
LUAU_ASSERT(!scope->lookupLValue(dfg->getDef(local)));
|
||||
return std::nullopt;
|
||||
return annotatedTy;
|
||||
}
|
||||
|
||||
std::optional<TypeId> ConstraintGenerator::checkLValue(const ScopePtr& scope, AstExprGlobal* global, TypeId assignedTy)
|
||||
|
@ -2,13 +2,11 @@
|
||||
|
||||
#include "Luau/Anyification.h"
|
||||
#include "Luau/ApplyTypeFunction.h"
|
||||
#include "Luau/Clone.h"
|
||||
#include "Luau/Common.h"
|
||||
#include "Luau/ConstraintSolver.h"
|
||||
#include "Luau/DcrLogger.h"
|
||||
#include "Luau/Instantiation.h"
|
||||
#include "Luau/Location.h"
|
||||
#include "Luau/Metamethods.h"
|
||||
#include "Luau/ModuleResolver.h"
|
||||
#include "Luau/Quantify.h"
|
||||
#include "Luau/Simplify.h"
|
||||
@ -17,12 +15,11 @@
|
||||
#include "Luau/Type.h"
|
||||
#include "Luau/TypeFamily.h"
|
||||
#include "Luau/TypeUtils.h"
|
||||
#include "Luau/Unifier.h"
|
||||
#include "Luau/Unifier2.h"
|
||||
#include "Luau/VisitType.h"
|
||||
#include <utility>
|
||||
|
||||
LUAU_FASTFLAGVARIABLE(DebugLuauLogSolver, false);
|
||||
LUAU_FASTFLAG(LuauFloorDivision);
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -1103,6 +1100,12 @@ bool ConstraintSolver::tryDispatch(const FunctionCallConstraint& c, NotNull<cons
|
||||
|
||||
const bool occursCheckPassed = u2.unify(fn, inferredTy);
|
||||
|
||||
for (const auto& [expanded, additions] : u2.expandedFreeTypes)
|
||||
{
|
||||
for (TypeId addition : additions)
|
||||
upperBoundContributors[expanded].push_back(std::make_pair(constraint->location, addition));
|
||||
}
|
||||
|
||||
if (occursCheckPassed && c.callSite)
|
||||
(*c.astOverloadResolvedTypes)[c.callSite] = inferredTy;
|
||||
|
||||
@ -1490,7 +1493,7 @@ namespace
|
||||
*/
|
||||
struct FindRefineConstraintBlockers : TypeOnceVisitor
|
||||
{
|
||||
std::unordered_set<TypeId> found;
|
||||
DenseHashSet<TypeId> found{nullptr};
|
||||
bool visit(TypeId ty, const BlockedType&) override
|
||||
{
|
||||
found.insert(ty);
|
||||
@ -1905,15 +1908,16 @@ bool ConstraintSolver::tryDispatchIterableFunction(
|
||||
std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTableProp(
|
||||
TypeId subjectType, const std::string& propName, bool suppressSimplification)
|
||||
{
|
||||
std::unordered_set<TypeId> seen;
|
||||
DenseHashSet<TypeId> seen{nullptr};
|
||||
return lookupTableProp(subjectType, propName, suppressSimplification, seen);
|
||||
}
|
||||
|
||||
std::pair<std::vector<TypeId>, std::optional<TypeId>> ConstraintSolver::lookupTableProp(
|
||||
TypeId subjectType, const std::string& propName, bool suppressSimplification, std::unordered_set<TypeId>& seen)
|
||||
TypeId subjectType, const std::string& propName, bool suppressSimplification, DenseHashSet<TypeId>& seen)
|
||||
{
|
||||
if (!seen.insert(subjectType).second)
|
||||
if (seen.contains(subjectType))
|
||||
return {};
|
||||
seen.insert(subjectType);
|
||||
|
||||
subjectType = follow(subjectType);
|
||||
|
||||
@ -2073,7 +2077,15 @@ bool ConstraintSolver::tryUnify(NotNull<const Constraint> constraint, TID subTy,
|
||||
|
||||
bool success = u2.unify(subTy, superTy);
|
||||
|
||||
if (!success)
|
||||
if (success)
|
||||
{
|
||||
for (const auto& [expanded, additions] : u2.expandedFreeTypes)
|
||||
{
|
||||
for (TypeId addition : additions)
|
||||
upperBoundContributors[expanded].push_back(std::make_pair(constraint->location, addition));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Unification only fails when doing so would fail the occurs check.
|
||||
// ie create a self-bound type or a cyclic type pack
|
||||
@ -2320,6 +2332,12 @@ ErrorVec ConstraintSolver::unify(NotNull<Scope> scope, Location location, TypePa
|
||||
|
||||
u.unify(subPack, superPack);
|
||||
|
||||
for (const auto& [expanded, additions] : u.expandedFreeTypes)
|
||||
{
|
||||
for (TypeId addition : additions)
|
||||
upperBoundContributors[expanded].push_back(std::make_pair(location, addition));
|
||||
}
|
||||
|
||||
unblock(subPack, Location{});
|
||||
unblock(superPack, Location{});
|
||||
|
||||
|
@ -11,10 +11,13 @@
|
||||
|
||||
LUAU_FASTFLAG(DebugLuauFreezeArena)
|
||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||
LUAU_FASTFLAG(LuauLoopControlFlowAnalysis)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
|
||||
bool doesCallError(const AstExprCall* call); // TypeInfer.cpp
|
||||
|
||||
const RefinementKey* RefinementKeyArena::leaf(DefId def)
|
||||
{
|
||||
return allocator.allocate(RefinementKey{nullptr, def, std::nullopt});
|
||||
@ -82,9 +85,9 @@ std::optional<DefId> DfgScope::lookup(DefId def, const std::string& key) const
|
||||
{
|
||||
for (const DfgScope* current = this; current; current = current->parent)
|
||||
{
|
||||
if (auto map = props.find(def))
|
||||
if (auto props = current->props.find(def))
|
||||
{
|
||||
if (auto it = map->find(key); it != map->end())
|
||||
if (auto it = props->find(key); it != props->end())
|
||||
return NotNull{it->second};
|
||||
}
|
||||
}
|
||||
@ -92,6 +95,47 @@ std::optional<DefId> DfgScope::lookup(DefId def, const std::string& key) const
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
void DfgScope::inherit(const DfgScope* childScope)
|
||||
{
|
||||
for (const auto& [k, a] : childScope->bindings)
|
||||
{
|
||||
if (lookup(k))
|
||||
bindings[k] = a;
|
||||
}
|
||||
|
||||
for (const auto& [k1, a1] : childScope->props)
|
||||
{
|
||||
for (const auto& [k2, a2] : a1)
|
||||
props[k1][k2] = a2;
|
||||
}
|
||||
}
|
||||
|
||||
bool DfgScope::canUpdateDefinition(Symbol symbol) const
|
||||
{
|
||||
for (const DfgScope* current = this; current; current = current->parent)
|
||||
{
|
||||
if (current->bindings.find(symbol))
|
||||
return true;
|
||||
else if (current->isLoopScope)
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool DfgScope::canUpdateDefinition(DefId def, const std::string& key) const
|
||||
{
|
||||
for (const DfgScope* current = this; current; current = current->parent)
|
||||
{
|
||||
if (auto props = current->props.find(def))
|
||||
return true;
|
||||
else if (current->isLoopScope)
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
DataFlowGraph DataFlowGraphBuilder::build(AstStatBlock* block, NotNull<InternalErrorReporter> handle)
|
||||
{
|
||||
LUAU_ASSERT(FFlag::DebugLuauDeferredConstraintResolution);
|
||||
@ -110,24 +154,54 @@ DataFlowGraph DataFlowGraphBuilder::build(AstStatBlock* block, NotNull<InternalE
|
||||
return std::move(builder.graph);
|
||||
}
|
||||
|
||||
DfgScope* DataFlowGraphBuilder::childScope(DfgScope* scope)
|
||||
DfgScope* DataFlowGraphBuilder::childScope(DfgScope* scope, bool isLoopScope)
|
||||
{
|
||||
return scopes.emplace_back(new DfgScope{scope}).get();
|
||||
return scopes.emplace_back(new DfgScope{scope, isLoopScope}).get();
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatBlock* b)
|
||||
void DataFlowGraphBuilder::join(DfgScope* p, DfgScope* a, DfgScope* b)
|
||||
{
|
||||
// TODO TODO FIXME IMPLEMENT JOIN LOGIC FOR PROPERTIES
|
||||
|
||||
for (const auto& [sym, def1] : a->bindings)
|
||||
{
|
||||
if (auto def2 = b->bindings.find(sym))
|
||||
p->bindings[sym] = defArena->phi(NotNull{def1}, NotNull{*def2});
|
||||
else if (auto def2 = p->bindings.find(sym))
|
||||
p->bindings[sym] = defArena->phi(NotNull{def1}, NotNull{*def2});
|
||||
}
|
||||
|
||||
for (const auto& [sym, def1] : b->bindings)
|
||||
{
|
||||
if (a->bindings.find(sym))
|
||||
continue;
|
||||
else if (auto def2 = p->bindings.find(sym))
|
||||
p->bindings[sym] = defArena->phi(NotNull{def1}, NotNull{*def2});
|
||||
}
|
||||
}
|
||||
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatBlock* b)
|
||||
{
|
||||
DfgScope* child = childScope(scope);
|
||||
return visitBlockWithoutChildScope(child, b);
|
||||
ControlFlow cf = visitBlockWithoutChildScope(child, b);
|
||||
scope->inherit(child);
|
||||
return cf;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visitBlockWithoutChildScope(DfgScope* scope, AstStatBlock* b)
|
||||
ControlFlow DataFlowGraphBuilder::visitBlockWithoutChildScope(DfgScope* scope, AstStatBlock* b)
|
||||
{
|
||||
for (AstStat* s : b->body)
|
||||
visit(scope, s);
|
||||
std::optional<ControlFlow> firstControlFlow;
|
||||
for (AstStat* stat : b->body)
|
||||
{
|
||||
ControlFlow cf = visit(scope, stat);
|
||||
if (cf != ControlFlow::None && !firstControlFlow)
|
||||
firstControlFlow = cf;
|
||||
}
|
||||
|
||||
return firstControlFlow.value_or(ControlFlow::None);
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStat* s)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStat* s)
|
||||
{
|
||||
if (auto b = s->as<AstStatBlock>())
|
||||
return visit(scope, b);
|
||||
@ -173,56 +247,85 @@ void DataFlowGraphBuilder::visit(DfgScope* scope, AstStat* s)
|
||||
handle->ice("Unknown AstStat in DataFlowGraphBuilder::visit");
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatIf* i)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatIf* i)
|
||||
{
|
||||
// TODO: type states and control flow analysis
|
||||
visitExpr(scope, i->condition);
|
||||
visit(scope, i->thenbody);
|
||||
|
||||
DfgScope* thenScope = childScope(scope);
|
||||
DfgScope* elseScope = childScope(scope);
|
||||
|
||||
ControlFlow thencf = visit(thenScope, i->thenbody);
|
||||
ControlFlow elsecf = ControlFlow::None;
|
||||
if (i->elsebody)
|
||||
visit(scope, i->elsebody);
|
||||
elsecf = visit(elseScope, i->elsebody);
|
||||
|
||||
if (thencf != ControlFlow::None && elsecf == ControlFlow::None)
|
||||
join(scope, scope, elseScope);
|
||||
else if (thencf == ControlFlow::None && elsecf != ControlFlow::None)
|
||||
join(scope, thenScope, scope);
|
||||
else if ((thencf | elsecf) == ControlFlow::None)
|
||||
join(scope, thenScope, elseScope);
|
||||
|
||||
if (FFlag::LuauLoopControlFlowAnalysis && thencf == elsecf)
|
||||
return thencf;
|
||||
else if (matches(thencf, ControlFlow::Returns | ControlFlow::Throws) && matches(elsecf, ControlFlow::Returns | ControlFlow::Throws))
|
||||
return ControlFlow::Returns;
|
||||
else
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatWhile* w)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatWhile* w)
|
||||
{
|
||||
// TODO(controlflow): entry point has a back edge from exit point
|
||||
DfgScope* whileScope = childScope(scope);
|
||||
DfgScope* whileScope = childScope(scope, /*isLoopScope=*/true);
|
||||
visitExpr(whileScope, w->condition);
|
||||
visit(whileScope, w->body);
|
||||
|
||||
scope->inherit(whileScope);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatRepeat* r)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatRepeat* r)
|
||||
{
|
||||
// TODO(controlflow): entry point has a back edge from exit point
|
||||
DfgScope* repeatScope = childScope(scope); // TODO: loop scope.
|
||||
DfgScope* repeatScope = childScope(scope, /*isLoopScope=*/true);
|
||||
visitBlockWithoutChildScope(repeatScope, r->body);
|
||||
visitExpr(repeatScope, r->condition);
|
||||
|
||||
scope->inherit(repeatScope);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatBreak* b)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatBreak* b)
|
||||
{
|
||||
// TODO: Control flow analysis
|
||||
return; // ok
|
||||
return ControlFlow::Breaks;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatContinue* c)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatContinue* c)
|
||||
{
|
||||
// TODO: Control flow analysis
|
||||
return; // ok
|
||||
return ControlFlow::Continues;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatReturn* r)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatReturn* r)
|
||||
{
|
||||
// TODO: Control flow analysis
|
||||
for (AstExpr* e : r->list)
|
||||
visitExpr(scope, e);
|
||||
|
||||
return ControlFlow::Returns;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatExpr* e)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatExpr* e)
|
||||
{
|
||||
visitExpr(scope, e->expr);
|
||||
if (auto call = e->expr->as<AstExprCall>(); call && doesCallError(call))
|
||||
return ControlFlow::Throws;
|
||||
else
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatLocal* l)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatLocal* l)
|
||||
{
|
||||
// We're gonna need a `visitExprList` and `visitVariadicExpr` (function calls and `...`)
|
||||
std::vector<DefId> defs;
|
||||
@ -243,11 +346,13 @@ void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatLocal* l)
|
||||
graph.localDefs[local] = def;
|
||||
scope->bindings[local] = def;
|
||||
}
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatFor* f)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatFor* f)
|
||||
{
|
||||
DfgScope* forScope = childScope(scope); // TODO: loop scope.
|
||||
DfgScope* forScope = childScope(scope, /*isLoopScope=*/true);
|
||||
|
||||
visitExpr(scope, f->from);
|
||||
visitExpr(scope, f->to);
|
||||
@ -263,11 +368,15 @@ void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatFor* f)
|
||||
|
||||
// TODO(controlflow): entry point has a back edge from exit point
|
||||
visit(forScope, f->body);
|
||||
|
||||
scope->inherit(forScope);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatForIn* f)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatForIn* f)
|
||||
{
|
||||
DfgScope* forScope = childScope(scope); // TODO: loop scope.
|
||||
DfgScope* forScope = childScope(scope, /*isLoopScope=*/true);
|
||||
|
||||
for (AstLocal* local : f->vars)
|
||||
{
|
||||
@ -285,9 +394,13 @@ void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatForIn* f)
|
||||
visitExpr(forScope, e);
|
||||
|
||||
visit(forScope, f->body);
|
||||
|
||||
scope->inherit(forScope);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatAssign* a)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatAssign* a)
|
||||
{
|
||||
std::vector<DefId> defs;
|
||||
defs.reserve(a->values.size);
|
||||
@ -299,9 +412,11 @@ void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatAssign* a)
|
||||
AstExpr* v = a->vars.data[i];
|
||||
visitLValue(scope, v, i < defs.size() ? defs[i] : defArena->freshCell());
|
||||
}
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatCompoundAssign* c)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatCompoundAssign* c)
|
||||
{
|
||||
// TODO: This needs revisiting because this is incorrect. The `c->var` part is both being read and written to,
|
||||
// but the `c->var` only has one pointer address, so we need to come up with a way to store both.
|
||||
@ -312,9 +427,11 @@ void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatCompoundAssign* c)
|
||||
// We can't just visit `c->var` as a rvalue and then separately traverse `c->var` as an lvalue, since that's O(n^2).
|
||||
DefId def = visitExpr(scope, c->value).def;
|
||||
visitLValue(scope, c->var, def, /* isCompoundAssignment */ true);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatFunction* f)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatFunction* f)
|
||||
{
|
||||
// In the old solver, we assumed that the name of the function is always a function in the body
|
||||
// but this isn't true, e.g. the following example will print `5`, not a function address.
|
||||
@ -329,34 +446,42 @@ void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatFunction* f)
|
||||
DefId prototype = defArena->freshCell();
|
||||
visitLValue(scope, f->name, prototype);
|
||||
visitExpr(scope, f->func);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatLocalFunction* l)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatLocalFunction* l)
|
||||
{
|
||||
DefId def = defArena->freshCell();
|
||||
graph.localDefs[l->name] = def;
|
||||
scope->bindings[l->name] = def;
|
||||
visitExpr(scope, l->func);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatTypeAlias* t)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatTypeAlias* t)
|
||||
{
|
||||
DfgScope* unreachable = childScope(scope);
|
||||
visitGenerics(unreachable, t->generics);
|
||||
visitGenericPacks(unreachable, t->genericPacks);
|
||||
visitType(unreachable, t->type);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatDeclareGlobal* d)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatDeclareGlobal* d)
|
||||
{
|
||||
DefId def = defArena->freshCell();
|
||||
graph.declaredDefs[d] = def;
|
||||
scope->bindings[d->name] = def;
|
||||
|
||||
visitType(scope, d->type);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatDeclareFunction* d)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatDeclareFunction* d)
|
||||
{
|
||||
DefId def = defArena->freshCell();
|
||||
graph.declaredDefs[d] = def;
|
||||
@ -367,9 +492,11 @@ void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatDeclareFunction* d)
|
||||
visitGenericPacks(unreachable, d->genericPacks);
|
||||
visitTypeList(unreachable, d->params);
|
||||
visitTypeList(unreachable, d->retTypes);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatDeclareClass* d)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatDeclareClass* d)
|
||||
{
|
||||
// This declaration does not "introduce" any bindings in value namespace,
|
||||
// so there's no symbolic value to begin with. We'll traverse the properties
|
||||
@ -377,19 +504,30 @@ void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatDeclareClass* d)
|
||||
DfgScope* unreachable = childScope(scope);
|
||||
for (AstDeclaredClassProp prop : d->props)
|
||||
visitType(unreachable, prop.ty);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visit(DfgScope* scope, AstStatError* error)
|
||||
ControlFlow DataFlowGraphBuilder::visit(DfgScope* scope, AstStatError* error)
|
||||
{
|
||||
DfgScope* unreachable = childScope(scope);
|
||||
for (AstStat* s : error->statements)
|
||||
visit(unreachable, s);
|
||||
for (AstExpr* e : error->expressions)
|
||||
visitExpr(unreachable, e);
|
||||
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
DataFlowResult DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExpr* e)
|
||||
{
|
||||
// Some subexpressions could be visited two times. If we've already seen it, just extract it.
|
||||
if (auto def = graph.astDefs.find(e))
|
||||
{
|
||||
auto key = graph.astRefinementKeys.find(e);
|
||||
return {NotNull{*def}, key ? *key : nullptr};
|
||||
}
|
||||
|
||||
auto go = [&]() -> DataFlowResult {
|
||||
if (auto g = e->as<AstExprGroup>())
|
||||
return visitExpr(scope, g);
|
||||
@ -481,11 +619,14 @@ DataFlowResult DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprIndexName
|
||||
auto [parentDef, parentKey] = visitExpr(scope, i->expr);
|
||||
|
||||
std::string index = i->index.value;
|
||||
auto& propDef = moduleScope->props[parentDef][index];
|
||||
if (!propDef)
|
||||
propDef = defArena->freshCell();
|
||||
|
||||
return {NotNull{propDef}, keyArena->node(parentKey, NotNull{propDef}, index)};
|
||||
if (auto propDef = scope->lookup(parentDef, index))
|
||||
return {*propDef, keyArena->node(parentKey, *propDef, index)};
|
||||
else
|
||||
{
|
||||
DefId def = defArena->freshCell();
|
||||
scope->props[parentDef][index] = def;
|
||||
return {def, keyArena->node(parentKey, def, index)};
|
||||
}
|
||||
}
|
||||
|
||||
DataFlowResult DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprIndexExpr* i)
|
||||
@ -496,11 +637,14 @@ DataFlowResult DataFlowGraphBuilder::visitExpr(DfgScope* scope, AstExprIndexExpr
|
||||
if (auto string = i->index->as<AstExprConstantString>())
|
||||
{
|
||||
std::string index{string->value.data, string->value.size};
|
||||
auto& propDef = moduleScope->props[parentDef][index];
|
||||
if (!propDef)
|
||||
propDef = defArena->freshCell();
|
||||
|
||||
return {NotNull{propDef}, keyArena->node(parentKey, NotNull{propDef}, index)};
|
||||
if (auto propDef = scope->lookup(parentDef, index))
|
||||
return {*propDef, keyArena->node(parentKey, *propDef, index)};
|
||||
else
|
||||
{
|
||||
DefId def = defArena->freshCell();
|
||||
scope->props[parentDef][index] = def;
|
||||
return {def, keyArena->node(parentKey, def, index)};
|
||||
}
|
||||
}
|
||||
|
||||
return {defArena->freshCell(/* subscripted= */true), nullptr};
|
||||
@ -636,9 +780,14 @@ void DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprLocal* l, DefId i
|
||||
}
|
||||
|
||||
// In order to avoid alias tracking, we need to clip the reference to the parent def.
|
||||
DefId updated = defArena->freshCell(containsSubscriptedDefinition(incomingDef));
|
||||
graph.astDefs[l] = updated;
|
||||
scope->bindings[l->local] = updated;
|
||||
if (scope->canUpdateDefinition(l->local))
|
||||
{
|
||||
DefId updated = defArena->freshCell(containsSubscriptedDefinition(incomingDef));
|
||||
graph.astDefs[l] = updated;
|
||||
scope->bindings[l->local] = updated;
|
||||
}
|
||||
else
|
||||
visitExpr(scope, static_cast<AstExpr*>(l));
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprGlobal* g, DefId incomingDef, bool isCompoundAssignment)
|
||||
@ -651,18 +800,28 @@ void DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprGlobal* g, DefId
|
||||
}
|
||||
|
||||
// In order to avoid alias tracking, we need to clip the reference to the parent def.
|
||||
DefId updated = defArena->freshCell(containsSubscriptedDefinition(incomingDef));
|
||||
graph.astDefs[g] = updated;
|
||||
scope->bindings[g->name] = updated;
|
||||
if (scope->canUpdateDefinition(g->name))
|
||||
{
|
||||
DefId updated = defArena->freshCell(containsSubscriptedDefinition(incomingDef));
|
||||
graph.astDefs[g] = updated;
|
||||
scope->bindings[g->name] = updated;
|
||||
}
|
||||
else
|
||||
visitExpr(scope, static_cast<AstExpr*>(g));
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprIndexName* i, DefId incomingDef)
|
||||
{
|
||||
DefId parentDef = visitExpr(scope, i->expr).def;
|
||||
|
||||
DefId updated = defArena->freshCell(containsSubscriptedDefinition(incomingDef));
|
||||
graph.astDefs[i] = updated;
|
||||
scope->props[parentDef][i->index.value] = updated;
|
||||
if (scope->canUpdateDefinition(parentDef, i->index.value))
|
||||
{
|
||||
DefId updated = defArena->freshCell(containsSubscriptedDefinition(incomingDef));
|
||||
graph.astDefs[i] = updated;
|
||||
scope->props[parentDef][i->index.value] = updated;
|
||||
}
|
||||
else
|
||||
visitExpr(scope, static_cast<AstExpr*>(i));
|
||||
}
|
||||
|
||||
void DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprIndexExpr* i, DefId incomingDef)
|
||||
@ -672,9 +831,14 @@ void DataFlowGraphBuilder::visitLValue(DfgScope* scope, AstExprIndexExpr* i, Def
|
||||
|
||||
if (auto string = i->index->as<AstExprConstantString>())
|
||||
{
|
||||
DefId updated = defArena->freshCell(containsSubscriptedDefinition(incomingDef));
|
||||
graph.astDefs[i] = updated;
|
||||
scope->props[parentDef][string->value.data] = updated;
|
||||
if (scope->canUpdateDefinition(parentDef, string->value.data))
|
||||
{
|
||||
DefId updated = defArena->freshCell(containsSubscriptedDefinition(incomingDef));
|
||||
graph.astDefs[i] = updated;
|
||||
scope->props[parentDef][string->value.data] = updated;
|
||||
}
|
||||
else
|
||||
visitExpr(scope, static_cast<AstExpr*>(i));
|
||||
}
|
||||
|
||||
graph.astDefs[i] = defArena->freshCell();
|
||||
|
@ -1,6 +1,9 @@
|
||||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#include "Luau/Def.h"
|
||||
#include "Luau/Common.h"
|
||||
#include "Luau/DenseHash.h"
|
||||
|
||||
#include <deque>
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -9,8 +12,27 @@ bool containsSubscriptedDefinition(DefId def)
|
||||
{
|
||||
if (auto cell = get<Cell>(def))
|
||||
return cell->subscripted;
|
||||
else if (auto phi = get<Phi>(def))
|
||||
{
|
||||
std::deque<DefId> queue(begin(phi->operands), end(phi->operands));
|
||||
DenseHashSet<const Def*> seen{nullptr};
|
||||
|
||||
LUAU_ASSERT(!"Phi nodes not implemented yet");
|
||||
while (!queue.empty())
|
||||
{
|
||||
DefId next = queue.front();
|
||||
queue.pop_front();
|
||||
|
||||
LUAU_ASSERT(!seen.find(next));
|
||||
if (seen.find(next))
|
||||
continue;
|
||||
seen.insert(next);
|
||||
|
||||
if (auto cell_ = get<Cell>(next); cell_ && cell_->subscripted)
|
||||
return true;
|
||||
else if (auto phi_ = get<Phi>(next))
|
||||
queue.insert(queue.end(), phi_->operands.begin(), phi_->operands.end());
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -19,4 +41,12 @@ DefId DefArena::freshCell(bool subscripted)
|
||||
return NotNull{allocator.allocate(Def{Cell{subscripted}})};
|
||||
}
|
||||
|
||||
DefId DefArena::phi(DefId a, DefId b)
|
||||
{
|
||||
if (a == b)
|
||||
return a;
|
||||
else
|
||||
return NotNull{allocator.allocate(Def{Phi{{a, b}}})};
|
||||
}
|
||||
|
||||
} // namespace Luau
|
||||
|
@ -2,11 +2,12 @@
|
||||
#include "Luau/BuiltinDefinitions.h"
|
||||
|
||||
LUAU_FASTFLAGVARIABLE(LuauBufferDefinitions, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauBufferTypeck, false)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
|
||||
static const std::string kBuiltinDefinitionBufferSrc = R"BUILTIN_SRC(
|
||||
static const std::string kBuiltinDefinitionBufferSrc_DEPRECATED = R"BUILTIN_SRC(
|
||||
|
||||
-- TODO: this will be replaced with a built-in primitive type
|
||||
declare class buffer end
|
||||
@ -40,6 +41,36 @@ declare buffer: {
|
||||
|
||||
)BUILTIN_SRC";
|
||||
|
||||
static const std::string kBuiltinDefinitionBufferSrc = R"BUILTIN_SRC(
|
||||
|
||||
declare buffer: {
|
||||
create: (size: number) -> buffer,
|
||||
fromstring: (str: string) -> buffer,
|
||||
tostring: (b: buffer) -> string,
|
||||
len: (b: buffer) -> number,
|
||||
copy: (target: buffer, targetOffset: number, source: buffer, sourceOffset: number?, count: number?) -> (),
|
||||
fill: (b: buffer, offset: number, value: number, count: number?) -> (),
|
||||
readi8: (b: buffer, offset: number) -> number,
|
||||
readu8: (b: buffer, offset: number) -> number,
|
||||
readi16: (b: buffer, offset: number) -> number,
|
||||
readu16: (b: buffer, offset: number) -> number,
|
||||
readi32: (b: buffer, offset: number) -> number,
|
||||
readu32: (b: buffer, offset: number) -> number,
|
||||
readf32: (b: buffer, offset: number) -> number,
|
||||
readf64: (b: buffer, offset: number) -> number,
|
||||
writei8: (b: buffer, offset: number, value: number) -> (),
|
||||
writeu8: (b: buffer, offset: number, value: number) -> (),
|
||||
writei16: (b: buffer, offset: number, value: number) -> (),
|
||||
writeu16: (b: buffer, offset: number, value: number) -> (),
|
||||
writei32: (b: buffer, offset: number, value: number) -> (),
|
||||
writeu32: (b: buffer, offset: number, value: number) -> (),
|
||||
writef32: (b: buffer, offset: number, value: number) -> (),
|
||||
writef64: (b: buffer, offset: number, value: number) -> (),
|
||||
readstring: (b: buffer, offset: number, count: number) -> string,
|
||||
writestring: (b: buffer, offset: number, value: string, count: number?) -> (),
|
||||
}
|
||||
|
||||
)BUILTIN_SRC";
|
||||
static const std::string kBuiltinDefinitionLuaSrc = R"BUILTIN_SRC(
|
||||
|
||||
declare bit32: {
|
||||
@ -236,8 +267,10 @@ std::string getBuiltinDefinitionSource()
|
||||
{
|
||||
std::string result = kBuiltinDefinitionLuaSrc;
|
||||
|
||||
if (FFlag::LuauBufferDefinitions)
|
||||
if (FFlag::LuauBufferTypeck)
|
||||
result = kBuiltinDefinitionBufferSrc + result;
|
||||
else if (FFlag::LuauBufferDefinitions)
|
||||
result = kBuiltinDefinitionBufferSrc_DEPRECATED + result;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
@ -490,7 +490,12 @@ struct ErrorConverter
|
||||
|
||||
std::string operator()(const TypePackMismatch& e) const
|
||||
{
|
||||
return "Type pack '" + toString(e.givenTp) + "' could not be converted into '" + toString(e.wantedTp) + "'";
|
||||
std::string ss = "Type pack '" + toString(e.givenTp) + "' could not be converted into '" + toString(e.wantedTp) + "'";
|
||||
|
||||
if (!e.reason.empty())
|
||||
ss += "; " + e.reason;
|
||||
|
||||
return ss;
|
||||
}
|
||||
|
||||
std::string operator()(const DynamicPropertyLookupOnClassesUnsafe& e) const
|
||||
|
@ -251,7 +251,7 @@ namespace
|
||||
static ErrorVec accumulateErrors(
|
||||
const std::unordered_map<ModuleName, std::shared_ptr<SourceNode>>& sourceNodes, ModuleResolver& moduleResolver, const ModuleName& name)
|
||||
{
|
||||
std::unordered_set<ModuleName> seen;
|
||||
DenseHashSet<ModuleName> seen{{}};
|
||||
std::vector<ModuleName> queue{name};
|
||||
|
||||
ErrorVec result;
|
||||
@ -261,7 +261,7 @@ static ErrorVec accumulateErrors(
|
||||
ModuleName next = std::move(queue.back());
|
||||
queue.pop_back();
|
||||
|
||||
if (seen.count(next))
|
||||
if (seen.contains(next))
|
||||
continue;
|
||||
seen.insert(next);
|
||||
|
||||
@ -442,7 +442,7 @@ CheckResult Frontend::check(const ModuleName& name, std::optional<FrontendOption
|
||||
std::vector<ModuleName> buildQueue;
|
||||
bool cycleDetected = parseGraph(buildQueue, name, frontendOptions.forAutocomplete);
|
||||
|
||||
std::unordered_set<Luau::ModuleName> seen;
|
||||
DenseHashSet<Luau::ModuleName> seen{{}};
|
||||
std::vector<BuildQueueItem> buildQueueItems;
|
||||
addBuildQueueItems(buildQueueItems, buildQueue, cycleDetected, seen, frontendOptions);
|
||||
LUAU_ASSERT(!buildQueueItems.empty());
|
||||
@ -495,12 +495,12 @@ std::vector<ModuleName> Frontend::checkQueuedModules(std::optional<FrontendOptio
|
||||
std::vector<ModuleName> currModuleQueue;
|
||||
std::swap(currModuleQueue, moduleQueue);
|
||||
|
||||
std::unordered_set<Luau::ModuleName> seen;
|
||||
DenseHashSet<Luau::ModuleName> seen{{}};
|
||||
std::vector<BuildQueueItem> buildQueueItems;
|
||||
|
||||
for (const ModuleName& name : currModuleQueue)
|
||||
{
|
||||
if (seen.count(name))
|
||||
if (seen.contains(name))
|
||||
continue;
|
||||
|
||||
if (!isDirty(name, frontendOptions.forAutocomplete))
|
||||
@ -511,7 +511,7 @@ std::vector<ModuleName> Frontend::checkQueuedModules(std::optional<FrontendOptio
|
||||
|
||||
std::vector<ModuleName> queue;
|
||||
bool cycleDetected = parseGraph(queue, name, frontendOptions.forAutocomplete, [&seen](const ModuleName& name) {
|
||||
return seen.count(name);
|
||||
return seen.contains(name);
|
||||
});
|
||||
|
||||
addBuildQueueItems(buildQueueItems, queue, cycleDetected, seen, frontendOptions);
|
||||
@ -836,11 +836,11 @@ bool Frontend::parseGraph(
|
||||
}
|
||||
|
||||
void Frontend::addBuildQueueItems(std::vector<BuildQueueItem>& items, std::vector<ModuleName>& buildQueue, bool cycleDetected,
|
||||
std::unordered_set<Luau::ModuleName>& seen, const FrontendOptions& frontendOptions)
|
||||
DenseHashSet<Luau::ModuleName>& seen, const FrontendOptions& frontendOptions)
|
||||
{
|
||||
for (const ModuleName& moduleName : buildQueue)
|
||||
{
|
||||
if (seen.count(moduleName))
|
||||
if (seen.contains(moduleName))
|
||||
continue;
|
||||
seen.insert(moduleName);
|
||||
|
||||
@ -1048,6 +1048,7 @@ void Frontend::checkBuildQueueItem(BuildQueueItem& item)
|
||||
module->astResolvedTypes.clear();
|
||||
module->astResolvedTypePacks.clear();
|
||||
module->astScopes.clear();
|
||||
module->upperBoundContributors.clear();
|
||||
|
||||
if (!FFlag::DebugLuauDeferredConstraintResolution)
|
||||
module->scopes.clear();
|
||||
@ -1285,6 +1286,7 @@ ModulePtr check(const SourceModule& sourceModule, Mode mode, const std::vector<R
|
||||
|
||||
result->scopes = std::move(cg.scopes);
|
||||
result->type = sourceModule.type;
|
||||
result->upperBoundContributors = std::move(cs.upperBoundContributors);
|
||||
|
||||
result->clonePublicInterface(builtinTypes, *iceHandler);
|
||||
|
||||
|
@ -3,6 +3,7 @@
|
||||
#include "Luau/GlobalTypes.h"
|
||||
|
||||
LUAU_FASTFLAG(LuauInitializeStringMetatableInGlobalTypes)
|
||||
LUAU_FASTFLAG(LuauBufferTypeck)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -18,6 +19,8 @@ GlobalTypes::GlobalTypes(NotNull<BuiltinTypes> builtinTypes)
|
||||
globalScope->addBuiltinTypeBinding("string", TypeFun{{}, builtinTypes->stringType});
|
||||
globalScope->addBuiltinTypeBinding("boolean", TypeFun{{}, builtinTypes->booleanType});
|
||||
globalScope->addBuiltinTypeBinding("thread", TypeFun{{}, builtinTypes->threadType});
|
||||
if (FFlag::LuauBufferTypeck)
|
||||
globalScope->addBuiltinTypeBinding("buffer", TypeFun{{}, builtinTypes->bufferType});
|
||||
globalScope->addBuiltinTypeBinding("unknown", TypeFun{{}, builtinTypes->unknownType});
|
||||
globalScope->addBuiltinTypeBinding("never", TypeFun{{}, builtinTypes->neverType});
|
||||
|
||||
|
@ -14,6 +14,8 @@
|
||||
|
||||
LUAU_FASTINTVARIABLE(LuauSuggestionDistance, 4)
|
||||
|
||||
LUAU_FASTFLAG(LuauBufferTypeck)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
|
||||
@ -1105,7 +1107,7 @@ private:
|
||||
TypeKind getTypeKind(const std::string& name)
|
||||
{
|
||||
if (name == "nil" || name == "boolean" || name == "userdata" || name == "number" || name == "string" || name == "table" ||
|
||||
name == "function" || name == "thread")
|
||||
name == "function" || name == "thread" || (FFlag::LuauBufferTypeck && name == "buffer"))
|
||||
return Kind_Primitive;
|
||||
|
||||
if (name == "vector")
|
||||
@ -2215,7 +2217,8 @@ private:
|
||||
return;
|
||||
|
||||
if (!tty->indexer && !tty->props.empty() && tty->state != TableState::Generic)
|
||||
emitWarning(*context, LintWarning::Code_TableOperations, node->location, "Using '%s' on a table without an array part is likely a bug", op);
|
||||
emitWarning(
|
||||
*context, LintWarning::Code_TableOperations, node->location, "Using '%s' on a table without an array part is likely a bug", op);
|
||||
else if (tty->indexer && isString(tty->indexer->indexType)) // note: to avoid complexity of subtype tests we just check if the key is a string
|
||||
emitWarning(*context, LintWarning::Code_TableOperations, node->location, "Using '%s' on a table with string keys is likely a bug", op);
|
||||
}
|
||||
|
@ -3,7 +3,9 @@
|
||||
|
||||
#include "Luau/Ast.h"
|
||||
#include "Luau/Common.h"
|
||||
#include "Luau/Simplify.h"
|
||||
#include "Luau/Type.h"
|
||||
#include "Luau/Simplify.h"
|
||||
#include "Luau/Subtyping.h"
|
||||
#include "Luau/Normalize.h"
|
||||
#include "Luau/Error.h"
|
||||
@ -64,14 +66,43 @@ struct NonStrictContext
|
||||
NonStrictContext(NonStrictContext&&) = default;
|
||||
NonStrictContext& operator=(NonStrictContext&&) = default;
|
||||
|
||||
void unionContexts(const NonStrictContext& other)
|
||||
static NonStrictContext disjunction(
|
||||
NotNull<BuiltinTypes> builtinTypes, NotNull<TypeArena> arena, const NonStrictContext& left, const NonStrictContext& right)
|
||||
{
|
||||
// TODO: unimplemented
|
||||
// disjunction implements union over the domain of keys
|
||||
// if the default value for a defId not in the map is `never`
|
||||
// then never | T is T
|
||||
NonStrictContext disj{};
|
||||
|
||||
for (auto [def, leftTy] : left.context)
|
||||
{
|
||||
if (std::optional<TypeId> rightTy = right.find(def))
|
||||
disj.context[def] = simplifyUnion(builtinTypes, arena, leftTy, *rightTy).result;
|
||||
else
|
||||
disj.context[def] = leftTy;
|
||||
}
|
||||
|
||||
for (auto [def, rightTy] : right.context)
|
||||
{
|
||||
if (!right.find(def).has_value())
|
||||
disj.context[def] = rightTy;
|
||||
}
|
||||
|
||||
return disj;
|
||||
}
|
||||
|
||||
void intersectContexts(const NonStrictContext& other)
|
||||
static NonStrictContext conjunction(
|
||||
NotNull<BuiltinTypes> builtins, NotNull<TypeArena> arena, const NonStrictContext& left, const NonStrictContext& right)
|
||||
{
|
||||
// TODO: unimplemented
|
||||
NonStrictContext conj{};
|
||||
|
||||
for (auto [def, leftTy] : left.context)
|
||||
{
|
||||
if (std::optional<TypeId> rightTy = right.find(def))
|
||||
conj.context[def] = simplifyIntersection(builtins, arena, leftTy, *rightTy).result;
|
||||
}
|
||||
|
||||
return conj;
|
||||
}
|
||||
|
||||
void removeFromContext(const std::vector<DefId>& defs)
|
||||
@ -82,6 +113,12 @@ struct NonStrictContext
|
||||
std::optional<TypeId> find(const DefId& def) const
|
||||
{
|
||||
const Def* d = def.get();
|
||||
return find(d);
|
||||
}
|
||||
|
||||
private:
|
||||
std::optional<TypeId> find(const Def* d) const
|
||||
{
|
||||
auto it = context.find(d);
|
||||
if (it != context.end())
|
||||
return {it->second};
|
||||
@ -180,153 +217,260 @@ struct NonStrictTypeChecker
|
||||
return builtinTypes->anyType;
|
||||
}
|
||||
|
||||
|
||||
void visit(AstStat* stat)
|
||||
{
|
||||
NonStrictContext fresh{};
|
||||
visit(stat, fresh);
|
||||
}
|
||||
|
||||
void visit(AstStat* stat, NonStrictContext& context)
|
||||
NonStrictContext visit(AstStat* stat)
|
||||
{
|
||||
auto pusher = pushStack(stat);
|
||||
if (auto s = stat->as<AstStatBlock>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatIf>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatWhile>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatRepeat>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatBreak>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatContinue>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatReturn>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatExpr>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatLocal>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatFor>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatForIn>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatAssign>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatCompoundAssign>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatFunction>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatLocalFunction>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatTypeAlias>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatDeclareFunction>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatDeclareGlobal>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatDeclareClass>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else if (auto s = stat->as<AstStatError>())
|
||||
return visit(s, context);
|
||||
return visit(s);
|
||||
else
|
||||
LUAU_ASSERT(!"NonStrictTypeChecker encountered an unknown node type");
|
||||
{
|
||||
LUAU_ASSERT(!"NonStrictTypeChecker encountered an unknown statement type");
|
||||
ice->ice("NonStrictTypeChecker encountered an unknown statement type");
|
||||
}
|
||||
}
|
||||
|
||||
void visit(AstStatBlock* block, NonStrictContext& context)
|
||||
NonStrictContext visit(AstStatBlock* block)
|
||||
{
|
||||
auto StackPusher = pushStack(block);
|
||||
for (AstStat* statement : block->body)
|
||||
visit(statement, context);
|
||||
visit(statement);
|
||||
return {};
|
||||
}
|
||||
|
||||
void visit(AstStatIf* ifStatement, NonStrictContext& context) {}
|
||||
void visit(AstStatWhile* whileStatement, NonStrictContext& context) {}
|
||||
void visit(AstStatRepeat* repeatStatement, NonStrictContext& context) {}
|
||||
void visit(AstStatBreak* breakStatement, NonStrictContext& context) {}
|
||||
void visit(AstStatContinue* continueStatement, NonStrictContext& context) {}
|
||||
void visit(AstStatReturn* returnStatement, NonStrictContext& context) {}
|
||||
void visit(AstStatExpr* expr, NonStrictContext& context)
|
||||
NonStrictContext visit(AstStatIf* ifStatement)
|
||||
{
|
||||
visit(expr->expr, context);
|
||||
NonStrictContext condB = visit(ifStatement->condition);
|
||||
NonStrictContext thenB = visit(ifStatement->thenbody);
|
||||
NonStrictContext elseB = visit(ifStatement->elsebody);
|
||||
return NonStrictContext::disjunction(
|
||||
builtinTypes, NotNull{&arena}, condB, NonStrictContext::conjunction(builtinTypes, NotNull{&arena}, thenB, elseB));
|
||||
}
|
||||
void visit(AstStatLocal* local, NonStrictContext& context) {}
|
||||
void visit(AstStatFor* forStatement, NonStrictContext& context) {}
|
||||
void visit(AstStatForIn* forInStatement, NonStrictContext& context) {}
|
||||
void visit(AstStatAssign* assign, NonStrictContext& context) {}
|
||||
void visit(AstStatCompoundAssign* compoundAssign, NonStrictContext& context) {}
|
||||
void visit(AstStatFunction* statFn, NonStrictContext& context) {}
|
||||
void visit(AstStatLocalFunction* localFn, NonStrictContext& context) {}
|
||||
void visit(AstStatTypeAlias* typeAlias, NonStrictContext& context) {}
|
||||
void visit(AstStatDeclareFunction* declFn, NonStrictContext& context) {}
|
||||
void visit(AstStatDeclareGlobal* declGlobal, NonStrictContext& context) {}
|
||||
void visit(AstStatDeclareClass* declClass, NonStrictContext& context) {}
|
||||
void visit(AstStatError* error, NonStrictContext& context) {}
|
||||
|
||||
void visit(AstExpr* expr, NonStrictContext& context)
|
||||
NonStrictContext visit(AstStatWhile* whileStatement)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatRepeat* repeatStatement)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatBreak* breakStatement)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatContinue* continueStatement)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatReturn* returnStatement)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatExpr* expr)
|
||||
{
|
||||
return visit(expr->expr);
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatLocal* local)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatFor* forStatement)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatForIn* forInStatement)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatAssign* assign)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatCompoundAssign* compoundAssign)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatFunction* statFn)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatLocalFunction* localFn)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatTypeAlias* typeAlias)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatDeclareFunction* declFn)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatDeclareGlobal* declGlobal)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatDeclareClass* declClass)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstStatError* error)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExpr* expr)
|
||||
{
|
||||
auto pusher = pushStack(expr);
|
||||
if (auto e = expr->as<AstExprGroup>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprConstantNil>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprConstantBool>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprConstantNumber>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprConstantString>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprLocal>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprGlobal>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprVarargs>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprCall>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprIndexName>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprIndexExpr>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprFunction>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprTable>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprUnary>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprBinary>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprTypeAssertion>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprIfElse>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprInterpString>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else if (auto e = expr->as<AstExprError>())
|
||||
return visit(e, context);
|
||||
return visit(e);
|
||||
else
|
||||
{
|
||||
LUAU_ASSERT(!"NonStrictTypeChecker encountered an unknown expression type");
|
||||
ice->ice("NonStrictTypeChecker encountered an unknown expression type");
|
||||
}
|
||||
}
|
||||
|
||||
void visit(AstExprGroup* group, NonStrictContext& context) {}
|
||||
void visit(AstExprConstantNil* expr, NonStrictContext& context) {}
|
||||
void visit(AstExprConstantBool* expr, NonStrictContext& context) {}
|
||||
void visit(AstExprConstantNumber* expr, NonStrictContext& context) {}
|
||||
void visit(AstExprConstantString* expr, NonStrictContext& context) {}
|
||||
void visit(AstExprLocal* local, NonStrictContext& context) {}
|
||||
void visit(AstExprGlobal* global, NonStrictContext& context) {}
|
||||
void visit(AstExprVarargs* global, NonStrictContext& context) {}
|
||||
|
||||
void visit(AstExprCall* call, NonStrictContext& context)
|
||||
NonStrictContext visit(AstExprGroup* group)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprConstantNil* expr)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprConstantBool* expr)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprConstantNumber* expr)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprConstantString* expr)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprLocal* local)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprGlobal* global)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprVarargs* global)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
|
||||
NonStrictContext visit(AstExprCall* call)
|
||||
{
|
||||
NonStrictContext fresh{};
|
||||
TypeId* originalCallTy = module->astOriginalCallTypes.find(call);
|
||||
if (!originalCallTy)
|
||||
return;
|
||||
return fresh;
|
||||
|
||||
TypeId fnTy = *originalCallTy;
|
||||
// TODO: how should we link this to the passed in context here
|
||||
NonStrictContext fresh{};
|
||||
if (auto fn = get<FunctionType>(follow(fnTy)))
|
||||
{
|
||||
if (fn->isCheckedFunction)
|
||||
@ -369,21 +513,64 @@ struct NonStrictTypeChecker
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fresh;
|
||||
}
|
||||
|
||||
void visit(AstExprIndexName* indexName, NonStrictContext& context) {}
|
||||
void visit(AstExprIndexExpr* indexExpr, NonStrictContext& context) {}
|
||||
void visit(AstExprFunction* exprFn, NonStrictContext& context)
|
||||
NonStrictContext visit(AstExprIndexName* indexName)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprIndexExpr* indexExpr)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprFunction* exprFn)
|
||||
{
|
||||
auto pusher = pushStack(exprFn);
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprTable* table)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprUnary* unary)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprBinary* binary)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprTypeAssertion* typeAssertion)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprIfElse* ifElse)
|
||||
{
|
||||
NonStrictContext condB = visit(ifElse->condition);
|
||||
NonStrictContext thenB = visit(ifElse->trueExpr);
|
||||
NonStrictContext elseB = visit(ifElse->falseExpr);
|
||||
return NonStrictContext::disjunction(
|
||||
builtinTypes, NotNull{&arena}, condB, NonStrictContext::conjunction(builtinTypes, NotNull{&arena}, thenB, elseB));
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprInterpString* interpString)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
NonStrictContext visit(AstExprError* error)
|
||||
{
|
||||
return {};
|
||||
}
|
||||
void visit(AstExprTable* table, NonStrictContext& context) {}
|
||||
void visit(AstExprUnary* unary, NonStrictContext& context) {}
|
||||
void visit(AstExprBinary* binary, NonStrictContext& context) {}
|
||||
void visit(AstExprTypeAssertion* typeAssertion, NonStrictContext& context) {}
|
||||
void visit(AstExprIfElse* ifElse, NonStrictContext& context) {}
|
||||
void visit(AstExprInterpString* interpString, NonStrictContext& context) {}
|
||||
void visit(AstExprError* error, NonStrictContext& context) {}
|
||||
|
||||
void reportError(TypeErrorData data, const Location& location)
|
||||
{
|
||||
|
@ -8,6 +8,7 @@
|
||||
#include "Luau/Clone.h"
|
||||
#include "Luau/Common.h"
|
||||
#include "Luau/RecursionCounter.h"
|
||||
#include "Luau/Set.h"
|
||||
#include "Luau/Subtyping.h"
|
||||
#include "Luau/Type.h"
|
||||
#include "Luau/TypeFwd.h"
|
||||
@ -18,10 +19,10 @@ LUAU_FASTFLAGVARIABLE(DebugLuauCheckNormalizeInvariant, false)
|
||||
// This could theoretically be 2000 on amd64, but x86 requires this.
|
||||
LUAU_FASTINTVARIABLE(LuauNormalizeIterationLimit, 1200);
|
||||
LUAU_FASTINTVARIABLE(LuauNormalizeCacheLimit, 100000);
|
||||
LUAU_FASTFLAGVARIABLE(LuauNormalizeCyclicUnions, false);
|
||||
LUAU_FASTFLAG(LuauTransitiveSubtyping)
|
||||
LUAU_FASTFLAG(DebugLuauReadWriteProperties)
|
||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||
LUAU_FASTFLAG(LuauBufferTypeck)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -268,6 +269,7 @@ NormalizedType::NormalizedType(NotNull<BuiltinTypes> builtinTypes)
|
||||
, numbers(builtinTypes->neverType)
|
||||
, strings{NormalizedStringType::never}
|
||||
, threads(builtinTypes->neverType)
|
||||
, buffers(builtinTypes->neverType)
|
||||
{
|
||||
}
|
||||
|
||||
@ -310,13 +312,13 @@ bool NormalizedType::isUnknown() const
|
||||
bool NormalizedType::isExactlyNumber() const
|
||||
{
|
||||
return hasNumbers() && !hasTops() && !hasBooleans() && !hasClasses() && !hasErrors() && !hasNils() && !hasStrings() && !hasThreads() &&
|
||||
!hasTables() && !hasFunctions() && !hasTyvars();
|
||||
(!FFlag::LuauBufferTypeck || !hasBuffers()) && !hasTables() && !hasFunctions() && !hasTyvars();
|
||||
}
|
||||
|
||||
bool NormalizedType::isSubtypeOfString() const
|
||||
{
|
||||
return hasStrings() && !hasTops() && !hasBooleans() && !hasClasses() && !hasErrors() && !hasNils() && !hasNumbers() && !hasThreads() &&
|
||||
!hasTables() && !hasFunctions() && !hasTyvars();
|
||||
(!FFlag::LuauBufferTypeck || !hasBuffers()) && !hasTables() && !hasFunctions() && !hasTyvars();
|
||||
}
|
||||
|
||||
bool NormalizedType::shouldSuppressErrors() const
|
||||
@ -373,6 +375,12 @@ bool NormalizedType::hasThreads() const
|
||||
return !get<NeverType>(threads);
|
||||
}
|
||||
|
||||
bool NormalizedType::hasBuffers() const
|
||||
{
|
||||
LUAU_ASSERT(FFlag::LuauBufferTypeck);
|
||||
return !get<NeverType>(buffers);
|
||||
}
|
||||
|
||||
bool NormalizedType::hasTables() const
|
||||
{
|
||||
return !tables.isNever();
|
||||
@ -393,18 +401,18 @@ static bool isShallowInhabited(const NormalizedType& norm)
|
||||
// This test is just a shallow check, for example it returns `true` for `{ p : never }`
|
||||
return !get<NeverType>(norm.tops) || !get<NeverType>(norm.booleans) || !norm.classes.isNever() || !get<NeverType>(norm.errors) ||
|
||||
!get<NeverType>(norm.nils) || !get<NeverType>(norm.numbers) || !norm.strings.isNever() || !get<NeverType>(norm.threads) ||
|
||||
!norm.functions.isNever() || !norm.tables.empty() || !norm.tyvars.empty();
|
||||
(FFlag::LuauBufferTypeck && !get<NeverType>(norm.buffers)) || !norm.functions.isNever() || !norm.tables.empty() || !norm.tyvars.empty();
|
||||
}
|
||||
|
||||
bool Normalizer::isInhabited(const NormalizedType* norm, std::unordered_set<TypeId> seen)
|
||||
bool Normalizer::isInhabited(const NormalizedType* norm, Set<TypeId> seen)
|
||||
{
|
||||
// If normalization failed, the type is complex, and so is more likely than not to be inhabited.
|
||||
if (!norm)
|
||||
return true;
|
||||
|
||||
if (!get<NeverType>(norm->tops) || !get<NeverType>(norm->booleans) || !get<NeverType>(norm->errors) || !get<NeverType>(norm->nils) ||
|
||||
!get<NeverType>(norm->numbers) || !get<NeverType>(norm->threads) || !norm->classes.isNever() || !norm->strings.isNever() ||
|
||||
!norm->functions.isNever())
|
||||
!get<NeverType>(norm->numbers) || !get<NeverType>(norm->threads) || (FFlag::LuauBufferTypeck && !get<NeverType>(norm->buffers)) ||
|
||||
!norm->classes.isNever() || !norm->strings.isNever() || !norm->functions.isNever())
|
||||
return true;
|
||||
|
||||
for (const auto& [_, intersect] : norm->tyvars)
|
||||
@ -430,7 +438,7 @@ bool Normalizer::isInhabited(TypeId ty)
|
||||
return *result;
|
||||
}
|
||||
|
||||
bool result = isInhabited(ty, {});
|
||||
bool result = isInhabited(ty, {nullptr});
|
||||
|
||||
if (cacheInhabitance)
|
||||
cachedIsInhabited[ty] = result;
|
||||
@ -438,7 +446,7 @@ bool Normalizer::isInhabited(TypeId ty)
|
||||
return result;
|
||||
}
|
||||
|
||||
bool Normalizer::isInhabited(TypeId ty, std::unordered_set<TypeId> seen)
|
||||
bool Normalizer::isInhabited(TypeId ty, Set<TypeId> seen)
|
||||
{
|
||||
// TODO: use log.follow(ty), CLI-64291
|
||||
ty = follow(ty);
|
||||
@ -492,7 +500,7 @@ bool Normalizer::isIntersectionInhabited(TypeId left, TypeId right)
|
||||
return *result;
|
||||
}
|
||||
|
||||
std::unordered_set<TypeId> seen = {};
|
||||
Set<TypeId> seen{nullptr};
|
||||
seen.insert(left);
|
||||
seen.insert(right);
|
||||
|
||||
@ -628,6 +636,18 @@ static bool isNormalizedThread(TypeId ty)
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool isNormalizedBuffer(TypeId ty)
|
||||
{
|
||||
LUAU_ASSERT(FFlag::LuauBufferTypeck);
|
||||
|
||||
if (get<NeverType>(ty))
|
||||
return true;
|
||||
else if (const PrimitiveType* ptv = get<PrimitiveType>(ty))
|
||||
return ptv->type == PrimitiveType::Buffer;
|
||||
else
|
||||
return false;
|
||||
}
|
||||
|
||||
static bool areNormalizedFunctions(const NormalizedFunctionType& tys)
|
||||
{
|
||||
for (TypeId ty : tys.parts)
|
||||
@ -748,6 +768,8 @@ static void assertInvariant(const NormalizedType& norm)
|
||||
LUAU_ASSERT(isNormalizedNumber(norm.numbers));
|
||||
LUAU_ASSERT(isNormalizedString(norm.strings));
|
||||
LUAU_ASSERT(isNormalizedThread(norm.threads));
|
||||
if (FFlag::LuauBufferTypeck)
|
||||
LUAU_ASSERT(isNormalizedBuffer(norm.buffers));
|
||||
LUAU_ASSERT(areNormalizedFunctions(norm.functions));
|
||||
LUAU_ASSERT(areNormalizedTables(norm.tables));
|
||||
LUAU_ASSERT(isNormalizedTyvar(norm.tyvars));
|
||||
@ -774,7 +796,7 @@ const NormalizedType* Normalizer::normalize(TypeId ty)
|
||||
return found->second.get();
|
||||
|
||||
NormalizedType norm{builtinTypes};
|
||||
std::unordered_set<TypeId> seenSetTypes;
|
||||
Set<TypeId> seenSetTypes{nullptr};
|
||||
if (!unionNormalWithTy(norm, ty, seenSetTypes))
|
||||
return nullptr;
|
||||
if (norm.isUnknown())
|
||||
@ -795,7 +817,7 @@ bool Normalizer::normalizeIntersections(const std::vector<TypeId>& intersections
|
||||
NormalizedType norm{builtinTypes};
|
||||
norm.tops = builtinTypes->anyType;
|
||||
// Now we need to intersect the two types
|
||||
std::unordered_set<TypeId> seenSetTypes;
|
||||
Set<TypeId> seenSetTypes{nullptr};
|
||||
for (auto ty : intersections)
|
||||
{
|
||||
if (!intersectNormalWithTy(norm, ty, seenSetTypes))
|
||||
@ -818,6 +840,8 @@ void Normalizer::clearNormal(NormalizedType& norm)
|
||||
norm.numbers = builtinTypes->neverType;
|
||||
norm.strings.resetToNever();
|
||||
norm.threads = builtinTypes->neverType;
|
||||
if (FFlag::LuauBufferTypeck)
|
||||
norm.buffers = builtinTypes->neverType;
|
||||
norm.tables.clear();
|
||||
norm.functions.resetToNever();
|
||||
norm.tyvars.clear();
|
||||
@ -1503,6 +1527,8 @@ bool Normalizer::unionNormals(NormalizedType& here, const NormalizedType& there,
|
||||
here.numbers = (get<NeverType>(there.numbers) ? here.numbers : there.numbers);
|
||||
unionStrings(here.strings, there.strings);
|
||||
here.threads = (get<NeverType>(there.threads) ? here.threads : there.threads);
|
||||
if (FFlag::LuauBufferTypeck)
|
||||
here.buffers = (get<NeverType>(there.buffers) ? here.buffers : there.buffers);
|
||||
unionFunctions(here.functions, there.functions);
|
||||
unionTables(here.tables, there.tables);
|
||||
return true;
|
||||
@ -1531,7 +1557,7 @@ bool Normalizer::withinResourceLimits()
|
||||
}
|
||||
|
||||
// See above for an explaination of `ignoreSmallerTyvars`.
|
||||
bool Normalizer::unionNormalWithTy(NormalizedType& here, TypeId there, std::unordered_set<TypeId>& seenSetTypes, int ignoreSmallerTyvars)
|
||||
bool Normalizer::unionNormalWithTy(NormalizedType& here, TypeId there, Set<TypeId>& seenSetTypes, int ignoreSmallerTyvars)
|
||||
{
|
||||
RecursionCounter _rc(&sharedState->counters.recursionCount);
|
||||
if (!withinResourceLimits())
|
||||
@ -1559,12 +1585,9 @@ bool Normalizer::unionNormalWithTy(NormalizedType& here, TypeId there, std::unor
|
||||
}
|
||||
else if (const UnionType* utv = get<UnionType>(there))
|
||||
{
|
||||
if (FFlag::LuauNormalizeCyclicUnions)
|
||||
{
|
||||
if (seenSetTypes.count(there))
|
||||
return true;
|
||||
seenSetTypes.insert(there);
|
||||
}
|
||||
if (seenSetTypes.count(there))
|
||||
return true;
|
||||
seenSetTypes.insert(there);
|
||||
|
||||
for (UnionTypeIterator it = begin(utv); it != end(utv); ++it)
|
||||
{
|
||||
@ -1620,6 +1643,8 @@ bool Normalizer::unionNormalWithTy(NormalizedType& here, TypeId there, std::unor
|
||||
here.strings.resetToString();
|
||||
else if (ptv->type == PrimitiveType::Thread)
|
||||
here.threads = there;
|
||||
else if (FFlag::LuauBufferTypeck && ptv->type == PrimitiveType::Buffer)
|
||||
here.buffers = there;
|
||||
else if (ptv->type == PrimitiveType::Function)
|
||||
{
|
||||
here.functions.resetToTop();
|
||||
@ -1739,6 +1764,8 @@ std::optional<NormalizedType> Normalizer::negateNormal(const NormalizedType& her
|
||||
result.strings.isCofinite = !result.strings.isCofinite;
|
||||
|
||||
result.threads = get<NeverType>(here.threads) ? builtinTypes->threadType : builtinTypes->neverType;
|
||||
if (FFlag::LuauBufferTypeck)
|
||||
result.buffers = get<NeverType>(here.buffers) ? builtinTypes->bufferType : builtinTypes->neverType;
|
||||
|
||||
/*
|
||||
* Things get weird and so, so complicated if we allow negations of
|
||||
@ -1828,6 +1855,10 @@ void Normalizer::subtractPrimitive(NormalizedType& here, TypeId ty)
|
||||
case PrimitiveType::Thread:
|
||||
here.threads = builtinTypes->neverType;
|
||||
break;
|
||||
case PrimitiveType::Buffer:
|
||||
if (FFlag::LuauBufferTypeck)
|
||||
here.buffers = builtinTypes->neverType;
|
||||
break;
|
||||
case PrimitiveType::Function:
|
||||
here.functions.resetToNever();
|
||||
break;
|
||||
@ -2621,7 +2652,7 @@ void Normalizer::intersectFunctions(NormalizedFunctionType& heres, const Normali
|
||||
}
|
||||
}
|
||||
|
||||
bool Normalizer::intersectTyvarsWithTy(NormalizedTyvars& here, TypeId there, std::unordered_set<TypeId>& seenSetTypes)
|
||||
bool Normalizer::intersectTyvarsWithTy(NormalizedTyvars& here, TypeId there, Set<TypeId>& seenSetTypes)
|
||||
{
|
||||
for (auto it = here.begin(); it != here.end();)
|
||||
{
|
||||
@ -2658,6 +2689,8 @@ bool Normalizer::intersectNormals(NormalizedType& here, const NormalizedType& th
|
||||
here.numbers = (get<NeverType>(there.numbers) ? there.numbers : here.numbers);
|
||||
intersectStrings(here.strings, there.strings);
|
||||
here.threads = (get<NeverType>(there.threads) ? there.threads : here.threads);
|
||||
if (FFlag::LuauBufferTypeck)
|
||||
here.buffers = (get<NeverType>(there.buffers) ? there.buffers : here.buffers);
|
||||
intersectFunctions(here.functions, there.functions);
|
||||
intersectTables(here.tables, there.tables);
|
||||
|
||||
@ -2699,7 +2732,7 @@ bool Normalizer::intersectNormals(NormalizedType& here, const NormalizedType& th
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Normalizer::intersectNormalWithTy(NormalizedType& here, TypeId there, std::unordered_set<TypeId>& seenSetTypes)
|
||||
bool Normalizer::intersectNormalWithTy(NormalizedType& here, TypeId there, Set<TypeId>& seenSetTypes)
|
||||
{
|
||||
RecursionCounter _rc(&sharedState->counters.recursionCount);
|
||||
if (!withinResourceLimits())
|
||||
@ -2779,6 +2812,7 @@ bool Normalizer::intersectNormalWithTy(NormalizedType& here, TypeId there, std::
|
||||
NormalizedStringType strings = std::move(here.strings);
|
||||
NormalizedFunctionType functions = std::move(here.functions);
|
||||
TypeId threads = here.threads;
|
||||
TypeId buffers = here.buffers;
|
||||
TypeIds tables = std::move(here.tables);
|
||||
|
||||
clearNormal(here);
|
||||
@ -2793,6 +2827,8 @@ bool Normalizer::intersectNormalWithTy(NormalizedType& here, TypeId there, std::
|
||||
here.strings = std::move(strings);
|
||||
else if (ptv->type == PrimitiveType::Thread)
|
||||
here.threads = threads;
|
||||
else if (FFlag::LuauBufferTypeck && ptv->type == PrimitiveType::Buffer)
|
||||
here.buffers = buffers;
|
||||
else if (ptv->type == PrimitiveType::Function)
|
||||
here.functions = std::move(functions);
|
||||
else if (ptv->type == PrimitiveType::Table)
|
||||
@ -2963,6 +2999,8 @@ TypeId Normalizer::typeFromNormal(const NormalizedType& norm)
|
||||
}
|
||||
if (!get<NeverType>(norm.threads))
|
||||
result.push_back(builtinTypes->threadType);
|
||||
if (FFlag::LuauBufferTypeck && !get<NeverType>(norm.buffers))
|
||||
result.push_back(builtinTypes->bufferType);
|
||||
|
||||
result.insert(result.end(), norm.tables.begin(), norm.tables.end());
|
||||
for (auto& [tyvar, intersect] : norm.tyvars)
|
||||
|
@ -72,18 +72,6 @@ std::optional<std::pair<Binding*, Scope*>> Scope::lookupEx(Symbol sym)
|
||||
}
|
||||
}
|
||||
|
||||
std::optional<TypeId> Scope::lookupLValue(DefId def) const
|
||||
{
|
||||
for (const Scope* current = this; current; current = current->parent.get())
|
||||
{
|
||||
if (auto ty = current->lvalueTypes.find(def))
|
||||
return *ty;
|
||||
}
|
||||
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
// TODO: We might kill Scope::lookup(Symbol) once data flow is fully fleshed out with type states and control flow analysis.
|
||||
std::optional<TypeId> Scope::lookup(DefId def) const
|
||||
{
|
||||
for (const Scope* current = this; current; current = current->parent.get())
|
||||
@ -181,6 +169,16 @@ std::optional<Binding> Scope::linearSearchForBinding(const std::string& name, bo
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
// Updates the `this` scope with the assignments from the `childScope` including ones that doesn't exist in `this`.
|
||||
void Scope::inheritAssignments(const ScopePtr& childScope)
|
||||
{
|
||||
if (!FFlag::DebugLuauDeferredConstraintResolution)
|
||||
return;
|
||||
|
||||
for (const auto& [k, a] : childScope->lvalueTypes)
|
||||
lvalueTypes[k] = a;
|
||||
}
|
||||
|
||||
// Updates the `this` scope with the refinements from the `childScope` excluding ones that doesn't exist in `this`.
|
||||
void Scope::inheritRefinements(const ScopePtr& childScope)
|
||||
{
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
#include "Luau/Simplify.h"
|
||||
|
||||
#include "Luau/DenseHash.h"
|
||||
#include "Luau/Normalize.h" // TypeIds
|
||||
#include "Luau/RecursionCounter.h"
|
||||
#include "Luau/ToString.h"
|
||||
@ -21,7 +22,7 @@ struct TypeSimplifier
|
||||
NotNull<BuiltinTypes> builtinTypes;
|
||||
NotNull<TypeArena> arena;
|
||||
|
||||
std::set<TypeId> blockedTypes;
|
||||
DenseHashSet<TypeId> blockedTypes{nullptr};
|
||||
|
||||
int recursionDepth = 0;
|
||||
|
||||
|
@ -50,13 +50,21 @@ bool SubtypingReasoning::operator==(const SubtypingReasoning& other) const
|
||||
return subPath == other.subPath && superPath == other.superPath;
|
||||
}
|
||||
|
||||
size_t SubtypingReasoningHash::operator()(const SubtypingReasoning& r) const
|
||||
{
|
||||
return TypePath::PathHash()(r.subPath) ^ (TypePath::PathHash()(r.superPath) << 1);
|
||||
}
|
||||
|
||||
SubtypingResult& SubtypingResult::andAlso(const SubtypingResult& other)
|
||||
{
|
||||
// If this result is a subtype, we take the other result's reasoning. If
|
||||
// this result is not a subtype, we keep the current reasoning, even if the
|
||||
// other isn't a subtype.
|
||||
if (isSubtype)
|
||||
reasoning = other.reasoning;
|
||||
// If the other result is not a subtype, we want to join all of its
|
||||
// reasonings to this one. If this result already has reasonings of its own,
|
||||
// those need to be attributed here.
|
||||
if (!other.isSubtype)
|
||||
{
|
||||
for (const SubtypingReasoning& r : other.reasoning)
|
||||
reasoning.insert(r);
|
||||
}
|
||||
|
||||
isSubtype &= other.isSubtype;
|
||||
// `|=` is intentional here, we want to preserve error related flags.
|
||||
@ -69,10 +77,20 @@ SubtypingResult& SubtypingResult::andAlso(const SubtypingResult& other)
|
||||
|
||||
SubtypingResult& SubtypingResult::orElse(const SubtypingResult& other)
|
||||
{
|
||||
// If the other result is not a subtype, we take the other result's
|
||||
// reasoning.
|
||||
if (!other.isSubtype)
|
||||
reasoning = other.reasoning;
|
||||
// If this result is a subtype, we do not join the reasoning lists. If this
|
||||
// result is not a subtype, but the other is a subtype, we want to _clear_
|
||||
// our reasoning list. If both results are not subtypes, we join the
|
||||
// reasoning lists.
|
||||
if (!isSubtype)
|
||||
{
|
||||
if (other.isSubtype)
|
||||
reasoning.clear();
|
||||
else
|
||||
{
|
||||
for (const SubtypingReasoning& r : other.reasoning)
|
||||
reasoning.insert(r);
|
||||
}
|
||||
}
|
||||
|
||||
isSubtype |= other.isSubtype;
|
||||
isErrorSuppressing |= other.isErrorSuppressing;
|
||||
@ -89,20 +107,26 @@ SubtypingResult& SubtypingResult::withBothComponent(TypePath::Component componen
|
||||
|
||||
SubtypingResult& SubtypingResult::withSubComponent(TypePath::Component component)
|
||||
{
|
||||
if (!reasoning)
|
||||
reasoning = SubtypingReasoning{Path(), Path()};
|
||||
|
||||
reasoning->subPath = reasoning->subPath.push_front(component);
|
||||
if (reasoning.empty())
|
||||
reasoning.insert(SubtypingReasoning{Path(component), TypePath::kEmpty});
|
||||
else
|
||||
{
|
||||
for (auto& r : reasoning)
|
||||
r.subPath = r.subPath.push_front(component);
|
||||
}
|
||||
|
||||
return *this;
|
||||
}
|
||||
|
||||
SubtypingResult& SubtypingResult::withSuperComponent(TypePath::Component component)
|
||||
{
|
||||
if (!reasoning)
|
||||
reasoning = SubtypingReasoning{Path(), Path()};
|
||||
|
||||
reasoning->superPath = reasoning->superPath.push_front(component);
|
||||
if (reasoning.empty())
|
||||
reasoning.insert(SubtypingReasoning{TypePath::kEmpty, Path(component)});
|
||||
else
|
||||
{
|
||||
for (auto& r : reasoning)
|
||||
r.superPath = r.superPath.push_front(component);
|
||||
}
|
||||
|
||||
return *this;
|
||||
}
|
||||
@ -114,20 +138,26 @@ SubtypingResult& SubtypingResult::withBothPath(TypePath::Path path)
|
||||
|
||||
SubtypingResult& SubtypingResult::withSubPath(TypePath::Path path)
|
||||
{
|
||||
if (!reasoning)
|
||||
reasoning = SubtypingReasoning{Path(), Path()};
|
||||
|
||||
reasoning->subPath = path.append(reasoning->subPath);
|
||||
if (reasoning.empty())
|
||||
reasoning.insert(SubtypingReasoning{path, TypePath::kEmpty});
|
||||
else
|
||||
{
|
||||
for (auto& r : reasoning)
|
||||
r.subPath = path.append(r.subPath);
|
||||
}
|
||||
|
||||
return *this;
|
||||
}
|
||||
|
||||
SubtypingResult& SubtypingResult::withSuperPath(TypePath::Path path)
|
||||
{
|
||||
if (!reasoning)
|
||||
reasoning = SubtypingReasoning{Path(), Path()};
|
||||
|
||||
reasoning->superPath = path.append(reasoning->superPath);
|
||||
if (reasoning.empty())
|
||||
reasoning.insert(SubtypingReasoning{TypePath::kEmpty, path});
|
||||
else
|
||||
{
|
||||
for (auto& r : reasoning)
|
||||
r.superPath = path.append(r.superPath);
|
||||
}
|
||||
|
||||
return *this;
|
||||
}
|
||||
@ -281,7 +311,7 @@ SubtypingResult Subtyping::isCovariantWith(SubtypingEnvironment& env, TypeId sub
|
||||
return {true};
|
||||
|
||||
std::pair<TypeId, TypeId> typePair{subTy, superTy};
|
||||
if (!seenTypes.insert(typePair).second)
|
||||
if (!seenTypes.insert(typePair))
|
||||
{
|
||||
/* TODO: Caching results for recursive types is really tricky to think
|
||||
* about.
|
||||
@ -632,8 +662,8 @@ SubtypingResult Subtyping::isContravariantWith(SubtypingEnvironment& env, SubTy&
|
||||
// whenever we involve contravariance. We'll end up appending path
|
||||
// components that should belong to the supertype to the subtype, and vice
|
||||
// versa.
|
||||
if (result.reasoning)
|
||||
std::swap(result.reasoning->subPath, result.reasoning->superPath);
|
||||
for (auto& reasoning : result.reasoning)
|
||||
std::swap(reasoning.subPath, reasoning.superPath);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
@ -562,6 +562,9 @@ struct TypeStringifier
|
||||
case PrimitiveType::Thread:
|
||||
state.emit("thread");
|
||||
return;
|
||||
case PrimitiveType::Buffer:
|
||||
state.emit("buffer");
|
||||
return;
|
||||
case PrimitiveType::Function:
|
||||
state.emit("function");
|
||||
return;
|
||||
|
@ -10,7 +10,6 @@
|
||||
#include <limits>
|
||||
#include <math.h>
|
||||
|
||||
LUAU_FASTFLAG(LuauFloorDivision)
|
||||
|
||||
namespace
|
||||
{
|
||||
@ -474,8 +473,6 @@ struct Printer
|
||||
case AstExprBinary::Pow:
|
||||
case AstExprBinary::CompareLt:
|
||||
case AstExprBinary::CompareGt:
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision || a->op != AstExprBinary::FloorDiv);
|
||||
|
||||
writer.maybeSpace(a->right->location.begin, 2);
|
||||
writer.symbol(toString(a->op));
|
||||
break;
|
||||
@ -761,8 +758,6 @@ struct Printer
|
||||
writer.symbol("/=");
|
||||
break;
|
||||
case AstExprBinary::FloorDiv:
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision);
|
||||
|
||||
writer.maybeSpace(a->value->location.begin, 2);
|
||||
writer.symbol("//=");
|
||||
break;
|
||||
|
@ -27,6 +27,7 @@ LUAU_FASTINT(LuauTypeInferRecursionLimit)
|
||||
LUAU_FASTFLAG(LuauInstantiateInSubtyping)
|
||||
LUAU_FASTFLAG(DebugLuauReadWriteProperties)
|
||||
LUAU_FASTFLAGVARIABLE(LuauInitializeStringMetatableInGlobalTypes, false)
|
||||
LUAU_FASTFLAG(LuauBufferTypeck)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -214,6 +215,13 @@ bool isThread(TypeId ty)
|
||||
return isPrim(ty, PrimitiveType::Thread);
|
||||
}
|
||||
|
||||
bool isBuffer(TypeId ty)
|
||||
{
|
||||
LUAU_ASSERT(FFlag::LuauBufferTypeck);
|
||||
|
||||
return isPrim(ty, PrimitiveType::Buffer);
|
||||
}
|
||||
|
||||
bool isOptional(TypeId ty)
|
||||
{
|
||||
if (isNil(ty))
|
||||
@ -926,6 +934,7 @@ BuiltinTypes::BuiltinTypes()
|
||||
, stringType(arena->addType(Type{PrimitiveType{PrimitiveType::String}, /*persistent*/ true}))
|
||||
, booleanType(arena->addType(Type{PrimitiveType{PrimitiveType::Boolean}, /*persistent*/ true}))
|
||||
, threadType(arena->addType(Type{PrimitiveType{PrimitiveType::Thread}, /*persistent*/ true}))
|
||||
, bufferType(arena->addType(Type{PrimitiveType{PrimitiveType::Buffer}, /*persistent*/ true}))
|
||||
, functionType(arena->addType(Type{PrimitiveType{PrimitiveType::Function}, /*persistent*/ true}))
|
||||
, classType(arena->addType(Type{ClassType{"class", {}, std::nullopt, std::nullopt, {}, {}, {}}, /*persistent*/ true}))
|
||||
, tableType(arena->addType(Type{PrimitiveType{PrimitiveType::Table}, /*persistent*/ true}))
|
||||
|
@ -13,8 +13,6 @@
|
||||
|
||||
#include <string>
|
||||
|
||||
LUAU_FASTFLAG(LuauParseDeclareClassIndexer);
|
||||
|
||||
static char* allocateString(Luau::Allocator& allocator, std::string_view contents)
|
||||
{
|
||||
char* result = (char*)allocator.allocate(contents.size() + 1);
|
||||
@ -106,6 +104,8 @@ public:
|
||||
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("string"), std::nullopt, Location());
|
||||
case PrimitiveType::Thread:
|
||||
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("thread"), std::nullopt, Location());
|
||||
case PrimitiveType::Buffer:
|
||||
return allocator->alloc<AstTypeReference>(Location(), std::nullopt, AstName("buffer"), std::nullopt, Location());
|
||||
default:
|
||||
return nullptr;
|
||||
}
|
||||
@ -230,7 +230,7 @@ public:
|
||||
}
|
||||
|
||||
AstTableIndexer* indexer = nullptr;
|
||||
if (FFlag::LuauParseDeclareClassIndexer && ctv.indexer)
|
||||
if (ctv.indexer)
|
||||
{
|
||||
RecursionCounter counter(&count);
|
||||
|
||||
|
@ -3,9 +3,9 @@
|
||||
|
||||
#include "Luau/Ast.h"
|
||||
#include "Luau/AstQuery.h"
|
||||
#include "Luau/Clone.h"
|
||||
#include "Luau/Common.h"
|
||||
#include "Luau/DcrLogger.h"
|
||||
#include "Luau/DenseHash.h"
|
||||
#include "Luau/Error.h"
|
||||
#include "Luau/InsertionOrderedMap.h"
|
||||
#include "Luau/Instantiation.h"
|
||||
@ -20,12 +20,12 @@
|
||||
#include "Luau/TypePack.h"
|
||||
#include "Luau/TypePath.h"
|
||||
#include "Luau/TypeUtils.h"
|
||||
#include "Luau/TypeOrPack.h"
|
||||
#include "Luau/VisitType.h"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
LUAU_FASTFLAG(DebugLuauMagicTypes)
|
||||
LUAU_FASTFLAG(LuauFloorDivision);
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -1660,14 +1660,48 @@ struct TypeChecker2
|
||||
if (argIt == end(inferredFtv->argTypes))
|
||||
break;
|
||||
|
||||
TypeId inferredArgTy = *argIt;
|
||||
|
||||
if (arg->annotation)
|
||||
{
|
||||
TypeId inferredArgTy = *argIt;
|
||||
TypeId annotatedArgTy = lookupAnnotation(arg->annotation);
|
||||
|
||||
testIsSubtype(inferredArgTy, annotatedArgTy, arg->location);
|
||||
}
|
||||
|
||||
// Some Luau constructs can result in an argument type being
|
||||
// reduced to never by inference. In this case, we want to
|
||||
// report an error at the function, instead of reporting an
|
||||
// error at every callsite.
|
||||
if (is<NeverType>(follow(inferredArgTy)))
|
||||
{
|
||||
// If the annotation simplified to never, we don't want to
|
||||
// even look at contributors.
|
||||
bool explicitlyNever = false;
|
||||
if (arg->annotation)
|
||||
{
|
||||
TypeId annotatedArgTy = lookupAnnotation(arg->annotation);
|
||||
explicitlyNever = is<NeverType>(annotatedArgTy);
|
||||
}
|
||||
|
||||
// Not following here is deliberate: the contribution map is
|
||||
// keyed by type pointer, but that type pointer has, at some
|
||||
// point, been transmuted to a bound type pointing to never.
|
||||
if (const auto contributors = module->upperBoundContributors.find(inferredArgTy); contributors && !explicitlyNever)
|
||||
{
|
||||
// It's unfortunate that we can't link error messages
|
||||
// together. For now, this will work.
|
||||
reportError(
|
||||
GenericError{format(
|
||||
"Parameter '%s' has been reduced to never. This function is not callable with any possible value.", arg->name.value)},
|
||||
arg->location);
|
||||
for (const auto& [site, component] : *contributors)
|
||||
reportError(ExtraInformation{format("Parameter '%s' is required to be a subtype of '%s' here.", arg->name.value,
|
||||
toString(component).c_str())},
|
||||
site);
|
||||
}
|
||||
}
|
||||
|
||||
++argIt;
|
||||
}
|
||||
}
|
||||
@ -1819,8 +1853,6 @@ struct TypeChecker2
|
||||
bool typesHaveIntersection = normalizer.isIntersectionInhabited(leftType, rightType);
|
||||
if (auto it = kBinaryOpMetamethods.find(expr->op); it != kBinaryOpMetamethods.end())
|
||||
{
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision || expr->op != AstExprBinary::Op::FloorDiv);
|
||||
|
||||
std::optional<TypeId> leftMt = getMetatable(leftType, builtinTypes);
|
||||
std::optional<TypeId> rightMt = getMetatable(rightType, builtinTypes);
|
||||
bool matches = leftMt == rightMt;
|
||||
@ -2009,8 +2041,6 @@ struct TypeChecker2
|
||||
case AstExprBinary::Op::FloorDiv:
|
||||
case AstExprBinary::Op::Pow:
|
||||
case AstExprBinary::Op::Mod:
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision || expr->op != AstExprBinary::Op::FloorDiv);
|
||||
|
||||
testIsSubtype(leftType, builtinTypes->numberType, expr->left->location);
|
||||
testIsSubtype(rightType, builtinTypes->numberType, expr->right->location);
|
||||
|
||||
@ -2413,29 +2443,65 @@ struct TypeChecker2
|
||||
}
|
||||
}
|
||||
|
||||
void explainError(TypeId subTy, TypeId superTy, Location location, const SubtypingResult& r)
|
||||
template<typename TID>
|
||||
std::optional<std::string> explainReasonings(TID subTy, TID superTy, Location location, const SubtypingResult& r)
|
||||
{
|
||||
if (!r.reasoning)
|
||||
return reportError(TypeMismatch{superTy, subTy}, location);
|
||||
if (r.reasoning.empty())
|
||||
return std::nullopt;
|
||||
|
||||
std::optional<TypeOrPack> subLeaf = traverse(subTy, r.reasoning->subPath, builtinTypes);
|
||||
std::optional<TypeOrPack> superLeaf = traverse(superTy, r.reasoning->superPath, builtinTypes);
|
||||
std::vector<std::string> reasons;
|
||||
for (const SubtypingReasoning& reasoning : r.reasoning)
|
||||
{
|
||||
if (reasoning.subPath.empty() && reasoning.superPath.empty())
|
||||
continue;
|
||||
|
||||
if (!subLeaf || !superLeaf)
|
||||
ice->ice("Subtyping test returned a reasoning with an invalid path", location);
|
||||
std::optional<TypeOrPack> subLeaf = traverse(subTy, reasoning.subPath, builtinTypes);
|
||||
std::optional<TypeOrPack> superLeaf = traverse(superTy, reasoning.superPath, builtinTypes);
|
||||
|
||||
if (!get2<TypeId, TypeId>(*subLeaf, *superLeaf) && !get2<TypePackId, TypePackId>(*subLeaf, *superLeaf))
|
||||
ice->ice("Subtyping test returned a reasoning where one path ends at a type and the other ends at a pack.", location);
|
||||
if (!subLeaf || !superLeaf)
|
||||
ice->ice("Subtyping test returned a reasoning with an invalid path", location);
|
||||
|
||||
std::string reason;
|
||||
if (!get2<TypeId, TypeId>(*subLeaf, *superLeaf) && !get2<TypePackId, TypePackId>(*subLeaf, *superLeaf))
|
||||
ice->ice("Subtyping test returned a reasoning where one path ends at a type and the other ends at a pack.", location);
|
||||
|
||||
if (r.reasoning->subPath == r.reasoning->superPath)
|
||||
reason = "at " + toString(r.reasoning->subPath) + ", " + toString(*subLeaf) + " is not a subtype of " + toString(*superLeaf);
|
||||
else
|
||||
reason = "type " + toString(subTy) + toString(r.reasoning->subPath) + " (" + toString(*subLeaf) + ") is not a subtype of " +
|
||||
toString(superTy) + toString(r.reasoning->superPath) + " (" + toString(*superLeaf) + ")";
|
||||
std::string reason;
|
||||
if (reasoning.subPath == reasoning.superPath)
|
||||
reason = "at " + toString(reasoning.subPath) + ", " + toString(*subLeaf) + " is not a subtype of " + toString(*superLeaf);
|
||||
else
|
||||
reason = "type " + toString(subTy) + toString(reasoning.subPath, /* prefixDot */ true) + " (" + toString(*subLeaf) +
|
||||
") is not a subtype of " + toString(superTy) + toString(reasoning.superPath, /* prefixDot */ true) + " (" +
|
||||
toString(*superLeaf) + ")";
|
||||
|
||||
reportError(TypeMismatch{superTy, subTy, reason}, location);
|
||||
reasons.push_back(reason);
|
||||
}
|
||||
|
||||
// DenseHashSet ordering is entirely undefined, so we want to
|
||||
// sort the reasons here to achieve a stable error
|
||||
// stringification.
|
||||
std::sort(reasons.begin(), reasons.end());
|
||||
std::string allReasons;
|
||||
bool first = true;
|
||||
for (const std::string& reason : reasons)
|
||||
{
|
||||
if (first)
|
||||
first = false;
|
||||
else
|
||||
allReasons += "\n\t";
|
||||
|
||||
allReasons += reason;
|
||||
}
|
||||
|
||||
return allReasons;
|
||||
}
|
||||
|
||||
void explainError(TypeId subTy, TypeId superTy, Location location, const SubtypingResult& result)
|
||||
{
|
||||
reportError(TypeMismatch{superTy, subTy, explainReasonings(subTy, superTy, location, result).value_or("")}, location);
|
||||
}
|
||||
|
||||
void explainError(TypePackId subTy, TypePackId superTy, Location location, const SubtypingResult& result)
|
||||
{
|
||||
reportError(TypePackMismatch{superTy, subTy, explainReasonings(subTy, superTy, location, result).value_or("")}, location);
|
||||
}
|
||||
|
||||
bool testIsSubtype(TypeId subTy, TypeId superTy, Location location)
|
||||
@ -2459,7 +2525,7 @@ struct TypeChecker2
|
||||
reportError(NormalizationTooComplex{}, location);
|
||||
|
||||
if (!r.isSubtype && !r.isErrorSuppressing)
|
||||
reportError(TypePackMismatch{superTy, subTy}, location);
|
||||
explainError(subTy, superTy, location, r);
|
||||
|
||||
return r.isSubtype;
|
||||
}
|
||||
@ -2507,7 +2573,7 @@ struct TypeChecker2
|
||||
if (!normalizer.isInhabited(ty))
|
||||
return;
|
||||
|
||||
std::unordered_set<TypeId> seen;
|
||||
DenseHashSet<TypeId> seen{nullptr};
|
||||
bool found = hasIndexTypeFromType(ty, prop, location, seen, astIndexExprType);
|
||||
foundOneProp |= found;
|
||||
if (!found)
|
||||
@ -2568,14 +2634,14 @@ struct TypeChecker2
|
||||
}
|
||||
}
|
||||
|
||||
bool hasIndexTypeFromType(TypeId ty, const std::string& prop, const Location& location, std::unordered_set<TypeId>& seen, TypeId astIndexExprType)
|
||||
bool hasIndexTypeFromType(TypeId ty, const std::string& prop, const Location& location, DenseHashSet<TypeId>& seen, TypeId astIndexExprType)
|
||||
{
|
||||
// If we have already encountered this type, we must assume that some
|
||||
// other codepath will do the right thing and signal false if the
|
||||
// property is not present.
|
||||
const bool isUnseen = seen.insert(ty).second;
|
||||
if (!isUnseen)
|
||||
if (seen.contains(ty))
|
||||
return true;
|
||||
seen.insert(ty);
|
||||
|
||||
if (get<ErrorType>(ty) || get<AnyType>(ty) || get<NeverType>(ty))
|
||||
return true;
|
||||
|
@ -751,8 +751,11 @@ TypeFamilyReductionResult<TypeId> andFamilyFn(const std::vector<TypeId>& typePar
|
||||
// And evalutes to a boolean if the LHS is falsey, and the RHS type if LHS is truthy.
|
||||
SimplifyResult filteredLhs = simplifyIntersection(ctx->builtins, ctx->arena, lhsTy, ctx->builtins->falsyType);
|
||||
SimplifyResult overallResult = simplifyUnion(ctx->builtins, ctx->arena, rhsTy, filteredLhs.result);
|
||||
std::vector<TypeId> blockedTypes(filteredLhs.blockedTypes.begin(), filteredLhs.blockedTypes.end());
|
||||
blockedTypes.insert(blockedTypes.end(), overallResult.blockedTypes.begin(), overallResult.blockedTypes.end());
|
||||
std::vector<TypeId> blockedTypes{};
|
||||
for (auto ty : filteredLhs.blockedTypes)
|
||||
blockedTypes.push_back(ty);
|
||||
for (auto ty : overallResult.blockedTypes)
|
||||
blockedTypes.push_back(ty);
|
||||
return {overallResult.result, false, std::move(blockedTypes), {}};
|
||||
}
|
||||
|
||||
@ -776,8 +779,11 @@ TypeFamilyReductionResult<TypeId> orFamilyFn(const std::vector<TypeId>& typePara
|
||||
// Or evalutes to the LHS type if the LHS is truthy, and the RHS type if LHS is falsy.
|
||||
SimplifyResult filteredLhs = simplifyIntersection(ctx->builtins, ctx->arena, lhsTy, ctx->builtins->truthyType);
|
||||
SimplifyResult overallResult = simplifyUnion(ctx->builtins, ctx->arena, rhsTy, filteredLhs.result);
|
||||
std::vector<TypeId> blockedTypes(filteredLhs.blockedTypes.begin(), filteredLhs.blockedTypes.end());
|
||||
blockedTypes.insert(blockedTypes.end(), overallResult.blockedTypes.begin(), overallResult.blockedTypes.end());
|
||||
std::vector<TypeId> blockedTypes{};
|
||||
for (auto ty : filteredLhs.blockedTypes)
|
||||
blockedTypes.push_back(ty);
|
||||
for (auto ty : overallResult.blockedTypes)
|
||||
blockedTypes.push_back(ty);
|
||||
return {overallResult.result, false, std::move(blockedTypes), {}};
|
||||
}
|
||||
|
||||
|
@ -35,12 +35,11 @@ LUAU_FASTFLAG(LuauKnowsTheDataModel3)
|
||||
LUAU_FASTFLAGVARIABLE(DebugLuauFreezeDuringUnification, false)
|
||||
LUAU_FASTFLAGVARIABLE(DebugLuauSharedSelf, false)
|
||||
LUAU_FASTFLAG(LuauInstantiateInSubtyping)
|
||||
LUAU_FASTFLAG(LuauOccursIsntAlwaysFailure)
|
||||
LUAU_FASTFLAGVARIABLE(LuauTinyControlFlowAnalysis, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauLoopControlFlowAnalysis, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauAlwaysCommitInferencesOfFunctionCalls, false)
|
||||
LUAU_FASTFLAG(LuauParseDeclareClassIndexer)
|
||||
LUAU_FASTFLAG(LuauFloorDivision);
|
||||
LUAU_FASTFLAG(LuauBufferTypeck)
|
||||
LUAU_FASTFLAGVARIABLE(LuauRemoveBadRelationalOperatorWarning, false)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -202,7 +201,7 @@ static bool isMetamethod(const Name& name)
|
||||
return name == "__index" || name == "__newindex" || name == "__call" || name == "__concat" || name == "__unm" || name == "__add" ||
|
||||
name == "__sub" || name == "__mul" || name == "__div" || name == "__mod" || name == "__pow" || name == "__tostring" ||
|
||||
name == "__metatable" || name == "__eq" || name == "__lt" || name == "__le" || name == "__mode" || name == "__iter" || name == "__len" ||
|
||||
(FFlag::LuauFloorDivision && name == "__idiv");
|
||||
name == "__idiv";
|
||||
}
|
||||
|
||||
size_t HashBoolNamePair::operator()(const std::pair<bool, Name>& pair) const
|
||||
@ -222,6 +221,7 @@ TypeChecker::TypeChecker(const ScopePtr& globalScope, ModuleResolver* resolver,
|
||||
, stringType(builtinTypes->stringType)
|
||||
, booleanType(builtinTypes->booleanType)
|
||||
, threadType(builtinTypes->threadType)
|
||||
, bufferType(builtinTypes->bufferType)
|
||||
, anyType(builtinTypes->anyType)
|
||||
, unknownType(builtinTypes->unknownType)
|
||||
, neverType(builtinTypes->neverType)
|
||||
@ -1626,13 +1626,6 @@ ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatTypeAlias& ty
|
||||
|
||||
TypeId& bindingType = bindingsMap[name].type;
|
||||
|
||||
if (!FFlag::LuauOccursIsntAlwaysFailure)
|
||||
{
|
||||
if (unify(ty, bindingType, aliasScope, typealias.location))
|
||||
bindingType = ty;
|
||||
return ControlFlow::None;
|
||||
}
|
||||
|
||||
unify(ty, bindingType, aliasScope, typealias.location);
|
||||
|
||||
// It is possible for this unification to succeed but for
|
||||
@ -1762,7 +1755,7 @@ ControlFlow TypeChecker::check(const ScopePtr& scope, const AstStatDeclareClass&
|
||||
if (!ctv->metatable)
|
||||
ice("No metatable for declared class");
|
||||
|
||||
if (const auto& indexer = declaredClass.indexer; FFlag::LuauParseDeclareClassIndexer && indexer)
|
||||
if (const auto& indexer = declaredClass.indexer)
|
||||
ctv->indexer = TableIndexer(resolveType(scope, *indexer->indexType), resolveType(scope, *indexer->resultType));
|
||||
|
||||
TableType* metatable = getMutable<TableType>(*ctv->metatable);
|
||||
@ -2560,7 +2553,6 @@ std::string opToMetaTableEntry(const AstExprBinary::Op& op)
|
||||
case AstExprBinary::Div:
|
||||
return "__div";
|
||||
case AstExprBinary::FloorDiv:
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision);
|
||||
return "__idiv";
|
||||
case AstExprBinary::Mod:
|
||||
return "__mod";
|
||||
@ -2763,10 +2755,26 @@ TypeId TypeChecker::checkRelationalOperation(
|
||||
{
|
||||
reportErrors(state.errors);
|
||||
|
||||
if (!isEquality && state.errors.empty() && (get<UnionType>(leftType) || isBoolean(leftType)))
|
||||
if (FFlag::LuauRemoveBadRelationalOperatorWarning)
|
||||
{
|
||||
reportError(expr.location, GenericError{format("Type '%s' cannot be compared with relational operator %s", toString(leftType).c_str(),
|
||||
toString(expr.op).c_str())});
|
||||
// The original version of this check also produced this error when we had a union type.
|
||||
// However, the old solver does not readily have the ability to discern if the union is comparable.
|
||||
// This is the case when the lhs is e.g. a union of singletons and the rhs is the combined type.
|
||||
// The new solver has much more powerful logic for resolving relational operators, but for now,
|
||||
// we need to be conservative in the old solver to deliver a reasonable developer experience.
|
||||
if (!isEquality && state.errors.empty() && isBoolean(leftType))
|
||||
{
|
||||
reportError(expr.location, GenericError{format("Type '%s' cannot be compared with relational operator %s",
|
||||
toString(leftType).c_str(), toString(expr.op).c_str())});
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!isEquality && state.errors.empty() && (get<UnionType>(leftType) || isBoolean(leftType)))
|
||||
{
|
||||
reportError(expr.location, GenericError{format("Type '%s' cannot be compared with relational operator %s",
|
||||
toString(leftType).c_str(), toString(expr.op).c_str())});
|
||||
}
|
||||
}
|
||||
|
||||
return booleanType;
|
||||
@ -3058,8 +3066,6 @@ TypeId TypeChecker::checkBinaryOperation(
|
||||
case AstExprBinary::FloorDiv:
|
||||
case AstExprBinary::Mod:
|
||||
case AstExprBinary::Pow:
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision || expr.op != AstExprBinary::FloorDiv);
|
||||
|
||||
reportErrors(tryUnify(lhsType, numberType, scope, expr.left->location));
|
||||
reportErrors(tryUnify(rhsType, numberType, scope, expr.right->location));
|
||||
return numberType;
|
||||
@ -6016,6 +6022,8 @@ void TypeChecker::resolve(const TypeGuardPredicate& typeguardP, RefinementMap& r
|
||||
return refine(isBoolean, booleanType);
|
||||
else if (typeguardP.kind == "thread")
|
||||
return refine(isThread, threadType);
|
||||
else if (FFlag::LuauBufferTypeck && typeguardP.kind == "buffer")
|
||||
return refine(isBuffer, bufferType);
|
||||
else if (typeguardP.kind == "table")
|
||||
{
|
||||
return refine([](TypeId ty) -> bool {
|
||||
|
@ -6,8 +6,9 @@
|
||||
#include "Luau/Type.h"
|
||||
#include "Luau/TypeFwd.h"
|
||||
#include "Luau/TypePack.h"
|
||||
#include "Luau/TypeUtils.h"
|
||||
#include "Luau/TypeOrPack.h"
|
||||
|
||||
#include <functional>
|
||||
#include <optional>
|
||||
#include <sstream>
|
||||
#include <type_traits>
|
||||
@ -104,6 +105,41 @@ bool Path::operator==(const Path& other) const
|
||||
return components == other.components;
|
||||
}
|
||||
|
||||
size_t PathHash::operator()(const Property& prop) const
|
||||
{
|
||||
return std::hash<std::string>()(prop.name) ^ static_cast<size_t>(prop.isRead);
|
||||
}
|
||||
|
||||
size_t PathHash::operator()(const Index& idx) const
|
||||
{
|
||||
return idx.index;
|
||||
}
|
||||
|
||||
size_t PathHash::operator()(const TypeField& field) const
|
||||
{
|
||||
return static_cast<size_t>(field);
|
||||
}
|
||||
|
||||
size_t PathHash::operator()(const PackField& field) const
|
||||
{
|
||||
return static_cast<size_t>(field);
|
||||
}
|
||||
|
||||
size_t PathHash::operator()(const Component& component) const
|
||||
{
|
||||
return visit(*this, component);
|
||||
}
|
||||
|
||||
size_t PathHash::operator()(const Path& path) const
|
||||
{
|
||||
size_t hash = 0;
|
||||
|
||||
for (const Component& component : path.components)
|
||||
hash ^= (*this)(component);
|
||||
|
||||
return hash;
|
||||
}
|
||||
|
||||
Path PathBuilder::build()
|
||||
{
|
||||
return Path(std::move(components));
|
||||
@ -465,7 +501,7 @@ struct TraversalState
|
||||
|
||||
} // namespace
|
||||
|
||||
std::string toString(const TypePath::Path& path)
|
||||
std::string toString(const TypePath::Path& path, bool prefixDot)
|
||||
{
|
||||
std::stringstream result;
|
||||
bool first = true;
|
||||
@ -491,7 +527,7 @@ std::string toString(const TypePath::Path& path)
|
||||
}
|
||||
else if constexpr (std::is_same_v<T, TypePath::TypeField>)
|
||||
{
|
||||
if (!first)
|
||||
if (!first || prefixDot)
|
||||
result << '.';
|
||||
|
||||
switch (c)
|
||||
@ -523,7 +559,7 @@ std::string toString(const TypePath::Path& path)
|
||||
}
|
||||
else if constexpr (std::is_same_v<T, TypePath::PackField>)
|
||||
{
|
||||
if (!first)
|
||||
if (!first || prefixDot)
|
||||
result << '.';
|
||||
|
||||
switch (c)
|
||||
@ -580,7 +616,14 @@ std::optional<TypeOrPack> traverse(TypeId root, const Path& path, NotNull<Builti
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
std::optional<TypeOrPack> traverse(TypePackId root, const Path& path, NotNull<BuiltinTypes> builtinTypes);
|
||||
std::optional<TypeOrPack> traverse(TypePackId root, const Path& path, NotNull<BuiltinTypes> builtinTypes)
|
||||
{
|
||||
TraversalState state(follow(root), builtinTypes);
|
||||
if (traverse(state, path))
|
||||
return state.current;
|
||||
else
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
std::optional<TypeId> traverseForType(TypeId root, const Path& path, NotNull<BuiltinTypes> builtinTypes)
|
||||
{
|
||||
|
@ -19,10 +19,10 @@ LUAU_FASTINT(LuauTypeInferTypePackLoopLimit)
|
||||
LUAU_FASTFLAG(LuauErrorRecoveryType)
|
||||
LUAU_FASTFLAGVARIABLE(LuauInstantiateInSubtyping, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauTransitiveSubtyping, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauOccursIsntAlwaysFailure, false)
|
||||
LUAU_FASTFLAG(LuauAlwaysCommitInferencesOfFunctionCalls)
|
||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||
LUAU_FASTFLAGVARIABLE(LuauFixIndexerSubtypingOrdering, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauUnifierShouldNotCopyError, false)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -2873,7 +2873,7 @@ bool Unifier::occursCheck(TypeId needle, TypeId haystack, bool reversed)
|
||||
|
||||
bool occurs = occursCheck(sharedState.tempSeenTy, needle, haystack);
|
||||
|
||||
if (occurs && FFlag::LuauOccursIsntAlwaysFailure)
|
||||
if (occurs)
|
||||
{
|
||||
Unifier innerState = makeChildUnifier();
|
||||
if (const UnionType* ut = get<UnionType>(haystack))
|
||||
@ -2931,15 +2931,7 @@ bool Unifier::occursCheck(DenseHashSet<TypeId>& seen, TypeId needle, TypeId hays
|
||||
ice("Expected needle to be free");
|
||||
|
||||
if (needle == haystack)
|
||||
{
|
||||
if (!FFlag::LuauOccursIsntAlwaysFailure)
|
||||
{
|
||||
reportError(location, OccursCheckFailed{});
|
||||
log.replace(needle, *builtinTypes->errorRecoveryType());
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
if (log.getMutable<FreeType>(haystack) || (hideousFixMeGenericsAreActuallyFree && log.is<GenericType>(haystack)))
|
||||
return false;
|
||||
@ -2963,10 +2955,13 @@ bool Unifier::occursCheck(TypePackId needle, TypePackId haystack, bool reversed)
|
||||
|
||||
bool occurs = occursCheck(sharedState.tempSeenTp, needle, haystack);
|
||||
|
||||
if (occurs && FFlag::LuauOccursIsntAlwaysFailure)
|
||||
if (occurs)
|
||||
{
|
||||
reportError(location, OccursCheckFailed{});
|
||||
log.replace(needle, *builtinTypes->errorRecoveryTypePack());
|
||||
if (FFlag::LuauUnifierShouldNotCopyError)
|
||||
log.replace(needle, BoundTypePack{builtinTypes->errorRecoveryTypePack()});
|
||||
else
|
||||
log.replace(needle, *builtinTypes->errorRecoveryTypePack());
|
||||
}
|
||||
|
||||
return occurs;
|
||||
@ -2993,15 +2988,7 @@ bool Unifier::occursCheck(DenseHashSet<TypePackId>& seen, TypePackId needle, Typ
|
||||
while (!log.getMutable<ErrorType>(haystack))
|
||||
{
|
||||
if (needle == haystack)
|
||||
{
|
||||
if (!FFlag::LuauOccursIsntAlwaysFailure)
|
||||
{
|
||||
reportError(location, OccursCheckFailed{});
|
||||
log.replace(needle, *builtinTypes->errorRecoveryTypePack());
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
if (auto a = get<TypePack>(haystack); a && a->tail)
|
||||
{
|
||||
|
@ -5,9 +5,6 @@
|
||||
#include "Luau/Instantiation.h"
|
||||
#include "Luau/Scope.h"
|
||||
#include "Luau/Simplify.h"
|
||||
#include "Luau/Substitution.h"
|
||||
#include "Luau/ToString.h"
|
||||
#include "Luau/TxnLog.h"
|
||||
#include "Luau/Type.h"
|
||||
#include "Luau/TypeArena.h"
|
||||
#include "Luau/TypeCheckLimits.h"
|
||||
@ -16,7 +13,6 @@
|
||||
|
||||
#include <algorithm>
|
||||
#include <optional>
|
||||
#include <unordered_set>
|
||||
|
||||
LUAU_FASTINT(LuauTypeInferRecursionLimit)
|
||||
|
||||
@ -49,7 +45,10 @@ bool Unifier2::unify(TypeId subTy, TypeId superTy)
|
||||
FreeType* superFree = getMutable<FreeType>(superTy);
|
||||
|
||||
if (subFree)
|
||||
{
|
||||
subFree->upperBound = mkIntersection(subFree->upperBound, superTy);
|
||||
expandedFreeTypes[subTy].push_back(superTy);
|
||||
}
|
||||
|
||||
if (superFree)
|
||||
superFree->lowerBound = mkUnion(superFree->lowerBound, subTy);
|
||||
|
@ -3,7 +3,6 @@
|
||||
|
||||
#include "Luau/Common.h"
|
||||
|
||||
LUAU_FASTFLAG(LuauFloorDivision);
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -282,7 +281,6 @@ std::string toString(AstExprBinary::Op op)
|
||||
case AstExprBinary::Div:
|
||||
return "/";
|
||||
case AstExprBinary::FloorDiv:
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision);
|
||||
return "//";
|
||||
case AstExprBinary::Mod:
|
||||
return "%";
|
||||
|
@ -7,7 +7,6 @@
|
||||
|
||||
#include <limits.h>
|
||||
|
||||
LUAU_FASTFLAGVARIABLE(LuauFloorDivision, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauLexerLookaheadRemembersBraceType, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauCheckedFunctionSyntax, false)
|
||||
|
||||
@ -142,7 +141,7 @@ std::string Lexeme::toString() const
|
||||
return "'::'";
|
||||
|
||||
case FloorDiv:
|
||||
return FFlag::LuauFloorDivision ? "'//'" : "<unknown>";
|
||||
return "'//'";
|
||||
|
||||
case AddAssign:
|
||||
return "'+='";
|
||||
@ -157,7 +156,7 @@ std::string Lexeme::toString() const
|
||||
return "'/='";
|
||||
|
||||
case FloorDivAssign:
|
||||
return FFlag::LuauFloorDivision ? "'//='" : "<unknown>";
|
||||
return "'//='";
|
||||
|
||||
case ModAssign:
|
||||
return "'%='";
|
||||
@ -909,44 +908,29 @@ Lexeme Lexer::readNext()
|
||||
|
||||
case '/':
|
||||
{
|
||||
if (FFlag::LuauFloorDivision)
|
||||
consume();
|
||||
|
||||
char ch = peekch();
|
||||
|
||||
if (ch == '=')
|
||||
{
|
||||
consume();
|
||||
|
||||
char ch = peekch();
|
||||
|
||||
if (ch == '=')
|
||||
{
|
||||
consume();
|
||||
return Lexeme(Location(start, 2), Lexeme::DivAssign);
|
||||
}
|
||||
else if (ch == '/')
|
||||
{
|
||||
consume();
|
||||
|
||||
if (peekch() == '=')
|
||||
{
|
||||
consume();
|
||||
return Lexeme(Location(start, 3), Lexeme::FloorDivAssign);
|
||||
}
|
||||
else
|
||||
return Lexeme(Location(start, 2), Lexeme::FloorDiv);
|
||||
}
|
||||
else
|
||||
return Lexeme(Location(start, 1), '/');
|
||||
return Lexeme(Location(start, 2), Lexeme::DivAssign);
|
||||
}
|
||||
else
|
||||
else if (ch == '/')
|
||||
{
|
||||
consume();
|
||||
|
||||
if (peekch() == '=')
|
||||
{
|
||||
consume();
|
||||
return Lexeme(Location(start, 2), Lexeme::DivAssign);
|
||||
return Lexeme(Location(start, 3), Lexeme::FloorDivAssign);
|
||||
}
|
||||
else
|
||||
return Lexeme(Location(start, 1), '/');
|
||||
return Lexeme(Location(start, 2), Lexeme::FloorDiv);
|
||||
}
|
||||
else
|
||||
return Lexeme(Location(start, 1), '/');
|
||||
}
|
||||
|
||||
case '*':
|
||||
|
@ -16,14 +16,9 @@ LUAU_FASTINTVARIABLE(LuauParseErrorLimit, 100)
|
||||
// Warning: If you are introducing new syntax, ensure that it is behind a separate
|
||||
// flag so that we don't break production games by reverting syntax changes.
|
||||
// See docs/SyntaxChanges.md for an explanation.
|
||||
LUAU_FASTFLAGVARIABLE(LuauParseDeclareClassIndexer, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauClipExtraHasEndProps, false)
|
||||
LUAU_FASTFLAG(LuauFloorDivision)
|
||||
LUAU_FASTFLAG(LuauCheckedFunctionSyntax)
|
||||
|
||||
LUAU_FASTFLAGVARIABLE(LuauBetterTypeUnionLimits, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauBetterTypeRecLimits, false)
|
||||
|
||||
LUAU_FASTFLAGVARIABLE(LuauParseImpreciseNumber, false)
|
||||
|
||||
namespace Luau
|
||||
@ -926,7 +921,7 @@ AstStat* Parser::parseDeclaration(const Location& start)
|
||||
{
|
||||
props.push_back(parseDeclaredClassMethod());
|
||||
}
|
||||
else if (lexer.current().type == '[' && (!FFlag::LuauParseDeclareClassIndexer || lexer.lookahead().type == Lexeme::RawString ||
|
||||
else if (lexer.current().type == '[' && (lexer.lookahead().type == Lexeme::RawString ||
|
||||
lexer.lookahead().type == Lexeme::QuotedString))
|
||||
{
|
||||
const Lexeme begin = lexer.current();
|
||||
@ -946,7 +941,7 @@ AstStat* Parser::parseDeclaration(const Location& start)
|
||||
else
|
||||
report(begin.location, "String literal contains malformed escape sequence or \\0");
|
||||
}
|
||||
else if (lexer.current().type == '[' && FFlag::LuauParseDeclareClassIndexer)
|
||||
else if (lexer.current().type == '[')
|
||||
{
|
||||
if (indexer)
|
||||
{
|
||||
@ -1546,8 +1541,7 @@ AstType* Parser::parseTypeSuffix(AstType* type, const Location& begin)
|
||||
|
||||
unsigned int oldRecursionCount = recursionCounter;
|
||||
parts.push_back(parseSimpleType(/* allowPack= */ false).type);
|
||||
if (FFlag::LuauBetterTypeUnionLimits)
|
||||
recursionCounter = oldRecursionCount;
|
||||
recursionCounter = oldRecursionCount;
|
||||
|
||||
isUnion = true;
|
||||
}
|
||||
@ -1556,7 +1550,7 @@ AstType* Parser::parseTypeSuffix(AstType* type, const Location& begin)
|
||||
Location loc = lexer.current().location;
|
||||
nextLexeme();
|
||||
|
||||
if (!FFlag::LuauBetterTypeUnionLimits || !hasOptional)
|
||||
if (!hasOptional)
|
||||
parts.push_back(allocator.alloc<AstTypeReference>(loc, std::nullopt, nameNil, std::nullopt, loc));
|
||||
|
||||
isUnion = true;
|
||||
@ -1568,8 +1562,7 @@ AstType* Parser::parseTypeSuffix(AstType* type, const Location& begin)
|
||||
|
||||
unsigned int oldRecursionCount = recursionCounter;
|
||||
parts.push_back(parseSimpleType(/* allowPack= */ false).type);
|
||||
if (FFlag::LuauBetterTypeUnionLimits)
|
||||
recursionCounter = oldRecursionCount;
|
||||
recursionCounter = oldRecursionCount;
|
||||
|
||||
isIntersection = true;
|
||||
}
|
||||
@ -1581,7 +1574,7 @@ AstType* Parser::parseTypeSuffix(AstType* type, const Location& begin)
|
||||
else
|
||||
break;
|
||||
|
||||
if (FFlag::LuauBetterTypeUnionLimits && parts.size() > unsigned(FInt::LuauTypeLengthLimit) + hasOptional)
|
||||
if (parts.size() > unsigned(FInt::LuauTypeLengthLimit) + hasOptional)
|
||||
ParseError::raise(parts.back()->location, "Exceeded allowed type length; simplify your type annotation to make the code compile");
|
||||
}
|
||||
|
||||
@ -1609,10 +1602,7 @@ AstType* Parser::parseTypeSuffix(AstType* type, const Location& begin)
|
||||
AstTypeOrPack Parser::parseTypeOrPack()
|
||||
{
|
||||
unsigned int oldRecursionCount = recursionCounter;
|
||||
|
||||
// recursion counter is incremented in parseSimpleType
|
||||
if (!FFlag::LuauBetterTypeRecLimits)
|
||||
incrementRecursionCounter("type annotation");
|
||||
|
||||
Location begin = lexer.current().location;
|
||||
|
||||
@ -1632,10 +1622,7 @@ AstTypeOrPack Parser::parseTypeOrPack()
|
||||
AstType* Parser::parseType(bool inDeclarationContext)
|
||||
{
|
||||
unsigned int oldRecursionCount = recursionCounter;
|
||||
|
||||
// recursion counter is incremented in parseSimpleType
|
||||
if (!FFlag::LuauBetterTypeRecLimits)
|
||||
incrementRecursionCounter("type annotation");
|
||||
|
||||
Location begin = lexer.current().location;
|
||||
|
||||
@ -1841,11 +1828,7 @@ std::optional<AstExprBinary::Op> Parser::parseBinaryOp(const Lexeme& l)
|
||||
else if (l.type == '/')
|
||||
return AstExprBinary::Div;
|
||||
else if (l.type == Lexeme::FloorDiv)
|
||||
{
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision);
|
||||
|
||||
return AstExprBinary::FloorDiv;
|
||||
}
|
||||
else if (l.type == '%')
|
||||
return AstExprBinary::Mod;
|
||||
else if (l.type == '^')
|
||||
@ -1883,11 +1866,7 @@ std::optional<AstExprBinary::Op> Parser::parseCompoundOp(const Lexeme& l)
|
||||
else if (l.type == Lexeme::DivAssign)
|
||||
return AstExprBinary::Div;
|
||||
else if (l.type == Lexeme::FloorDivAssign)
|
||||
{
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision);
|
||||
|
||||
return AstExprBinary::FloorDiv;
|
||||
}
|
||||
else if (l.type == Lexeme::ModAssign)
|
||||
return AstExprBinary::Mod;
|
||||
else if (l.type == Lexeme::PowAssign)
|
||||
|
@ -133,6 +133,7 @@ public:
|
||||
void vcvttsd2si(OperandX64 dst, OperandX64 src);
|
||||
void vcvtsi2sd(OperandX64 dst, OperandX64 src1, OperandX64 src2);
|
||||
void vcvtsd2ss(OperandX64 dst, OperandX64 src1, OperandX64 src2);
|
||||
void vcvtss2sd(OperandX64 dst, OperandX64 src1, OperandX64 src2);
|
||||
|
||||
void vroundsd(OperandX64 dst, OperandX64 src1, OperandX64 src2, RoundingModeX64 roundingMode); // inexact
|
||||
|
||||
@ -158,7 +159,6 @@ public:
|
||||
|
||||
void vblendvpd(RegisterX64 dst, RegisterX64 src1, OperandX64 mask, RegisterX64 src3);
|
||||
|
||||
|
||||
// Run final checks
|
||||
bool finalize();
|
||||
|
||||
@ -228,6 +228,7 @@ private:
|
||||
void placeVex(OperandX64 dst, OperandX64 src1, OperandX64 src2, bool setW, uint8_t mode, uint8_t prefix);
|
||||
void placeImm8Or32(int32_t imm);
|
||||
void placeImm8(int32_t imm);
|
||||
void placeImm16(int16_t imm);
|
||||
void placeImm32(int32_t imm);
|
||||
void placeImm64(int64_t imm);
|
||||
void placeLabel(Label& label);
|
||||
|
@ -251,7 +251,7 @@ enum class IrCmd : uint8_t
|
||||
// A: pointer (Table)
|
||||
DUP_TABLE,
|
||||
|
||||
// Insert an integer key into a table
|
||||
// Insert an integer key into a table and return the pointer to inserted value (TValue)
|
||||
// A: pointer (Table)
|
||||
// B: int (key)
|
||||
TABLE_SETNUM,
|
||||
@ -281,7 +281,7 @@ enum class IrCmd : uint8_t
|
||||
NUM_TO_UINT,
|
||||
|
||||
// Adjust stack top (L->top) to point at 'B' TValues *after* the specified register
|
||||
// This is used to return muliple values
|
||||
// This is used to return multiple values
|
||||
// A: Rn
|
||||
// B: int (offset)
|
||||
ADJUST_STACK_TO_REG,
|
||||
@ -420,6 +420,14 @@ enum class IrCmd : uint8_t
|
||||
// When undef is specified instead of a block, execution is aborted on check failure
|
||||
CHECK_NODE_VALUE,
|
||||
|
||||
// Guard against access at specified offset/size overflowing the buffer length
|
||||
// A: pointer (buffer)
|
||||
// B: int (offset)
|
||||
// C: int (size)
|
||||
// D: block/vmexit/undef
|
||||
// When undef is specified instead of a block, execution is aborted on check failure
|
||||
CHECK_BUFFER_LEN,
|
||||
|
||||
// Special operations
|
||||
|
||||
// Check interrupt handler
|
||||
@ -621,6 +629,71 @@ enum class IrCmd : uint8_t
|
||||
// Find or create an upval at the given level
|
||||
// A: Rn (level)
|
||||
FINDUPVAL,
|
||||
|
||||
// Read i8 (sign-extended to int) from buffer storage at specified offset
|
||||
// A: pointer (buffer)
|
||||
// B: int (offset)
|
||||
BUFFER_READI8,
|
||||
|
||||
// Read u8 (zero-extended to int) from buffer storage at specified offset
|
||||
// A: pointer (buffer)
|
||||
// B: int (offset)
|
||||
BUFFER_READU8,
|
||||
|
||||
// Write i8/u8 value (int argument is truncated) to buffer storage at specified offset
|
||||
// A: pointer (buffer)
|
||||
// B: int (offset)
|
||||
// C: int (value)
|
||||
BUFFER_WRITEI8,
|
||||
|
||||
// Read i16 (sign-extended to int) from buffer storage at specified offset
|
||||
// A: pointer (buffer)
|
||||
// B: int (offset)
|
||||
BUFFER_READI16,
|
||||
|
||||
// Read u16 (zero-extended to int) from buffer storage at specified offset
|
||||
// A: pointer (buffer)
|
||||
// B: int (offset)
|
||||
BUFFER_READU16,
|
||||
|
||||
// Write i16/u16 value (int argument is truncated) to buffer storage at specified offset
|
||||
// A: pointer (buffer)
|
||||
// B: int (offset)
|
||||
// C: int (value)
|
||||
BUFFER_WRITEI16,
|
||||
|
||||
// Read i32 value from buffer storage at specified offset
|
||||
// A: pointer (buffer)
|
||||
// B: int (offset)
|
||||
BUFFER_READI32,
|
||||
|
||||
// Write i32/u32 value to buffer storage at specified offset
|
||||
// A: pointer (buffer)
|
||||
// B: int (offset)
|
||||
// C: int (value)
|
||||
BUFFER_WRITEI32,
|
||||
|
||||
// Read float value (converted to double) from buffer storage at specified offset
|
||||
// A: pointer (buffer)
|
||||
// B: int (offset)
|
||||
BUFFER_READF32,
|
||||
|
||||
// Write float value (converted from double) to buffer storage at specified offset
|
||||
// A: pointer (buffer)
|
||||
// B: int (offset)
|
||||
// C: double (value)
|
||||
BUFFER_WRITEF32,
|
||||
|
||||
// Read double value from buffer storage at specified offset
|
||||
// A: pointer (buffer)
|
||||
// B: int (offset)
|
||||
BUFFER_READF64,
|
||||
|
||||
// Write double value to buffer storage at specified offset
|
||||
// A: pointer (buffer)
|
||||
// B: int (offset)
|
||||
// C: double (value)
|
||||
BUFFER_WRITEF64,
|
||||
};
|
||||
|
||||
enum class IrConstKind : uint8_t
|
||||
|
@ -128,6 +128,7 @@ inline bool isNonTerminatingJump(IrCmd cmd)
|
||||
case IrCmd::CHECK_SLOT_MATCH:
|
||||
case IrCmd::CHECK_NODE_NO_NEXT:
|
||||
case IrCmd::CHECK_NODE_VALUE:
|
||||
case IrCmd::CHECK_BUFFER_LEN:
|
||||
return true;
|
||||
default:
|
||||
break;
|
||||
@ -197,6 +198,13 @@ inline bool hasResult(IrCmd cmd)
|
||||
case IrCmd::GET_TYPEOF:
|
||||
case IrCmd::NEWCLOSURE:
|
||||
case IrCmd::FINDUPVAL:
|
||||
case IrCmd::BUFFER_READI8:
|
||||
case IrCmd::BUFFER_READU8:
|
||||
case IrCmd::BUFFER_READI16:
|
||||
case IrCmd::BUFFER_READU16:
|
||||
case IrCmd::BUFFER_READI32:
|
||||
case IrCmd::BUFFER_READF32:
|
||||
case IrCmd::BUFFER_READF64:
|
||||
return true;
|
||||
default:
|
||||
break;
|
||||
|
@ -175,6 +175,12 @@ void AssemblyBuilderX64::mov(OperandX64 lhs, OperandX64 rhs)
|
||||
place(OP_PLUS_REG(0xb0, lhs.base.index));
|
||||
placeImm8(rhs.imm);
|
||||
}
|
||||
else if (size == SizeX64::word)
|
||||
{
|
||||
place(0x66);
|
||||
place(OP_PLUS_REG(0xb8, lhs.base.index));
|
||||
placeImm16(rhs.imm);
|
||||
}
|
||||
else if (size == SizeX64::dword)
|
||||
{
|
||||
place(OP_PLUS_REG(0xb8, lhs.base.index));
|
||||
@ -200,6 +206,13 @@ void AssemblyBuilderX64::mov(OperandX64 lhs, OperandX64 rhs)
|
||||
placeModRegMem(lhs, 0, /*extraCodeBytes=*/1);
|
||||
placeImm8(rhs.imm);
|
||||
}
|
||||
else if (size == SizeX64::word)
|
||||
{
|
||||
place(0x66);
|
||||
place(0xc7);
|
||||
placeModRegMem(lhs, 0, /*extraCodeBytes=*/2);
|
||||
placeImm16(rhs.imm);
|
||||
}
|
||||
else
|
||||
{
|
||||
LUAU_ASSERT(size == SizeX64::dword || size == SizeX64::qword);
|
||||
@ -780,6 +793,16 @@ void AssemblyBuilderX64::vcvtsd2ss(OperandX64 dst, OperandX64 src1, OperandX64 s
|
||||
placeAvx("vcvtsd2ss", dst, src1, src2, 0x5a, (src2.cat == CategoryX64::reg ? src2.base.size : src2.memSize) == SizeX64::qword, AVX_0F, AVX_F2);
|
||||
}
|
||||
|
||||
void AssemblyBuilderX64::vcvtss2sd(OperandX64 dst, OperandX64 src1, OperandX64 src2)
|
||||
{
|
||||
if (src2.cat == CategoryX64::reg)
|
||||
LUAU_ASSERT(src2.base.size == SizeX64::xmmword);
|
||||
else
|
||||
LUAU_ASSERT(src2.memSize == SizeX64::dword);
|
||||
|
||||
placeAvx("vcvtsd2ss", dst, src1, src2, 0x5a, false, AVX_0F, AVX_F3);
|
||||
}
|
||||
|
||||
void AssemblyBuilderX64::vroundsd(OperandX64 dst, OperandX64 src1, OperandX64 src2, RoundingModeX64 roundingMode)
|
||||
{
|
||||
placeAvx("vroundsd", dst, src1, src2, uint8_t(roundingMode) | kRoundingPrecisionInexact, 0x0b, false, AVX_0F3A, AVX_66);
|
||||
@ -1086,7 +1109,10 @@ void AssemblyBuilderX64::placeBinaryRegAndRegMem(OperandX64 lhs, OperandX64 rhs,
|
||||
LUAU_ASSERT(lhs.base.size == (rhs.cat == CategoryX64::reg ? rhs.base.size : rhs.memSize));
|
||||
|
||||
SizeX64 size = lhs.base.size;
|
||||
LUAU_ASSERT(size == SizeX64::byte || size == SizeX64::dword || size == SizeX64::qword);
|
||||
LUAU_ASSERT(size == SizeX64::byte || size == SizeX64::word || size == SizeX64::dword || size == SizeX64::qword);
|
||||
|
||||
if (size == SizeX64::word)
|
||||
place(0x66);
|
||||
|
||||
placeRex(lhs.base, rhs);
|
||||
place(size == SizeX64::byte ? code8 : code);
|
||||
@ -1417,6 +1443,13 @@ void AssemblyBuilderX64::placeImm8(int32_t imm)
|
||||
LUAU_ASSERT(!"Invalid immediate value");
|
||||
}
|
||||
|
||||
void AssemblyBuilderX64::placeImm16(int16_t imm)
|
||||
{
|
||||
uint8_t* pos = codePos;
|
||||
LUAU_ASSERT(pos + sizeof(imm) < codeEnd);
|
||||
codePos = writeu16(pos, imm);
|
||||
}
|
||||
|
||||
void AssemblyBuilderX64::placeImm32(int32_t imm)
|
||||
{
|
||||
uint8_t* pos = codePos;
|
||||
|
@ -15,6 +15,16 @@ inline uint8_t* writeu8(uint8_t* target, uint8_t value)
|
||||
return target + sizeof(value);
|
||||
}
|
||||
|
||||
inline uint8_t* writeu16(uint8_t* target, uint16_t value)
|
||||
{
|
||||
#if defined(LUAU_BIG_ENDIAN)
|
||||
value = htole16(value);
|
||||
#endif
|
||||
|
||||
memcpy(target, &value, sizeof(value));
|
||||
return target + sizeof(value);
|
||||
}
|
||||
|
||||
inline uint8_t* writeu32(uint8_t* target, uint32_t value)
|
||||
{
|
||||
#if defined(LUAU_BIG_ENDIAN)
|
||||
|
@ -235,6 +235,8 @@ const char* getCmdName(IrCmd cmd)
|
||||
return "CHECK_NODE_NO_NEXT";
|
||||
case IrCmd::CHECK_NODE_VALUE:
|
||||
return "CHECK_NODE_VALUE";
|
||||
case IrCmd::CHECK_BUFFER_LEN:
|
||||
return "CHECK_BUFFER_LEN";
|
||||
case IrCmd::INTERRUPT:
|
||||
return "INTERRUPT";
|
||||
case IrCmd::CHECK_GC:
|
||||
@ -319,6 +321,30 @@ const char* getCmdName(IrCmd cmd)
|
||||
return "GET_TYPEOF";
|
||||
case IrCmd::FINDUPVAL:
|
||||
return "FINDUPVAL";
|
||||
case IrCmd::BUFFER_READI8:
|
||||
return "BUFFER_READI8";
|
||||
case IrCmd::BUFFER_READU8:
|
||||
return "BUFFER_READU8";
|
||||
case IrCmd::BUFFER_WRITEI8:
|
||||
return "BUFFER_WRITEI8";
|
||||
case IrCmd::BUFFER_READI16:
|
||||
return "BUFFER_READI16";
|
||||
case IrCmd::BUFFER_READU16:
|
||||
return "BUFFER_READU16";
|
||||
case IrCmd::BUFFER_WRITEI16:
|
||||
return "BUFFER_WRITEI16";
|
||||
case IrCmd::BUFFER_READI32:
|
||||
return "BUFFER_READI32";
|
||||
case IrCmd::BUFFER_WRITEI32:
|
||||
return "BUFFER_WRITEI32";
|
||||
case IrCmd::BUFFER_READF32:
|
||||
return "BUFFER_READF32";
|
||||
case IrCmd::BUFFER_WRITEF32:
|
||||
return "BUFFER_WRITEF32";
|
||||
case IrCmd::BUFFER_READF64:
|
||||
return "BUFFER_READF64";
|
||||
case IrCmd::BUFFER_WRITEF64:
|
||||
return "BUFFER_WRITEF64";
|
||||
}
|
||||
|
||||
LUAU_UNREACHABLE();
|
||||
|
@ -135,13 +135,9 @@ static void checkObjectBarrierConditions(AssemblyBuilderA64& build, RegisterA64
|
||||
if (ratag == -1 || !isGCO(ratag))
|
||||
{
|
||||
if (ra.kind == IrOpKind::VmReg)
|
||||
{
|
||||
addr = mem(rBase, vmRegOp(ra) * sizeof(TValue) + offsetof(TValue, tt));
|
||||
}
|
||||
else if (ra.kind == IrOpKind::VmConst)
|
||||
{
|
||||
emitAddOffset(build, temp, rConstants, vmConstOp(ra) * sizeof(TValue) + offsetof(TValue, tt));
|
||||
}
|
||||
|
||||
build.ldr(tempw, addr);
|
||||
build.cmp(tempw, LUA_TSTRING);
|
||||
@ -154,13 +150,10 @@ static void checkObjectBarrierConditions(AssemblyBuilderA64& build, RegisterA64
|
||||
|
||||
// iswhite(gcvalue(ra))
|
||||
if (ra.kind == IrOpKind::VmReg)
|
||||
{
|
||||
addr = mem(rBase, vmRegOp(ra) * sizeof(TValue) + offsetof(TValue, value));
|
||||
}
|
||||
else if (ra.kind == IrOpKind::VmConst)
|
||||
{
|
||||
emitAddOffset(build, temp, rConstants, vmConstOp(ra) * sizeof(TValue) + offsetof(TValue, value));
|
||||
}
|
||||
|
||||
build.ldr(temp, addr);
|
||||
build.ldrb(tempw, mem(temp, offsetof(GCheader, marked)));
|
||||
build.tst(tempw, bit2mask(WHITE0BIT, WHITE1BIT));
|
||||
@ -240,6 +233,14 @@ static bool emitBuiltin(
|
||||
}
|
||||
}
|
||||
|
||||
static uint64_t getDoubleBits(double value)
|
||||
{
|
||||
uint64_t result;
|
||||
static_assert(sizeof(result) == sizeof(value), "Expecting double to be 64-bit");
|
||||
memcpy(&result, &value, sizeof(value));
|
||||
return result;
|
||||
}
|
||||
|
||||
IrLoweringA64::IrLoweringA64(AssemblyBuilderA64& build, ModuleHelpers& helpers, IrFunction& function, LoweringStats* stats)
|
||||
: build(build)
|
||||
, helpers(helpers)
|
||||
@ -309,7 +310,7 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||
|
||||
if (inst.b.kind == IrOpKind::Inst)
|
||||
{
|
||||
build.add(inst.regA64, inst.regA64, regOp(inst.b), kTValueSizeLog2);
|
||||
build.add(inst.regA64, inst.regA64, regOp(inst.b), kTValueSizeLog2); // implicit uxtw
|
||||
}
|
||||
else if (inst.b.kind == IrOpKind::Constant)
|
||||
{
|
||||
@ -409,9 +410,16 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||
}
|
||||
case IrCmd::STORE_DOUBLE:
|
||||
{
|
||||
RegisterA64 temp = tempDouble(inst.b);
|
||||
AddressA64 addr = tempAddr(inst.a, offsetof(TValue, value));
|
||||
build.str(temp, addr);
|
||||
if (inst.b.kind == IrOpKind::Constant && getDoubleBits(doubleOp(inst.b)) == 0)
|
||||
{
|
||||
build.str(xzr, addr);
|
||||
}
|
||||
else
|
||||
{
|
||||
RegisterA64 temp = tempDouble(inst.b);
|
||||
build.str(temp, addr);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case IrCmd::STORE_INT:
|
||||
@ -816,11 +824,12 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||
{
|
||||
RegisterA64 index = tempDouble(inst.a);
|
||||
RegisterA64 limit = tempDouble(inst.b);
|
||||
RegisterA64 step = tempDouble(inst.c);
|
||||
|
||||
Label direct;
|
||||
|
||||
// step > 0
|
||||
build.fcmpz(tempDouble(inst.c));
|
||||
build.fcmpz(step);
|
||||
build.b(getConditionFP(IrCondition::Greater), direct);
|
||||
|
||||
// !(limit <= index)
|
||||
@ -974,6 +983,7 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||
{
|
||||
inst.regA64 = regs.allocReg(KindA64::w, index);
|
||||
RegisterA64 temp = tempDouble(inst.a);
|
||||
// note: we don't use fcvtzu for consistency with C++ code
|
||||
build.fcvtzs(castReg(KindA64::x, inst.regA64), temp);
|
||||
break;
|
||||
}
|
||||
@ -989,7 +999,7 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||
else if (inst.b.kind == IrOpKind::Inst)
|
||||
{
|
||||
build.add(temp, rBase, uint16_t(vmRegOp(inst.a) * sizeof(TValue)));
|
||||
build.add(temp, temp, regOp(inst.b), kTValueSizeLog2);
|
||||
build.add(temp, temp, regOp(inst.b), kTValueSizeLog2); // implicit uxtw
|
||||
build.str(temp, mem(rState, offsetof(lua_State, top)));
|
||||
}
|
||||
else
|
||||
@ -1372,6 +1382,63 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||
finalizeTargetLabel(inst.b, fresh);
|
||||
break;
|
||||
}
|
||||
case IrCmd::CHECK_BUFFER_LEN:
|
||||
{
|
||||
int accessSize = intOp(inst.c);
|
||||
LUAU_ASSERT(accessSize > 0 && accessSize <= int(AssemblyBuilderA64::kMaxImmediate));
|
||||
|
||||
Label fresh; // used when guard aborts execution or jumps to a VM exit
|
||||
Label& target = getTargetLabel(inst.d, fresh);
|
||||
|
||||
RegisterA64 temp = regs.allocTemp(KindA64::w);
|
||||
build.ldr(temp, mem(regOp(inst.a), offsetof(Buffer, len)));
|
||||
|
||||
if (inst.b.kind == IrOpKind::Inst)
|
||||
{
|
||||
if (accessSize == 1)
|
||||
{
|
||||
// fails if offset >= len
|
||||
build.cmp(temp, regOp(inst.b));
|
||||
build.b(ConditionA64::UnsignedLessEqual, target);
|
||||
}
|
||||
else
|
||||
{
|
||||
// fails if offset + size >= len; we compute it as len - offset <= size
|
||||
RegisterA64 tempx = castReg(KindA64::x, temp);
|
||||
build.sub(tempx, tempx, regOp(inst.b)); // implicit uxtw
|
||||
build.cmp(tempx, uint16_t(accessSize));
|
||||
build.b(ConditionA64::LessEqual, target); // note: this is a signed 64-bit comparison so that out of bounds offset fails
|
||||
}
|
||||
}
|
||||
else if (inst.b.kind == IrOpKind::Constant)
|
||||
{
|
||||
int offset = intOp(inst.b);
|
||||
|
||||
// Constant folding can take care of it, but for safety we avoid overflow/underflow cases here
|
||||
if (offset < 0 || unsigned(offset) + unsigned(accessSize) >= unsigned(INT_MAX))
|
||||
{
|
||||
build.b(target);
|
||||
}
|
||||
else if (offset + accessSize <= int(AssemblyBuilderA64::kMaxImmediate))
|
||||
{
|
||||
build.cmp(temp, uint16_t(offset + accessSize));
|
||||
build.b(ConditionA64::UnsignedLessEqual, target);
|
||||
}
|
||||
else
|
||||
{
|
||||
RegisterA64 temp2 = regs.allocTemp(KindA64::w);
|
||||
build.mov(temp2, offset + accessSize);
|
||||
build.cmp(temp, temp2);
|
||||
build.b(ConditionA64::UnsignedLessEqual, target);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
LUAU_ASSERT(!"Unsupported instruction form");
|
||||
}
|
||||
finalizeTargetLabel(inst.d, fresh);
|
||||
break;
|
||||
}
|
||||
case IrCmd::INTERRUPT:
|
||||
{
|
||||
regs.spill(build, index);
|
||||
@ -1967,7 +2034,7 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||
LUAU_ASSERT(sizeof(TString*) == 8);
|
||||
|
||||
if (inst.a.kind == IrOpKind::Inst)
|
||||
build.add(inst.regA64, rGlobalState, regOp(inst.a), 3);
|
||||
build.add(inst.regA64, rGlobalState, regOp(inst.a), 3); // implicit uxtw
|
||||
else if (inst.a.kind == IrOpKind::Constant)
|
||||
build.add(inst.regA64, rGlobalState, uint16_t(tagOp(inst.a)) * 8);
|
||||
else
|
||||
@ -2000,6 +2067,118 @@ void IrLoweringA64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_READI8:
|
||||
{
|
||||
inst.regA64 = regs.allocReuse(KindA64::w, index, {inst.b});
|
||||
AddressA64 addr = tempAddrBuffer(inst.a, inst.b);
|
||||
|
||||
build.ldrsb(inst.regA64, addr);
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_READU8:
|
||||
{
|
||||
inst.regA64 = regs.allocReuse(KindA64::w, index, {inst.b});
|
||||
AddressA64 addr = tempAddrBuffer(inst.a, inst.b);
|
||||
|
||||
build.ldrb(inst.regA64, addr);
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_WRITEI8:
|
||||
{
|
||||
RegisterA64 temp = tempInt(inst.c);
|
||||
AddressA64 addr = tempAddrBuffer(inst.a, inst.b);
|
||||
|
||||
build.strb(temp, addr);
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_READI16:
|
||||
{
|
||||
inst.regA64 = regs.allocReuse(KindA64::w, index, {inst.b});
|
||||
AddressA64 addr = tempAddrBuffer(inst.a, inst.b);
|
||||
|
||||
build.ldrsh(inst.regA64, addr);
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_READU16:
|
||||
{
|
||||
inst.regA64 = regs.allocReuse(KindA64::w, index, {inst.b});
|
||||
AddressA64 addr = tempAddrBuffer(inst.a, inst.b);
|
||||
|
||||
build.ldrh(inst.regA64, addr);
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_WRITEI16:
|
||||
{
|
||||
RegisterA64 temp = tempInt(inst.c);
|
||||
AddressA64 addr = tempAddrBuffer(inst.a, inst.b);
|
||||
|
||||
build.strh(temp, addr);
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_READI32:
|
||||
{
|
||||
inst.regA64 = regs.allocReuse(KindA64::w, index, {inst.b});
|
||||
AddressA64 addr = tempAddrBuffer(inst.a, inst.b);
|
||||
|
||||
build.ldr(inst.regA64, addr);
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_WRITEI32:
|
||||
{
|
||||
RegisterA64 temp = tempInt(inst.c);
|
||||
AddressA64 addr = tempAddrBuffer(inst.a, inst.b);
|
||||
|
||||
build.str(temp, addr);
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_READF32:
|
||||
{
|
||||
inst.regA64 = regs.allocReg(KindA64::d, index);
|
||||
RegisterA64 temp = castReg(KindA64::s, inst.regA64); // safe to alias a fresh register
|
||||
AddressA64 addr = tempAddrBuffer(inst.a, inst.b);
|
||||
|
||||
build.ldr(temp, addr);
|
||||
build.fcvt(inst.regA64, temp);
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_WRITEF32:
|
||||
{
|
||||
RegisterA64 temp1 = tempDouble(inst.c);
|
||||
RegisterA64 temp2 = regs.allocTemp(KindA64::s);
|
||||
AddressA64 addr = tempAddrBuffer(inst.a, inst.b);
|
||||
|
||||
build.fcvt(temp2, temp1);
|
||||
build.str(temp2, addr);
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_READF64:
|
||||
{
|
||||
inst.regA64 = regs.allocReg(KindA64::d, index);
|
||||
AddressA64 addr = tempAddrBuffer(inst.a, inst.b);
|
||||
|
||||
build.ldr(inst.regA64, addr);
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_WRITEF64:
|
||||
{
|
||||
RegisterA64 temp = tempDouble(inst.c);
|
||||
AddressA64 addr = tempAddrBuffer(inst.a, inst.b);
|
||||
|
||||
build.str(temp, addr);
|
||||
break;
|
||||
}
|
||||
|
||||
// To handle unsupported instructions, add "case IrCmd::OP" and make sure to set error = true!
|
||||
}
|
||||
|
||||
@ -2126,9 +2305,7 @@ RegisterA64 IrLoweringA64::tempDouble(IrOp op)
|
||||
RegisterA64 temp1 = regs.allocTemp(KindA64::x);
|
||||
RegisterA64 temp2 = regs.allocTemp(KindA64::d);
|
||||
|
||||
uint64_t vali;
|
||||
static_assert(sizeof(vali) == sizeof(val), "Expecting double to be 64-bit");
|
||||
memcpy(&vali, &val, sizeof(val));
|
||||
uint64_t vali = getDoubleBits(val);
|
||||
|
||||
if ((vali << 16) == 0)
|
||||
{
|
||||
@ -2224,6 +2401,35 @@ AddressA64 IrLoweringA64::tempAddr(IrOp op, int offset)
|
||||
}
|
||||
}
|
||||
|
||||
AddressA64 IrLoweringA64::tempAddrBuffer(IrOp bufferOp, IrOp indexOp)
|
||||
{
|
||||
if (indexOp.kind == IrOpKind::Inst)
|
||||
{
|
||||
RegisterA64 temp = regs.allocTemp(KindA64::x);
|
||||
build.add(temp, regOp(bufferOp), regOp(indexOp)); // implicit uxtw
|
||||
return mem(temp, offsetof(Buffer, data));
|
||||
}
|
||||
else if (indexOp.kind == IrOpKind::Constant)
|
||||
{
|
||||
// Since the resulting address may be used to load any size, including 1 byte, from an unaligned offset, we are limited by unscaled encoding
|
||||
if (unsigned(intOp(indexOp)) + offsetof(Buffer, data) <= 255)
|
||||
return mem(regOp(bufferOp), int(intOp(indexOp) + offsetof(Buffer, data)));
|
||||
|
||||
// indexOp can only be negative in dead code (since offsets are checked); this avoids assertion in emitAddOffset
|
||||
if (intOp(indexOp) < 0)
|
||||
return mem(regOp(bufferOp), offsetof(Buffer, data));
|
||||
|
||||
RegisterA64 temp = regs.allocTemp(KindA64::x);
|
||||
emitAddOffset(build, temp, regOp(bufferOp), size_t(intOp(indexOp)));
|
||||
return mem(temp, offsetof(Buffer, data));
|
||||
}
|
||||
else
|
||||
{
|
||||
LUAU_ASSERT(!"Unsupported instruction form");
|
||||
return noreg;
|
||||
}
|
||||
}
|
||||
|
||||
RegisterA64 IrLoweringA64::regOp(IrOp op)
|
||||
{
|
||||
IrInst& inst = function.instOp(op);
|
||||
|
@ -44,6 +44,7 @@ struct IrLoweringA64
|
||||
RegisterA64 tempInt(IrOp op);
|
||||
RegisterA64 tempUint(IrOp op);
|
||||
AddressA64 tempAddr(IrOp op, int offset);
|
||||
AddressA64 tempAddrBuffer(IrOp bufferOp, IrOp indexOp);
|
||||
|
||||
// May emit restore instructions
|
||||
RegisterA64 regOp(IrOp op);
|
||||
|
@ -219,22 +219,26 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||
build.mov(luauRegValue(vmRegOp(inst.a)), regOp(inst.b));
|
||||
break;
|
||||
case IrCmd::STORE_DOUBLE:
|
||||
{
|
||||
OperandX64 valueLhs = inst.a.kind == IrOpKind::Inst ? qword[regOp(inst.a) + offsetof(TValue, value)] : luauRegValue(vmRegOp(inst.a));
|
||||
|
||||
if (inst.b.kind == IrOpKind::Constant)
|
||||
{
|
||||
ScopedRegX64 tmp{regs, SizeX64::xmmword};
|
||||
|
||||
build.vmovsd(tmp.reg, build.f64(doubleOp(inst.b)));
|
||||
build.vmovsd(luauRegValue(vmRegOp(inst.a)), tmp.reg);
|
||||
build.vmovsd(valueLhs, tmp.reg);
|
||||
}
|
||||
else if (inst.b.kind == IrOpKind::Inst)
|
||||
{
|
||||
build.vmovsd(luauRegValue(vmRegOp(inst.a)), regOp(inst.b));
|
||||
build.vmovsd(valueLhs, regOp(inst.b));
|
||||
}
|
||||
else
|
||||
{
|
||||
LUAU_ASSERT(!"Unsupported instruction form");
|
||||
}
|
||||
break;
|
||||
}
|
||||
case IrCmd::STORE_INT:
|
||||
if (inst.b.kind == IrOpKind::Constant)
|
||||
build.mov(luauRegValueInt(vmRegOp(inst.a)), intOp(inst.b));
|
||||
@ -1169,6 +1173,64 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||
jumpOrAbortOnUndef(ConditionX64::Equal, inst.b, next);
|
||||
break;
|
||||
}
|
||||
case IrCmd::CHECK_BUFFER_LEN:
|
||||
{
|
||||
int accessSize = intOp(inst.c);
|
||||
LUAU_ASSERT(accessSize > 0);
|
||||
|
||||
if (inst.b.kind == IrOpKind::Inst)
|
||||
{
|
||||
if (accessSize == 1)
|
||||
{
|
||||
// Simpler check for a single byte access
|
||||
build.cmp(dword[regOp(inst.a) + offsetof(Buffer, len)], regOp(inst.b));
|
||||
jumpOrAbortOnUndef(ConditionX64::BelowEqual, inst.d, next);
|
||||
}
|
||||
else
|
||||
{
|
||||
ScopedRegX64 tmp1{regs, SizeX64::qword};
|
||||
ScopedRegX64 tmp2{regs, SizeX64::dword};
|
||||
|
||||
// To perform the bounds check using a single branch, we take index that is limited to 32 bit int
|
||||
// Access size is then added using a 64 bit addition
|
||||
// This will make sure that addition will not wrap around for values like 0xffffffff
|
||||
|
||||
if (IrCmd source = function.instOp(inst.b).cmd; source == IrCmd::NUM_TO_INT)
|
||||
{
|
||||
// When previous operation is a conversion to an integer (common case), it is guaranteed to have high register bits cleared
|
||||
build.lea(tmp1.reg, addr[qwordReg(regOp(inst.b)) + accessSize]);
|
||||
}
|
||||
else
|
||||
{
|
||||
// When the source of the index is unknown, it could contain garbage in the high bits, so we zero-extend it explicitly
|
||||
build.mov(dwordReg(tmp1.reg), regOp(inst.b));
|
||||
build.add(tmp1.reg, accessSize);
|
||||
}
|
||||
|
||||
build.mov(tmp2.reg, dword[regOp(inst.a) + offsetof(Buffer, len)]);
|
||||
build.cmp(qwordReg(tmp2.reg), tmp1.reg);
|
||||
|
||||
jumpOrAbortOnUndef(ConditionX64::Below, inst.d, next);
|
||||
}
|
||||
}
|
||||
else if (inst.b.kind == IrOpKind::Constant)
|
||||
{
|
||||
int offset = intOp(inst.b);
|
||||
|
||||
// Constant folding can take care of it, but for safety we avoid overflow/underflow cases here
|
||||
if (offset < 0 || unsigned(offset) + unsigned(accessSize) >= unsigned(INT_MAX))
|
||||
jumpOrAbortOnUndef(inst.d, next);
|
||||
else
|
||||
build.cmp(dword[regOp(inst.a) + offsetof(Buffer, len)], offset + accessSize);
|
||||
|
||||
jumpOrAbortOnUndef(ConditionX64::Below, inst.d, next);
|
||||
}
|
||||
else
|
||||
{
|
||||
LUAU_ASSERT(!"Unsupported instruction form");
|
||||
}
|
||||
break;
|
||||
}
|
||||
case IrCmd::INTERRUPT:
|
||||
{
|
||||
unsigned pcpos = uintOp(inst.a);
|
||||
@ -1711,6 +1773,93 @@ void IrLoweringX64::lowerInst(IrInst& inst, uint32_t index, const IrBlock& next)
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_READI8:
|
||||
inst.regX64 = regs.allocRegOrReuse(SizeX64::dword, index, {inst.a, inst.b});
|
||||
|
||||
build.movsx(inst.regX64, byte[bufferAddrOp(inst.a, inst.b)]);
|
||||
break;
|
||||
|
||||
case IrCmd::BUFFER_READU8:
|
||||
inst.regX64 = regs.allocRegOrReuse(SizeX64::dword, index, {inst.a, inst.b});
|
||||
|
||||
build.movzx(inst.regX64, byte[bufferAddrOp(inst.a, inst.b)]);
|
||||
break;
|
||||
|
||||
case IrCmd::BUFFER_WRITEI8:
|
||||
{
|
||||
OperandX64 value = inst.c.kind == IrOpKind::Inst ? byteReg(regOp(inst.c)) : OperandX64(intOp(inst.c));
|
||||
|
||||
build.mov(byte[bufferAddrOp(inst.a, inst.b)], value);
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_READI16:
|
||||
inst.regX64 = regs.allocRegOrReuse(SizeX64::dword, index, {inst.a, inst.b});
|
||||
|
||||
build.movsx(inst.regX64, word[bufferAddrOp(inst.a, inst.b)]);
|
||||
break;
|
||||
|
||||
case IrCmd::BUFFER_READU16:
|
||||
inst.regX64 = regs.allocRegOrReuse(SizeX64::dword, index, {inst.a, inst.b});
|
||||
|
||||
build.movzx(inst.regX64, word[bufferAddrOp(inst.a, inst.b)]);
|
||||
break;
|
||||
|
||||
case IrCmd::BUFFER_WRITEI16:
|
||||
{
|
||||
OperandX64 value = inst.c.kind == IrOpKind::Inst ? wordReg(regOp(inst.c)) : OperandX64(intOp(inst.c));
|
||||
|
||||
build.mov(word[bufferAddrOp(inst.a, inst.b)], value);
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_READI32:
|
||||
inst.regX64 = regs.allocRegOrReuse(SizeX64::dword, index, {inst.a, inst.b});
|
||||
|
||||
build.mov(inst.regX64, dword[bufferAddrOp(inst.a, inst.b)]);
|
||||
break;
|
||||
|
||||
case IrCmd::BUFFER_WRITEI32:
|
||||
{
|
||||
OperandX64 value = inst.c.kind == IrOpKind::Inst ? regOp(inst.c) : OperandX64(intOp(inst.c));
|
||||
|
||||
build.mov(dword[bufferAddrOp(inst.a, inst.b)], value);
|
||||
break;
|
||||
}
|
||||
|
||||
case IrCmd::BUFFER_READF32:
|
||||
inst.regX64 = regs.allocReg(SizeX64::xmmword, index);
|
||||
|
||||
build.vcvtss2sd(inst.regX64, inst.regX64, dword[bufferAddrOp(inst.a, inst.b)]);
|
||||
break;
|
||||
|
||||
case IrCmd::BUFFER_WRITEF32:
|
||||
storeDoubleAsFloat(dword[bufferAddrOp(inst.a, inst.b)], inst.c);
|
||||
break;
|
||||
|
||||
case IrCmd::BUFFER_READF64:
|
||||
inst.regX64 = regs.allocReg(SizeX64::xmmword, index);
|
||||
|
||||
build.vmovsd(inst.regX64, qword[bufferAddrOp(inst.a, inst.b)]);
|
||||
break;
|
||||
|
||||
case IrCmd::BUFFER_WRITEF64:
|
||||
if (inst.c.kind == IrOpKind::Constant)
|
||||
{
|
||||
ScopedRegX64 tmp{regs, SizeX64::xmmword};
|
||||
build.vmovsd(tmp.reg, build.f64(doubleOp(inst.c)));
|
||||
build.vmovsd(qword[bufferAddrOp(inst.a, inst.b)], tmp.reg);
|
||||
}
|
||||
else if (inst.c.kind == IrOpKind::Inst)
|
||||
{
|
||||
build.vmovsd(qword[bufferAddrOp(inst.a, inst.b)], regOp(inst.c));
|
||||
}
|
||||
else
|
||||
{
|
||||
LUAU_ASSERT(!"Unsupported instruction form");
|
||||
}
|
||||
break;
|
||||
|
||||
// Pseudo instructions
|
||||
case IrCmd::NOP:
|
||||
case IrCmd::SUBSTITUTE:
|
||||
@ -1922,6 +2071,17 @@ RegisterX64 IrLoweringX64::regOp(IrOp op)
|
||||
return inst.regX64;
|
||||
}
|
||||
|
||||
OperandX64 IrLoweringX64::bufferAddrOp(IrOp bufferOp, IrOp indexOp)
|
||||
{
|
||||
if (indexOp.kind == IrOpKind::Inst)
|
||||
return regOp(bufferOp) + qwordReg(regOp(indexOp)) + offsetof(Buffer, data);
|
||||
else if (indexOp.kind == IrOpKind::Constant)
|
||||
return regOp(bufferOp) + intOp(indexOp) + offsetof(Buffer, data);
|
||||
|
||||
LUAU_ASSERT(!"Unsupported instruction form");
|
||||
return noreg;
|
||||
}
|
||||
|
||||
IrConst IrLoweringX64::constOp(IrOp op) const
|
||||
{
|
||||
return function.constOp(op);
|
||||
|
@ -50,6 +50,7 @@ struct IrLoweringX64
|
||||
OperandX64 memRegUintOp(IrOp op);
|
||||
OperandX64 memRegTagOp(IrOp op);
|
||||
RegisterX64 regOp(IrOp op);
|
||||
OperandX64 bufferAddrOp(IrOp bufferOp, IrOp indexOp);
|
||||
|
||||
IrConst constOp(IrOp op) const;
|
||||
uint8_t tagOp(IrOp op) const;
|
||||
|
@ -8,6 +8,9 @@
|
||||
|
||||
#include <math.h>
|
||||
|
||||
LUAU_FASTFLAGVARIABLE(LuauBufferTranslateIr, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauImproveInsertIr, false)
|
||||
|
||||
// TODO: when nresults is less than our actual result count, we can skip computing/writing unused results
|
||||
|
||||
static const int kMinMaxUnrolledParams = 5;
|
||||
@ -150,13 +153,12 @@ static BuiltinImplResult translateBuiltinMathDegRad(IrBuilder& build, IrCmd cmd,
|
||||
return {BuiltinImplType::Full, 1};
|
||||
}
|
||||
|
||||
static BuiltinImplResult translateBuiltinMathLog(
|
||||
IrBuilder& build, LuauBuiltinFunction bfid, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
||||
static BuiltinImplResult translateBuiltinMathLog(IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
||||
{
|
||||
if (nparams < 1 || nresults > 1)
|
||||
return {BuiltinImplType::None, -1};
|
||||
|
||||
int libmId = bfid;
|
||||
int libmId = LBF_MATH_LOG;
|
||||
std::optional<double> denom;
|
||||
|
||||
if (nparams != 1)
|
||||
@ -298,7 +300,7 @@ static BuiltinImplResult translateBuiltinTypeof(IrBuilder& build, int nparams, i
|
||||
}
|
||||
|
||||
static BuiltinImplResult translateBuiltinBit32BinaryOp(
|
||||
IrBuilder& build, LuauBuiltinFunction bfid, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
||||
IrBuilder& build, IrCmd cmd, bool btest, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
||||
{
|
||||
if (nparams < 2 || nparams > kBit32BinaryOpUnrolledParams || nresults > 1)
|
||||
return {BuiltinImplType::None, -1};
|
||||
@ -315,17 +317,6 @@ static BuiltinImplResult translateBuiltinBit32BinaryOp(
|
||||
IrOp vaui = build.inst(IrCmd::NUM_TO_UINT, va);
|
||||
IrOp vbui = build.inst(IrCmd::NUM_TO_UINT, vb);
|
||||
|
||||
|
||||
IrCmd cmd = IrCmd::NOP;
|
||||
if (bfid == LBF_BIT32_BAND || bfid == LBF_BIT32_BTEST)
|
||||
cmd = IrCmd::BITAND_UINT;
|
||||
else if (bfid == LBF_BIT32_BXOR)
|
||||
cmd = IrCmd::BITXOR_UINT;
|
||||
else if (bfid == LBF_BIT32_BOR)
|
||||
cmd = IrCmd::BITOR_UINT;
|
||||
|
||||
LUAU_ASSERT(cmd != IrCmd::NOP);
|
||||
|
||||
IrOp res = build.inst(cmd, vaui, vbui);
|
||||
|
||||
for (int i = 3; i <= nparams; ++i)
|
||||
@ -336,7 +327,7 @@ static BuiltinImplResult translateBuiltinBit32BinaryOp(
|
||||
res = build.inst(cmd, res, arg);
|
||||
}
|
||||
|
||||
if (bfid == LBF_BIT32_BTEST)
|
||||
if (btest)
|
||||
{
|
||||
IrOp falsey = build.block(IrBlockKind::Internal);
|
||||
IrOp truthy = build.block(IrBlockKind::Internal);
|
||||
@ -351,7 +342,6 @@ static BuiltinImplResult translateBuiltinBit32BinaryOp(
|
||||
build.inst(IrCmd::STORE_INT, build.vmReg(ra), build.constInt(1));
|
||||
build.inst(IrCmd::JUMP, exit);
|
||||
|
||||
|
||||
build.beginBlock(exit);
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TBOOLEAN));
|
||||
}
|
||||
@ -367,8 +357,7 @@ static BuiltinImplResult translateBuiltinBit32BinaryOp(
|
||||
return {BuiltinImplType::Full, 1};
|
||||
}
|
||||
|
||||
static BuiltinImplResult translateBuiltinBit32Bnot(
|
||||
IrBuilder& build, LuauBuiltinFunction bfid, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
||||
static BuiltinImplResult translateBuiltinBit32Bnot(IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
||||
{
|
||||
if (nparams < 1 || nresults > 1)
|
||||
return {BuiltinImplType::None, -1};
|
||||
@ -389,7 +378,7 @@ static BuiltinImplResult translateBuiltinBit32Bnot(
|
||||
}
|
||||
|
||||
static BuiltinImplResult translateBuiltinBit32Shift(
|
||||
IrBuilder& build, LuauBuiltinFunction bfid, int nparams, int ra, int arg, IrOp args, int nresults, IrOp fallback, int pcpos)
|
||||
IrBuilder& build, IrCmd cmd, int nparams, int ra, int arg, IrOp args, int nresults, IrOp fallback, int pcpos)
|
||||
{
|
||||
if (nparams < 2 || nresults > 1)
|
||||
return {BuiltinImplType::None, -1};
|
||||
@ -418,16 +407,6 @@ static BuiltinImplResult translateBuiltinBit32Shift(
|
||||
build.beginBlock(block);
|
||||
}
|
||||
|
||||
IrCmd cmd = IrCmd::NOP;
|
||||
if (bfid == LBF_BIT32_LSHIFT)
|
||||
cmd = IrCmd::BITLSHIFT_UINT;
|
||||
else if (bfid == LBF_BIT32_RSHIFT)
|
||||
cmd = IrCmd::BITRSHIFT_UINT;
|
||||
else if (bfid == LBF_BIT32_ARSHIFT)
|
||||
cmd = IrCmd::BITARSHIFT_UINT;
|
||||
|
||||
LUAU_ASSERT(cmd != IrCmd::NOP);
|
||||
|
||||
IrOp shift = build.inst(cmd, vaui, vbi);
|
||||
|
||||
IrOp value = build.inst(IrCmd::UINT_TO_NUM, shift);
|
||||
@ -439,8 +418,7 @@ static BuiltinImplResult translateBuiltinBit32Shift(
|
||||
return {BuiltinImplType::UsesFallback, 1};
|
||||
}
|
||||
|
||||
static BuiltinImplResult translateBuiltinBit32Rotate(
|
||||
IrBuilder& build, LuauBuiltinFunction bfid, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
||||
static BuiltinImplResult translateBuiltinBit32Rotate(IrBuilder& build, IrCmd cmd, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
||||
{
|
||||
if (nparams < 2 || nresults > 1)
|
||||
return {BuiltinImplType::None, -1};
|
||||
@ -454,7 +432,6 @@ static BuiltinImplResult translateBuiltinBit32Rotate(
|
||||
IrOp vaui = build.inst(IrCmd::NUM_TO_UINT, va);
|
||||
IrOp vbi = build.inst(IrCmd::NUM_TO_INT, vb);
|
||||
|
||||
IrCmd cmd = (bfid == LBF_BIT32_LROTATE) ? IrCmd::BITLROTATE_UINT : IrCmd::BITRROTATE_UINT;
|
||||
IrOp shift = build.inst(cmd, vaui, vbi);
|
||||
|
||||
IrOp value = build.inst(IrCmd::UINT_TO_NUM, shift);
|
||||
@ -467,7 +444,7 @@ static BuiltinImplResult translateBuiltinBit32Rotate(
|
||||
}
|
||||
|
||||
static BuiltinImplResult translateBuiltinBit32Extract(
|
||||
IrBuilder& build, LuauBuiltinFunction bfid, int nparams, int ra, int arg, IrOp args, int nresults, IrOp fallback, int pcpos)
|
||||
IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, IrOp fallback, int pcpos)
|
||||
{
|
||||
if (nparams < 2 || nresults > 1)
|
||||
return {BuiltinImplType::None, -1};
|
||||
@ -547,8 +524,7 @@ static BuiltinImplResult translateBuiltinBit32Extract(
|
||||
return {BuiltinImplType::UsesFallback, 1};
|
||||
}
|
||||
|
||||
static BuiltinImplResult translateBuiltinBit32ExtractK(
|
||||
IrBuilder& build, LuauBuiltinFunction bfid, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
||||
static BuiltinImplResult translateBuiltinBit32ExtractK(IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
||||
{
|
||||
if (nparams < 2 || nresults > 1)
|
||||
return {BuiltinImplType::None, -1};
|
||||
@ -583,8 +559,7 @@ static BuiltinImplResult translateBuiltinBit32ExtractK(
|
||||
return {BuiltinImplType::Full, 1};
|
||||
}
|
||||
|
||||
static BuiltinImplResult translateBuiltinBit32Unary(
|
||||
IrBuilder& build, IrCmd cmd, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
||||
static BuiltinImplResult translateBuiltinBit32Unary(IrBuilder& build, IrCmd cmd, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos)
|
||||
{
|
||||
if (nparams < 1 || nresults > 1)
|
||||
return {BuiltinImplType::None, -1};
|
||||
@ -607,7 +582,7 @@ static BuiltinImplResult translateBuiltinBit32Unary(
|
||||
}
|
||||
|
||||
static BuiltinImplResult translateBuiltinBit32Replace(
|
||||
IrBuilder& build, LuauBuiltinFunction bfid, int nparams, int ra, int arg, IrOp args, int nresults, IrOp fallback, int pcpos)
|
||||
IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, IrOp fallback, int pcpos)
|
||||
{
|
||||
if (nparams < 3 || nresults > 1)
|
||||
return {BuiltinImplType::None, -1};
|
||||
@ -631,7 +606,6 @@ static BuiltinImplResult translateBuiltinBit32Replace(
|
||||
build.inst(IrCmd::JUMP_CMP_INT, f, build.constInt(32), build.cond(IrCondition::UnsignedGreaterEqual), fallback, block);
|
||||
build.beginBlock(block);
|
||||
|
||||
// TODO: this can be optimized using a bit-select instruction (btr on x86)
|
||||
IrOp m = build.constInt(1);
|
||||
IrOp shift = build.inst(IrCmd::BITLSHIFT_UINT, m, f);
|
||||
IrOp not_ = build.inst(IrCmd::BITNOT_UINT, shift);
|
||||
@ -717,10 +691,35 @@ static BuiltinImplResult translateBuiltinTableInsert(IrBuilder& build, int npara
|
||||
|
||||
IrOp setnum = build.inst(IrCmd::TABLE_SETNUM, table, pos);
|
||||
|
||||
IrOp va = build.inst(IrCmd::LOAD_TVALUE, args);
|
||||
build.inst(IrCmd::STORE_TVALUE, setnum, va);
|
||||
if (FFlag::LuauImproveInsertIr)
|
||||
{
|
||||
if (args.kind == IrOpKind::Constant)
|
||||
{
|
||||
LUAU_ASSERT(build.function.constOp(args).kind == IrConstKind::Double);
|
||||
|
||||
build.inst(IrCmd::BARRIER_TABLE_FORWARD, table, args, build.undef());
|
||||
// No barrier necessary since numbers aren't collectable
|
||||
build.inst(IrCmd::STORE_DOUBLE, setnum, args);
|
||||
build.inst(IrCmd::STORE_TAG, setnum, build.constTag(LUA_TNUMBER));
|
||||
}
|
||||
else
|
||||
{
|
||||
IrOp va = build.inst(IrCmd::LOAD_TVALUE, args);
|
||||
build.inst(IrCmd::STORE_TVALUE, setnum, va);
|
||||
|
||||
// Compiler only generates FASTCALL*K for source-level constants, so dynamic imports are not affected
|
||||
LUAU_ASSERT(build.function.proto);
|
||||
IrOp argstag = args.kind == IrOpKind::VmConst ? build.constTag(build.function.proto->k[vmConstOp(args)].tt) : build.undef();
|
||||
|
||||
build.inst(IrCmd::BARRIER_TABLE_FORWARD, table, args, argstag);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
IrOp va = build.inst(IrCmd::LOAD_TVALUE, args);
|
||||
build.inst(IrCmd::STORE_TVALUE, setnum, va);
|
||||
|
||||
build.inst(IrCmd::BARRIER_TABLE_FORWARD, table, args, build.undef());
|
||||
}
|
||||
|
||||
return {BuiltinImplType::Full, 0};
|
||||
}
|
||||
@ -742,6 +741,59 @@ static BuiltinImplResult translateBuiltinStringLen(IrBuilder& build, int nparams
|
||||
return {BuiltinImplType::Full, 1};
|
||||
}
|
||||
|
||||
static void translateBufferArgsAndCheckBounds(IrBuilder& build, int nparams, int arg, IrOp args, int size, int pcpos, IrOp& buf, IrOp& intIndex)
|
||||
{
|
||||
build.loadAndCheckTag(build.vmReg(arg), LUA_TBUFFER, build.vmExit(pcpos));
|
||||
builtinCheckDouble(build, args, pcpos);
|
||||
|
||||
if (nparams == 3)
|
||||
builtinCheckDouble(build, build.vmReg(vmRegOp(args) + 1), pcpos);
|
||||
|
||||
buf = build.inst(IrCmd::LOAD_POINTER, build.vmReg(arg));
|
||||
|
||||
IrOp numIndex = builtinLoadDouble(build, args);
|
||||
intIndex = build.inst(IrCmd::NUM_TO_INT, numIndex);
|
||||
|
||||
build.inst(IrCmd::CHECK_BUFFER_LEN, buf, intIndex, build.constInt(size), build.vmExit(pcpos));
|
||||
}
|
||||
|
||||
static BuiltinImplResult translateBuiltinBufferRead(
|
||||
IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos, IrCmd readCmd, int size, IrCmd convCmd)
|
||||
{
|
||||
if (!FFlag::LuauBufferTranslateIr)
|
||||
return {BuiltinImplType::None, -1};
|
||||
|
||||
if (nparams < 2 || nresults > 1)
|
||||
return {BuiltinImplType::None, -1};
|
||||
|
||||
IrOp buf, intIndex;
|
||||
translateBufferArgsAndCheckBounds(build, nparams, arg, args, size, pcpos, buf, intIndex);
|
||||
|
||||
IrOp result = build.inst(readCmd, buf, intIndex);
|
||||
build.inst(IrCmd::STORE_DOUBLE, build.vmReg(ra), convCmd == IrCmd::NOP ? result : build.inst(convCmd, result));
|
||||
build.inst(IrCmd::STORE_TAG, build.vmReg(ra), build.constTag(LUA_TNUMBER));
|
||||
|
||||
return {BuiltinImplType::Full, 1};
|
||||
}
|
||||
|
||||
static BuiltinImplResult translateBuiltinBufferWrite(
|
||||
IrBuilder& build, int nparams, int ra, int arg, IrOp args, int nresults, int pcpos, IrCmd writeCmd, int size, IrCmd convCmd)
|
||||
{
|
||||
if (!FFlag::LuauBufferTranslateIr)
|
||||
return {BuiltinImplType::None, -1};
|
||||
|
||||
if (nparams < 3 || nresults > 0)
|
||||
return {BuiltinImplType::None, -1};
|
||||
|
||||
IrOp buf, intIndex;
|
||||
translateBufferArgsAndCheckBounds(build, nparams, arg, args, size, pcpos, buf, intIndex);
|
||||
|
||||
IrOp numValue = builtinLoadDouble(build, build.vmReg(vmRegOp(args) + 1));
|
||||
build.inst(writeCmd, buf, intIndex, convCmd == IrCmd::NOP ? numValue : build.inst(convCmd, numValue));
|
||||
|
||||
return {BuiltinImplType::Full, 0};
|
||||
}
|
||||
|
||||
BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg, IrOp args, int nparams, int nresults, IrOp fallback, int pcpos)
|
||||
{
|
||||
// Builtins are not allowed to handle variadic arguments
|
||||
@ -757,7 +809,7 @@ BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg,
|
||||
case LBF_MATH_RAD:
|
||||
return translateBuiltinMathDegRad(build, IrCmd::MUL_NUM, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_MATH_LOG:
|
||||
return translateBuiltinMathLog(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
|
||||
return translateBuiltinMathLog(build, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_MATH_MIN:
|
||||
return translateBuiltinMathMinMax(build, IrCmd::MIN_NUM, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_MATH_MAX:
|
||||
@ -797,29 +849,35 @@ BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg,
|
||||
case LBF_MATH_MODF:
|
||||
return translateBuiltinNumberTo2Number(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_BIT32_BAND:
|
||||
return translateBuiltinBit32BinaryOp(build, IrCmd::BITAND_UINT, /* btest= */ false, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_BIT32_BOR:
|
||||
return translateBuiltinBit32BinaryOp(build, IrCmd::BITOR_UINT, /* btest= */ false, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_BIT32_BXOR:
|
||||
return translateBuiltinBit32BinaryOp(build, IrCmd::BITXOR_UINT, /* btest= */ false, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_BIT32_BTEST:
|
||||
return translateBuiltinBit32BinaryOp(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
|
||||
return translateBuiltinBit32BinaryOp(build, IrCmd::BITAND_UINT, /* btest= */ true, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_BIT32_BNOT:
|
||||
return translateBuiltinBit32Bnot(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
|
||||
return translateBuiltinBit32Bnot(build, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_BIT32_LSHIFT:
|
||||
return translateBuiltinBit32Shift(build, IrCmd::BITLSHIFT_UINT, nparams, ra, arg, args, nresults, fallback, pcpos);
|
||||
case LBF_BIT32_RSHIFT:
|
||||
return translateBuiltinBit32Shift(build, IrCmd::BITRSHIFT_UINT, nparams, ra, arg, args, nresults, fallback, pcpos);
|
||||
case LBF_BIT32_ARSHIFT:
|
||||
return translateBuiltinBit32Shift(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, fallback, pcpos);
|
||||
return translateBuiltinBit32Shift(build, IrCmd::BITARSHIFT_UINT, nparams, ra, arg, args, nresults, fallback, pcpos);
|
||||
case LBF_BIT32_LROTATE:
|
||||
return translateBuiltinBit32Rotate(build, IrCmd::BITLROTATE_UINT, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_BIT32_RROTATE:
|
||||
return translateBuiltinBit32Rotate(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
|
||||
return translateBuiltinBit32Rotate(build, IrCmd::BITRROTATE_UINT, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_BIT32_EXTRACT:
|
||||
return translateBuiltinBit32Extract(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, fallback, pcpos);
|
||||
return translateBuiltinBit32Extract(build, nparams, ra, arg, args, nresults, fallback, pcpos);
|
||||
case LBF_BIT32_EXTRACTK:
|
||||
return translateBuiltinBit32ExtractK(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, pcpos);
|
||||
return translateBuiltinBit32ExtractK(build, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_BIT32_COUNTLZ:
|
||||
return translateBuiltinBit32Unary(build, IrCmd::BITCOUNTLZ_UINT, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_BIT32_COUNTRZ:
|
||||
return translateBuiltinBit32Unary(build, IrCmd::BITCOUNTRZ_UINT, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_BIT32_REPLACE:
|
||||
return translateBuiltinBit32Replace(build, LuauBuiltinFunction(bfid), nparams, ra, arg, args, nresults, fallback, pcpos);
|
||||
return translateBuiltinBit32Replace(build, nparams, ra, arg, args, nresults, fallback, pcpos);
|
||||
case LBF_TYPE:
|
||||
return translateBuiltinType(build, nparams, ra, arg, args, nresults);
|
||||
case LBF_TYPEOF:
|
||||
@ -832,6 +890,32 @@ BuiltinImplResult translateBuiltin(IrBuilder& build, int bfid, int ra, int arg,
|
||||
return translateBuiltinStringLen(build, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_BIT32_BYTESWAP:
|
||||
return translateBuiltinBit32Unary(build, IrCmd::BYTESWAP_UINT, nparams, ra, arg, args, nresults, pcpos);
|
||||
case LBF_BUFFER_READI8:
|
||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READI8, 1, IrCmd::INT_TO_NUM);
|
||||
case LBF_BUFFER_READU8:
|
||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READU8, 1, IrCmd::INT_TO_NUM);
|
||||
case LBF_BUFFER_WRITEU8:
|
||||
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_WRITEI8, 1, IrCmd::NUM_TO_UINT);
|
||||
case LBF_BUFFER_READI16:
|
||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READI16, 2, IrCmd::INT_TO_NUM);
|
||||
case LBF_BUFFER_READU16:
|
||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READU16, 2, IrCmd::INT_TO_NUM);
|
||||
case LBF_BUFFER_WRITEU16:
|
||||
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_WRITEI16, 2, IrCmd::NUM_TO_UINT);
|
||||
case LBF_BUFFER_READI32:
|
||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READI32, 4, IrCmd::INT_TO_NUM);
|
||||
case LBF_BUFFER_READU32:
|
||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READI32, 4, IrCmd::UINT_TO_NUM);
|
||||
case LBF_BUFFER_WRITEU32:
|
||||
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_WRITEI32, 4, IrCmd::NUM_TO_UINT);
|
||||
case LBF_BUFFER_READF32:
|
||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READF32, 4, IrCmd::NOP);
|
||||
case LBF_BUFFER_WRITEF32:
|
||||
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_WRITEF32, 4, IrCmd::NOP);
|
||||
case LBF_BUFFER_READF64:
|
||||
return translateBuiltinBufferRead(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_READF64, 8, IrCmd::NOP);
|
||||
case LBF_BUFFER_WRITEF64:
|
||||
return translateBuiltinBufferWrite(build, nparams, ra, arg, args, nresults, pcpos, IrCmd::BUFFER_WRITEF64, 8, IrCmd::NOP);
|
||||
default:
|
||||
return {BuiltinImplType::None, -1};
|
||||
}
|
||||
|
@ -12,9 +12,8 @@
|
||||
#include "lstate.h"
|
||||
#include "ltm.h"
|
||||
|
||||
LUAU_FASTFLAG(LuauReduceStackSpills)
|
||||
LUAU_FASTFLAGVARIABLE(LuauInlineArrConstOffset, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauLowerAltLoopForn, false)
|
||||
LUAU_FASTFLAG(LuauImproveInsertIr)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -562,9 +561,10 @@ IrOp translateFastCallN(IrBuilder& build, const Instruction* pc, int pcpos, bool
|
||||
|
||||
IrOp builtinArgs = args;
|
||||
|
||||
if (customArgs.kind == IrOpKind::VmConst && bfid != LBF_TABLE_INSERT)
|
||||
if (customArgs.kind == IrOpKind::VmConst && (FFlag::LuauImproveInsertIr || bfid != LBF_TABLE_INSERT))
|
||||
{
|
||||
TValue protok = build.function.proto->k[customArgs.index];
|
||||
LUAU_ASSERT(build.function.proto);
|
||||
TValue protok = build.function.proto->k[vmConstOp(customArgs)];
|
||||
|
||||
if (protok.tt == LUA_TNUMBER)
|
||||
builtinArgs = build.constDouble(protok.value.n);
|
||||
@ -921,20 +921,10 @@ void translateInstGetTableN(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
build.inst(IrCmd::CHECK_ARRAY_SIZE, vb, build.constInt(c), fallback);
|
||||
build.inst(IrCmd::CHECK_NO_METATABLE, vb, fallback);
|
||||
|
||||
if (FFlag::LuauInlineArrConstOffset)
|
||||
{
|
||||
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constInt(0));
|
||||
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constInt(0));
|
||||
|
||||
IrOp arrElTval = build.inst(IrCmd::LOAD_TVALUE, arrEl, build.constInt(c * sizeof(TValue)));
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), arrElTval);
|
||||
}
|
||||
else
|
||||
{
|
||||
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constInt(c));
|
||||
|
||||
IrOp arrElTval = build.inst(IrCmd::LOAD_TVALUE, arrEl);
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), arrElTval);
|
||||
}
|
||||
IrOp arrElTval = build.inst(IrCmd::LOAD_TVALUE, arrEl, build.constInt(c * sizeof(TValue)));
|
||||
build.inst(IrCmd::STORE_TVALUE, build.vmReg(ra), arrElTval);
|
||||
|
||||
IrOp next = build.blockAtInst(pcpos + 1);
|
||||
FallbackStreamScope scope(build, fallback, next);
|
||||
@ -961,20 +951,10 @@ void translateInstSetTableN(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
build.inst(IrCmd::CHECK_NO_METATABLE, vb, fallback);
|
||||
build.inst(IrCmd::CHECK_READONLY, vb, fallback);
|
||||
|
||||
if (FFlag::LuauInlineArrConstOffset)
|
||||
{
|
||||
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constInt(0));
|
||||
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constInt(0));
|
||||
|
||||
IrOp tva = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(ra));
|
||||
build.inst(IrCmd::STORE_TVALUE, arrEl, tva, build.constInt(c * sizeof(TValue)));
|
||||
}
|
||||
else
|
||||
{
|
||||
IrOp arrEl = build.inst(IrCmd::GET_ARR_ADDR, vb, build.constInt(c));
|
||||
|
||||
IrOp tva = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(ra));
|
||||
build.inst(IrCmd::STORE_TVALUE, arrEl, tva);
|
||||
}
|
||||
IrOp tva = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(ra));
|
||||
build.inst(IrCmd::STORE_TVALUE, arrEl, tva, build.constInt(c * sizeof(TValue)));
|
||||
|
||||
build.inst(IrCmd::BARRIER_TABLE_FORWARD, vb, build.vmReg(ra), build.undef());
|
||||
|
||||
@ -1376,74 +1356,37 @@ void translateInstNewClosure(IrBuilder& build, const Instruction* pc, int pcpos)
|
||||
Instruction uinsn = pc[ui + 1];
|
||||
LUAU_ASSERT(LUAU_INSN_OP(uinsn) == LOP_CAPTURE);
|
||||
|
||||
if (FFlag::LuauReduceStackSpills)
|
||||
switch (LUAU_INSN_A(uinsn))
|
||||
{
|
||||
switch (LUAU_INSN_A(uinsn))
|
||||
{
|
||||
case LCT_VAL:
|
||||
{
|
||||
IrOp src = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(LUAU_INSN_B(uinsn)));
|
||||
IrOp dst = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, ncl, build.vmUpvalue(ui));
|
||||
build.inst(IrCmd::STORE_TVALUE, dst, src);
|
||||
break;
|
||||
}
|
||||
|
||||
case LCT_REF:
|
||||
{
|
||||
IrOp src = build.inst(IrCmd::FINDUPVAL, build.vmReg(LUAU_INSN_B(uinsn)));
|
||||
IrOp dst = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, ncl, build.vmUpvalue(ui));
|
||||
build.inst(IrCmd::STORE_POINTER, dst, src);
|
||||
build.inst(IrCmd::STORE_TAG, dst, build.constTag(LUA_TUPVAL));
|
||||
break;
|
||||
}
|
||||
|
||||
case LCT_UPVAL:
|
||||
{
|
||||
IrOp src = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, build.undef(), build.vmUpvalue(LUAU_INSN_B(uinsn)));
|
||||
IrOp dst = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, ncl, build.vmUpvalue(ui));
|
||||
IrOp load = build.inst(IrCmd::LOAD_TVALUE, src);
|
||||
build.inst(IrCmd::STORE_TVALUE, dst, load);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
LUAU_ASSERT(!"Unknown upvalue capture type");
|
||||
LUAU_UNREACHABLE(); // improves switch() codegen by eliding opcode bounds checks
|
||||
}
|
||||
}
|
||||
else
|
||||
case LCT_VAL:
|
||||
{
|
||||
IrOp src = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(LUAU_INSN_B(uinsn)));
|
||||
IrOp dst = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, ncl, build.vmUpvalue(ui));
|
||||
build.inst(IrCmd::STORE_TVALUE, dst, src);
|
||||
break;
|
||||
}
|
||||
|
||||
switch (LUAU_INSN_A(uinsn))
|
||||
{
|
||||
case LCT_VAL:
|
||||
{
|
||||
IrOp src = build.inst(IrCmd::LOAD_TVALUE, build.vmReg(LUAU_INSN_B(uinsn)));
|
||||
build.inst(IrCmd::STORE_TVALUE, dst, src);
|
||||
break;
|
||||
}
|
||||
case LCT_REF:
|
||||
{
|
||||
IrOp src = build.inst(IrCmd::FINDUPVAL, build.vmReg(LUAU_INSN_B(uinsn)));
|
||||
IrOp dst = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, ncl, build.vmUpvalue(ui));
|
||||
build.inst(IrCmd::STORE_POINTER, dst, src);
|
||||
build.inst(IrCmd::STORE_TAG, dst, build.constTag(LUA_TUPVAL));
|
||||
break;
|
||||
}
|
||||
|
||||
case LCT_REF:
|
||||
{
|
||||
IrOp src = build.inst(IrCmd::FINDUPVAL, build.vmReg(LUAU_INSN_B(uinsn)));
|
||||
build.inst(IrCmd::STORE_POINTER, dst, src);
|
||||
build.inst(IrCmd::STORE_TAG, dst, build.constTag(LUA_TUPVAL));
|
||||
break;
|
||||
}
|
||||
case LCT_UPVAL:
|
||||
{
|
||||
IrOp src = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, build.undef(), build.vmUpvalue(LUAU_INSN_B(uinsn)));
|
||||
IrOp dst = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, ncl, build.vmUpvalue(ui));
|
||||
IrOp load = build.inst(IrCmd::LOAD_TVALUE, src);
|
||||
build.inst(IrCmd::STORE_TVALUE, dst, load);
|
||||
break;
|
||||
}
|
||||
|
||||
case LCT_UPVAL:
|
||||
{
|
||||
IrOp src = build.inst(IrCmd::GET_CLOSURE_UPVAL_ADDR, build.undef(), build.vmUpvalue(LUAU_INSN_B(uinsn)));
|
||||
IrOp load = build.inst(IrCmd::LOAD_TVALUE, src);
|
||||
build.inst(IrCmd::STORE_TVALUE, dst, load);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
LUAU_ASSERT(!"Unknown upvalue capture type");
|
||||
LUAU_UNREACHABLE(); // improves switch() codegen by eliding opcode bounds checks
|
||||
}
|
||||
default:
|
||||
LUAU_ASSERT(!"Unknown upvalue capture type");
|
||||
LUAU_UNREACHABLE(); // improves switch() codegen by eliding opcode bounds checks
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -122,6 +122,7 @@ IrValueKind getCmdValueKind(IrCmd cmd)
|
||||
case IrCmd::CHECK_SLOT_MATCH:
|
||||
case IrCmd::CHECK_NODE_NO_NEXT:
|
||||
case IrCmd::CHECK_NODE_VALUE:
|
||||
case IrCmd::CHECK_BUFFER_LEN:
|
||||
case IrCmd::INTERRUPT:
|
||||
case IrCmd::CHECK_GC:
|
||||
case IrCmd::BARRIER_OBJ:
|
||||
@ -172,6 +173,21 @@ IrValueKind getCmdValueKind(IrCmd cmd)
|
||||
return IrValueKind::Pointer;
|
||||
case IrCmd::FINDUPVAL:
|
||||
return IrValueKind::Pointer;
|
||||
case IrCmd::BUFFER_READI8:
|
||||
case IrCmd::BUFFER_READU8:
|
||||
case IrCmd::BUFFER_READI16:
|
||||
case IrCmd::BUFFER_READU16:
|
||||
case IrCmd::BUFFER_READI32:
|
||||
return IrValueKind::Int;
|
||||
case IrCmd::BUFFER_WRITEI8:
|
||||
case IrCmd::BUFFER_WRITEI16:
|
||||
case IrCmd::BUFFER_WRITEI32:
|
||||
case IrCmd::BUFFER_WRITEF32:
|
||||
case IrCmd::BUFFER_WRITEF64:
|
||||
return IrValueKind::None;
|
||||
case IrCmd::BUFFER_READF32:
|
||||
case IrCmd::BUFFER_READF64:
|
||||
return IrValueKind::Double;
|
||||
}
|
||||
|
||||
LUAU_UNREACHABLE();
|
||||
|
@ -3,8 +3,6 @@
|
||||
|
||||
#include "Luau/IrUtils.h"
|
||||
|
||||
LUAU_FASTFLAGVARIABLE(LuauReduceStackSpills, false)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
namespace CodeGen
|
||||
@ -198,7 +196,7 @@ void IrValueLocationTracking::invalidateRestoreOp(IrOp location, bool skipValueI
|
||||
IrInst& inst = function.instructions[instIdx];
|
||||
|
||||
// If we are only modifying the tag, we can avoid invalidating tracked location of values
|
||||
if (FFlag::LuauReduceStackSpills && skipValueInvalidation)
|
||||
if (skipValueInvalidation)
|
||||
{
|
||||
switch (getCmdValueKind(inst.cmd))
|
||||
{
|
||||
|
@ -15,8 +15,6 @@
|
||||
LUAU_FASTINTVARIABLE(LuauCodeGenMinLinearBlockPath, 3)
|
||||
LUAU_FASTINTVARIABLE(LuauCodeGenReuseSlotLimit, 64)
|
||||
LUAU_FASTFLAGVARIABLE(DebugLuauAbortingChecks, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauReuseHashSlots2, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauMergeTagLoads, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauReuseArrSlots2, false)
|
||||
LUAU_FASTFLAG(LuauLowerAltLoopForn)
|
||||
|
||||
@ -546,10 +544,7 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||
}
|
||||
else if (inst.a.kind == IrOpKind::VmReg)
|
||||
{
|
||||
if (FFlag::LuauMergeTagLoads)
|
||||
state.substituteOrRecordVmRegLoad(inst);
|
||||
else
|
||||
state.createRegLink(index, inst.a);
|
||||
state.substituteOrRecordVmRegLoad(inst);
|
||||
}
|
||||
break;
|
||||
case IrCmd::LOAD_POINTER:
|
||||
@ -762,7 +757,7 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||
else
|
||||
replace(function, block, index, {IrCmd::JUMP, inst.d});
|
||||
}
|
||||
else if (FFlag::LuauMergeTagLoads && inst.a == inst.b)
|
||||
else if (inst.a == inst.b)
|
||||
{
|
||||
replace(function, block, index, {IrCmd::JUMP, inst.c});
|
||||
}
|
||||
@ -920,6 +915,22 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||
state.inSafeEnv = true;
|
||||
}
|
||||
break;
|
||||
case IrCmd::CHECK_BUFFER_LEN:
|
||||
// TODO: remove duplicate checks and extend earlier check bound when possible
|
||||
break;
|
||||
case IrCmd::BUFFER_READI8:
|
||||
case IrCmd::BUFFER_READU8:
|
||||
case IrCmd::BUFFER_WRITEI8:
|
||||
case IrCmd::BUFFER_READI16:
|
||||
case IrCmd::BUFFER_READU16:
|
||||
case IrCmd::BUFFER_WRITEI16:
|
||||
case IrCmd::BUFFER_READI32:
|
||||
case IrCmd::BUFFER_WRITEI32:
|
||||
case IrCmd::BUFFER_READF32:
|
||||
case IrCmd::BUFFER_WRITEF32:
|
||||
case IrCmd::BUFFER_READF64:
|
||||
case IrCmd::BUFFER_WRITEF64:
|
||||
break;
|
||||
case IrCmd::CHECK_GC:
|
||||
// It is enough to perform a GC check once in a block
|
||||
if (state.checkedGc)
|
||||
@ -971,9 +982,6 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||
state.getArrAddrCache.push_back(index);
|
||||
break;
|
||||
case IrCmd::GET_SLOT_NODE_ADDR:
|
||||
if (!FFlag::LuauReuseHashSlots2)
|
||||
break;
|
||||
|
||||
for (uint32_t prevIdx : state.getSlotNodeCache)
|
||||
{
|
||||
const IrInst& prev = function.instructions[prevIdx];
|
||||
@ -1126,9 +1134,6 @@ static void constPropInInst(ConstPropState& state, IrBuilder& build, IrFunction&
|
||||
break;
|
||||
}
|
||||
case IrCmd::CHECK_SLOT_MATCH:
|
||||
if (!FFlag::LuauReuseHashSlots2)
|
||||
break;
|
||||
|
||||
for (uint32_t prevIdx : state.checkSlotMatchCache)
|
||||
{
|
||||
const IrInst& prev = function.instructions[prevIdx];
|
||||
|
@ -539,6 +539,25 @@ public:
|
||||
{
|
||||
return impl.end();
|
||||
}
|
||||
|
||||
bool operator==(const DenseHashSet<Key, Hash, Eq>& other)
|
||||
{
|
||||
if (size() != other.size())
|
||||
return false;
|
||||
|
||||
for (const Key& k : *this)
|
||||
{
|
||||
if (!other.contains(k))
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool operator!=(const DenseHashSet<Key, Hash, Eq>& other)
|
||||
{
|
||||
return !(*this == other);
|
||||
}
|
||||
};
|
||||
|
||||
// This is a faster alternative of unordered_map, but it does not implement the same interface (i.e. it does not support erasing and has
|
||||
|
@ -7,7 +7,6 @@
|
||||
#include <algorithm>
|
||||
#include <string.h>
|
||||
|
||||
LUAU_FASTFLAG(LuauFloorDivision)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -1283,8 +1282,6 @@ void BytecodeBuilder::validateInstructions() const
|
||||
case LOP_IDIV:
|
||||
case LOP_MOD:
|
||||
case LOP_POW:
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision || op != LOP_IDIV);
|
||||
|
||||
VREG(LUAU_INSN_A(insn));
|
||||
VREG(LUAU_INSN_B(insn));
|
||||
VREG(LUAU_INSN_C(insn));
|
||||
@ -1297,8 +1294,6 @@ void BytecodeBuilder::validateInstructions() const
|
||||
case LOP_IDIVK:
|
||||
case LOP_MODK:
|
||||
case LOP_POWK:
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision || op != LOP_IDIVK);
|
||||
|
||||
VREG(LUAU_INSN_A(insn));
|
||||
VREG(LUAU_INSN_B(insn));
|
||||
VCONST(LUAU_INSN_C(insn), Number);
|
||||
@ -1866,8 +1861,6 @@ void BytecodeBuilder::dumpInstruction(const uint32_t* code, std::string& result,
|
||||
break;
|
||||
|
||||
case LOP_IDIV:
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision);
|
||||
|
||||
formatAppend(result, "IDIV R%d R%d R%d\n", LUAU_INSN_A(insn), LUAU_INSN_B(insn), LUAU_INSN_C(insn));
|
||||
break;
|
||||
|
||||
@ -1904,8 +1897,6 @@ void BytecodeBuilder::dumpInstruction(const uint32_t* code, std::string& result,
|
||||
break;
|
||||
|
||||
case LOP_IDIVK:
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision);
|
||||
|
||||
formatAppend(result, "IDIVK R%d R%d K%d [", LUAU_INSN_A(insn), LUAU_INSN_B(insn), LUAU_INSN_C(insn));
|
||||
dumpConstant(result, LUAU_INSN_C(insn));
|
||||
result.append("]\n");
|
||||
|
@ -26,8 +26,8 @@ LUAU_FASTINTVARIABLE(LuauCompileInlineThreshold, 25)
|
||||
LUAU_FASTINTVARIABLE(LuauCompileInlineThresholdMaxBoost, 300)
|
||||
LUAU_FASTINTVARIABLE(LuauCompileInlineDepth, 5)
|
||||
|
||||
LUAU_FASTFLAG(LuauFloorDivision)
|
||||
LUAU_FASTFLAGVARIABLE(LuauCompileIfElseAndOr, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauCompileSideEffects, false)
|
||||
LUAU_FASTFLAGVARIABLE(LuauCompileDeadIf, false)
|
||||
|
||||
namespace Luau
|
||||
{
|
||||
@ -260,7 +260,7 @@ struct Compiler
|
||||
if (bytecode.getInstructionCount() > kMaxInstructionCount)
|
||||
CompileError::raise(func->location, "Exceeded function instruction limit; split the function into parts to compile");
|
||||
|
||||
// since top-level code only executes once, it can be marked as cold if it has no loops (top-level code with loops might be profitable to compile natively)
|
||||
// top-level code only executes once so it can be marked as cold if it has no loops; code with loops might be profitable to compile natively
|
||||
if (func->functionDepth == 0 && !hasLoops)
|
||||
protoflags |= LPF_NATIVE_COLD;
|
||||
|
||||
@ -644,10 +644,7 @@ struct Compiler
|
||||
|
||||
// evaluate extra expressions for side effects
|
||||
for (size_t i = func->args.size; i < expr->args.size; ++i)
|
||||
{
|
||||
RegScope rsi(this);
|
||||
compileExprAuto(expr->args.data[i], rsi);
|
||||
}
|
||||
compileExprSide(expr->args.data[i]);
|
||||
|
||||
// apply all evaluated arguments to the compiler state
|
||||
// note: locals use current startpc for debug info, although some of them have been computed earlier; this is similar to compileStatLocal
|
||||
@ -1038,8 +1035,6 @@ struct Compiler
|
||||
return k ? LOP_DIVK : LOP_DIV;
|
||||
|
||||
case AstExprBinary::FloorDiv:
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision);
|
||||
|
||||
return k ? LOP_IDIVK : LOP_IDIV;
|
||||
|
||||
case AstExprBinary::Mod:
|
||||
@ -1496,8 +1491,6 @@ struct Compiler
|
||||
case AstExprBinary::Mod:
|
||||
case AstExprBinary::Pow:
|
||||
{
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision || expr->op != AstExprBinary::FloorDiv);
|
||||
|
||||
int32_t rc = getConstantNumber(expr->right);
|
||||
|
||||
if (rc >= 0 && rc <= 255)
|
||||
@ -1596,18 +1589,15 @@ struct Compiler
|
||||
}
|
||||
else
|
||||
{
|
||||
if (FFlag::LuauCompileIfElseAndOr)
|
||||
// Optimization: convert some if..then..else expressions into and/or when the other side has no side effects and is very cheap to compute
|
||||
// if v then v else e => v or e
|
||||
// if v then e else v => v and e
|
||||
if (int creg = getExprLocalReg(expr->condition); creg >= 0)
|
||||
{
|
||||
// Optimization: convert some if..then..else expressions into and/or when the other side has no side effects and is very cheap to compute
|
||||
// if v then v else e => v or e
|
||||
// if v then e else v => v and e
|
||||
if (int creg = getExprLocalReg(expr->condition); creg >= 0)
|
||||
{
|
||||
if (creg == getExprLocalReg(expr->trueExpr) && (getExprLocalReg(expr->falseExpr) >= 0 || isConstant(expr->falseExpr)))
|
||||
return compileExprIfElseAndOr(/* and_= */ false, uint8_t(creg), expr->falseExpr, target);
|
||||
else if (creg == getExprLocalReg(expr->falseExpr) && (getExprLocalReg(expr->trueExpr) >= 0 || isConstant(expr->trueExpr)))
|
||||
return compileExprIfElseAndOr(/* and_= */ true, uint8_t(creg), expr->trueExpr, target);
|
||||
}
|
||||
if (creg == getExprLocalReg(expr->trueExpr) && (getExprLocalReg(expr->falseExpr) >= 0 || isConstant(expr->falseExpr)))
|
||||
return compileExprIfElseAndOr(/* and_= */ false, uint8_t(creg), expr->falseExpr, target);
|
||||
else if (creg == getExprLocalReg(expr->falseExpr) && (getExprLocalReg(expr->trueExpr) >= 0 || isConstant(expr->trueExpr)))
|
||||
return compileExprIfElseAndOr(/* and_= */ true, uint8_t(creg), expr->trueExpr, target);
|
||||
}
|
||||
|
||||
std::vector<size_t> elseJump;
|
||||
@ -2215,6 +2205,23 @@ struct Compiler
|
||||
return reg;
|
||||
}
|
||||
|
||||
void compileExprSide(AstExpr* node)
|
||||
{
|
||||
if (FFlag::LuauCompileSideEffects)
|
||||
{
|
||||
// Optimization: some expressions never carry side effects so we don't need to emit any code
|
||||
if (node->is<AstExprLocal>() || node->is<AstExprGlobal>() || node->is<AstExprVarargs>() || node->is<AstExprFunction>() || isConstant(node))
|
||||
return;
|
||||
|
||||
// note: the remark is omitted for calls as it's fairly noisy due to inlining
|
||||
if (!node->is<AstExprCall>())
|
||||
bytecode.addDebugRemark("expression only compiled for side effects");
|
||||
}
|
||||
|
||||
RegScope rsi(this);
|
||||
compileExprAuto(node, rsi);
|
||||
}
|
||||
|
||||
// initializes target..target+targetCount-1 range using expression
|
||||
// if expression is a call/vararg, we assume it returns all values, otherwise we fill the rest with nil
|
||||
// assumes target register range can be clobbered and is at the top of the register space if targetTop = true
|
||||
@ -2263,10 +2270,7 @@ struct Compiler
|
||||
|
||||
// evaluate extra expressions for side effects
|
||||
for (size_t i = targetCount; i < list.size; ++i)
|
||||
{
|
||||
RegScope rsi(this);
|
||||
compileExprAuto(list.data[i], rsi);
|
||||
}
|
||||
compileExprSide(list.data[i]);
|
||||
}
|
||||
else if (list.size > 0)
|
||||
{
|
||||
@ -2501,6 +2505,18 @@ struct Compiler
|
||||
return;
|
||||
}
|
||||
|
||||
// Optimization: condition is always false but isn't a constant => we only need the else body and condition's side effects
|
||||
if (FFlag::LuauCompileDeadIf)
|
||||
{
|
||||
if (AstExprBinary* cand = stat->condition->as<AstExprBinary>(); cand && cand->op == AstExprBinary::And && isConstantFalse(cand->right))
|
||||
{
|
||||
compileExprSide(cand->left);
|
||||
if (stat->elsebody)
|
||||
compileStat(stat->elsebody);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Optimization: body is a "break" statement with no "else" => we can directly break out of the loop in "then" case
|
||||
if (!stat->elsebody && isStatBreak(stat->thenbody) && !areLocalsCaptured(loops.back().localOffset))
|
||||
{
|
||||
@ -2640,7 +2656,7 @@ struct Compiler
|
||||
// expression that continue will jump to.
|
||||
loops.back().localOffsetContinue = localStack.size();
|
||||
|
||||
// if continue was called from this statement, then any local defined after this in the loop body should not be accessed by until condition
|
||||
// if continue was called from this statement, any local defined after this in the loop body should not be accessed by until condition
|
||||
// it is sufficient to check this condition once, as if this holds for the first continue, it must hold for all subsequent continues.
|
||||
if (loops.back().continueUsed && !continueValidated)
|
||||
{
|
||||
@ -3230,10 +3246,7 @@ struct Compiler
|
||||
|
||||
// compute expressions with side effects
|
||||
for (size_t i = stat->vars.size; i < stat->values.size; ++i)
|
||||
{
|
||||
RegScope rsi(this);
|
||||
compileExprAuto(stat->values.data[i], rsi);
|
||||
}
|
||||
compileExprSide(stat->values.data[i]);
|
||||
|
||||
// almost done... let's assign everything left to right, noting that locals were either written-to directly, or will be written-to in a
|
||||
// separate pass to avoid conflicts
|
||||
@ -3276,8 +3289,6 @@ struct Compiler
|
||||
case AstExprBinary::Mod:
|
||||
case AstExprBinary::Pow:
|
||||
{
|
||||
LUAU_ASSERT(FFlag::LuauFloorDivision || stat->op != AstExprBinary::FloorDiv);
|
||||
|
||||
if (var.kind != LValue::Kind_Local)
|
||||
compileLValueUse(var, target, /* set= */ false);
|
||||
|
||||
@ -3425,8 +3436,7 @@ struct Compiler
|
||||
}
|
||||
else
|
||||
{
|
||||
RegScope rs(this);
|
||||
compileExprAuto(stat->expr, rs);
|
||||
compileExprSide(stat->expr);
|
||||
}
|
||||
}
|
||||
else if (AstStatLocal* stat = node->as<AstStatLocal>())
|
||||
|
@ -4,7 +4,6 @@
|
||||
#include "Luau/Lexer.h"
|
||||
#include "Luau/StringUtils.h"
|
||||
|
||||
LUAU_FASTFLAG(LuauFloorDivision)
|
||||
namespace Luau
|
||||
{
|
||||
|
||||
@ -113,24 +112,8 @@ static void next(Lexer& lexer)
|
||||
lexer.next();
|
||||
|
||||
// skip C-style comments as Lexer only understands Lua-style comments atm
|
||||
|
||||
if (FFlag::LuauFloorDivision)
|
||||
{
|
||||
while (lexer.current().type == Luau::Lexeme::FloorDiv)
|
||||
lexer.nextline();
|
||||
}
|
||||
else
|
||||
{
|
||||
while (lexer.current().type == '/')
|
||||
{
|
||||
Lexeme peek = lexer.lookahead();
|
||||
|
||||
if (peek.type != '/' || peek.location.begin != lexer.current().location.end)
|
||||
break;
|
||||
|
||||
lexer.nextline();
|
||||
}
|
||||
}
|
||||
while (lexer.current().type == Luau::Lexeme::FloorDiv)
|
||||
lexer.nextline();
|
||||
}
|
||||
|
||||
static Error fail(Lexer& lexer, const char* message)
|
||||
|
8
Makefile
8
Makefile
@ -3,6 +3,8 @@
|
||||
MAKEFLAGS+=-r -j8
|
||||
COMMA=,
|
||||
|
||||
CMAKE_PATH=cmake
|
||||
|
||||
config=debug
|
||||
protobuf=system
|
||||
|
||||
@ -101,7 +103,6 @@ ifeq ($(config),analyze)
|
||||
endif
|
||||
|
||||
ifeq ($(config),fuzz)
|
||||
CXX=clang++ # our fuzzing infra relies on llvm fuzzer
|
||||
CXXFLAGS+=-fsanitize=address,fuzzer -Ibuild/libprotobuf-mutator -O2
|
||||
LDFLAGS+=-fsanitize=address,fuzzer
|
||||
LPROTOBUF=-lprotobuf
|
||||
@ -252,12 +253,13 @@ fuzz/luau.pb.cpp: fuzz/luau.proto build/libprotobuf-mutator
|
||||
|
||||
$(BUILD)/fuzz/proto.cpp.o: fuzz/luau.pb.cpp
|
||||
$(BUILD)/fuzz/protoprint.cpp.o: fuzz/luau.pb.cpp
|
||||
$(BUILD)/fuzz/prototest.cpp.o: fuzz/luau.pb.cpp
|
||||
|
||||
build/libprotobuf-mutator:
|
||||
git clone https://github.com/google/libprotobuf-mutator build/libprotobuf-mutator
|
||||
git -C build/libprotobuf-mutator checkout 212a7be1eb08e7f9c79732d2aab9b2097085d936
|
||||
CXX= cmake -S build/libprotobuf-mutator -B build/libprotobuf-mutator $(DPROTOBUF)
|
||||
make -C build/libprotobuf-mutator -j8
|
||||
$(CMAKE_PATH) -DCMAKE_CXX_COMPILER=$(CMAKE_CXX) -DCMAKE_C_COMPILER=$(CMAKE_CC) -DCMAKE_CXX_COMPILER_LAUNCHER=$(CMAKE_PROXY) -S build/libprotobuf-mutator -B build/libprotobuf-mutator $(DPROTOBUF)
|
||||
$(MAKE) -C build/libprotobuf-mutator
|
||||
|
||||
# picks up include dependencies for all object files
|
||||
-include $(OBJECTS:.o=.d)
|
||||
|
@ -185,6 +185,7 @@ target_sources(Luau.Analysis PRIVATE
|
||||
Analysis/include/Luau/Refinement.h
|
||||
Analysis/include/Luau/RequireTracer.h
|
||||
Analysis/include/Luau/Scope.h
|
||||
Analysis/include/Luau/Set.h
|
||||
Analysis/include/Luau/Simplify.h
|
||||
Analysis/include/Luau/Substitution.h
|
||||
Analysis/include/Luau/Subtyping.h
|
||||
@ -419,6 +420,7 @@ if(TARGET Luau.UnitTest)
|
||||
tests/RuntimeLimits.test.cpp
|
||||
tests/ScopedFlags.h
|
||||
tests/Simplify.test.cpp
|
||||
tests/Set.test.cpp
|
||||
tests/StringUtils.test.cpp
|
||||
tests/Subtyping.test.cpp
|
||||
tests/Symbol.test.cpp
|
||||
|
@ -10,6 +10,8 @@
|
||||
|
||||
#include <string.h>
|
||||
|
||||
LUAU_FASTFLAGVARIABLE(LuauBufferBetterMsg, false)
|
||||
|
||||
// while C API returns 'size_t' for binary compatibility in case of future extensions,
|
||||
// in the current implementation, length and offset are limited to 31 bits
|
||||
// because offset is limited to an integer, a single 64bit comparison can be used and will not overflow
|
||||
@ -36,8 +38,15 @@ static int buffer_create(lua_State* L)
|
||||
{
|
||||
int size = luaL_checkinteger(L, 1);
|
||||
|
||||
if (size < 0)
|
||||
luaL_error(L, "size cannot be negative");
|
||||
if (FFlag::LuauBufferBetterMsg)
|
||||
{
|
||||
luaL_argcheck(L, size >= 0, 1, "size");
|
||||
}
|
||||
else
|
||||
{
|
||||
if (size < 0)
|
||||
luaL_error(L, "invalid size");
|
||||
}
|
||||
|
||||
lua_newbuffer(L, size);
|
||||
return 1;
|
||||
@ -165,8 +174,15 @@ static int buffer_readstring(lua_State* L)
|
||||
int offset = luaL_checkinteger(L, 2);
|
||||
int size = luaL_checkinteger(L, 3);
|
||||
|
||||
if (size < 0)
|
||||
luaL_error(L, "size cannot be negative");
|
||||
if (FFlag::LuauBufferBetterMsg)
|
||||
{
|
||||
luaL_argcheck(L, size >= 0, 3, "size");
|
||||
}
|
||||
else
|
||||
{
|
||||
if (size < 0)
|
||||
luaL_error(L, "invalid size");
|
||||
}
|
||||
|
||||
if (isoutofbounds(offset, len, unsigned(size)))
|
||||
luaL_error(L, "buffer access out of bounds");
|
||||
@ -184,8 +200,15 @@ static int buffer_writestring(lua_State* L)
|
||||
const char* val = luaL_checklstring(L, 3, &size);
|
||||
int count = luaL_optinteger(L, 4, int(size));
|
||||
|
||||
if (count < 0)
|
||||
luaL_error(L, "count cannot be negative");
|
||||
if (FFlag::LuauBufferBetterMsg)
|
||||
{
|
||||
luaL_argcheck(L, count >= 0, 4, "count");
|
||||
}
|
||||
else
|
||||
{
|
||||
if (count < 0)
|
||||
luaL_error(L, "invalid count");
|
||||
}
|
||||
|
||||
if (size_t(count) > size)
|
||||
luaL_error(L, "string length overflow");
|
||||
|
@ -9,8 +9,6 @@
|
||||
|
||||
#define LUA_STRFTIMEOPTIONS "aAbBcdHIjmMpSUwWxXyYzZ%"
|
||||
|
||||
LUAU_FASTFLAGVARIABLE(LuauOsTimegm, false)
|
||||
|
||||
#if defined(_WIN32)
|
||||
static tm* gmtime_r(const time_t* timep, tm* result)
|
||||
{
|
||||
@ -21,19 +19,10 @@ static tm* localtime_r(const time_t* timep, tm* result)
|
||||
{
|
||||
return localtime_s(result, timep) == 0 ? result : NULL;
|
||||
}
|
||||
|
||||
static time_t timegm(struct tm* timep)
|
||||
{
|
||||
LUAU_ASSERT(!FFlag::LuauOsTimegm);
|
||||
|
||||
return _mkgmtime(timep);
|
||||
}
|
||||
#endif
|
||||
|
||||
static time_t os_timegm(struct tm* timep)
|
||||
{
|
||||
LUAU_ASSERT(FFlag::LuauOsTimegm);
|
||||
|
||||
// Julian day number calculation
|
||||
int day = timep->tm_mday;
|
||||
int month = timep->tm_mon + 1;
|
||||
@ -206,10 +195,7 @@ static int os_time(lua_State* L)
|
||||
ts.tm_isdst = getboolfield(L, "isdst");
|
||||
|
||||
// Note: upstream Lua uses mktime() here which assumes input is local time, but we prefer UTC for consistency
|
||||
if (FFlag::LuauOsTimegm)
|
||||
t = os_timegm(&ts);
|
||||
else
|
||||
t = timegm(&ts);
|
||||
t = os_timegm(&ts);
|
||||
}
|
||||
if (t == (time_t)(-1))
|
||||
lua_pushnil(L);
|
||||
|
@ -8,6 +8,8 @@
|
||||
|
||||
#define iscont(p) ((*(p)&0xC0) == 0x80)
|
||||
|
||||
LUAU_DYNAMIC_FASTFLAGVARIABLE(LuauStricterUtf8, false)
|
||||
|
||||
// from strlib
|
||||
// translate a relative string position: negative means back from end
|
||||
static int u_posrelat(int pos, size_t len)
|
||||
@ -45,6 +47,8 @@ static const char* utf8_decode(const char* o, int* val)
|
||||
res |= ((c & 0x7F) << (count * 5)); // add first byte
|
||||
if (count > 3 || res > MAXUNICODE || res <= limits[count])
|
||||
return NULL; // invalid byte sequence
|
||||
if (DFFlag::LuauStricterUtf8 && unsigned(res - 0xD800) < 0x800)
|
||||
return NULL; // surrogate
|
||||
s += count; // skip continuation bytes read
|
||||
}
|
||||
if (val)
|
||||
|
@ -135,8 +135,6 @@
|
||||
// Does VM support native execution via ExecutionCallbacks? We mostly assume it does but keep the define to make it easy to quantify the cost.
|
||||
#define VM_HAS_NATIVE 1
|
||||
|
||||
void (*lua_iter_call_telemetry)(lua_State* L, int gtt, int stt, int itt) = NULL;
|
||||
|
||||
LUAU_NOINLINE void luau_callhook(lua_State* L, lua_Hook hook, void* userdata)
|
||||
{
|
||||
ptrdiff_t base = savestack(L, L->base);
|
||||
@ -2293,10 +2291,6 @@ reentry:
|
||||
{
|
||||
// table or userdata with __call, will be called during FORGLOOP
|
||||
// TODO: we might be able to stop supporting this depending on whether it's used in practice
|
||||
void (*telemetrycb)(lua_State * L, int gtt, int stt, int itt) = lua_iter_call_telemetry;
|
||||
|
||||
if (telemetrycb)
|
||||
telemetrycb(L, ttype(ra), ttype(ra + 1), ttype(ra + 2));
|
||||
}
|
||||
else if (ttistable(ra))
|
||||
{
|
||||
|
33
bench/tests/pcmmix.lua
Normal file
33
bench/tests/pcmmix.lua
Normal file
@ -0,0 +1,33 @@
|
||||
local bench = script and require(script.Parent.bench_support) or require("bench_support")
|
||||
|
||||
local samples = 100_000
|
||||
|
||||
-- create two 16-bit stereo pcm audio buffers
|
||||
local ch1 = buffer.create(samples * 2 * 2)
|
||||
local ch2 = buffer.create(samples * 2 * 2)
|
||||
|
||||
-- just init with random data
|
||||
for i = 0, samples * 2 - 1 do
|
||||
buffer.writei16(ch1, i * 2, math.random(-32768, 32767))
|
||||
buffer.writei16(ch2, i * 2, math.random(-32768, 32767))
|
||||
end
|
||||
|
||||
function test()
|
||||
local mix = buffer.create(samples * 2 * 2)
|
||||
|
||||
for i = 0, samples - 1 do
|
||||
local s1l = buffer.readi16(ch1, i * 4)
|
||||
local s1r = buffer.readi16(ch1, i * 4 + 2)
|
||||
|
||||
local s2l = buffer.readi16(ch2, i * 4)
|
||||
local s2r = buffer.readi16(ch2, i * 4 + 2)
|
||||
|
||||
local combinedl = s1l + s2l - s1l * s2l / 32768
|
||||
local combinedr = s1r + s2r - s1r * s2r / 32768
|
||||
|
||||
buffer.writei16(mix, i * 4, combinedl)
|
||||
buffer.writei16(mix, i * 4 + 2, combinedr)
|
||||
end
|
||||
end
|
||||
|
||||
bench.runCode(test, "pcmmix")
|
@ -20,25 +20,32 @@
|
||||
#include "lualib.h"
|
||||
|
||||
#include <chrono>
|
||||
#include <cstring>
|
||||
|
||||
static bool getEnvParam(const char* name, bool def)
|
||||
{
|
||||
char* val = getenv(name);
|
||||
if (val == nullptr)
|
||||
return def;
|
||||
else
|
||||
return strcmp(val, "0") != 0;
|
||||
}
|
||||
|
||||
// Select components to fuzz
|
||||
const bool kFuzzCompiler = true;
|
||||
const bool kFuzzLinter = true;
|
||||
const bool kFuzzTypeck = true;
|
||||
const bool kFuzzVM = true;
|
||||
const bool kFuzzTranspile = true;
|
||||
const bool kFuzzCodegenVM = true;
|
||||
const bool kFuzzCodegenAssembly = true;
|
||||
const bool kFuzzCompiler = getEnvParam("LUAU_FUZZ_COMPILER", true);
|
||||
const bool kFuzzLinter = getEnvParam("LUAU_FUZZ_LINTER", true);
|
||||
const bool kFuzzTypeck = getEnvParam("LUAU_FUZZ_TYPE_CHECK", true);
|
||||
const bool kFuzzVM = getEnvParam("LUAU_FUZZ_VM", true);
|
||||
const bool kFuzzTranspile = getEnvParam("LUAU_FUZZ_TRANSPILE", true);
|
||||
const bool kFuzzCodegenVM = getEnvParam("LUAU_FUZZ_CODEGEN_VM", true);
|
||||
const bool kFuzzCodegenAssembly = getEnvParam("LUAU_FUZZ_CODEGEN_ASM", true);
|
||||
const bool kFuzzUseNewSolver = getEnvParam("LUAU_FUZZ_NEW_SOLVER", false);
|
||||
|
||||
// Should we generate type annotations?
|
||||
const bool kFuzzTypes = true;
|
||||
const bool kFuzzTypes = getEnvParam("LUAU_FUZZ_GEN_TYPES", true);
|
||||
|
||||
const Luau::CodeGen::AssemblyOptions::Target kFuzzCodegenTarget = Luau::CodeGen::AssemblyOptions::A64;
|
||||
|
||||
static_assert(!(kFuzzVM && !kFuzzCompiler), "VM requires the compiler!");
|
||||
static_assert(!(kFuzzCodegenVM && !kFuzzCompiler), "Codegen requires the compiler!");
|
||||
static_assert(!(kFuzzCodegenAssembly && !kFuzzCompiler), "Codegen requires the compiler!");
|
||||
|
||||
std::vector<std::string> protoprint(const luau::ModuleSet& stat, bool types);
|
||||
|
||||
LUAU_FASTINT(LuauTypeInferRecursionLimit)
|
||||
@ -49,6 +56,7 @@ LUAU_FASTINT(LuauTypeInferIterationLimit)
|
||||
LUAU_FASTINT(LuauTarjanChildLimit)
|
||||
LUAU_FASTFLAG(DebugLuauFreezeArena)
|
||||
LUAU_FASTFLAG(DebugLuauAbortingChecks)
|
||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||
|
||||
std::chrono::milliseconds kInterruptTimeout(10);
|
||||
std::chrono::time_point<std::chrono::system_clock> interruptDeadline;
|
||||
@ -218,6 +226,13 @@ static std::vector<std::string> debugsources;
|
||||
|
||||
DEFINE_PROTO_FUZZER(const luau::ModuleSet& message)
|
||||
{
|
||||
if (!kFuzzCompiler && (kFuzzCodegenAssembly || kFuzzCodegenVM || kFuzzVM))
|
||||
{
|
||||
printf("Compiler is required in order to fuzz codegen or the VM\n");
|
||||
LUAU_ASSERT(false);
|
||||
return;
|
||||
}
|
||||
|
||||
FInt::LuauTypeInferRecursionLimit.value = 100;
|
||||
FInt::LuauTypeInferTypePackLoopLimit.value = 100;
|
||||
FInt::LuauCheckRecursionLimit.value = 100;
|
||||
@ -231,6 +246,7 @@ DEFINE_PROTO_FUZZER(const luau::ModuleSet& message)
|
||||
|
||||
FFlag::DebugLuauFreezeArena.value = true;
|
||||
FFlag::DebugLuauAbortingChecks.value = true;
|
||||
FFlag::DebugLuauDeferredConstraintResolution.value = kFuzzUseNewSolver;
|
||||
|
||||
std::vector<std::string> sources = protoprint(message, kFuzzTypes);
|
||||
|
||||
|
@ -208,6 +208,11 @@ TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "FormsOfMov")
|
||||
SINGLE_COMPARE(mov(byte[rsi], al), 0x88, 0x06);
|
||||
SINGLE_COMPARE(mov(byte[rsi], dil), 0x48, 0x88, 0x3e);
|
||||
SINGLE_COMPARE(mov(byte[rsi], r10b), 0x4c, 0x88, 0x16);
|
||||
SINGLE_COMPARE(mov(wordReg(ebx), 0x3a3d), 0x66, 0xbb, 0x3d, 0x3a);
|
||||
SINGLE_COMPARE(mov(word[rsi], 0x3a3d), 0x66, 0xc7, 0x06, 0x3d, 0x3a);
|
||||
SINGLE_COMPARE(mov(word[rsi], wordReg(eax)), 0x66, 0x89, 0x06);
|
||||
SINGLE_COMPARE(mov(word[rsi], wordReg(edi)), 0x66, 0x89, 0x3e);
|
||||
SINGLE_COMPARE(mov(word[rsi], wordReg(r10)), 0x66, 0x44, 0x89, 0x16);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "FormsOfMovExtended")
|
||||
@ -531,6 +536,8 @@ TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "AVXConversionInstructionForms")
|
||||
SINGLE_COMPARE(vcvtsi2sd(xmm6, xmm11, qword[rcx + rdx]), 0xc4, 0xe1, 0xa3, 0x2a, 0x34, 0x11);
|
||||
SINGLE_COMPARE(vcvtsd2ss(xmm5, xmm10, xmm11), 0xc4, 0xc1, 0x2b, 0x5a, 0xeb);
|
||||
SINGLE_COMPARE(vcvtsd2ss(xmm6, xmm11, qword[rcx + rdx]), 0xc4, 0xe1, 0xa3, 0x5a, 0x34, 0x11);
|
||||
SINGLE_COMPARE(vcvtss2sd(xmm3, xmm8, xmm12), 0xc4, 0xc1, 0x3a, 0x5a, 0xdc);
|
||||
SINGLE_COMPARE(vcvtss2sd(xmm4, xmm9, dword[rcx + rsi]), 0xc4, 0xe1, 0x32, 0x5a, 0x24, 0x31);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(AssemblyBuilderX64Fixture, "AVXTernaryInstructionForms")
|
||||
|
@ -1142,33 +1142,27 @@ L0: RETURN R1 1
|
||||
TEST_CASE("AndOrFoldLeft")
|
||||
{
|
||||
// constant folding and/or expression is possible even if just the left hand is constant
|
||||
CHECK_EQ("\n" + compileFunction0("local a = false if a and b then b() end"), R"(
|
||||
RETURN R0 0
|
||||
CHECK_EQ("\n" + compileFunction0("local a = false return a and b"), R"(
|
||||
LOADB R0 0
|
||||
RETURN R0 1
|
||||
)");
|
||||
|
||||
CHECK_EQ("\n" + compileFunction0("local a = true if a or b then b() end"), R"(
|
||||
GETIMPORT R0 1 [b]
|
||||
CALL R0 0 0
|
||||
RETURN R0 0
|
||||
CHECK_EQ("\n" + compileFunction0("local a = true return a or b"), R"(
|
||||
LOADB R0 1
|
||||
RETURN R0 1
|
||||
)");
|
||||
|
||||
// however, if right hand side is constant we can't constant fold the entire expression
|
||||
// (note that we don't need to evaluate the right hand side, but we do need a branch)
|
||||
CHECK_EQ("\n" + compileFunction0("local a = false if b and a then b() end"), R"(
|
||||
GETIMPORT R0 1 [b]
|
||||
JUMPIFNOT R0 L0
|
||||
RETURN R0 0
|
||||
GETIMPORT R0 1 [b]
|
||||
CALL R0 0 0
|
||||
L0: RETURN R0 0
|
||||
// if right hand side is constant we can't constant fold the entire expression
|
||||
CHECK_EQ("\n" + compileFunction0("local a = false return b and a"), R"(
|
||||
GETIMPORT R1 2 [b]
|
||||
ANDK R0 R1 K0 [false]
|
||||
RETURN R0 1
|
||||
)");
|
||||
|
||||
CHECK_EQ("\n" + compileFunction0("local a = true if b or a then b() end"), R"(
|
||||
GETIMPORT R0 1 [b]
|
||||
JUMPIF R0 L0
|
||||
L0: GETIMPORT R0 1 [b]
|
||||
CALL R0 0 0
|
||||
RETURN R0 0
|
||||
CHECK_EQ("\n" + compileFunction0("local a = true return b or a"), R"(
|
||||
GETIMPORT R1 2 [b]
|
||||
ORK R0 R1 K0 [true]
|
||||
RETURN R0 1
|
||||
)");
|
||||
}
|
||||
|
||||
@ -2001,7 +1995,8 @@ for i = 1, 2 do
|
||||
local x = i == 1 or a
|
||||
until f(x)
|
||||
end
|
||||
)", 0, 2);
|
||||
)",
|
||||
0, 2);
|
||||
|
||||
CHECK(!"Expected CompileError");
|
||||
}
|
||||
@ -7594,8 +7589,6 @@ L0: RETURN R0 2
|
||||
|
||||
TEST_CASE("IfThenElseAndOr")
|
||||
{
|
||||
ScopedFastFlag sff("LuauCompileIfElseAndOr", true);
|
||||
|
||||
// if v then v else k can be optimized to ORK
|
||||
CHECK_EQ("\n" + compileFunction0(R"(
|
||||
local x = ...
|
||||
@ -7691,4 +7684,127 @@ RETURN R1 1
|
||||
)");
|
||||
}
|
||||
|
||||
TEST_CASE("SideEffects")
|
||||
{
|
||||
ScopedFastFlag sff("LuauCompileSideEffects", true);
|
||||
|
||||
// we do not evaluate expressions in some cases when we know they can't carry side effects
|
||||
CHECK_EQ("\n" + compileFunction0(R"(
|
||||
local x = 5, print
|
||||
local y = 5, 42
|
||||
local z = 5, table.find -- considered side effecting because of metamethods
|
||||
)"),
|
||||
R"(
|
||||
LOADN R0 5
|
||||
LOADN R1 5
|
||||
LOADN R2 5
|
||||
GETIMPORT R3 2 [table.find]
|
||||
RETURN R0 0
|
||||
)");
|
||||
|
||||
// this also applies to returns in cases where a function gets inlined
|
||||
CHECK_EQ("\n" + compileFunction(R"(
|
||||
local function test1()
|
||||
return 42
|
||||
end
|
||||
|
||||
local function test2()
|
||||
return print
|
||||
end
|
||||
|
||||
local function test3()
|
||||
return function() print(test3) end
|
||||
end
|
||||
|
||||
local function test4()
|
||||
return table.find -- considered side effecting because of metamethods
|
||||
end
|
||||
|
||||
test1()
|
||||
test2()
|
||||
test3()
|
||||
test4()
|
||||
)",
|
||||
5, 2),
|
||||
R"(
|
||||
DUPCLOSURE R0 K0 ['test1']
|
||||
DUPCLOSURE R1 K1 ['test2']
|
||||
DUPCLOSURE R2 K2 ['test3']
|
||||
CAPTURE VAL R2
|
||||
DUPCLOSURE R3 K3 ['test4']
|
||||
GETIMPORT R4 6 [table.find]
|
||||
RETURN R0 0
|
||||
)");
|
||||
}
|
||||
|
||||
TEST_CASE("IfElimination")
|
||||
{
|
||||
ScopedFastFlag sff1("LuauCompileDeadIf", true);
|
||||
ScopedFastFlag sff2("LuauCompileSideEffects", true);
|
||||
|
||||
// if the left hand side of a condition is constant, it constant folds and we don't emit the branch
|
||||
CHECK_EQ("\n" + compileFunction0("local a = false if a and b then b() end"), R"(
|
||||
RETURN R0 0
|
||||
)");
|
||||
|
||||
CHECK_EQ("\n" + compileFunction0("local a = true if a or b then b() end"), R"(
|
||||
GETIMPORT R0 1 [b]
|
||||
CALL R0 0 0
|
||||
RETURN R0 0
|
||||
)");
|
||||
|
||||
// of course this keeps the other branch if present
|
||||
CHECK_EQ("\n" + compileFunction0("local a = false if a and b then b() else return 42 end"), R"(
|
||||
LOADN R0 42
|
||||
RETURN R0 1
|
||||
RETURN R0 0
|
||||
)");
|
||||
|
||||
CHECK_EQ("\n" + compileFunction0("local a = true if a or b then b() else return 42 end"), R"(
|
||||
GETIMPORT R0 1 [b]
|
||||
CALL R0 0 0
|
||||
RETURN R0 0
|
||||
)");
|
||||
|
||||
// if the right hand side is constant, the condition doesn't constant fold but we still could eliminate one of the branches for 'a and K'
|
||||
CHECK_EQ("\n" + compileFunction0("local a = false if b and a then return 1 end"), R"(
|
||||
RETURN R0 0
|
||||
)");
|
||||
|
||||
CHECK_EQ("\n" + compileFunction0("local a = false if b and a then return 1 else return 2 end"), R"(
|
||||
LOADN R0 2
|
||||
RETURN R0 1
|
||||
)");
|
||||
|
||||
// of course if the right hand side of 'and' is 'true', we still need to actually evaluate the left hand side
|
||||
CHECK_EQ("\n" + compileFunction0("local a = true if b and a then return 1 end"), R"(
|
||||
GETIMPORT R0 1 [b]
|
||||
JUMPIFNOT R0 L0
|
||||
LOADN R0 1
|
||||
RETURN R0 1
|
||||
L0: RETURN R0 0
|
||||
)");
|
||||
|
||||
CHECK_EQ("\n" + compileFunction0("local a = true if b and a then return 1 else return 2 end"), R"(
|
||||
GETIMPORT R0 1 [b]
|
||||
JUMPIFNOT R0 L0
|
||||
LOADN R0 1
|
||||
RETURN R0 1
|
||||
L0: LOADN R0 2
|
||||
RETURN R0 1
|
||||
)");
|
||||
|
||||
// also even if we eliminate the branch, we still need to compute side effects
|
||||
CHECK_EQ("\n" + compileFunction0("local a = false if b.test and a then return 1 end"), R"(
|
||||
GETIMPORT R0 2 [b.test]
|
||||
RETURN R0 0
|
||||
)");
|
||||
|
||||
CHECK_EQ("\n" + compileFunction0("local a = false if b.test and a then return 1 else return 2 end"), R"(
|
||||
GETIMPORT R0 2 [b.test]
|
||||
LOADN R0 2
|
||||
RETURN R0 1
|
||||
)");
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
@ -24,8 +24,6 @@ extern bool verbose;
|
||||
extern bool codegen;
|
||||
extern int optimizationLevel;
|
||||
|
||||
LUAU_FASTFLAG(LuauFloorDivision);
|
||||
|
||||
static lua_CompileOptions defaultOptions()
|
||||
{
|
||||
lua_CompileOptions copts = {};
|
||||
@ -288,13 +286,13 @@ TEST_CASE("Assert")
|
||||
|
||||
TEST_CASE("Basic")
|
||||
{
|
||||
ScopedFastFlag sffs{"LuauFloorDivision", true};
|
||||
|
||||
runConformance("basic.lua");
|
||||
}
|
||||
|
||||
TEST_CASE("Buffers")
|
||||
{
|
||||
ScopedFastFlag luauBufferBetterMsg{"LuauBufferBetterMsg", true};
|
||||
|
||||
runConformance("buffers.lua");
|
||||
}
|
||||
|
||||
@ -379,7 +377,6 @@ TEST_CASE("Errors")
|
||||
|
||||
TEST_CASE("Events")
|
||||
{
|
||||
ScopedFastFlag sffs{"LuauFloorDivision", true};
|
||||
runConformance("events.lua");
|
||||
}
|
||||
|
||||
@ -416,6 +413,7 @@ TEST_CASE("Bitwise")
|
||||
|
||||
TEST_CASE("UTF8")
|
||||
{
|
||||
ScopedFastFlag sff("LuauStricterUtf8", true);
|
||||
runConformance("utf8.lua");
|
||||
}
|
||||
|
||||
@ -462,8 +460,6 @@ TEST_CASE("Pack")
|
||||
|
||||
TEST_CASE("Vector")
|
||||
{
|
||||
ScopedFastFlag sffs{"LuauFloorDivision", true};
|
||||
|
||||
lua_CompileOptions copts = defaultOptions();
|
||||
copts.vectorCtor = "vector";
|
||||
|
||||
@ -521,6 +517,10 @@ static void populateRTTI(lua_State* L, Luau::TypeId type)
|
||||
lua_pushstring(L, "thread");
|
||||
break;
|
||||
|
||||
case Luau::PrimitiveType::Buffer:
|
||||
lua_pushstring(L, "buffer");
|
||||
break;
|
||||
|
||||
default:
|
||||
LUAU_ASSERT(!"Unknown primitive type");
|
||||
}
|
||||
@ -1696,9 +1696,6 @@ static void pushInt64(lua_State* L, int64_t value)
|
||||
|
||||
TEST_CASE("Userdata")
|
||||
{
|
||||
|
||||
ScopedFastFlag sffs{"LuauFloorDivision", true};
|
||||
|
||||
runConformance("userdata.lua", [](lua_State* L) {
|
||||
// create metatable with all the metamethods
|
||||
lua_newtable(L);
|
||||
|
@ -92,4 +92,229 @@ TEST_CASE_FIXTURE(DataFlowGraphFixture, "independent_locals")
|
||||
REQUIRE(x != y);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(DataFlowGraphFixture, "phi")
|
||||
{
|
||||
dfg(R"(
|
||||
local x
|
||||
|
||||
if a then
|
||||
x = true
|
||||
end
|
||||
|
||||
local y = x
|
||||
)");
|
||||
|
||||
DefId y = getDef<AstExprLocal, 2>();
|
||||
|
||||
const Phi* phi = get<Phi>(y);
|
||||
CHECK(phi);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(DataFlowGraphFixture, "mutate_local_not_owned_by_while")
|
||||
{
|
||||
dfg(R"(
|
||||
local x
|
||||
|
||||
while cond() do
|
||||
x = true
|
||||
end
|
||||
|
||||
local y = x
|
||||
)");
|
||||
|
||||
DefId x0 = graph->getDef(query<AstStatLocal>(module)->vars.data[0]);
|
||||
DefId x1 = getDef<AstExprLocal, 1>(); // x = true
|
||||
DefId x2 = getDef<AstExprLocal, 2>(); // local y = x
|
||||
|
||||
CHECK(x0 == x1);
|
||||
CHECK(x1 == x2);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(DataFlowGraphFixture, "mutate_local_owned_by_while")
|
||||
{
|
||||
dfg(R"(
|
||||
while cond() do
|
||||
local x
|
||||
x = true
|
||||
x = 5
|
||||
end
|
||||
)");
|
||||
|
||||
DefId x0 = graph->getDef(query<AstStatLocal>(module)->vars.data[0]);
|
||||
DefId x1 = getDef<AstExprLocal, 1>(); // x = true
|
||||
DefId x2 = getDef<AstExprLocal, 2>(); // x = 5
|
||||
|
||||
CHECK(x0 != x1);
|
||||
CHECK(x1 != x2);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(DataFlowGraphFixture, "mutate_local_not_owned_by_repeat")
|
||||
{
|
||||
dfg(R"(
|
||||
local x
|
||||
|
||||
repeat
|
||||
x = true
|
||||
until cond()
|
||||
|
||||
local y = x
|
||||
)");
|
||||
|
||||
DefId x0 = graph->getDef(query<AstStatLocal>(module)->vars.data[0]);
|
||||
DefId x1 = getDef<AstExprLocal, 1>(); // x = true
|
||||
DefId x2 = getDef<AstExprLocal, 2>(); // local y = x
|
||||
|
||||
CHECK(x0 == x1);
|
||||
CHECK(x1 == x2);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(DataFlowGraphFixture, "mutate_local_owned_by_repeat")
|
||||
{
|
||||
dfg(R"(
|
||||
repeat
|
||||
local x
|
||||
x = true
|
||||
x = 5
|
||||
until cond()
|
||||
)");
|
||||
|
||||
DefId x0 = graph->getDef(query<AstStatLocal>(module)->vars.data[0]);
|
||||
DefId x1 = getDef<AstExprLocal, 1>(); // x = true
|
||||
DefId x2 = getDef<AstExprLocal, 2>(); // x = 5
|
||||
|
||||
CHECK(x0 != x1);
|
||||
CHECK(x1 != x2);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(DataFlowGraphFixture, "mutate_local_not_owned_by_for")
|
||||
{
|
||||
dfg(R"(
|
||||
local x
|
||||
|
||||
for i = 0, 5 do
|
||||
x = true
|
||||
end
|
||||
|
||||
local y = x
|
||||
)");
|
||||
|
||||
DefId x0 = graph->getDef(query<AstStatLocal>(module)->vars.data[0]);
|
||||
DefId x1 = getDef<AstExprLocal, 1>(); // x = true
|
||||
DefId x2 = getDef<AstExprLocal, 2>(); // local y = x
|
||||
|
||||
CHECK(x0 == x1);
|
||||
CHECK(x1 == x2);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(DataFlowGraphFixture, "mutate_local_owned_by_for")
|
||||
{
|
||||
dfg(R"(
|
||||
for i = 0, 5 do
|
||||
local x
|
||||
x = true
|
||||
x = 5
|
||||
end
|
||||
)");
|
||||
|
||||
DefId x0 = graph->getDef(query<AstStatLocal>(module)->vars.data[0]);
|
||||
DefId x1 = getDef<AstExprLocal, 1>(); // x = true
|
||||
DefId x2 = getDef<AstExprLocal, 2>(); // x = 5
|
||||
|
||||
CHECK(x0 != x1);
|
||||
CHECK(x1 != x2);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(DataFlowGraphFixture, "mutate_local_not_owned_by_for_in")
|
||||
{
|
||||
dfg(R"(
|
||||
local x
|
||||
|
||||
for i, v in t do
|
||||
x = true
|
||||
end
|
||||
|
||||
local y = x
|
||||
)");
|
||||
|
||||
DefId x0 = graph->getDef(query<AstStatLocal>(module)->vars.data[0]);
|
||||
DefId x1 = getDef<AstExprLocal, 1>(); // x = true
|
||||
DefId x2 = getDef<AstExprLocal, 2>(); // local y = x
|
||||
|
||||
CHECK(x0 == x1);
|
||||
CHECK(x1 == x2);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(DataFlowGraphFixture, "mutate_local_owned_by_for_in")
|
||||
{
|
||||
dfg(R"(
|
||||
for i, v in t do
|
||||
local x
|
||||
x = true
|
||||
x = 5
|
||||
end
|
||||
)");
|
||||
|
||||
DefId x0 = graph->getDef(query<AstStatLocal>(module)->vars.data[0]);
|
||||
DefId x1 = getDef<AstExprLocal, 1>(); // x = true
|
||||
DefId x2 = getDef<AstExprLocal, 2>(); // x = 5
|
||||
|
||||
CHECK(x0 != x1);
|
||||
CHECK(x1 != x2);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(DataFlowGraphFixture, "mutate_preexisting_property_not_owned_by_while")
|
||||
{
|
||||
dfg(R"(
|
||||
local t = {}
|
||||
t.x = 5
|
||||
|
||||
while cond() do
|
||||
t.x = true
|
||||
end
|
||||
|
||||
local y = t.x
|
||||
)");
|
||||
|
||||
DefId x1 = getDef<AstExprIndexName, 1>(); // t.x = 5
|
||||
DefId x2 = getDef<AstExprIndexName, 2>(); // t.x = true
|
||||
DefId x3 = getDef<AstExprIndexName, 3>(); // local y = t.x
|
||||
|
||||
CHECK(x1 == x2);
|
||||
CHECK(x2 == x3);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(DataFlowGraphFixture, "mutate_non_preexisting_property_not_owned_by_while")
|
||||
{
|
||||
dfg(R"(
|
||||
local t = {}
|
||||
|
||||
while cond() do
|
||||
t.x = true
|
||||
end
|
||||
|
||||
local y = t.x
|
||||
)");
|
||||
|
||||
DefId x1 = getDef<AstExprIndexName, 1>(); // t.x = true
|
||||
DefId x2 = getDef<AstExprIndexName, 2>(); // local y = t.x
|
||||
|
||||
CHECK(x1 == x2);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(DataFlowGraphFixture, "mutate_property_of_table_owned_by_while")
|
||||
{
|
||||
dfg(R"(
|
||||
while cond() do
|
||||
local t = {}
|
||||
t.x = true
|
||||
t.x = 5
|
||||
end
|
||||
)");
|
||||
|
||||
DefId x1 = getDef<AstExprIndexName, 1>(); // t.x = true
|
||||
DefId x2 = getDef<AstExprIndexName, 2>(); // t.x = 5
|
||||
|
||||
CHECK(x1 != x2);
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
@ -154,7 +154,7 @@ TEST_CASE_FIXTURE(DifferFixture, "left_cyclic_table_right_table_property_wrong")
|
||||
local function id<a>(x: a): a
|
||||
return x
|
||||
end
|
||||
|
||||
|
||||
-- Remove name from cyclic table
|
||||
local foo = id({})
|
||||
foo.foo = foo
|
||||
@ -172,7 +172,7 @@ TEST_CASE_FIXTURE(DifferFixture, "right_cyclic_table_left_table_missing_property
|
||||
local function id<a>(x: a): a
|
||||
return x
|
||||
end
|
||||
|
||||
|
||||
-- Remove name from cyclic table
|
||||
local foo = id({})
|
||||
foo.foo = foo
|
||||
@ -190,7 +190,7 @@ TEST_CASE_FIXTURE(DifferFixture, "right_cyclic_table_left_table_property_wrong")
|
||||
local function id<a>(x: a): a
|
||||
return x
|
||||
end
|
||||
|
||||
|
||||
-- Remove name from cyclic table
|
||||
local foo = id({})
|
||||
foo.foo = foo
|
||||
@ -208,7 +208,7 @@ TEST_CASE_FIXTURE(DifferFixture, "equal_table_two_cyclic_tables_are_not_differen
|
||||
local function id<a>(x: a): a
|
||||
return x
|
||||
end
|
||||
|
||||
|
||||
-- Remove name from cyclic table
|
||||
local foo = id({})
|
||||
foo.foo = foo
|
||||
@ -226,7 +226,7 @@ TEST_CASE_FIXTURE(DifferFixture, "equal_table_two_shifted_circles_are_not_differ
|
||||
local function id<a>(x: a): a
|
||||
return x
|
||||
end
|
||||
|
||||
|
||||
-- Remove name from cyclic table
|
||||
local foo = id({})
|
||||
foo.foo = id({})
|
||||
@ -254,7 +254,7 @@ TEST_CASE_FIXTURE(DifferFixture, "table_left_circle_right_measuring_tape")
|
||||
local function id<a>(x: a): a
|
||||
return x
|
||||
end
|
||||
|
||||
|
||||
-- Remove name from cyclic table
|
||||
local foo = id({})
|
||||
foo.foo = id({})
|
||||
@ -281,7 +281,7 @@ TEST_CASE_FIXTURE(DifferFixture, "equal_table_measuring_tapes")
|
||||
local function id<a>(x: a): a
|
||||
return x
|
||||
end
|
||||
|
||||
|
||||
-- Remove name from cyclic table
|
||||
local foo = id({})
|
||||
foo.foo = id({})
|
||||
@ -305,7 +305,7 @@ TEST_CASE_FIXTURE(DifferFixture, "equal_table_A_B_C")
|
||||
local function id<a>(x: a): a
|
||||
return x
|
||||
end
|
||||
|
||||
|
||||
-- Remove name from cyclic table
|
||||
local foo = id({})
|
||||
foo.foo = id({})
|
||||
@ -774,7 +774,7 @@ TEST_CASE_FIXTURE(DifferFixtureWithBuiltins, "negation")
|
||||
if typeof(almostBar.x.y) ~= "number" then
|
||||
almostFoo = almostBar
|
||||
end
|
||||
|
||||
|
||||
)");
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
|
||||
|
@ -99,6 +99,8 @@ struct Fixture
|
||||
|
||||
ScopedFastFlag sff_DebugLuauFreezeArena;
|
||||
|
||||
ScopedFastFlag luauBufferTypeck{"LuauBufferTypeck", true};
|
||||
|
||||
TestFileResolver fileResolver;
|
||||
TestConfigResolver configResolver;
|
||||
NullModuleResolver moduleResolver;
|
||||
@ -185,17 +187,9 @@ struct DifferFixtureGeneric : BaseFixture
|
||||
void compareNe(TypeId left, std::optional<std::string> symbolLeft, TypeId right, std::optional<std::string> symbolRight,
|
||||
const std::string& expectedMessage, bool multiLine)
|
||||
{
|
||||
std::string diffMessage;
|
||||
try
|
||||
{
|
||||
DifferResult diffRes = diffWithSymbols(left, right, symbolLeft, symbolRight);
|
||||
REQUIRE_MESSAGE(diffRes.diffError.has_value(), "Differ did not report type error, even though types are unequal");
|
||||
diffMessage = diffRes.diffError->toString(multiLine);
|
||||
}
|
||||
catch (const InternalCompilerError& e)
|
||||
{
|
||||
REQUIRE_MESSAGE(false, ("InternalCompilerError: " + e.message));
|
||||
}
|
||||
DifferResult diffRes = diffWithSymbols(left, right, symbolLeft, symbolRight);
|
||||
REQUIRE_MESSAGE(diffRes.diffError.has_value(), "Differ did not report type error, even though types are unequal");
|
||||
std::string diffMessage = diffRes.diffError->toString(multiLine);
|
||||
CHECK_EQ(expectedMessage, diffMessage);
|
||||
}
|
||||
|
||||
@ -216,15 +210,10 @@ struct DifferFixtureGeneric : BaseFixture
|
||||
|
||||
void compareEq(TypeId left, TypeId right)
|
||||
{
|
||||
try
|
||||
{
|
||||
DifferResult diffRes = diff(left, right);
|
||||
CHECK_MESSAGE(!diffRes.diffError.has_value(), diffRes.diffError->toString());
|
||||
}
|
||||
catch (const InternalCompilerError& e)
|
||||
{
|
||||
REQUIRE_MESSAGE(false, ("InternalCompilerError: " + e.message));
|
||||
}
|
||||
DifferResult diffRes = diff(left, right);
|
||||
CHECK(!diffRes.diffError);
|
||||
if (diffRes.diffError)
|
||||
INFO(diffRes.diffError->toString());
|
||||
}
|
||||
|
||||
void compareTypesEq(const std::string& leftSymbol, const std::string& rightSymbol)
|
||||
|
@ -1238,7 +1238,7 @@ TEST_CASE_FIXTURE(FrontendFixture, "parse_only")
|
||||
REQUIRE(frontend.sourceNodes.count("game/Gui/Modules/B"));
|
||||
|
||||
auto node = frontend.sourceNodes["game/Gui/Modules/B"];
|
||||
CHECK_EQ(node->requireSet.count("game/Gui/Modules/A"), 1);
|
||||
CHECK(node->requireSet.contains("game/Gui/Modules/A"));
|
||||
REQUIRE_EQ(node->requireLocations.size(), 1);
|
||||
CHECK_EQ(node->requireLocations[0].second, Luau::Location(Position(2, 18), Position(2, 36)));
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#pragma once
|
||||
|
||||
#include "Luau/DenseHash.h"
|
||||
#include <ostream>
|
||||
#include <optional>
|
||||
|
||||
@ -21,4 +22,40 @@ auto operator<<(std::ostream& lhs, const std::optional<T>& t) -> decltype(lhs <<
|
||||
return lhs << "none";
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
auto operator<<(std::ostream& lhs, const std::vector<T>& t) -> decltype(lhs << t[0])
|
||||
{
|
||||
lhs << "{ ";
|
||||
bool first = true;
|
||||
for (const T& element : t)
|
||||
{
|
||||
if (first)
|
||||
first = false;
|
||||
else
|
||||
lhs << ", ";
|
||||
|
||||
lhs << element;
|
||||
}
|
||||
|
||||
return lhs << " }";
|
||||
}
|
||||
|
||||
template<typename K, typename H, typename E>
|
||||
auto operator<<(std::ostream& lhs, const Luau::DenseHashSet<K, H, E>& set) -> decltype(lhs << *set.begin())
|
||||
{
|
||||
lhs << "{ ";
|
||||
bool first = true;
|
||||
for (const K& element : set)
|
||||
{
|
||||
if (first)
|
||||
first = false;
|
||||
else
|
||||
lhs << ", ";
|
||||
|
||||
lhs << element;
|
||||
}
|
||||
|
||||
return lhs << " }";
|
||||
}
|
||||
|
||||
} // namespace std
|
||||
|
@ -1933,8 +1933,6 @@ bb_0:
|
||||
|
||||
TEST_CASE_FIXTURE(IrBuilderFixture, "DuplicateHashSlotChecks")
|
||||
{
|
||||
ScopedFastFlag luauReuseHashSlots{"LuauReuseHashSlots2", true};
|
||||
|
||||
IrOp block = build.block(IrBlockKind::Internal);
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
@ -1991,8 +1989,6 @@ bb_fallback_1:
|
||||
|
||||
TEST_CASE_FIXTURE(IrBuilderFixture, "DuplicateHashSlotChecksAvoidNil")
|
||||
{
|
||||
ScopedFastFlag luauReuseHashSlots{"LuauReuseHashSlots2", true};
|
||||
|
||||
IrOp block = build.block(IrBlockKind::Internal);
|
||||
IrOp fallback = build.block(IrBlockKind::Fallback);
|
||||
|
||||
@ -3074,8 +3070,6 @@ bb_0:
|
||||
|
||||
TEST_CASE_FIXTURE(IrBuilderFixture, "TagSelfEqualityCheckRemoval")
|
||||
{
|
||||
ScopedFastFlag luauMergeTagLoads{"LuauMergeTagLoads", true};
|
||||
|
||||
IrOp entry = build.block(IrBlockKind::Internal);
|
||||
IrOp trueBlock = build.block(IrBlockKind::Internal);
|
||||
IrOp falseBlock = build.block(IrBlockKind::Internal);
|
||||
@ -3133,7 +3127,7 @@ TEST_CASE_FIXTURE(IrBuilderFixture, "ToDot")
|
||||
updateUseCounts(build.function);
|
||||
computeCfgInfo(build.function);
|
||||
|
||||
// note: we don't validate the output of these to avoid test churn when dot formatting changes, but we run these to make sure they don't assert/crash
|
||||
// note: we don't validate the output of these to avoid test churn when formatting changes; we run these to make sure they don't assert/crash
|
||||
toDot(build.function, /* includeInst= */ true);
|
||||
toDotCfg(build.function);
|
||||
toDotDjGraph(build.function);
|
||||
|
@ -1686,8 +1686,8 @@ TEST_CASE_FIXTURE(Fixture, "DuplicateConditionsExpr")
|
||||
LintResult result = lint(R"(
|
||||
local correct, opaque = ...
|
||||
|
||||
if correct({a = 1, b = 2 * (-2), c = opaque.path['with']("calls")}) then
|
||||
elseif correct({a = 1, b = 2 * (-2), c = opaque.path['with']("calls")}) then
|
||||
if correct({a = 1, b = 2 * (-2), c = opaque.path['with']("calls", `string {opaque}`)}) then
|
||||
elseif correct({a = 1, b = 2 * (-2), c = opaque.path['with']("calls", `string {opaque}`)}) then
|
||||
elseif correct({a = 1, b = 2 * (-2), c = opaque.path['with']("calls", false)}) then
|
||||
end
|
||||
)");
|
||||
@ -1880,7 +1880,7 @@ local _ = 0x20000000000000
|
||||
local _ = 0x20000000000002
|
||||
|
||||
-- large powers of two should work as well (this is 2^63)
|
||||
local _ = -9223372036854775808
|
||||
local _ = 0x80000000000000
|
||||
)");
|
||||
|
||||
REQUIRE(2 == result.warnings.size());
|
||||
|
@ -390,8 +390,6 @@ TEST_CASE_FIXTURE(Fixture, "clone_iteration_limit")
|
||||
// they are.
|
||||
TEST_CASE_FIXTURE(Fixture, "clone_cyclic_union")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauCloneCyclicUnions", true};
|
||||
|
||||
TypeArena src;
|
||||
|
||||
TypeId u = src.addType(UnionType{{builtinTypes->numberType, builtinTypes->stringType}});
|
||||
@ -417,10 +415,6 @@ TEST_CASE_FIXTURE(Fixture, "clone_cyclic_union")
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "any_persistance_does_not_leak")
|
||||
{
|
||||
ScopedFastFlag flags[] = {
|
||||
{"LuauOccursIsntAlwaysFailure", true},
|
||||
};
|
||||
|
||||
fileResolver.source["Module/A"] = R"(
|
||||
export type A = B
|
||||
type B = A
|
||||
|
@ -710,7 +710,7 @@ TEST_CASE_FIXTURE(NormalizeFixture, "union_function_and_top_function")
|
||||
|
||||
TEST_CASE_FIXTURE(NormalizeFixture, "negated_function_is_anything_except_a_function")
|
||||
{
|
||||
CHECK("(boolean | class | number | string | table | thread)?" == toString(normal(R"(
|
||||
CHECK("(boolean | buffer | class | number | string | table | thread)?" == toString(normal(R"(
|
||||
Not<fun>
|
||||
)")));
|
||||
}
|
||||
@ -735,8 +735,7 @@ TEST_CASE_FIXTURE(NormalizeFixture, "trivial_intersection_inhabited")
|
||||
|
||||
TEST_CASE_FIXTURE(NormalizeFixture, "bare_negated_boolean")
|
||||
{
|
||||
// TODO: We don't yet have a way to say number | string | thread | nil | Class | Table | Function
|
||||
CHECK("(class | function | number | string | table | thread)?" == toString(normal(R"(
|
||||
CHECK("(buffer | class | function | number | string | table | thread)?" == toString(normal(R"(
|
||||
Not<boolean>
|
||||
)")));
|
||||
}
|
||||
@ -849,8 +848,6 @@ TEST_CASE_FIXTURE(NormalizeFixture, "recurring_intersection")
|
||||
|
||||
TEST_CASE_FIXTURE(NormalizeFixture, "cyclic_union")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauNormalizeCyclicUnions", true};
|
||||
|
||||
// T where T = any & (number | T)
|
||||
TypeId t = arena.addType(BlockedType{});
|
||||
TypeId u = arena.addType(UnionType{{builtinTypes->numberType, t}});
|
||||
@ -871,11 +868,11 @@ TEST_CASE_FIXTURE(NormalizeFixture, "negations_of_classes")
|
||||
{
|
||||
createSomeClasses(&frontend);
|
||||
CHECK("(Parent & ~Child) | Unrelated" == toString(normal("(Parent & Not<Child>) | Unrelated")));
|
||||
CHECK("((class & ~Child) | boolean | function | number | string | table | thread)?" == toString(normal("Not<Child>")));
|
||||
CHECK("((class & ~Child) | boolean | buffer | function | number | string | table | thread)?" == toString(normal("Not<Child>")));
|
||||
CHECK("Child" == toString(normal("Not<Parent> & Child")));
|
||||
CHECK("((class & ~Parent) | Child | boolean | function | number | string | table | thread)?" == toString(normal("Not<Parent> | Child")));
|
||||
CHECK("(boolean | function | number | string | table | thread)?" == toString(normal("Not<cls>")));
|
||||
CHECK("(Parent | Unrelated | boolean | function | number | string | table | thread)?" ==
|
||||
CHECK("((class & ~Parent) | Child | boolean | buffer | function | number | string | table | thread)?" == toString(normal("Not<Parent> | Child")));
|
||||
CHECK("(boolean | buffer | function | number | string | table | thread)?" == toString(normal("Not<cls>")));
|
||||
CHECK("(Parent | Unrelated | boolean | buffer | function | number | string | table | thread)?" ==
|
||||
toString(normal("Not<cls & Not<Parent> & Not<Child> & Not<Unrelated>>")));
|
||||
}
|
||||
|
||||
@ -904,7 +901,7 @@ TEST_CASE_FIXTURE(NormalizeFixture, "top_table_type")
|
||||
TEST_CASE_FIXTURE(NormalizeFixture, "negations_of_tables")
|
||||
{
|
||||
CHECK(nullptr == toNormalizedType("Not<{}>"));
|
||||
CHECK("(boolean | class | function | number | string | thread)?" == toString(normal("Not<tbl>")));
|
||||
CHECK("(boolean | buffer | class | function | number | string | thread)?" == toString(normal("Not<tbl>")));
|
||||
CHECK("table" == toString(normal("Not<Not<tbl>>")));
|
||||
}
|
||||
|
||||
|
@ -1337,7 +1337,6 @@ TEST_CASE_FIXTURE(Fixture, "parse_error_with_too_many_nested_type_group")
|
||||
TEST_CASE_FIXTURE(Fixture, "can_parse_complex_unions_successfully")
|
||||
{
|
||||
ScopedFastInt sfis[] = {{"LuauRecursionLimit", 10}, {"LuauTypeLengthLimit", 10}};
|
||||
ScopedFastFlag sff{"LuauBetterTypeUnionLimits", true};
|
||||
|
||||
parse(R"(
|
||||
local f:
|
||||
@ -1959,8 +1958,6 @@ TEST_CASE_FIXTURE(Fixture, "class_method_properties")
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "class_indexer")
|
||||
{
|
||||
ScopedFastFlag LuauParseDeclareClassIndexer("LuauParseDeclareClassIndexer", true);
|
||||
|
||||
AstStatBlock* stat = parseEx(R"(
|
||||
declare class Foo
|
||||
prop: boolean
|
||||
|
63
tests/Set.test.cpp
Normal file
63
tests/Set.test.cpp
Normal file
@ -0,0 +1,63 @@
|
||||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
#include "Luau/Set.h"
|
||||
|
||||
#include "doctest.h"
|
||||
|
||||
TEST_SUITE_BEGIN("SetTests");
|
||||
|
||||
TEST_CASE("empty_set_size_0")
|
||||
{
|
||||
Luau::Set<int> s1{0};
|
||||
CHECK(s1.size() == 0);
|
||||
CHECK(s1.empty());
|
||||
}
|
||||
|
||||
TEST_CASE("insertion_works_and_increases_size")
|
||||
{
|
||||
Luau::Set<int> s1{0};
|
||||
CHECK(s1.size() == 0);
|
||||
CHECK(s1.empty());
|
||||
|
||||
s1.insert(1);
|
||||
CHECK(s1.contains(1));
|
||||
CHECK(s1.size() == 1);
|
||||
|
||||
s1.insert(2);
|
||||
CHECK(s1.contains(2));
|
||||
CHECK(s1.size() == 2);
|
||||
}
|
||||
|
||||
TEST_CASE("clear_resets_size")
|
||||
{
|
||||
Luau::Set<int> s1{0};
|
||||
s1.insert(1);
|
||||
s1.insert(2);
|
||||
REQUIRE(s1.size() == 2);
|
||||
|
||||
s1.clear();
|
||||
CHECK(s1.size() == 0);
|
||||
CHECK(s1.empty());
|
||||
}
|
||||
|
||||
TEST_CASE("erase_works_and_decreases_size")
|
||||
{
|
||||
Luau::Set<int> s1{0};
|
||||
s1.insert(1);
|
||||
s1.insert(2);
|
||||
CHECK(s1.size() == 2);
|
||||
CHECK(s1.contains(1));
|
||||
CHECK(s1.contains(2));
|
||||
|
||||
s1.erase(1);
|
||||
CHECK(s1.size() == 1);
|
||||
CHECK(!s1.contains(1));
|
||||
CHECK(s1.contains(2));
|
||||
|
||||
s1.erase(2);
|
||||
CHECK(s1.size() == 0);
|
||||
CHECK(s1.empty());
|
||||
CHECK(!s1.contains(1));
|
||||
CHECK(!s1.contains(2));
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
@ -32,7 +32,6 @@ struct SimplifyFixture : Fixture
|
||||
const TypeId stringTy = builtinTypes->stringType;
|
||||
const TypeId booleanTy = builtinTypes->booleanType;
|
||||
const TypeId nilTy = builtinTypes->nilType;
|
||||
const TypeId threadTy = builtinTypes->threadType;
|
||||
|
||||
const TypeId classTy = builtinTypes->classType;
|
||||
|
||||
|
@ -1,15 +1,17 @@
|
||||
// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
|
||||
|
||||
#include "Luau/TypePath.h"
|
||||
#include "doctest.h"
|
||||
#include "Fixture.h"
|
||||
#include "RegisterCallbacks.h"
|
||||
|
||||
#include "Luau/Normalize.h"
|
||||
#include "Luau/Subtyping.h"
|
||||
#include "Luau/Type.h"
|
||||
#include "Luau/TypePack.h"
|
||||
|
||||
#include "doctest.h"
|
||||
#include "Fixture.h"
|
||||
#include "RegisterCallbacks.h"
|
||||
#include <initializer_list>
|
||||
|
||||
using namespace Luau;
|
||||
|
||||
namespace Luau
|
||||
@ -1103,6 +1105,20 @@ TEST_SUITE_END();
|
||||
|
||||
TEST_SUITE_BEGIN("Subtyping.Subpaths");
|
||||
|
||||
bool operator==(const DenseHashSet<SubtypingReasoning, SubtypingReasoningHash>& set, const std::vector<SubtypingReasoning>& items)
|
||||
{
|
||||
if (items.size() != set.size())
|
||||
return false;
|
||||
|
||||
for (const SubtypingReasoning& r : items)
|
||||
{
|
||||
if (!set.contains(r))
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(SubtypeFixture, "table_property")
|
||||
{
|
||||
TypeId subTy = tbl({{"X", builtinTypes->numberType}});
|
||||
@ -1110,10 +1126,10 @@ TEST_CASE_FIXTURE(SubtypeFixture, "table_property")
|
||||
|
||||
SubtypingResult result = isSubtype(subTy, superTy);
|
||||
CHECK(!result.isSubtype);
|
||||
CHECK(result.reasoning == SubtypingReasoning{
|
||||
CHECK(result.reasoning == std::vector{SubtypingReasoning{
|
||||
/* subPath */ Path(TypePath::Property("X")),
|
||||
/* superPath */ Path(TypePath::Property("X")),
|
||||
});
|
||||
}});
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(SubtypeFixture, "table_indexers")
|
||||
@ -1123,18 +1139,14 @@ TEST_CASE_FIXTURE(SubtypeFixture, "table_indexers")
|
||||
|
||||
SubtypingResult result = isSubtype(subTy, superTy);
|
||||
CHECK(!result.isSubtype);
|
||||
CHECK(result.reasoning == SubtypingReasoning{
|
||||
/* subPath */ Path(TypePath::TypeField::IndexLookup),
|
||||
/* superPath */ Path(TypePath::TypeField::IndexLookup),
|
||||
});
|
||||
|
||||
subTy = idx(builtinTypes->stringType, builtinTypes->stringType);
|
||||
result = isSubtype(subTy, superTy);
|
||||
CHECK(!result.isSubtype);
|
||||
CHECK(result.reasoning == SubtypingReasoning{
|
||||
/* subPath */ Path(TypePath::TypeField::IndexResult),
|
||||
/* superPath */ Path(TypePath::TypeField::IndexResult),
|
||||
});
|
||||
CHECK(result.reasoning == std::vector{SubtypingReasoning{
|
||||
/* subPath */ Path(TypePath::TypeField::IndexLookup),
|
||||
/* superPath */ Path(TypePath::TypeField::IndexLookup),
|
||||
},
|
||||
SubtypingReasoning{
|
||||
/* subPath */ Path(TypePath::TypeField::IndexResult),
|
||||
/* superPath */ Path(TypePath::TypeField::IndexResult),
|
||||
}});
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(SubtypeFixture, "fn_arguments")
|
||||
@ -1144,10 +1156,10 @@ TEST_CASE_FIXTURE(SubtypeFixture, "fn_arguments")
|
||||
|
||||
SubtypingResult result = isSubtype(subTy, superTy);
|
||||
CHECK(!result.isSubtype);
|
||||
CHECK(result.reasoning == SubtypingReasoning{
|
||||
CHECK(result.reasoning == std::vector{SubtypingReasoning{
|
||||
/* subPath */ TypePath::PathBuilder().args().index(0).build(),
|
||||
/* superPath */ TypePath::PathBuilder().args().index(0).build(),
|
||||
});
|
||||
}});
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(SubtypeFixture, "fn_arguments_tail")
|
||||
@ -1157,10 +1169,10 @@ TEST_CASE_FIXTURE(SubtypeFixture, "fn_arguments_tail")
|
||||
|
||||
SubtypingResult result = isSubtype(subTy, superTy);
|
||||
CHECK(!result.isSubtype);
|
||||
CHECK(result.reasoning == SubtypingReasoning{
|
||||
CHECK(result.reasoning == std::vector{SubtypingReasoning{
|
||||
/* subPath */ TypePath::PathBuilder().args().tail().variadic().build(),
|
||||
/* superPath */ TypePath::PathBuilder().args().tail().variadic().build(),
|
||||
});
|
||||
}});
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(SubtypeFixture, "fn_rets")
|
||||
@ -1170,10 +1182,10 @@ TEST_CASE_FIXTURE(SubtypeFixture, "fn_rets")
|
||||
|
||||
SubtypingResult result = isSubtype(subTy, superTy);
|
||||
CHECK(!result.isSubtype);
|
||||
CHECK(result.reasoning == SubtypingReasoning{
|
||||
CHECK(result.reasoning == std::vector{SubtypingReasoning{
|
||||
/* subPath */ TypePath::PathBuilder().rets().index(0).build(),
|
||||
/* superPath */ TypePath::PathBuilder().rets().index(0).build(),
|
||||
});
|
||||
}});
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(SubtypeFixture, "fn_rets_tail")
|
||||
@ -1183,10 +1195,10 @@ TEST_CASE_FIXTURE(SubtypeFixture, "fn_rets_tail")
|
||||
|
||||
SubtypingResult result = isSubtype(subTy, superTy);
|
||||
CHECK(!result.isSubtype);
|
||||
CHECK(result.reasoning == SubtypingReasoning{
|
||||
CHECK(result.reasoning == std::vector{SubtypingReasoning{
|
||||
/* subPath */ TypePath::PathBuilder().rets().tail().variadic().build(),
|
||||
/* superPath */ TypePath::PathBuilder().rets().tail().variadic().build(),
|
||||
});
|
||||
}});
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(SubtypeFixture, "nested_table_properties")
|
||||
@ -1196,10 +1208,10 @@ TEST_CASE_FIXTURE(SubtypeFixture, "nested_table_properties")
|
||||
|
||||
SubtypingResult result = isSubtype(subTy, superTy);
|
||||
CHECK(!result.isSubtype);
|
||||
CHECK(result.reasoning == SubtypingReasoning{
|
||||
CHECK(result.reasoning == std::vector{SubtypingReasoning{
|
||||
/* subPath */ TypePath::PathBuilder().prop("X").prop("Y").prop("Z").build(),
|
||||
/* superPath */ TypePath::PathBuilder().prop("X").prop("Y").prop("Z").build(),
|
||||
});
|
||||
}});
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(SubtypeFixture, "string_table_mt")
|
||||
@ -1213,10 +1225,10 @@ TEST_CASE_FIXTURE(SubtypeFixture, "string_table_mt")
|
||||
// the string metatable. That means subtyping will see that the entire
|
||||
// metatable is empty, and abort there, without looking at the metatable
|
||||
// properties (because there aren't any).
|
||||
CHECK(result.reasoning == SubtypingReasoning{
|
||||
CHECK(result.reasoning == std::vector{SubtypingReasoning{
|
||||
/* subPath */ TypePath::PathBuilder().mt().prop("__index").build(),
|
||||
/* superPath */ TypePath::kEmpty,
|
||||
});
|
||||
}});
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(SubtypeFixture, "negation")
|
||||
@ -1226,9 +1238,22 @@ TEST_CASE_FIXTURE(SubtypeFixture, "negation")
|
||||
|
||||
SubtypingResult result = isSubtype(subTy, superTy);
|
||||
CHECK(!result.isSubtype);
|
||||
CHECK(result.reasoning == SubtypingReasoning{
|
||||
CHECK(result.reasoning == std::vector{SubtypingReasoning{
|
||||
/* subPath */ TypePath::kEmpty,
|
||||
/* superPath */ Path(TypePath::TypeField::Negated),
|
||||
}});
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(SubtypeFixture, "multiple_reasonings")
|
||||
{
|
||||
TypeId subTy = tbl({{"X", builtinTypes->stringType}, {"Y", builtinTypes->numberType}});
|
||||
TypeId superTy = tbl({{"X", builtinTypes->numberType}, {"Y", builtinTypes->stringType}});
|
||||
|
||||
SubtypingResult result = isSubtype(subTy, superTy);
|
||||
CHECK(!result.isSubtype);
|
||||
CHECK(result.reasoning == std::vector{
|
||||
SubtypingReasoning{/* subPath */ Path(TypePath::Property("X")), /* superPath */ Path(TypePath::Property("X"))},
|
||||
SubtypingReasoning{/* subPath */ Path(TypePath::Property("Y")), /* superPath */ Path(TypePath::Property("Y"))},
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -937,22 +937,10 @@ TEST_CASE_FIXTURE(Fixture, "tostring_error_mismatch")
|
||||
)");
|
||||
//clang-format off
|
||||
std::string expected =
|
||||
(FFlag::DebugLuauDeferredConstraintResolution) ?
|
||||
R"(Type
|
||||
'{| a: number, b: string, c: {| d: string |} |}'
|
||||
could not be converted into
|
||||
'{ a: number, b: string, c: { d: number } }'
|
||||
caused by:
|
||||
Property 'c' is not compatible.
|
||||
Type
|
||||
'{| d: string |}'
|
||||
could not be converted into
|
||||
'{ d: number }'
|
||||
caused by:
|
||||
Property 'd' is not compatible.
|
||||
Type 'string' could not be converted into 'number' in an invariant context)"
|
||||
:
|
||||
R"(Type
|
||||
(FFlag::DebugLuauDeferredConstraintResolution)
|
||||
? R"(Type pack '{| a: number, b: string, c: {| d: string |} |}' could not be converted into '{ a: number, b: string, c: { d: number } }'; at [0]["c"]["d"], string is not a subtype of number)"
|
||||
:
|
||||
R"(Type
|
||||
'{ a: number, b: string, c: { d: string } }'
|
||||
could not be converted into
|
||||
'{| a: number, b: string, c: {| d: number |} |}'
|
||||
|
@ -531,8 +531,6 @@ until c
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "transpile_compound_assignment")
|
||||
{
|
||||
ScopedFastFlag sffs{"LuauFloorDivision", true};
|
||||
|
||||
std::string code = R"(
|
||||
local a = 1
|
||||
a += 2
|
||||
|
@ -1048,7 +1048,7 @@ TEST_CASE_FIXTURE(Fixture, "table_types_record_the_property_locations")
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
auto ty = requireTypeAlias("Table");
|
||||
|
||||
auto ttv = Luau::get<Luau::TableType>(ty);
|
||||
auto ttv = Luau::get<Luau::TableType>(follow(ty));
|
||||
REQUIRE(ttv);
|
||||
|
||||
auto propIt = ttv->props.find("create");
|
||||
|
@ -541,10 +541,6 @@ TEST_CASE_FIXTURE(Fixture, "typeof_expr")
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "corecursive_types_error_on_tight_loop")
|
||||
{
|
||||
ScopedFastFlag flags[] = {
|
||||
{"LuauOccursIsntAlwaysFailure", true},
|
||||
};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
type A = B
|
||||
type B = A
|
||||
|
@ -484,6 +484,16 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "thread_is_a_type")
|
||||
CHECK("thread" == toString(requireType("co")));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "buffer_is_a_type")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
local b = buffer.create(10)
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK("buffer" == toString(requireType("b")));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "coroutine_resume_anything_goes")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
|
@ -397,8 +397,6 @@ TEST_CASE_FIXTURE(Fixture, "class_definition_string_props")
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "class_definition_indexer")
|
||||
{
|
||||
ScopedFastFlag LuauParseDeclareClassIndexer("LuauParseDeclareClassIndexer", true);
|
||||
|
||||
loadDefinition(R"(
|
||||
declare class Foo
|
||||
[number]: string
|
||||
|
@ -2137,7 +2137,11 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "num_is_solved_before_num_or_str")
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
||||
CHECK_EQ("Type 'string' could not be converted into 'number'", toString(result.errors[0]));
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
CHECK(toString(result.errors.at(0)) == "Type pack 'string' could not be converted into 'number'; at [0], string is not a subtype of number");
|
||||
else
|
||||
CHECK_EQ("Type 'string' could not be converted into 'number'", toString(result.errors[0]));
|
||||
|
||||
CHECK_EQ("() -> number", toString(requireType("num_or_str")));
|
||||
}
|
||||
|
||||
@ -2158,7 +2162,10 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "num_is_solved_after_num_or_str")
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
||||
CHECK_EQ("Type 'string' could not be converted into 'number'", toString(result.errors[0]));
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
CHECK(toString(result.errors.at(0)) == "Type pack 'string' could not be converted into 'number'; at [0], string is not a subtype of number");
|
||||
else
|
||||
CHECK_EQ("Type 'string' could not be converted into 'number'", toString(result.errors[0]));
|
||||
CHECK_EQ("() -> number", toString(requireType("num_or_str")));
|
||||
}
|
||||
|
||||
|
@ -17,6 +17,7 @@
|
||||
using namespace Luau;
|
||||
|
||||
LUAU_FASTFLAG(DebugLuauDeferredConstraintResolution)
|
||||
LUAU_FASTFLAG(LuauRemoveBadRelationalOperatorWarning)
|
||||
|
||||
TEST_SUITE_BEGIN("TypeInferOperators");
|
||||
|
||||
@ -147,8 +148,6 @@ TEST_CASE_FIXTURE(Fixture, "some_primitive_binary_ops")
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "floor_division_binary_op")
|
||||
{
|
||||
ScopedFastFlag sffs{"LuauFloorDivision", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local a = 4 // 8
|
||||
local b = -4 // 9
|
||||
@ -768,6 +767,13 @@ TEST_CASE_FIXTURE(Fixture, "error_on_invalid_operand_types_to_relational_operato
|
||||
local foo = a < b
|
||||
)");
|
||||
|
||||
// If DCR is off and the flag to remove this check in the old solver is on, the expected behavior is no errors.
|
||||
if (!FFlag::DebugLuauDeferredConstraintResolution && FFlag::LuauRemoveBadRelationalOperatorWarning)
|
||||
{
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
return;
|
||||
}
|
||||
|
||||
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
||||
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
@ -786,8 +792,6 @@ TEST_CASE_FIXTURE(Fixture, "error_on_invalid_operand_types_to_relational_operato
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "cli_38355_recursive_union")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauOccursIsntAlwaysFailure", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
--!strict
|
||||
local _
|
||||
@ -1028,8 +1032,6 @@ TEST_CASE_FIXTURE(Fixture, "infer_type_for_generic_division")
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "infer_type_for_generic_floor_division")
|
||||
{
|
||||
ScopedFastFlag floorDiv{"LuauFloorDivision", true};
|
||||
|
||||
CheckResult result = check(Mode::Strict, R"(
|
||||
local function f(x, y)
|
||||
return x // y
|
||||
@ -1452,4 +1454,24 @@ end
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "compare_singleton_string_to_string")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
local function test(a: string, b: string)
|
||||
if a == "Pet" and b == "Pet" then
|
||||
return true
|
||||
elseif a ~= b then
|
||||
return a < b
|
||||
else
|
||||
return false
|
||||
end
|
||||
end
|
||||
)");
|
||||
|
||||
if (FFlag::LuauRemoveBadRelationalOperatorWarning)
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
else
|
||||
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
@ -1540,6 +1540,23 @@ TEST_CASE_FIXTURE(BuiltinsFixture, "refine_thread")
|
||||
CHECK_EQ("number", toString(requireTypeAtPosition({5, 28})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "refine_buffer")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
local function f(x: number | buffer)
|
||||
if typeof(x) == "buffer" then
|
||||
local foo = x
|
||||
else
|
||||
local foo = x
|
||||
end
|
||||
end
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
CHECK_EQ("buffer", toString(requireTypeAtPosition({3, 28})));
|
||||
CHECK_EQ("number", toString(requireTypeAtPosition({5, 28})));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "falsiness_of_TruthyPredicate_narrows_into_nil")
|
||||
{
|
||||
CheckResult result = check(R"(
|
||||
|
@ -367,7 +367,8 @@ TEST_CASE_FIXTURE(Fixture, "parametric_tagged_union_alias")
|
||||
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
||||
|
||||
const std::string expectedError =
|
||||
"Type 'a' could not be converted into 'Err<number> | Ok<string>'; type a (a) is not a subtype of Err<number> | Ok<string>[1] (Err<number>)";
|
||||
"Type 'a' could not be converted into 'Err<number> | Ok<string>'; type a (a) is not a subtype of Err<number> | Ok<string>[1] (Err<number>)"
|
||||
"\n\ttype a[\"success\"] (false) is not a subtype of Err<number> | Ok<string>[0][\"success\"] (true)";
|
||||
|
||||
CHECK(toString(result.errors[0]) == expectedError);
|
||||
}
|
||||
|
@ -8,6 +8,7 @@
|
||||
|
||||
#include "Fixture.h"
|
||||
|
||||
#include "ScopedFlags.h"
|
||||
#include "doctest.h"
|
||||
|
||||
#include <algorithm>
|
||||
@ -3484,13 +3485,22 @@ TEST_CASE_FIXTURE(Fixture, "a_free_shape_cannot_turn_into_a_scalar_if_it_is_not_
|
||||
end
|
||||
)");
|
||||
|
||||
const std::string expected =
|
||||
R"(Type 't1 where t1 = {+ absolutely_no_scalar_has_this_method: (t1) -> (a, b...) +}' could not be converted into 'string'
|
||||
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
||||
|
||||
if (FFlag::DebugLuauDeferredConstraintResolution)
|
||||
CHECK(toString(result.errors.at(0)) ==
|
||||
"Type pack 't1 where t1 = { absolutely_no_scalar_has_this_method: (t1) -> (unknown, a...) }' could not be converted into 'string'; at "
|
||||
"[0], t1 where t1 = { absolutely_no_scalar_has_this_method: (t1) -> (unknown, a...) } is not a subtype of string");
|
||||
else
|
||||
{
|
||||
const std::string expected =
|
||||
R"(Type 't1 where t1 = {+ absolutely_no_scalar_has_this_method: (t1) -> (a, b...) +}' could not be converted into 'string'
|
||||
caused by:
|
||||
The former's metatable does not satisfy the requirements.
|
||||
Table type 'typeof(string)' not compatible with type 't1 where t1 = {+ absolutely_no_scalar_has_this_method: (t1) -> (a, b...) +}' because the former is missing field 'absolutely_no_scalar_has_this_method')";
|
||||
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
||||
CHECK_EQ(expected, toString(result.errors[0]));
|
||||
CHECK_EQ(expected, toString(result.errors[0]));
|
||||
}
|
||||
|
||||
CHECK_EQ("<a, b...>(t1) -> string where t1 = {+ absolutely_no_scalar_has_this_method: (t1) -> (a, b...) +}", toString(requireType("f")));
|
||||
}
|
||||
|
||||
@ -3915,4 +3925,30 @@ TEST_CASE_FIXTURE(Fixture, "simple_method_definition")
|
||||
CHECK_EQ("{| m: <a>(a) -> number |}", toString(getMainModule()->returnType, ToStringOptions{true}));
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "identify_all_problematic_table_fields")
|
||||
{
|
||||
ScopedFastFlag sff_DebugLuauDeferredConstraintResolution{"DebugLuauDeferredConstraintResolution", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
type T = {
|
||||
a: number,
|
||||
b: string,
|
||||
c: boolean,
|
||||
}
|
||||
|
||||
local a: T = {
|
||||
a = "foo",
|
||||
b = false,
|
||||
c = 123,
|
||||
}
|
||||
)");
|
||||
|
||||
LUAU_REQUIRE_ERROR_COUNT(1, result);
|
||||
|
||||
std::string expected = "Type 'a' could not be converted into 'T'; at [\"a\"], string is not a subtype of number"
|
||||
"\n\tat [\"b\"], boolean is not a subtype of string"
|
||||
"\n\tat [\"c\"], number is not a subtype of boolean";
|
||||
CHECK(toString(result.errors[0]) == expected);
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
#include "Luau/AstQuery.h"
|
||||
#include "Luau/BuiltinDefinitions.h"
|
||||
#include "Luau/Frontend.h"
|
||||
#include "Luau/Scope.h"
|
||||
#include "Luau/TypeInfer.h"
|
||||
#include "Luau/Type.h"
|
||||
@ -1261,8 +1262,6 @@ local b = typeof(foo) ~= 'nil'
|
||||
|
||||
TEST_CASE_FIXTURE(Fixture, "occurs_isnt_always_failure")
|
||||
{
|
||||
ScopedFastFlag sff{"LuauOccursIsntAlwaysFailure", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
function f(x, c) -- x : X
|
||||
local y = if c then x else nil -- y : X?
|
||||
@ -1441,6 +1440,32 @@ TEST_CASE_FIXTURE(Fixture, "promote_tail_type_packs")
|
||||
LUAU_REQUIRE_NO_ERRORS(result);
|
||||
}
|
||||
|
||||
TEST_CASE_FIXTURE(BuiltinsFixture, "lti_must_record_contributing_locations")
|
||||
{
|
||||
ScopedFastFlag sff_DebugLuauDeferredConstraintResolution{"DebugLuauDeferredConstraintResolution", true};
|
||||
|
||||
CheckResult result = check(R"(
|
||||
local function f(a)
|
||||
if math.random() > 0.5 then
|
||||
math.abs(a)
|
||||
else
|
||||
string.len(a)
|
||||
end
|
||||
end
|
||||
)");
|
||||
|
||||
// We inspect the actual errors in other tests; this test verifies that we
|
||||
// actually recorded breadcrumbs for a.
|
||||
LUAU_REQUIRE_ERROR_COUNT(3, result);
|
||||
TypeId fnTy = requireType("f");
|
||||
const FunctionType* fn = get<FunctionType>(fnTy);
|
||||
REQUIRE(fn);
|
||||
|
||||
TypeId argTy = *first(fn->argTypes);
|
||||
std::vector<std::pair<Location, TypeId>> locations = getMainModule()->upperBoundContributors[argTy];
|
||||
CHECK(locations.size() == 2);
|
||||
}
|
||||
|
||||
/*
|
||||
* CLI-49876
|
||||
*
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user