luau/tests/CostModel.test.cpp
vegorov-rbx 76f67e0733
Sync to upstream/release/588 (#992)
Type checker/autocomplete:
* `Luau::autocomplete` no longer performs typechecking internally, make
sure to run `Frontend::check` before performing autocomplete requests
* Autocomplete string suggestions without "" are now only suggested
inside the ""
* Autocomplete suggestions now include `function (anonymous autofilled)`
key with a full suggestion for the function expression (with arguments
included) stored in `AutocompleteEntry::insertText`
* `AutocompleteEntry::indexedWithSelf` is provided for function call
suggestions made with `:`
* Cyclic modules now see each other type exports as `any` to prevent
memory use-after-free (similar to module return type)

Runtime:
* Updated inline/loop unroll cost model to better handle assignments
(Fixes https://github.com/Roblox/luau/issues/978)
* `math.noise` speed was improved by ~30%
* `table.concat` speed was improved by ~5-7%
* `tonumber` and `tostring` now have fastcall paths that execute ~1.5x
and ~2.5x faster respectively (fixes #777)
* Fixed crash in `luaL_typename` when index refers to a non-existing
value
* Fixed potential out of memory scenario when using `string.sub` or
`string.char` in a loop
* Fixed behavior of some fastcall builtins when called without arguments
under -O2 to match original functions
* Support for native code execution in VM is now enabled by default
(note: native code still has to be generated explicitly)
* `Codegen::compile` now accepts `CodeGen_OnlyNativeModules` flag. When
set, only modules that have a `--!native` hot-comment at the top will be
compiled to native code

In our new typechecker:
* Generic type packs are no longer considered to be variadic during
unification
* Timeout and cancellation now works in new solver
* Fixed false positive errors around 'table' and 'function' type
refinements
* Table literals now use covariant unification rules. This is sound
since literal has no type specified and has no aliases
* Fixed issues with blocked types escaping the constraint solver
* Fixed more places where error messages that should've been suppressed
were still reported
* Fixed errors when iterating over a top table type

In our native code generation (jit):
* 'DebugLuauAbortingChecks' flag is now supported on A64
* LOP_NEWCLOSURE has been translated to IR
2023-07-28 08:13:53 -07:00

267 lines
5.6 KiB
C++

// This file is part of the Luau programming language and is licensed under MIT License; see LICENSE.txt for details
#include "Luau/Parser.h"
#include "ScopedFlags.h"
#include "doctest.h"
using namespace Luau;
namespace Luau
{
namespace Compile
{
uint64_t modelCost(AstNode* root, AstLocal* const* vars, size_t varCount, const DenseHashMap<AstExprCall*, int>& builtins);
int computeCost(uint64_t model, const bool* varsConst, size_t varCount);
} // namespace Compile
} // namespace Luau
TEST_SUITE_BEGIN("CostModel");
static uint64_t modelFunction(const char* source)
{
Allocator allocator;
AstNameTable names(allocator);
ParseResult result = Parser::parse(source, strlen(source), names, allocator);
REQUIRE(result.root != nullptr);
AstStatFunction* func = result.root->body.data[0]->as<AstStatFunction>();
REQUIRE(func);
return Luau::Compile::modelCost(func->func->body, func->func->args.data, func->func->args.size, DenseHashMap<AstExprCall*, int>{nullptr});
}
TEST_CASE("Expression")
{
uint64_t model = modelFunction(R"(
function test(a, b, c)
return a + (b + 1) * (b + 1) - c
end
)");
const bool args1[] = {false, false, false};
const bool args2[] = {false, true, false};
CHECK_EQ(5, Luau::Compile::computeCost(model, args1, 3));
CHECK_EQ(2, Luau::Compile::computeCost(model, args2, 3));
}
TEST_CASE("PropagateVariable")
{
uint64_t model = modelFunction(R"(
function test(a)
local b = a * a * a
return b * b
end
)");
const bool args1[] = {false};
const bool args2[] = {true};
CHECK_EQ(3, Luau::Compile::computeCost(model, args1, 1));
CHECK_EQ(0, Luau::Compile::computeCost(model, args2, 1));
}
TEST_CASE("LoopAssign")
{
uint64_t model = modelFunction(R"(
function test(a)
for i=1,3 do
a[i] = i
end
end
)");
const bool args1[] = {false};
const bool args2[] = {true};
// loop baseline cost is 5
CHECK_EQ(6, Luau::Compile::computeCost(model, args1, 1));
CHECK_EQ(6, Luau::Compile::computeCost(model, args2, 1));
}
TEST_CASE("MutableVariable")
{
uint64_t model = modelFunction(R"(
function test(a, b)
local x = a * a
x += b
return x * x
end
)");
const bool args1[] = {false};
const bool args2[] = {true};
CHECK_EQ(3, Luau::Compile::computeCost(model, args1, 1));
CHECK_EQ(2, Luau::Compile::computeCost(model, args2, 1));
}
TEST_CASE("ImportCall")
{
uint64_t model = modelFunction(R"(
function test(a)
return Instance.new(a)
end
)");
const bool args1[] = {false};
const bool args2[] = {true};
CHECK_EQ(6, Luau::Compile::computeCost(model, args1, 1));
CHECK_EQ(6, Luau::Compile::computeCost(model, args2, 1));
}
TEST_CASE("FastCall")
{
uint64_t model = modelFunction(R"(
function test(a)
return math.abs(a + 1)
end
)");
const bool args1[] = {false};
const bool args2[] = {true};
// note: we currently don't treat fast calls differently from cost model perspective
CHECK_EQ(6, Luau::Compile::computeCost(model, args1, 1));
CHECK_EQ(5, Luau::Compile::computeCost(model, args2, 1));
}
TEST_CASE("ControlFlow")
{
ScopedFastFlag sff("LuauAssignmentHasCost", true);
uint64_t model = modelFunction(R"(
function test(a)
while a < 0 do
a += 1
end
for i=10,1,-1 do
a += 1
end
for i in pairs({}) do
a += 1
if a % 2 == 0 then continue end
end
repeat
a += 1
if a % 2 == 0 then break end
until a > 10
return a
end
)");
const bool args1[] = {false};
const bool args2[] = {true};
CHECK_EQ(76, Luau::Compile::computeCost(model, args1, 1));
CHECK_EQ(73, Luau::Compile::computeCost(model, args2, 1));
}
TEST_CASE("Conditional")
{
uint64_t model = modelFunction(R"(
function test(a)
return if a < 0 then -a else a
end
)");
const bool args1[] = {false};
const bool args2[] = {true};
CHECK_EQ(4, Luau::Compile::computeCost(model, args1, 1));
CHECK_EQ(2, Luau::Compile::computeCost(model, args2, 1));
}
TEST_CASE("VarArgs")
{
uint64_t model = modelFunction(R"(
function test(...)
return select('#', ...) :: number
end
)");
CHECK_EQ(8, Luau::Compile::computeCost(model, nullptr, 0));
}
TEST_CASE("TablesFunctions")
{
uint64_t model = modelFunction(R"(
function test()
return { 42, op = function() end }
end
)");
CHECK_EQ(22, Luau::Compile::computeCost(model, nullptr, 0));
}
TEST_CASE("CostOverflow")
{
uint64_t model = modelFunction(R"(
function test()
return {{{{{{{{{{{{{{{}}}}}}}}}}}}}}}
end
)");
CHECK_EQ(127, Luau::Compile::computeCost(model, nullptr, 0));
}
TEST_CASE("TableAssign")
{
uint64_t model = modelFunction(R"(
function test(a)
for i=1,#a do
a[i] = i
end
end
)");
const bool args1[] = {false};
const bool args2[] = {true};
CHECK_EQ(7, Luau::Compile::computeCost(model, args1, 1));
CHECK_EQ(6, Luau::Compile::computeCost(model, args2, 1));
}
TEST_CASE("InterpString")
{
uint64_t model = modelFunction(R"(
function test(a)
return `hello, {a}!`
end
)");
const bool args1[] = {false};
const bool args2[] = {true};
CHECK_EQ(3, Luau::Compile::computeCost(model, args1, 1));
CHECK_EQ(3, Luau::Compile::computeCost(model, args2, 1));
}
TEST_CASE("MultipleAssignments")
{
ScopedFastFlag sff("LuauAssignmentHasCost", true);
uint64_t model = modelFunction(R"(
function test(a)
local x = 0
x = a
x = a + 1
x, x, x = a
x = a, a, a
end
)");
const bool args1[] = {false};
const bool args2[] = {true};
CHECK_EQ(8, Luau::Compile::computeCost(model, args1, 1));
CHECK_EQ(7, Luau::Compile::computeCost(model, args2, 1));
}
TEST_SUITE_END();