tsl-devkit/lsp-server/test/test_scheduler/test_async_executor.cppm

165 lines
6.2 KiB
C++

module;
export module lsp.test.scheduler.async_executor;
import std;
import lsp.scheduler.async_executor;
using namespace std::chrono_literals;
namespace
{
void Expect(bool condition, const std::string& message)
{
if (!condition)
throw std::runtime_error(message);
}
class SchedulerTestSuite
{
public:
struct Entry
{
std::string name;
std::function<void()> fn;
};
void Add(const std::string& name, std::function<void()> fn)
{
entries_.push_back({ name, std::move(fn) });
}
int RunAll()
{
int failures = 0;
for (const auto& entry : entries_)
{
try
{
entry.fn();
std::cout << "[PASS] " << entry.name << std::endl;
}
catch (const std::exception& e)
{
failures++;
std::cout << "[FAIL] " << entry.name << " -> " << e.what() << std::endl;
}
}
std::cout << "\nTotal: " << entries_.size() << ", Failures: " << failures << std::endl;
return failures;
}
private:
std::vector<Entry> entries_;
};
}
int main()
{
SchedulerTestSuite suite;
suite.Add("Completes basic task", [] {
lsp::scheduler::AsyncExecutor executor(2);
std::mutex callback_mutex;
std::optional<std::string> callback_result;
bool callback_cancelled = false;
auto handle = executor.Submit("task.simple", []() -> std::optional<std::string> {
std::this_thread::sleep_for(5ms);
return std::string("done"); }, [&](const std::optional<std::string>& result, bool cancelled) {
std::lock_guard<std::mutex> lk(callback_mutex);
callback_result = result;
callback_cancelled = cancelled; });
Expect(handle.Valid(), "Task handle should be valid");
Expect(handle.Wait(), "Handle wait should succeed");
{
std::lock_guard<std::mutex> lk(callback_mutex);
Expect(callback_result.has_value(), "Task result should be reported by callback");
Expect(!callback_cancelled, "Callback should not be marked cancelled");
Expect(callback_result.value() == "done", "Task result should match");
}
executor.WaitAll();
auto stats = executor.GetStatistics();
Expect(stats.completed == 1, "Completed count should be 1");
Expect(stats.cancelled == 0, "Cancelled count should be 0");
Expect(stats.failed == 0, "Failed count should be 0");
});
suite.Add("Cancels running task via handle", [] {
lsp::scheduler::AsyncExecutor executor(1);
std::atomic<bool> callback_cancelled{ false };
auto handle = executor.Submit("task.cancel", []() -> std::optional<std::string> {
std::this_thread::sleep_for(50ms);
return std::string("late"); }, [&](const std::optional<std::string>&, bool cancelled) { callback_cancelled.store(cancelled, std::memory_order_relaxed); });
std::this_thread::sleep_for(10ms);
Expect(handle.Cancel(), "Handle cancel should report success");
Expect(handle.Wait(), "Wait after cancellation should still succeed");
executor.WaitAll();
Expect(callback_cancelled.load(std::memory_order_relaxed), "Callback should observe cancellation");
auto stats = executor.GetStatistics();
Expect(stats.cancelled == 1, "Cancelled count should be 1");
});
suite.Add("Replaces existing task with same id", [] {
lsp::scheduler::AsyncExecutor executor(2);
std::atomic<bool> first_cancelled{ false };
auto first = executor.Submit("task.duplicate", []() -> std::optional<std::string> {
std::this_thread::sleep_for(30ms);
return std::string("first"); }, [&](const std::optional<std::string>&, bool cancelled) {
if (cancelled)
first_cancelled.store(true, std::memory_order_relaxed); });
std::this_thread::sleep_for(5ms);
std::atomic<bool> second_completed{ false };
auto second = executor.Submit("task.duplicate", [&]() -> std::optional<std::string> {
std::this_thread::sleep_for(20ms);
second_completed.store(true, std::memory_order_relaxed);
return std::string("second");
});
Expect(second.Wait(), "Second handle should finish");
Expect(first.Wait(), "First handle should finish");
executor.WaitAll();
Expect(first_cancelled.load(std::memory_order_relaxed), "First callback should report cancellation");
auto stats = executor.GetStatistics();
Expect(stats.completed == 1, "One task should complete successfully");
Expect(stats.cancelled >= 1, "At least one task should be cancelled");
Expect(second_completed.load(std::memory_order_relaxed), "Second task body should run");
});
suite.Add("Handles many concurrent submissions", [] {
constexpr int kTaskCount = 32;
lsp::scheduler::AsyncExecutor executor(8);
std::atomic<int> callback_count{ 0 };
for (int i = 0; i < kTaskCount; ++i)
{
executor.Submit("task." + std::to_string(i), [i]() -> std::optional<std::string> {
std::this_thread::sleep_for(2ms);
return std::to_string(i * i); }, [&](const std::optional<std::string>& result, bool cancelled) {
Expect(!cancelled, "Concurrent tasks should not be cancelled");
Expect(result.has_value(), "Concurrent task should produce a result");
callback_count.fetch_add(1, std::memory_order_relaxed); });
}
executor.WaitAll();
auto stats = executor.GetStatistics();
Expect(stats.completed == kTaskCount, "All concurrent tasks should complete");
Expect(stats.cancelled == 0 && stats.failed == 0, "No concurrent tasks should fail or cancel");
Expect(callback_count.load(std::memory_order_relaxed) == kTaskCount, "Callbacks should run for every task");
});
const int failures = suite.RunAll();
return failures == 0 ? 0 : 1;
}