7 #include <unordered_map>
9 #include "fml/concurrent_message_loop.h"
20 #include <sys/resource.h>
30 #include "flutter/fml/cpu_affinity.h"
31 #include "flutter/fml/trace_event.h"
47 VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
59 const vk::Instance& instance) {
60 for (
const auto& device : instance.enumeratePhysicalDevices().value) {
69 std::initializer_list<QueueIndexVK> queues) {
70 std::map<
size_t ,
size_t > family_index_map;
71 for (
const auto& queue : queues) {
72 family_index_map[queue.family] = 0;
74 for (
const auto& queue : queues) {
75 auto value = family_index_map[queue.family];
76 family_index_map[queue.family] = std::max(
value, queue.index);
79 static float kQueuePriority = 1.0f;
80 std::vector<vk::DeviceQueueCreateInfo> infos;
81 for (
const auto& item : family_index_map) {
82 vk::DeviceQueueCreateInfo info;
83 info.setQueueFamilyIndex(item.first);
84 info.setQueueCount(item.second + 1);
85 info.setQueuePriorities(kQueuePriority);
86 infos.push_back(info);
91 static std::optional<QueueIndexVK>
PickQueue(
const vk::PhysicalDevice& device,
92 vk::QueueFlagBits flags) {
95 const auto families = device.getQueueFamilyProperties();
96 for (
size_t i = 0u; i < families.size(); i++) {
97 if (!(families[i].queueFlags & flags)) {
106 auto context = std::shared_ptr<ContextVK>(
new ContextVK(settings.
flags));
107 context->Setup(std::move(settings));
108 if (!context->IsValid()) {
118 return std::clamp(hardware_concurrency / 2ull, 1ull, 4ull);
122 std::atomic_uint64_t context_count = 0;
123 uint64_t CalculateHash(
void* ptr) {
124 return context_count.fetch_add(1);
128 ContextVK::ContextVK(
const Flags& flags)
129 : Context(flags), hash_(CalculateHash(this)) {}
132 if (device_holder_ && device_holder_->device) {
133 [[maybe_unused]]
auto result = device_holder_->device->waitIdle();
135 if (command_pool_recycler_) {
136 command_pool_recycler_->DestroyThreadLocalPools();
146 VK_MAKE_API_VERSION(0, 2, 0, 0);
148 void ContextVK::Setup(Settings settings) {
149 TRACE_EVENT0(
"impeller",
"ContextVK::Setup");
151 if (!settings.proc_address_callback) {
159 auto& dispatcher = VULKAN_HPP_DEFAULT_DISPATCHER;
160 dispatcher.init(settings.proc_address_callback);
162 std::vector<std::string> embedder_instance_extensions;
163 std::vector<std::string> embedder_device_extensions;
164 if (settings.embedder_data.has_value()) {
165 embedder_instance_extensions = settings.embedder_data->instance_extensions;
166 embedder_device_extensions = settings.embedder_data->device_extensions;
168 auto caps = std::shared_ptr<CapabilitiesVK>(
new CapabilitiesVK(
169 settings.enable_validation,
170 settings.fatal_missing_validations,
171 settings.embedder_data.has_value(),
172 embedder_instance_extensions,
173 embedder_device_extensions
176 if (!caps->IsValid()) {
183 auto enabled_layers = caps->GetEnabledLayers();
184 auto enabled_extensions = caps->GetEnabledInstanceExtensions();
186 if (!enabled_layers.has_value() || !enabled_extensions.has_value()) {
191 vk::InstanceCreateFlags instance_flags = {};
193 if (std::find(enabled_extensions.value().begin(),
194 enabled_extensions.value().end(),
195 "VK_KHR_portability_enumeration") !=
196 enabled_extensions.value().end()) {
197 instance_flags |= vk::InstanceCreateFlagBits::eEnumeratePortabilityKHR;
200 std::vector<const char*> enabled_layers_c;
201 std::vector<const char*> enabled_extensions_c;
203 for (
const auto& layer : enabled_layers.value()) {
204 enabled_layers_c.push_back(layer.c_str());
207 for (
const auto& ext : enabled_extensions.value()) {
208 enabled_extensions_c.push_back(ext.c_str());
211 vk::ApplicationInfo application_info;
222 application_info.setApplicationVersion(VK_API_VERSION_1_0);
223 application_info.setApiVersion(VK_API_VERSION_1_1);
225 application_info.setPEngineName(
"Impeller");
226 application_info.setPApplicationName(
"Impeller");
228 vk::StructureChain<vk::InstanceCreateInfo, vk::ValidationFeaturesEXT>
231 if (!caps->AreValidationsEnabled()) {
232 instance_chain.unlink<vk::ValidationFeaturesEXT>();
235 std::vector<vk::ValidationFeatureEnableEXT> enabled_validations = {
236 vk::ValidationFeatureEnableEXT::eSynchronizationValidation,
239 auto validation = instance_chain.get<vk::ValidationFeaturesEXT>();
240 validation.setEnabledValidationFeatures(enabled_validations);
242 auto instance_info = instance_chain.get<vk::InstanceCreateInfo>();
243 instance_info.setPEnabledLayerNames(enabled_layers_c);
244 instance_info.setPEnabledExtensionNames(enabled_extensions_c);
245 instance_info.setPApplicationInfo(&application_info);
246 instance_info.setFlags(instance_flags);
248 auto device_holder = std::make_shared<DeviceHolderImpl>();
249 if (!settings.embedder_data.has_value()) {
250 auto instance = vk::createInstanceUnique(instance_info);
251 if (instance.result != vk::Result::eSuccess) {
253 << vk::to_string(instance.result);
256 device_holder->instance = std::move(instance.value);
258 device_holder->instance.reset(settings.embedder_data->instance);
259 device_holder->owned =
false;
261 dispatcher.init(device_holder->instance.get());
270 std::make_unique<DebugReportVK>(*caps, device_holder->instance.get());
272 if (!debug_report->IsValid()) {
280 if (!settings.embedder_data.has_value()) {
281 auto physical_device =
283 if (!physical_device.has_value()) {
287 device_holder->physical_device = physical_device.value();
289 device_holder->physical_device = settings.embedder_data->physical_device;
295 auto graphics_queue =
296 PickQueue(device_holder->physical_device, vk::QueueFlagBits::eGraphics);
297 auto transfer_queue =
298 PickQueue(device_holder->physical_device, vk::QueueFlagBits::eTransfer);
300 PickQueue(device_holder->physical_device, vk::QueueFlagBits::eCompute);
302 if (!graphics_queue.has_value()) {
306 if (!transfer_queue.has_value()) {
307 transfer_queue = graphics_queue.value();
309 if (!compute_queue.has_value()) {
317 auto enabled_device_extensions =
318 caps->GetEnabledDeviceExtensions(device_holder->physical_device);
319 if (!enabled_device_extensions.has_value()) {
325 std::vector<const char*> enabled_device_extensions_c;
326 for (
const auto& ext : enabled_device_extensions.value()) {
327 enabled_device_extensions_c.push_back(ext.c_str());
331 {graphics_queue.value(), compute_queue.value(), transfer_queue.value()});
333 const auto enabled_features =
334 caps->GetEnabledDeviceFeatures(device_holder->physical_device);
335 if (!enabled_features.has_value()) {
341 vk::DeviceCreateInfo device_info;
343 device_info.setPNext(&enabled_features.value().get());
344 device_info.setQueueCreateInfos(queue_create_infos);
345 device_info.setPEnabledExtensionNames(enabled_device_extensions_c);
348 if (!settings.embedder_data.has_value()) {
350 device_holder->physical_device.createDeviceUnique(device_info);
351 if (device_result.result != vk::Result::eSuccess) {
355 device_holder->device = std::move(device_result.value);
357 device_holder->device.reset(settings.embedder_data->device);
360 if (!caps->SetPhysicalDevice(device_holder->physical_device,
361 *enabled_features)) {
369 auto allocator = std::shared_ptr<AllocatorVK>(
new AllocatorVK(
371 application_info.apiVersion,
372 device_holder->physical_device,
374 device_holder->instance.get(),
378 if (!allocator->IsValid()) {
386 auto pipeline_library = std::shared_ptr<PipelineLibraryVK>(
387 new PipelineLibraryVK(device_holder,
389 std::move(settings.cache_directory),
390 raster_message_loop_->GetTaskRunner()
393 if (!pipeline_library->IsValid()) {
398 auto sampler_library =
399 std::shared_ptr<SamplerLibraryVK>(
new SamplerLibraryVK(device_holder));
401 auto shader_library = std::shared_ptr<ShaderLibraryVK>(
402 new ShaderLibraryVK(device_holder,
403 settings.shader_libraries_data)
406 if (!shader_library->IsValid()) {
415 std::shared_ptr<FenceWaiterVK>(
new FenceWaiterVK(device_holder));
421 if (!resource_manager) {
426 auto command_pool_recycler =
427 std::make_shared<CommandPoolRecyclerVK>(shared_from_this());
428 if (!command_pool_recycler) {
433 auto descriptor_pool_recycler =
434 std::make_shared<DescriptorPoolRecyclerVK>(weak_from_this());
435 if (!descriptor_pool_recycler) {
444 if (!settings.embedder_data.has_value()) {
446 graphics_queue.value(),
447 compute_queue.value(),
448 transfer_queue.value()
453 settings.embedder_data->queue_family_index);
455 if (!queues.IsValid()) {
460 VkPhysicalDeviceProperties physical_device_properties;
461 dispatcher.vkGetPhysicalDeviceProperties(device_holder->physical_device,
462 &physical_device_properties);
470 std::make_unique<DriverInfoVK>(device_holder->physical_device);
472 caps->ApplyWorkarounds(workarounds_);
473 sampler_library->ApplyWorkarounds(workarounds_);
475 device_holder_ = std::move(device_holder);
476 idle_waiter_vk_ = std::make_shared<IdleWaiterVK>(device_holder_);
477 driver_info_ = std::move(driver_info);
478 debug_report_ = std::move(debug_report);
479 allocator_ = std::move(allocator);
480 shader_library_ = std::move(shader_library);
481 sampler_library_ = std::move(sampler_library);
482 pipeline_library_ = std::move(pipeline_library);
483 yuv_conversion_library_ = std::shared_ptr<YUVConversionLibraryVK>(
484 new YUVConversionLibraryVK(device_holder_));
485 queues_ = std::move(queues);
486 device_capabilities_ = std::move(caps);
487 fence_waiter_ = std::move(fence_waiter);
488 resource_manager_ = std::move(resource_manager);
489 command_pool_recycler_ = std::move(command_pool_recycler);
490 descriptor_pool_recycler_ = std::move(descriptor_pool_recycler);
491 device_name_ = std::string(physical_device_properties.deviceName);
492 command_queue_vk_ = std::make_shared<CommandQueueVK>(weak_from_this());
493 should_enable_surface_control_ = settings.enable_surface_control;
499 gpu_tracer_ = std::make_shared<GPUTracerVK>(weak_from_this(),
500 settings.enable_gpu_tracing);
501 gpu_tracer_->InitializeQueryPool(*
this);
528 return shader_library_;
532 return sampler_library_;
536 return pipeline_library_;
541 auto tls_pool = recycler->Get();
548 std::shared_ptr<DescriptorPoolVK> descriptor_pool;
550 Lock lock(desc_pool_mutex_);
551 DescriptorPoolMap::iterator current_pool =
552 cached_descriptor_pool_.find(std::this_thread::get_id());
553 if (current_pool == cached_descriptor_pool_.end()) {
554 descriptor_pool = (cached_descriptor_pool_[std::this_thread::get_id()] =
555 descriptor_pool_recycler_->GetDescriptorPool());
557 descriptor_pool = current_pool->second;
561 auto tracked_objects = std::make_shared<TrackedObjectsVK>(
562 weak_from_this(), std::move(tls_pool), std::move(descriptor_pool),
566 if (!tracked_objects || !tracked_objects->IsValid() || !queue) {
570 vk::CommandBufferBeginInfo begin_info;
571 begin_info.flags = vk::CommandBufferUsageFlagBits::eOneTimeSubmit;
572 if (tracked_objects->GetCommandBuffer().begin(begin_info) !=
573 vk::Result::eSuccess) {
578 tracked_objects->GetGPUProbe().RecordCmdBufferStart(
579 tracked_objects->GetCommandBuffer());
584 std::move(tracked_objects)
589 return *device_holder_->instance;
593 return device_holder_->device.get();
596 const std::shared_ptr<fml::ConcurrentTaskRunner>
598 return raster_message_loop_->GetTaskRunner();
607 fence_waiter_->Terminate();
608 resource_manager_.reset();
610 raster_message_loop_->Terminate();
614 return std::make_shared<SurfaceContextVK>(shared_from_this());
618 return device_capabilities_;
626 return device_holder_->physical_device;
630 return fence_waiter_;
634 return resource_manager_;
639 return command_pool_recycler_;
648 return descriptor_pool_recycler_;
652 return command_queue_vk_;
656 std::shared_ptr<CommandBuffer> command_buffer) {
657 if (should_batch_cmd_buffers_) {
658 pending_command_buffers_.push_back(std::move(command_buffer));
666 if (pending_command_buffers_.empty()) {
670 if (should_batch_cmd_buffers_) {
671 bool result =
GetCommandQueue()->Submit(pending_command_buffers_).ok();
672 pending_command_buffers_.clear();
693 attachment.
texture->GetTextureDescriptor().format,
694 attachment.
texture->GetTextureDescriptor().sample_count,
704 depth->texture->GetTextureDescriptor().format,
705 depth->texture->GetTextureDescriptor().sample_count,
710 stencil.has_value()) {
712 stencil->texture->GetTextureDescriptor().format,
713 stencil->texture->GetTextureDescriptor().sample_count,
714 stencil->load_action,
715 stencil->store_action
724 Lock lock(desc_pool_mutex_);
725 cached_descriptor_pool_.erase(std::this_thread::get_id());
727 command_pool_recycler_->Dispose();
730 const std::shared_ptr<YUVConversionLibraryVK>&
732 return yuv_conversion_library_;
740 return should_enable_surface_control_ &&
static CapabilitiesVK & Cast(Capabilities &base)
The Vulkan layers and extensions wrangler.
bool SupportsExternalSemaphoreExtensions() const
void SetOffscreenFormat(PixelFormat pixel_format) const
std::optional< PhysicalDeviceFeatures > GetEnabledDeviceFeatures(const vk::PhysicalDevice &physical_device) const
void SetOffscreenFormat(PixelFormat pixel_format)
std::shared_ptr< Allocator > GetResourceAllocator() const override
Returns the allocator used to create textures and buffers on the device.
std::shared_ptr< ResourceManagerVK > GetResourceManager() const
vk::PhysicalDevice GetPhysicalDevice() const
const std::shared_ptr< YUVConversionLibraryVK > & GetYUVConversionLibrary() const
bool SetDebugName(T handle, std::string_view label) const
bool EnqueueCommandBuffer(std::shared_ptr< CommandBuffer > command_buffer) override
Enqueue command_buffer for submission by the end of the frame.
const vk::Device & GetDevice() const
bool FlushCommandBuffers() override
Flush all pending command buffers.
bool IsValid() const override
Determines if a context is valid. If the caller ever receives an invalid context, they must discard i...
const std::unique_ptr< DriverInfoVK > & GetDriverInfo() const
void DisposeThreadLocalCachedResources() override
std::shared_ptr< CommandBuffer > CreateCommandBuffer() const override
Create a new command buffer. Command buffers can be used to encode graphics, blit,...
virtual bool SubmitOnscreen(std::shared_ptr< CommandBuffer > cmd_buffer) override
Submit the command buffer that renders to the onscreen surface.
std::shared_ptr< SamplerLibrary > GetSamplerLibrary() const override
Returns the library of combined image samplers used in shaders.
static std::shared_ptr< ContextVK > Create(Settings settings)
std::shared_ptr< PipelineLibrary > GetPipelineLibrary() const override
Returns the library of pipelines used by render or compute commands.
const std::shared_ptr< QueueVK > & GetGraphicsQueue() const
const std::shared_ptr< const Capabilities > & GetCapabilities() const override
Get the capabilities of Impeller context. All optionally supported feature of the platform,...
RuntimeStageBackend GetRuntimeStageBackend() const override
Retrieve the runtime stage for this context type.
std::shared_ptr< CommandPoolRecyclerVK > GetCommandPoolRecycler() const
std::shared_ptr< CommandQueue > GetCommandQueue() const override
Return the graphics queue for submitting command buffers.
void InitializeCommonlyUsedShadersIfNeeded() const override
std::shared_ptr< FenceWaiterVK > GetFenceWaiter() const
bool GetShouldEnableSurfaceControlSwapchain() const
Whether the Android Surface control based swapchain should be enabled.
std::shared_ptr< GPUTracerVK > GetGPUTracer() const
BackendType GetBackendType() const override
Get the graphics backend of an Impeller context.
std::string DescribeGpuModel() const override
const WorkaroundsVK & GetWorkarounds() const
const std::shared_ptr< fml::ConcurrentTaskRunner > GetConcurrentWorkerTaskRunner() const
static size_t ChooseThreadCountForWorkers(size_t hardware_concurrency)
std::shared_ptr< ShaderLibrary > GetShaderLibrary() const override
Returns the library of shaders used to specify the programmable stages of a pipeline.
vk::Instance GetInstance() const
std::shared_ptr< DeviceHolderVK > GetDeviceHolder() const
void Shutdown() override
Force all pending asynchronous work to finish. This is achieved by deleting all owned concurrent mess...
std::shared_ptr< DescriptorPoolRecyclerVK > GetDescriptorPoolRecycler() const
std::shared_ptr< SurfaceContextVK > CreateSurfaceContext()
RenderPassBuilderVK & SetDepthStencilAttachment(PixelFormat format, SampleCount sample_count, LoadAction load_action, StoreAction store_action)
RenderPassBuilderVK & SetStencilAttachment(PixelFormat format, SampleCount sample_count, LoadAction load_action, StoreAction store_action)
RenderPassBuilderVK & SetColorAttachment(size_t index, PixelFormat format, SampleCount sample_count, LoadAction load_action, StoreAction store_action, vk::ImageLayout current_layout=vk::ImageLayout::eUndefined, bool is_swapchain=false)
vk::UniqueRenderPass Build(const vk::Device &device) const
a wrapper around the impeller [Allocator] instance that can be used to provide caching of allocated r...
virtual RenderTarget CreateOffscreenMSAA(const Context &context, ISize size, int mip_count, std::string_view label="Offscreen MSAA", RenderTarget::AttachmentConfigMSAA color_attachment_config=RenderTarget::kDefaultColorAttachmentConfigMSAA, std::optional< RenderTarget::AttachmentConfig > stencil_attachment_config=RenderTarget::kDefaultStencilAttachmentConfig, const std::shared_ptr< Texture > &existing_color_msaa_texture=nullptr, const std::shared_ptr< Texture > &existing_color_resolve_texture=nullptr, const std::shared_ptr< Texture > &existing_depth_stencil_texture=nullptr, std::optional< PixelFormat > target_pixel_format=std::nullopt)
bool IterateAllColorAttachments(const std::function< bool(size_t index, const ColorAttachment &attachment)> &iterator) const
const std::optional< DepthAttachment > & GetDepthAttachment() const
const std::optional< StencilAttachment > & GetStencilAttachment() const
static std::shared_ptr< ResourceManagerVK > Create()
Creates a shared resource manager (a dedicated thread).
ScopedObject< Object > Create(CtorArgs &&... args)
bool HasValidationLayers()
static std::optional< QueueIndexVK > PickQueue(const vk::PhysicalDevice &device, vk::QueueFlagBits flags)
static std::optional< vk::PhysicalDevice > PickPhysicalDevice(const CapabilitiesVK &caps, const vk::Instance &instance)
PixelFormat
The Pixel formats supported by Impeller. The naming convention denotes the usage of the component,...
static bool gHasValidationLayers
static constexpr uint32_t kImpellerEngineVersion
WorkaroundsVK GetWorkaroundsFromDriverInfo(DriverInfoVK &driver_info)
static std::vector< vk::DeviceQueueCreateInfo > GetQueueCreateInfos(std::initializer_list< QueueIndexVK > queues)
std::shared_ptr< Texture > texture
static QueuesVK FromEmbedderQueue(vk::Queue queue, uint32_t queue_family_index)
static QueuesVK FromQueueIndices(const vk::Device &device, QueueIndexVK graphics, QueueIndexVK compute, QueueIndexVK transfer)
std::shared_ptr< QueueVK > graphics_queue
A non-exhaustive set of driver specific workarounds.
bool batch_submit_command_buffer_timeout