Flutter Impeller
host_buffer.cc
Go to the documentation of this file.
1 // Copyright 2013 The Flutter Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
6 
7 #include <cstring>
8 #include <tuple>
9 
15 #include "impeller/core/formats.h"
16 
17 namespace impeller {
18 
19 constexpr size_t kAllocatorBlockSize = 1024000; // 1024 Kb.
20 
21 std::shared_ptr<HostBuffer> HostBuffer::Create(
22  const std::shared_ptr<Allocator>& allocator,
23  const std::shared_ptr<const IdleWaiter>& idle_waiter) {
24  return std::shared_ptr<HostBuffer>(new HostBuffer(allocator, idle_waiter));
25 }
26 
27 HostBuffer::HostBuffer(const std::shared_ptr<Allocator>& allocator,
28  const std::shared_ptr<const IdleWaiter>& idle_waiter)
29  : allocator_(allocator), idle_waiter_(idle_waiter) {
33  for (auto i = 0u; i < kHostBufferArenaSize; i++) {
34  std::shared_ptr<DeviceBuffer> device_buffer = allocator->CreateBuffer(desc);
35  FML_CHECK(device_buffer) << "Failed to allocate device buffer.";
36  device_buffers_[i].push_back(device_buffer);
37  }
38 }
39 
41  if (idle_waiter_) {
42  // Since we hold on to DeviceBuffers we should make sure they aren't being
43  // used while we are deleting the HostBuffer.
44  idle_waiter_->WaitIdle();
45  }
46 };
47 
48 BufferView HostBuffer::Emplace(const void* buffer,
49  size_t length,
50  size_t align) {
51  auto [range, device_buffer, raw_device_buffer] =
52  EmplaceInternal(buffer, length, align);
53  if (device_buffer) {
54  return BufferView(std::move(device_buffer), range);
55  } else if (raw_device_buffer) {
56  return BufferView(raw_device_buffer, range);
57  } else {
58  return {};
59  }
60 }
61 
62 BufferView HostBuffer::Emplace(const void* buffer, size_t length) {
63  auto [range, device_buffer, raw_device_buffer] =
64  EmplaceInternal(buffer, length);
65  if (device_buffer) {
66  return BufferView(std::move(device_buffer), range);
67  } else if (raw_device_buffer) {
68  return BufferView(raw_device_buffer, range);
69  } else {
70  return {};
71  }
72 }
73 
75  size_t align,
76  const EmplaceProc& cb) {
77  auto [range, device_buffer, raw_device_buffer] =
78  EmplaceInternal(length, align, cb);
79  if (device_buffer) {
80  return BufferView(std::move(device_buffer), range);
81  } else if (raw_device_buffer) {
82  return BufferView(raw_device_buffer, range);
83  } else {
84  return {};
85  }
86 }
87 
90  .current_frame = frame_index_,
91  .current_buffer = current_buffer_,
92  .total_buffer_count = device_buffers_[frame_index_].size(),
93  };
94 }
95 
96 bool HostBuffer::MaybeCreateNewBuffer() {
97  current_buffer_++;
98  if (current_buffer_ >= device_buffers_[frame_index_].size()) {
100  desc.size = kAllocatorBlockSize;
102  std::shared_ptr<DeviceBuffer> buffer = allocator_->CreateBuffer(desc);
103  if (!buffer) {
104  VALIDATION_LOG << "Failed to allocate host buffer of size " << desc.size;
105  return false;
106  }
107  device_buffers_[frame_index_].push_back(std::move(buffer));
108  }
109  offset_ = 0;
110  return true;
111 }
112 
113 std::tuple<Range, std::shared_ptr<DeviceBuffer>, DeviceBuffer*>
114 HostBuffer::EmplaceInternal(size_t length,
115  size_t align,
116  const EmplaceProc& cb) {
117  if (!cb) {
118  return {};
119  }
120 
121  // If the requested allocation is bigger than the block size, create a one-off
122  // device buffer and write to that.
123  if (length > kAllocatorBlockSize) {
124  DeviceBufferDescriptor desc;
125  desc.size = length;
126  desc.storage_mode = StorageMode::kHostVisible;
127  std::shared_ptr<DeviceBuffer> device_buffer =
128  allocator_->CreateBuffer(desc);
129  if (!device_buffer) {
130  return {};
131  }
132  if (cb) {
133  cb(device_buffer->OnGetContents());
134  device_buffer->Flush(Range{0, length});
135  }
136  return std::make_tuple(Range{0, length}, std::move(device_buffer), nullptr);
137  }
138 
139  size_t padding = 0;
140  if (align > 0 && offset_ % align) {
141  padding = align - (offset_ % align);
142  }
143  if (offset_ + padding + length > kAllocatorBlockSize) {
144  if (!MaybeCreateNewBuffer()) {
145  return {};
146  }
147  } else {
148  offset_ += padding;
149  }
150 
151  const std::shared_ptr<DeviceBuffer>& current_buffer = GetCurrentBuffer();
152  auto contents = current_buffer->OnGetContents();
153  cb(contents + offset_);
154  Range output_range(offset_, length);
155  current_buffer->Flush(output_range);
156 
157  offset_ += length;
158  return std::make_tuple(output_range, nullptr, current_buffer.get());
159 }
160 
161 std::tuple<Range, std::shared_ptr<DeviceBuffer>, DeviceBuffer*>
162 HostBuffer::EmplaceInternal(const void* buffer, size_t length) {
163  // If the requested allocation is bigger than the block size, create a one-off
164  // device buffer and write to that.
165  if (length > kAllocatorBlockSize) {
166  DeviceBufferDescriptor desc;
167  desc.size = length;
168  desc.storage_mode = StorageMode::kHostVisible;
169  std::shared_ptr<DeviceBuffer> device_buffer =
170  allocator_->CreateBuffer(desc);
171  if (!device_buffer) {
172  return {};
173  }
174  if (buffer) {
175  if (!device_buffer->CopyHostBuffer(static_cast<const uint8_t*>(buffer),
176  Range{0, length})) {
177  return {};
178  }
179  }
180  return std::make_tuple(Range{0, length}, std::move(device_buffer), nullptr);
181  }
182 
183  auto old_length = GetLength();
184  if (old_length + length > kAllocatorBlockSize) {
185  if (!MaybeCreateNewBuffer()) {
186  return {};
187  }
188  }
189  old_length = GetLength();
190 
191  const std::shared_ptr<DeviceBuffer>& current_buffer = GetCurrentBuffer();
192  auto contents = current_buffer->OnGetContents();
193  if (buffer) {
194  ::memmove(contents + old_length, buffer, length);
195  current_buffer->Flush(Range{old_length, length});
196  }
197  offset_ += length;
198  return std::make_tuple(Range{old_length, length}, nullptr,
199  current_buffer.get());
200 }
201 
202 std::tuple<Range, std::shared_ptr<DeviceBuffer>, DeviceBuffer*>
203 HostBuffer::EmplaceInternal(const void* buffer, size_t length, size_t align) {
204  if (align == 0 || (GetLength() % align) == 0) {
205  return EmplaceInternal(buffer, length);
206  }
207 
208  {
209  auto padding = align - (GetLength() % align);
210  if (offset_ + padding < kAllocatorBlockSize) {
211  offset_ += padding;
212  } else if (!MaybeCreateNewBuffer()) {
213  return {};
214  }
215  }
216 
217  return EmplaceInternal(buffer, length);
218 }
219 
220 const std::shared_ptr<DeviceBuffer>& HostBuffer::GetCurrentBuffer() const {
221  return device_buffers_[frame_index_][current_buffer_];
222 }
223 
225  // When resetting the host buffer state at the end of the frame, check if
226  // there are any unused buffers and remove them.
227  while (device_buffers_[frame_index_].size() > current_buffer_ + 1) {
228  device_buffers_[frame_index_].pop_back();
229  }
230 
231  offset_ = 0u;
232  current_buffer_ = 0u;
233  frame_index_ = (frame_index_ + 1) % kHostBufferArenaSize;
234 }
235 
236 } // namespace impeller
BufferView Emplace(const BufferType &buffer, size_t alignment=0)
Emplace non-uniform data (like contiguous vertices) onto the host buffer.
Definition: host_buffer.h:93
TestStateQuery GetStateForTest()
Retrieve internal buffer state for test expectations.
Definition: host_buffer.cc:88
std::function< void(uint8_t *buffer)> EmplaceProc
Definition: host_buffer.h:105
void Reset()
Resets the contents of the HostBuffer to nothing so it can be reused.
Definition: host_buffer.cc:224
static std::shared_ptr< HostBuffer > Create(const std::shared_ptr< Allocator > &allocator, const std::shared_ptr< const IdleWaiter > &idle_waiter)
Definition: host_buffer.cc:21
Vector2 padding
The halo padding in source space.
static constexpr const size_t kHostBufferArenaSize
Approximately the same size as the max frames in flight.
Definition: host_buffer.h:22
constexpr size_t kAllocatorBlockSize
Definition: host_buffer.cc:19
Test only internal state.
Definition: host_buffer.h:127
#define VALIDATION_LOG
Definition: validation.h:91