1/*
2 * Copyright (c) 2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 *     http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#ifndef DEVICE_GPU_RESOURCE_MANAGER_BASE_INL
17#define DEVICE_GPU_RESOURCE_MANAGER_BASE_INL
18
19#include <algorithm>
20#include <cinttypes>
21
22#include <render/namespace.h>
23
24#include "util/log.h"
25
26RENDER_BEGIN_NAMESPACE()
27template<typename ResourceType, typename CreateInfoType>
28inline GpuResourceManagerTyped<ResourceType, CreateInfoType>::GpuResourceManagerTyped(Device& device) : device_(device)
29{}
30
31template<typename ResourceType, typename CreateInfoType>
32inline ResourceType* GpuResourceManagerTyped<ResourceType, CreateInfoType>::Get(const uint32_t index) const
33{
34    if (index < resources_.size()) {
35        return resources_[index].get();
36    } else {
37        return nullptr;
38    }
39}
40
41template<typename ResourceType, typename CreateInfoType>
42template<typename AdditionalInfoType>
43inline void GpuResourceManagerTyped<ResourceType, CreateInfoType>::Create(const uint32_t index,
44    const CreateInfoType& desc, BASE_NS::unique_ptr<ResourceType> optionalResource, const bool useAdditionalDesc,
45    const AdditionalInfoType& additionalDesc)
46{
47    if (index < static_cast<uint32_t>(resources_.size())) { // use existing location
48        // add old for deallocation if found
49        if (resources_[index]) {
50            pendingDeallocations_.push_back({ move(resources_[index]), device_.GetFrameCount() });
51        }
52
53        if (optionalResource) {
54            resources_[index] = move(optionalResource);
55        } else {
56            if constexpr (BASE_NS::is_same_v<ResourceType, GpuBuffer>) {
57                if (useAdditionalDesc) {
58                    resources_[index] = device_.CreateGpuBuffer(additionalDesc);
59                } else {
60                    resources_[index] = device_.CreateGpuBuffer(desc);
61                }
62            } else if constexpr (BASE_NS::is_same_v<ResourceType, GpuImage>) {
63                resources_[index] = device_.CreateGpuImage(desc);
64            } else if constexpr (BASE_NS::is_same_v<ResourceType, GpuSampler>) {
65                resources_[index] = device_.CreateGpuSampler(desc);
66            }
67        }
68    } else {
69        if (optionalResource) {
70            resources_.push_back(move(optionalResource));
71        } else {
72            if constexpr (BASE_NS::is_same_v<ResourceType, GpuBuffer>) {
73                resources_.push_back(device_.CreateGpuBuffer(desc));
74            } else if constexpr (BASE_NS::is_same_v<ResourceType, GpuImage>) {
75                resources_.push_back(device_.CreateGpuImage(desc));
76            } else if constexpr (BASE_NS::is_same_v<ResourceType, GpuSampler>) {
77                resources_.push_back(device_.CreateGpuSampler(desc));
78            }
79        }
80        PLUGIN_ASSERT(index == static_cast<uint32_t>(resources_.size() - 1u));
81    }
82}
83
84template<typename ResourceType, typename CreateInfoType>
85inline void GpuResourceManagerTyped<ResourceType, CreateInfoType>::HandlePendingDeallocations()
86{
87    if (!pendingDeallocations_.empty()) {
88        auto const minAge = device_.GetCommandBufferingCount() + 1;
89        auto const ageLimit = (device_.GetFrameCount() < minAge) ? 0 : (device_.GetFrameCount() - minAge);
90
91        auto const oldResources = std::partition(pendingDeallocations_.begin(), pendingDeallocations_.end(),
92            [ageLimit](auto const& handleTime) { return handleTime.frameIndex >= ageLimit; });
93
94        pendingDeallocations_.erase(oldResources, pendingDeallocations_.end());
95    }
96}
97
98template<typename ResourceType, typename CreateInfoType>
99inline void GpuResourceManagerTyped<ResourceType, CreateInfoType>::HandlePendingDeallocationsImmediate()
100{
101    pendingDeallocations_.clear();
102}
103
104template<typename ResourceType, typename CreateInfoType>
105inline void GpuResourceManagerTyped<ResourceType, CreateInfoType>::Destroy(const uint32_t index)
106{
107    PLUGIN_ASSERT(index < static_cast<uint32_t>(resources_.size()));
108    if (index < static_cast<uint32_t>(resources_.size())) {
109        pendingDeallocations_.push_back({ move(resources_[index]), device_.GetFrameCount() });
110    }
111}
112
113template<typename ResourceType, typename CreateInfoType>
114inline void GpuResourceManagerTyped<ResourceType, CreateInfoType>::DestroyImmediate(const uint32_t index)
115{
116    PLUGIN_ASSERT(index < static_cast<uint32_t>(resources_.size()));
117    if (index < static_cast<uint32_t>(resources_.size())) {
118        resources_[index].reset();
119    }
120}
121
122template<typename ResourceType, typename CreateInfoType>
123inline void GpuResourceManagerTyped<ResourceType, CreateInfoType>::Resize(const size_t maxSize)
124{
125    // ATM our code does not allow reducing the stored space
126    // this assert is here to note the current design
127    PLUGIN_ASSERT(maxSize >= resources_.size());
128    resources_.resize(maxSize);
129}
130
131#if (RENDER_VALIDATION_ENABLED == 1)
132template<typename ResourceType, typename CreateInfoType>
133size_t GpuResourceManagerTyped<ResourceType, CreateInfoType>::GetValidResourceCount() const
134{
135    size_t count = 0;
136    for (const auto& res : resources_) {
137        if (res) {
138            count++;
139        }
140    }
141    return count;
142}
143#endif
144RENDER_END_NAMESPACE()
145
146#endif // DEVICE_GPU_RESOURCE_MANAGER_BASE_INL