1 /* 2 * Copyright 2015-2017 ARM Limited 3 * SPDX-License-Identifier: Apache-2.0 4 * 5 * Licensed under the Apache License, Version 2.0 (the "License"); 6 * you may not use this file except in compliance with the License. 7 * You may obtain a copy of the License at 8 * 9 * http://www.apache.org/licenses/LICENSE-2.0 10 * 11 * Unless required by applicable law or agreed to in writing, software 12 * distributed under the License is distributed on an "AS IS" BASIS, 13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 * See the License for the specific language governing permissions and 15 * limitations under the License. 16 */ 17 18 #ifndef SPIRV_CROSS_BARRIER_HPP 19 #define SPIRV_CROSS_BARRIER_HPP 20 21 #include <atomic> 22 #include <thread> 23 24 namespace spirv_cross 25 { 26 class Barrier 27 { 28 public: Barrier()29 Barrier() 30 { 31 count.store(0); 32 iteration.store(0); 33 } 34 set_release_divisor(unsigned divisor)35 void set_release_divisor(unsigned divisor) 36 { 37 this->divisor = divisor; 38 } 39 memoryBarrier()40 static inline void memoryBarrier() 41 { 42 std::atomic_thread_fence(std::memory_order_seq_cst); 43 } 44 reset_counter()45 void reset_counter() 46 { 47 count.store(0); 48 iteration.store(0); 49 } 50 wait()51 void wait() 52 { 53 unsigned target_iteration = iteration.load(std::memory_order_relaxed) + 1; 54 // Overflows cleanly. 55 unsigned target_count = divisor * target_iteration; 56 57 // Barriers don't enforce memory ordering. 58 // Be as relaxed about the barrier as we possibly can! 59 unsigned c = count.fetch_add(1u, std::memory_order_relaxed); 60 61 if (c + 1 == target_count) 62 { 63 iteration.store(target_iteration, std::memory_order_relaxed); 64 } 65 else 66 { 67 // If we have more threads than the CPU, don't hog the CPU for very long periods of time. 68 while (iteration.load(std::memory_order_relaxed) != target_iteration) 69 std::this_thread::yield(); 70 } 71 } 72 73 private: 74 unsigned divisor = 1; 75 std::atomic<unsigned> count; 76 std::atomic<unsigned> iteration; 77 }; 78 } 79 80 #endif 81