From c4b94b658160fefa932946d4cd7a272506ddf9f5 Mon Sep 17 00:00:00 2001 From: Bernhard Manfred Gruber Date: Mon, 28 Feb 2022 22:54:32 +0100 Subject: [PATCH] fix some CUDA warnings --- include/llama/Array.hpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/include/llama/Array.hpp b/include/llama/Array.hpp index 0c76ab2326..856235c9e2 100644 --- a/include/llama/Array.hpp +++ b/include/llama/Array.hpp @@ -78,13 +78,13 @@ namespace llama } template - constexpr auto get() -> T& + LLAMA_FN_HOST_ACC_INLINE constexpr auto get() -> T& { return element[I]; } template - constexpr auto get() const -> const T& + LLAMA_FN_HOST_ACC_INLINE constexpr auto get() const -> const T& { return element[I]; } @@ -210,7 +210,7 @@ namespace llama } template - LLAMA_FN_HOST_ACC_INLINE constexpr auto dot(Array a, Array b) -> T + LLAMA_FN_HOST_ACC_INLINE constexpr auto dot([[maybe_unused]] Array a, [[maybe_unused]] Array b) -> T { T r = 0; if constexpr(N > 0)