Skip to content

Commit

Permalink
Upgrade to clang-tidy-16
Browse files Browse the repository at this point in the history
  • Loading branch information
bernhardmgruber committed Aug 10, 2023
1 parent 75fee01 commit f4944ce
Show file tree
Hide file tree
Showing 6 changed files with 18 additions and 15 deletions.
3 changes: 2 additions & 1 deletion .clang-tidy
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,8 @@ Checks: >
-hicpp-signed-bitwise,
-cppcoreguidelines-pro-bounds-array-to-pointer-decay,
-hicpp-no-array-decay,
-cppcoreguidelines-owning-memory
-cppcoreguidelines-owning-memory,
-cppcoreguidelines-avoid-const-or-ref-data-members
WarningsAsErrors: '*'
HeaderFilterRegex: ''
Expand Down
10 changes: 5 additions & 5 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ jobs:
needs: clang-format
runs-on: ubuntu-22.04
env:
CXX: clang++-15
CXX: clang++-16
strategy:
fail-fast: false
matrix:
Expand All @@ -52,10 +52,10 @@ jobs:
- name: add LLVM apt repo
run: |
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key|sudo apt-key add -
sudo add-apt-repository 'deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-15 main'
- name: install clang-15
sudo add-apt-repository 'deb http://apt.llvm.org/jammy/ llvm-toolchain-jammy-16 main'
- name: install clang-16
run: |
sudo apt install clang-15 libomp-15-dev clang-tidy-15
sudo apt install clang-16 libomp-16-dev clang-tidy-16
- name: vcpkg install dependencies
run: |
eval $VCPKG_INSTALL
Expand All @@ -76,7 +76,7 @@ jobs:
run: |
cd build
sed -i 's/\(-forward-unknown-to-host-compiler\|--generate-code=arch=[^ ]\+\|--expt-extended-lambda\|--extended-lambda\|--expt-relaxed-constexpr\|--use_fast_math\)//g' compile_commands.json # remove NVCC specific flags which clang cannot handle
run-clang-tidy-15 -j $THREADS -header-filter='(tests|include/llama|examples)' -extra-arg=--no-cuda-version-check -extra-arg=-nocudalib -extra-arg=-Wno-unused-command-line-argument ${{ matrix.files }}
run-clang-tidy-16 -j $THREADS -header-filter='(tests|include/llama|examples)' -extra-arg=--no-cuda-version-check -extra-arg=-nocudalib -extra-arg=-Wno-unused-command-line-argument ${{ matrix.files }}
coverage:
needs: clang-format
Expand Down
6 changes: 3 additions & 3 deletions examples/root/lhcb_analysis/lhcb.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -488,7 +488,7 @@ namespace
clearHeatmap(view);
}

std::chrono::microseconds sortTime{};
std::chrono::microseconds sortTime{}; // NOLINT(misc-const-correctness)
if constexpr(Sort)
sortTime = sortView(view);

Expand Down Expand Up @@ -525,7 +525,7 @@ namespace
"{:13} {:>9.3f} {:>9.3f} {:>9.3f} {:>4} {:>10.1f} {:>7} {:>6.1f} {:>6.1f} {:>6.1f} {:>6.3f} {:>8}\n",
mappingName,
conversionTime / 1000.0,
sortTime.count() / 1000.0,
static_cast<double>(sortTime.count()) / 1000.0,
static_cast<double>(totalAnalysisTime.count()) / repetitions / 1000.0,
repetitions,
totalBlobSizes(view.mapping()) / 1024.0 / 1024.0,
Expand All @@ -550,7 +550,7 @@ auto main(int argc, const char* argv[]) -> int
}

const auto& inputFile = argv[1];
const auto treeName = argc == 3 ? argv[2] : "DecayTree";
const auto treeName = std::string_view(argc == 3 ? argv[2] : "DecayTree");

gErrorIgnoreLevel = kWarning + 1; // TODO(bgruber): supress warnings that the RNTuple still uses a pre-released
// format. Remove this once RNTuple hits production.
Expand Down
4 changes: 3 additions & 1 deletion include/llama/RecordRef.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -1015,7 +1015,8 @@ struct std::tuple_element<I, const llama::RecordRef<View, BoundRecordCoord, OwnV
template<typename View, typename BoundRecordCoord, bool OwnView>
struct std::hash<llama::RecordRef<View, BoundRecordCoord, OwnView>> // NOLINT(cert-dcl58-cpp)
{
auto operator()(const llama::RecordRef<View, BoundRecordCoord, OwnView>& rr) const -> std::size_t
LLAMA_FN_HOST_ACC_INLINE auto operator()(const llama::RecordRef<View, BoundRecordCoord, OwnView>& rr) const
-> std::size_t
{
std::size_t acc = 0;
llama::forEachLeaf(
Expand All @@ -1038,6 +1039,7 @@ template<
template<class>
class UQual>
struct std::
// NOLINTNEXTLINE(cert-dcl58-cpp)
basic_common_reference<llama::RecordRef<ViewA, BoundA, OwnA>, llama::RecordRef<ViewB, BoundB, OwnB>, TQual, UQual>
{
using type = std::enable_if_t<
Expand Down
8 changes: 4 additions & 4 deletions include/llama/StructName.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -283,12 +283,12 @@ namespace llama
{
constexpr auto intToStrSize(std::size_t s)
{
std::size_t len = 0;
do
std::size_t len = 1;
while(s >= 10)
{
len++;
s /= 10;
} while(s != 0);
}
return len;
}

Expand Down Expand Up @@ -335,7 +335,7 @@ namespace llama
it++;
it += intToStrSize(n);
auto it2 = it; // take copy because we write number backward
do
do // NOLINT(cppcoreguidelines-avoid-do-while)
{
it2--;
*it2 = '0' + n % 10;
Expand Down
2 changes: 1 addition & 1 deletion tests/arrayindexrange.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ TEST_CASE("ArrayIndexRange.concepts")

TEST_CASE("ArrayIndexRange.3D.reverse")
{
llama::ArrayExtentsDynamic<int, 3> extents{3, 3, 3};
const auto extents = llama::ArrayExtentsDynamic<int, 3>{3, 3, 3};

std::vector<llama::ArrayIndex<int, 3>> indices;
for(auto ai : llama::ArrayIndexRange{extents} | std::views::reverse)
Expand Down

0 comments on commit f4944ce

Please sign in to comment.