Skip to content

Commit 8322a5d

Browse files
committedMar 14, 2025·
benchdnn: address clang-tidy complaints
1 parent cd977dc commit 8322a5d

26 files changed

+103
-90
lines changed
 

‎tests/benchdnn/binary/binary.hpp

+5-4
Original file line numberDiff line numberDiff line change
@@ -72,9 +72,10 @@ struct prb_t : public prb_vdims_t {
7272

7373
prb_t(const prb_vdims_t &prb_vdims,
7474
const std::vector<dnnl_data_type_t> &sdt, dnnl_data_type_t ddt,
75-
const std::vector<std::string> &stag, std::string dtag, alg_t alg,
76-
bool inplace, const attr_t &attr, const thr_ctx_t &ctx_init,
77-
const thr_ctx_t &ctx_exe, const impl_filter_t &impl_filter)
75+
const std::vector<std::string> &stag, const std::string &dtag,
76+
alg_t alg, bool inplace, const attr_t &attr,
77+
const thr_ctx_t &ctx_init, const thr_ctx_t &ctx_exe,
78+
const impl_filter_t &impl_filter)
7879
: prb_vdims_t(prb_vdims)
7980
, sdt(sdt)
8081
, ddt(ddt)
@@ -109,7 +110,7 @@ struct prb_t : public prb_vdims_t {
109110

110111
const char *str() const { return repro.c_str(); }
111112

112-
const bool is_ternary_op() const { return alg == alg_t::SELECT; }
113+
bool is_ternary_op() const { return alg == alg_t::SELECT; }
113114

114115
private:
115116
std::string repro;

‎tests/benchdnn/brgemm/brgemm.hpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ struct settings_t : public base_settings_t {
6868
std::vector<std::string> batch_kind {"addr"};
6969

7070
const char *perf_template_csv() const {
71-
static const std::string args = "";
71+
static const std::string args;
7272
return perf_template_csv_base(args);
7373
}
7474

‎tests/benchdnn/common.hpp

+8-3
Original file line numberDiff line numberDiff line change
@@ -68,12 +68,12 @@ enum { CRIT = 1, WARN = 2 };
6868
do { \
6969
int status__ = (f); \
7070
if (status__ != OK) { \
71-
if (s == CRIT || s == WARN) { \
71+
if ((s) == CRIT || (s) == WARN) { \
7272
BENCHDNN_PRINT(0, \
7373
"Error: Function '%s' at (%s:%d) returned '%d'\n", \
7474
__FUNCTION__, __FILE__, __LINE__, status__); \
7575
fflush(0); \
76-
if (s == CRIT) exit(1); \
76+
if ((s) == CRIT) exit(1); \
7777
} \
7878
return status__; \
7979
} \
@@ -108,9 +108,14 @@ extern std::string driver_name;
108108
} \
109109
} while (0)
110110

111+
//NOLINTBEGIN(bugprone-macro-parentheses)
112+
// dnnl_common.hpp:119:5: error: expected ';' at end of declaration list [clang-diagnostic-error]
113+
// 119 | BENCHDNN_DISALLOW_COPY_AND_ASSIGN(stream_t);
114+
// | ^
111115
#define BENCHDNN_DISALLOW_COPY_AND_ASSIGN(T) \
112116
T(const T &) = delete; \
113117
T &operator=(const T &) = delete;
118+
//NOLINTEND(bugprone-macro-parentheses)
114119

115120
/* perf */
116121
extern double max_ms_per_prb; // max time spend per prb in ms
@@ -168,7 +173,7 @@ const char *bool2str(bool value);
168173
bool match_regex(const char *str, const char *pattern);
169174
bool skip_start(res_t *res, int idx = benchdnn_stat.tests);
170175

171-
typedef int (*bench_f)(int argc, char **argv);
176+
using bench_f = int (*)(int, char **);
172177
std::string locate_file(const std::string &fname);
173178
int batch(const char *fname, bench_f bench);
174179

‎tests/benchdnn/concat/concat.hpp

+1-5
Original file line numberDiff line numberDiff line change
@@ -85,11 +85,7 @@ struct prb_t : public prb_vdims_t {
8585
// If dst is omitted by `dtag = tag::undef`, omit `ddt` as well.
8686
if (dtag == tag::undef) this->ddt = dnnl_data_type_undef;
8787

88-
// Broadcast tag if needed
89-
if (stag.size() == 1) {
90-
const auto val = stag[0]; // Need a copy here.
91-
this->stag.assign(prb_vdims.n_inputs(), val);
92-
}
88+
broadcast_vector(this->stag, prb_vdims.n_inputs());
9389

9490
dst_dims[axis] = axis_size();
9591
repro = set_repro_line(); // must be last in ctor to collect right info

‎tests/benchdnn/dnn_types.hpp

+6-8
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ struct attr_t {
9292

9393
bool is_def() const {
9494
return policy == COMMON && value == 0 && dt == dnnl_s32
95-
&& groups.size() == 0;
95+
&& groups.empty();
9696
}
9797

9898
policy_t policy = COMMON;
@@ -141,7 +141,6 @@ struct attr_t {
141141
arg, e.policy, prim_kind, ndims, has_groups);
142142
}
143143

144-
zero_points_t() : points() {} // needed for debug icc190 build;
145144
std::map<int, entry_t> points;
146145
};
147146

@@ -156,7 +155,7 @@ struct attr_t {
156155

157156
bool is_def() const {
158157
return policy == COMMON && scale == 1.f && dt == dnnl_f32
159-
&& groups.size() == 0;
158+
&& groups.empty();
160159
}
161160

162161
policy_t policy = COMMON;
@@ -165,7 +164,7 @@ struct attr_t {
165164
std::vector<dnnl_dim_t> groups;
166165
};
167166

168-
void set(int arg, entry_t scale) { scales[arg] = scale; }
167+
void set(int arg, const entry_t &scale) { scales[arg] = scale; }
169168

170169
entry_t get(int arg) const {
171170
const auto &s = scales.find(arg);
@@ -194,8 +193,6 @@ struct attr_t {
194193
}
195194
int from_str(const std::string &s);
196195

197-
arg_scales_t() : scales() {} // needed for debug icc190 build;
198-
199196
std::map<int, entry_t> scales;
200197
};
201198

@@ -339,7 +336,7 @@ struct attr_t {
339336
bool is_prelu_kind() const;
340337
};
341338

342-
post_ops_t() : entry() {}
339+
post_ops_t() = default;
343340

344341
int len() const { return (int)entry.size(); }
345342
bool is_def() const { return len() == 0; }
@@ -460,7 +457,7 @@ struct isa_hints_t {
460457
cpu_hints_t hints_;
461458
isa_hints_t(cpu_hints_t hints) : hints_(hints) {}
462459

463-
cpu_hints_t get() { return hints_; }
460+
cpu_hints_t get() const { return hints_; }
464461

465462
static std::string hints2str(const isa_hints_t &isa_hints) {
466463
switch (isa_hints.hints_) {
@@ -540,6 +537,7 @@ struct sparse_options_t {
540537

541538
std::vector<int> get_args() const {
542539
std::vector<int> args;
540+
args.reserve(options_.size());
543541
for (const auto &opt : options_) {
544542
args.push_back(opt.first);
545543
}

‎tests/benchdnn/dnnl_common.hpp

+7-7
Original file line numberDiff line numberDiff line change
@@ -45,21 +45,21 @@
4545
do { \
4646
dnnl_status_t status__ = f; \
4747
if (status__ != dnnl_success) { \
48-
if (s == CRIT || s == WARN) { \
48+
if ((s) == CRIT || (s) == WARN) { \
4949
BENCHDNN_PRINT(0, \
5050
"Error: Function '%s' at (%s:%d) returned '%s'\n", \
5151
__FUNCTION__, __FILE__, __LINE__, \
5252
status2str(status__)); \
5353
fflush(0); \
54-
if (s == CRIT) exit(2); \
54+
if ((s) == CRIT) exit(2); \
5555
} \
5656
return FAIL; \
5757
} \
5858
} while (0)
5959

6060
#define DNN_SAFE_V(f) \
6161
do { \
62-
dnnl_status_t status__ = f; \
62+
dnnl_status_t status__ = (f); \
6363
if (status__ != dnnl_success) { \
6464
BENCHDNN_PRINT(0, \
6565
"Error: Function '%s' at (%s:%d) returned '%s'\n", \
@@ -72,7 +72,7 @@
7272
// Unlike `DNN_SAFE` this one returns `dnnl_status_t`, not `OK/FAIL`.
7373
#define DNN_SAFE_STATUS(f) \
7474
do { \
75-
dnnl_status_t status__ = f; \
75+
dnnl_status_t status__ = (f); \
7676
if (status__ != dnnl_success) { return status__; } \
7777
} while (0)
7878

@@ -499,6 +499,7 @@ int init_prim(benchdnn_dnnl_wrapper_t<dnnl_primitive_t> &user_prim,
499499
// Rationale: make sure that the primitive cache is robust in the case
500500
// where CPU and GPU engines are re-created because this is a commonly
501501
// used scenario in the frameworks.
502+
// NOLINTNEXTLINE(performance-unnecessary-copy-initialization)
502503
engine_t engine(get_test_engine());
503504

504505
// The first primitive creation using a temporary engine.
@@ -653,9 +654,8 @@ void check_correctness(const prb_t *prb, const std::vector<data_kind_t> &kinds,
653654
}
654655
}
655656

656-
typedef std::function<dnnl_status_t(
657-
const dnnl_stream_t &, const std::vector<dnnl_exec_arg_t> &)>
658-
perf_function_t;
657+
using perf_function_t = std::function<dnnl_status_t(
658+
const dnnl_stream_t &, const std::vector<dnnl_exec_arg_t> &)>;
659659

660660
int execute_and_wait(perf_function_t &exec_func, const dnnl_engine_t &engine,
661661
const args_t &args, res_t *res = nullptr);

‎tests/benchdnn/dnnl_memory.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -608,7 +608,7 @@ benchdnn_dnnl_wrapper_t<dnnl_memory_desc_t> dnn_mem_t::init_md(int ndims,
608608
const bool use_strides = !strides_.empty();
609609
// Ignore tag_ in case strides_ are explicitly provided
610610
if (use_strides) {
611-
std::vector<dnnl_dim_t> strides(strides_);
611+
const std::vector<dnnl_dim_t> &strides(strides_);
612612
DNN_SAFE_V(dnnl_memory_desc_create_with_strides(
613613
&md, ndims, dims, data_type, strides.data()));
614614
return md;

‎tests/benchdnn/dnnl_memory.hpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,7 @@ struct dnn_mem_t {
202202
bool active_ = false;
203203

204204
dnnl_engine_kind_t engine_kind_ = dnnl_any_engine;
205-
dnnl_engine_t engine_ = NULL;
205+
dnnl_engine_t engine_ = nullptr;
206206

207207
mutable bool is_mapped_ = false;
208208
mutable std::vector<void *> mapped_ptrs_;

‎tests/benchdnn/gnorm/gnorm.hpp

+3-11
Original file line numberDiff line numberDiff line change
@@ -109,17 +109,9 @@ struct prb_t : public desc_t {
109109
, impl_filter(impl_filter) {
110110

111111
if (mb) this->mb = mb;
112-
// Broadcast data types if needed
113-
if (dt.size() == 1) {
114-
const auto val = dt[0]; // Need a copy here.
115-
this->dt.assign(2, val);
116-
}
117-
118-
// Broadcast tags if needed
119-
if (tag.size() == 1) {
120-
const auto val = tag[0];
121-
this->tag.assign(2, val);
122-
}
112+
113+
broadcast_vector(this->dt, 2);
114+
broadcast_vector(this->tag, 2);
123115

124116
repro = set_repro_line(); // must be last in ctor to collect right info
125117
}

‎tests/benchdnn/lnorm/lnorm.hpp

+5-5
Original file line numberDiff line numberDiff line change
@@ -104,19 +104,19 @@ struct prb_t : public prb_dims_t {
104104
, attr(attr)
105105
, ctx_init(ctx_init)
106106
, ctx_exe(ctx_exe)
107-
, impl_filter(impl_filter) {
108-
n = 1;
107+
, impl_filter(impl_filter)
108+
, n(1)
109+
, c(dims[ndims - 1])
110+
, eps(1.f / 16) {
109111
for (int d = 0; d < ndims - 1; d++)
110112
n *= dims[d];
111-
c = dims[ndims - 1];
112-
eps = 1.f / 16;
113113

114114
// Broadcast data types if needed
115115
if (dt.size() == 1) {
116116
const auto val = dt[0]; // Need a copy here.
117117
this->dt.assign(2, val);
118118
}
119-
if (tag.size() == 1) { this->tag.push_back(tag::any); }
119+
if (tag.size() == 1) { this->tag.emplace_back(tag::any); }
120120
repro = set_repro_line(); // must be last in ctor to collect right info
121121
}
122122

‎tests/benchdnn/lrn/lrn.hpp

-1
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,6 @@ struct prb_t : public desc_t {
101101
if (mb) this->mb = mb;
102102
repro = set_repro_line(); // must be last in ctor to collect right info
103103
}
104-
~prb_t() {}
105104

106105
dir_t dir;
107106
dnnl_data_type_t dt;

‎tests/benchdnn/matmul/matmul.hpp

+2-2
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333

3434
namespace matmul {
3535

36-
typedef std::bitset<DNNL_MAX_NDIMS> dims_mask_t;
36+
using dims_mask_t = std::bitset<DNNL_MAX_NDIMS>;
3737

3838
struct settings_t : public base_settings_t {
3939
settings_t() = default;
@@ -90,7 +90,7 @@ struct prb_t : public prb_vdims_t {
9090
dnnl_data_type_t bia_dt, int bia_mask,
9191
const std::vector<dims_mask_t> &rt_dims_masks,
9292
#ifdef DNNL_EXPERIMENTAL_SPARSE
93-
sparse_options_t sparse_options,
93+
const sparse_options_t &sparse_options,
9494
#endif
9595
const attr_t &attr, const thr_ctx_t &ctx_init,
9696
const thr_ctx_t &ctx_exe, const impl_filter_t &impl_filter)

‎tests/benchdnn/reorder/cfg.cpp

+5-5
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/*******************************************************************************
2-
* Copyright 2018-2024 Intel Corporation
2+
* Copyright 2018-2025 Intel Corporation
33
*
44
* Licensed under the Apache License, Version 2.0 (the "License");
55
* you may not use this file except in compliance with the License.
@@ -31,8 +31,8 @@ const float f16_max_exact = 1 << 11;
3131
const float f4_max_exact = 1 << 3;
3232

3333
#define REG(dt, min, max) \
34-
const dt_conf_s CONCAT2(_conf_, dt) = {CONCAT2(dnnl_, dt), min, max}; \
35-
const dt_conf_t CONCAT2(conf_, dt) = &CONCAT2(_conf_, dt);
34+
const dt_conf_t CONCAT2(_conf_, dt) = {CONCAT2(dnnl_, dt), min, max}; \
35+
const dt_conf_t *CONCAT2(conf_, dt) = &CONCAT2(_conf_, dt);
3636

3737
REG(f32, -int_max_exact, int_max_exact);
3838
REG(f64, -int_max_exact, int_max_exact);
@@ -53,7 +53,7 @@ REG(u4, 0, 15);
5353

5454
#undef REG
5555

56-
dt_conf_t dt2cfg(dnnl_data_type_t dt) {
56+
const dt_conf_t *dt2cfg(dnnl_data_type_t dt) {
5757
#define CASE(cfg) \
5858
if (CONCAT2(dnnl_, cfg) == dt) return CONCAT2(conf_, cfg)
5959
CASE(f32);
@@ -74,7 +74,7 @@ dt_conf_t dt2cfg(dnnl_data_type_t dt) {
7474
return conf_f32;
7575
}
7676

77-
dnnl_data_type_t cfg2dt(dt_conf_t cfg) {
77+
dnnl_data_type_t cfg2dt(const dt_conf_t *cfg) {
7878
#define CASE(_cfg) \
7979
if (cfg == CONCAT2(conf_, _cfg)) return CONCAT2(dnnl_, _cfg)
8080
CASE(f32);

‎tests/benchdnn/reorder/reorder.hpp

+4-5
Original file line numberDiff line numberDiff line change
@@ -40,14 +40,13 @@ flag_t str2flag(const char *str);
4040
std::string flag2str(flag_bit_t flag);
4141
std::ostream &operator<<(std::ostream &s, const std::vector<flag_t> &oflag);
4242

43-
struct dt_conf_s {
43+
struct dt_conf_t {
4444
dnnl_data_type_t dt;
4545
float min;
4646
float max;
4747
};
48-
typedef const dt_conf_s *dt_conf_t;
49-
dt_conf_t dt2cfg(dnnl_data_type_t dt);
50-
dnnl_data_type_t cfg2dt(dt_conf_t cfg);
48+
const dt_conf_t *dt2cfg(dnnl_data_type_t dt);
49+
dnnl_data_type_t cfg2dt(const dt_conf_t *cfg);
5150

5251
enum cross_engine_t { NONE, CPU2GPU, GPU2CPU };
5352
cross_engine_t str2cross_engine(const char *str);
@@ -133,7 +132,7 @@ struct prb_t : public prb_dims_t {
133132
bool is_reorder_with_compensation(flag_bit_t flag) const;
134133
dims_t get_compensation_dims(flag_bit_t flag) const;
135134
int get_compensation_mask(flag_bit_t flag) const;
136-
dt_conf_t get_conf(data_kind_t kind) const;
135+
const dt_conf_t *get_conf(data_kind_t kind) const;
137136

138137
// Used to construct memory desc when dimensions are runtime since such mds
139138
// can't be used directly from query and memory objects can't be constructed.

‎tests/benchdnn/reorder/reorder_aux.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ int prb_t::get_compensation_mask(flag_bit_t flag) const {
126126
return mask;
127127
}
128128

129-
dt_conf_t prb_t::get_conf(data_kind_t kind) const {
129+
const dt_conf_t *prb_t::get_conf(data_kind_t kind) const {
130130
switch (kind) {
131131
case SRC: return dt2cfg(sdt);
132132
case DST: return dt2cfg(ddt);

‎tests/benchdnn/rnn/rnn.hpp

+1-5
Original file line numberDiff line numberDiff line change
@@ -333,11 +333,7 @@ struct prb_t : public desc_t {
333333
if (mb) this->mb = mb;
334334
count_ops();
335335

336-
// Broadcast data types if needed
337-
if (tag.size() == 1) {
338-
const auto val = tag[0]; // Need a copy here.
339-
this->tag.assign(3, val);
340-
}
336+
broadcast_vector(this->tag, 3);
341337

342338
wei_scales = nullptr;
343339
wei_proj_scales = nullptr;

0 commit comments

Comments
 (0)