Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

benchdnn: address clang-tidy complaints #2881

Merged
merged 1 commit into from
Mar 15, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions tests/benchdnn/binary/binary.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -72,9 +72,10 @@ struct prb_t : public prb_vdims_t {

prb_t(const prb_vdims_t &prb_vdims,
const std::vector<dnnl_data_type_t> &sdt, dnnl_data_type_t ddt,
const std::vector<std::string> &stag, std::string dtag, alg_t alg,
bool inplace, const attr_t &attr, const thr_ctx_t &ctx_init,
const thr_ctx_t &ctx_exe, const impl_filter_t &impl_filter)
const std::vector<std::string> &stag, const std::string &dtag,
alg_t alg, bool inplace, const attr_t &attr,
const thr_ctx_t &ctx_init, const thr_ctx_t &ctx_exe,
const impl_filter_t &impl_filter)
: prb_vdims_t(prb_vdims)
, sdt(sdt)
, ddt(ddt)
Expand Down Expand Up @@ -109,7 +110,7 @@ struct prb_t : public prb_vdims_t {

const char *str() const { return repro.c_str(); }

const bool is_ternary_op() const { return alg == alg_t::SELECT; }
bool is_ternary_op() const { return alg == alg_t::SELECT; }

private:
std::string repro;
Expand Down
2 changes: 1 addition & 1 deletion tests/benchdnn/brgemm/brgemm.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ struct settings_t : public base_settings_t {
std::vector<std::string> batch_kind {"addr"};

const char *perf_template_csv() const {
static const std::string args = "";
static const std::string args;
return perf_template_csv_base(args);
}

Expand Down
11 changes: 8 additions & 3 deletions tests/benchdnn/common.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -68,12 +68,12 @@ enum { CRIT = 1, WARN = 2 };
do { \
int status__ = (f); \
if (status__ != OK) { \
if (s == CRIT || s == WARN) { \
if ((s) == CRIT || (s) == WARN) { \
BENCHDNN_PRINT(0, \
"Error: Function '%s' at (%s:%d) returned '%d'\n", \
__FUNCTION__, __FILE__, __LINE__, status__); \
fflush(0); \
if (s == CRIT) exit(1); \
if ((s) == CRIT) exit(1); \
} \
return status__; \
} \
Expand Down Expand Up @@ -108,9 +108,14 @@ extern std::string driver_name;
} \
} while (0)

//NOLINTBEGIN(bugprone-macro-parentheses)
// dnnl_common.hpp:119:5: error: expected ';' at end of declaration list [clang-diagnostic-error]
// 119 | BENCHDNN_DISALLOW_COPY_AND_ASSIGN(stream_t);
// | ^
#define BENCHDNN_DISALLOW_COPY_AND_ASSIGN(T) \
T(const T &) = delete; \
T &operator=(const T &) = delete;
//NOLINTEND(bugprone-macro-parentheses)

/* perf */
extern double max_ms_per_prb; // max time spend per prb in ms
Expand Down Expand Up @@ -168,7 +173,7 @@ const char *bool2str(bool value);
bool match_regex(const char *str, const char *pattern);
bool skip_start(res_t *res, int idx = benchdnn_stat.tests);

typedef int (*bench_f)(int argc, char **argv);
using bench_f = int (*)(int, char **);
std::string locate_file(const std::string &fname);
int batch(const char *fname, bench_f bench);

Expand Down
6 changes: 1 addition & 5 deletions tests/benchdnn/concat/concat.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -85,11 +85,7 @@ struct prb_t : public prb_vdims_t {
// If dst is omitted by `dtag = tag::undef`, omit `ddt` as well.
if (dtag == tag::undef) this->ddt = dnnl_data_type_undef;

// Broadcast tag if needed
if (stag.size() == 1) {
const auto val = stag[0]; // Need a copy here.
this->stag.assign(prb_vdims.n_inputs(), val);
}
broadcast_vector(this->stag, prb_vdims.n_inputs());

dst_dims[axis] = axis_size();
repro = set_repro_line(); // must be last in ctor to collect right info
Expand Down
14 changes: 6 additions & 8 deletions tests/benchdnn/dnn_types.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ struct attr_t {

bool is_def() const {
return policy == COMMON && value == 0 && dt == dnnl_s32
&& groups.size() == 0;
&& groups.empty();
}

policy_t policy = COMMON;
Expand Down Expand Up @@ -141,7 +141,6 @@ struct attr_t {
arg, e.policy, prim_kind, ndims, has_groups);
}

zero_points_t() : points() {} // needed for debug icc190 build;
std::map<int, entry_t> points;
};

Expand All @@ -156,7 +155,7 @@ struct attr_t {

bool is_def() const {
return policy == COMMON && scale == 1.f && dt == dnnl_f32
&& groups.size() == 0;
&& groups.empty();
}

policy_t policy = COMMON;
Expand All @@ -165,7 +164,7 @@ struct attr_t {
std::vector<dnnl_dim_t> groups;
};

void set(int arg, entry_t scale) { scales[arg] = scale; }
void set(int arg, const entry_t &scale) { scales[arg] = scale; }

entry_t get(int arg) const {
const auto &s = scales.find(arg);
Expand Down Expand Up @@ -194,8 +193,6 @@ struct attr_t {
}
int from_str(const std::string &s);

arg_scales_t() : scales() {} // needed for debug icc190 build;

std::map<int, entry_t> scales;
};

Expand Down Expand Up @@ -339,7 +336,7 @@ struct attr_t {
bool is_prelu_kind() const;
};

post_ops_t() : entry() {}
post_ops_t() = default;

int len() const { return (int)entry.size(); }
bool is_def() const { return len() == 0; }
Expand Down Expand Up @@ -460,7 +457,7 @@ struct isa_hints_t {
cpu_hints_t hints_;
isa_hints_t(cpu_hints_t hints) : hints_(hints) {}

cpu_hints_t get() { return hints_; }
cpu_hints_t get() const { return hints_; }

static std::string hints2str(const isa_hints_t &isa_hints) {
switch (isa_hints.hints_) {
Expand Down Expand Up @@ -540,6 +537,7 @@ struct sparse_options_t {

std::vector<int> get_args() const {
std::vector<int> args;
args.reserve(options_.size());
for (const auto &opt : options_) {
args.push_back(opt.first);
}
Expand Down
14 changes: 7 additions & 7 deletions tests/benchdnn/dnnl_common.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,21 +45,21 @@
do { \
dnnl_status_t status__ = f; \
if (status__ != dnnl_success) { \
if (s == CRIT || s == WARN) { \
if ((s) == CRIT || (s) == WARN) { \
BENCHDNN_PRINT(0, \
"Error: Function '%s' at (%s:%d) returned '%s'\n", \
__FUNCTION__, __FILE__, __LINE__, \
status2str(status__)); \
fflush(0); \
if (s == CRIT) exit(2); \
if ((s) == CRIT) exit(2); \
} \
return FAIL; \
} \
} while (0)

#define DNN_SAFE_V(f) \
do { \
dnnl_status_t status__ = f; \
dnnl_status_t status__ = (f); \
if (status__ != dnnl_success) { \
BENCHDNN_PRINT(0, \
"Error: Function '%s' at (%s:%d) returned '%s'\n", \
Expand All @@ -72,7 +72,7 @@
// Unlike `DNN_SAFE` this one returns `dnnl_status_t`, not `OK/FAIL`.
#define DNN_SAFE_STATUS(f) \
do { \
dnnl_status_t status__ = f; \
dnnl_status_t status__ = (f); \
if (status__ != dnnl_success) { return status__; } \
} while (0)

Expand Down Expand Up @@ -499,6 +499,7 @@ int init_prim(benchdnn_dnnl_wrapper_t<dnnl_primitive_t> &user_prim,
// Rationale: make sure that the primitive cache is robust in the case
// where CPU and GPU engines are re-created because this is a commonly
// used scenario in the frameworks.
// NOLINTNEXTLINE(performance-unnecessary-copy-initialization)
engine_t engine(get_test_engine());

// The first primitive creation using a temporary engine.
Expand Down Expand Up @@ -653,9 +654,8 @@ void check_correctness(const prb_t *prb, const std::vector<data_kind_t> &kinds,
}
}

typedef std::function<dnnl_status_t(
const dnnl_stream_t &, const std::vector<dnnl_exec_arg_t> &)>
perf_function_t;
using perf_function_t = std::function<dnnl_status_t(
const dnnl_stream_t &, const std::vector<dnnl_exec_arg_t> &)>;

int execute_and_wait(perf_function_t &exec_func, const dnnl_engine_t &engine,
const args_t &args, res_t *res = nullptr);
Expand Down
2 changes: 1 addition & 1 deletion tests/benchdnn/dnnl_memory.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -608,7 +608,7 @@ benchdnn_dnnl_wrapper_t<dnnl_memory_desc_t> dnn_mem_t::init_md(int ndims,
const bool use_strides = !strides_.empty();
// Ignore tag_ in case strides_ are explicitly provided
if (use_strides) {
std::vector<dnnl_dim_t> strides(strides_);
const std::vector<dnnl_dim_t> &strides(strides_);
DNN_SAFE_V(dnnl_memory_desc_create_with_strides(
&md, ndims, dims, data_type, strides.data()));
return md;
Expand Down
2 changes: 1 addition & 1 deletion tests/benchdnn/dnnl_memory.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ struct dnn_mem_t {
bool active_ = false;

dnnl_engine_kind_t engine_kind_ = dnnl_any_engine;
dnnl_engine_t engine_ = NULL;
dnnl_engine_t engine_ = nullptr;

mutable bool is_mapped_ = false;
mutable std::vector<void *> mapped_ptrs_;
Expand Down
14 changes: 3 additions & 11 deletions tests/benchdnn/gnorm/gnorm.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -109,17 +109,9 @@ struct prb_t : public desc_t {
, impl_filter(impl_filter) {

if (mb) this->mb = mb;
// Broadcast data types if needed
if (dt.size() == 1) {
const auto val = dt[0]; // Need a copy here.
this->dt.assign(2, val);
}

// Broadcast tags if needed
if (tag.size() == 1) {
const auto val = tag[0];
this->tag.assign(2, val);
}

broadcast_vector(this->dt, 2);
broadcast_vector(this->tag, 2);

repro = set_repro_line(); // must be last in ctor to collect right info
}
Expand Down
10 changes: 5 additions & 5 deletions tests/benchdnn/lnorm/lnorm.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -104,19 +104,19 @@ struct prb_t : public prb_dims_t {
, attr(attr)
, ctx_init(ctx_init)
, ctx_exe(ctx_exe)
, impl_filter(impl_filter) {
n = 1;
, impl_filter(impl_filter)
, n(1)
, c(dims[ndims - 1])
, eps(1.f / 16) {
for (int d = 0; d < ndims - 1; d++)
n *= dims[d];
c = dims[ndims - 1];
eps = 1.f / 16;

// Broadcast data types if needed
if (dt.size() == 1) {
const auto val = dt[0]; // Need a copy here.
this->dt.assign(2, val);
}
if (tag.size() == 1) { this->tag.push_back(tag::any); }
if (tag.size() == 1) { this->tag.emplace_back(tag::any); }
repro = set_repro_line(); // must be last in ctor to collect right info
}

Expand Down
1 change: 0 additions & 1 deletion tests/benchdnn/lrn/lrn.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,6 @@ struct prb_t : public desc_t {
if (mb) this->mb = mb;
repro = set_repro_line(); // must be last in ctor to collect right info
}
~prb_t() {}

dir_t dir;
dnnl_data_type_t dt;
Expand Down
4 changes: 2 additions & 2 deletions tests/benchdnn/matmul/matmul.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@

namespace matmul {

typedef std::bitset<DNNL_MAX_NDIMS> dims_mask_t;
using dims_mask_t = std::bitset<DNNL_MAX_NDIMS>;

struct settings_t : public base_settings_t {
settings_t() = default;
Expand Down Expand Up @@ -90,7 +90,7 @@ struct prb_t : public prb_vdims_t {
dnnl_data_type_t bia_dt, int bia_mask,
const std::vector<dims_mask_t> &rt_dims_masks,
#ifdef DNNL_EXPERIMENTAL_SPARSE
sparse_options_t sparse_options,
const sparse_options_t &sparse_options,
#endif
const attr_t &attr, const thr_ctx_t &ctx_init,
const thr_ctx_t &ctx_exe, const impl_filter_t &impl_filter)
Expand Down
10 changes: 5 additions & 5 deletions tests/benchdnn/reorder/cfg.cpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*******************************************************************************
* Copyright 2018-2024 Intel Corporation
* Copyright 2018-2025 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -31,8 +31,8 @@ const float f16_max_exact = 1 << 11;
const float f4_max_exact = 1 << 3;

#define REG(dt, min, max) \
const dt_conf_s CONCAT2(_conf_, dt) = {CONCAT2(dnnl_, dt), min, max}; \
const dt_conf_t CONCAT2(conf_, dt) = &CONCAT2(_conf_, dt);
const dt_conf_t CONCAT2(_conf_, dt) = {CONCAT2(dnnl_, dt), min, max}; \
const dt_conf_t *CONCAT2(conf_, dt) = &CONCAT2(_conf_, dt);

REG(f32, -int_max_exact, int_max_exact);
REG(f64, -int_max_exact, int_max_exact);
Expand All @@ -53,7 +53,7 @@ REG(u4, 0, 15);

#undef REG

dt_conf_t dt2cfg(dnnl_data_type_t dt) {
const dt_conf_t *dt2cfg(dnnl_data_type_t dt) {
#define CASE(cfg) \
if (CONCAT2(dnnl_, cfg) == dt) return CONCAT2(conf_, cfg)
CASE(f32);
Expand All @@ -74,7 +74,7 @@ dt_conf_t dt2cfg(dnnl_data_type_t dt) {
return conf_f32;
}

dnnl_data_type_t cfg2dt(dt_conf_t cfg) {
dnnl_data_type_t cfg2dt(const dt_conf_t *cfg) {
#define CASE(_cfg) \
if (cfg == CONCAT2(conf_, _cfg)) return CONCAT2(dnnl_, _cfg)
CASE(f32);
Expand Down
9 changes: 4 additions & 5 deletions tests/benchdnn/reorder/reorder.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,13 @@ flag_t str2flag(const char *str);
std::string flag2str(flag_bit_t flag);
std::ostream &operator<<(std::ostream &s, const std::vector<flag_t> &oflag);

struct dt_conf_s {
struct dt_conf_t {
dnnl_data_type_t dt;
float min;
float max;
};
typedef const dt_conf_s *dt_conf_t;
dt_conf_t dt2cfg(dnnl_data_type_t dt);
dnnl_data_type_t cfg2dt(dt_conf_t cfg);
const dt_conf_t *dt2cfg(dnnl_data_type_t dt);
dnnl_data_type_t cfg2dt(const dt_conf_t *cfg);

enum cross_engine_t { NONE, CPU2GPU, GPU2CPU };
cross_engine_t str2cross_engine(const char *str);
Expand Down Expand Up @@ -133,7 +132,7 @@ struct prb_t : public prb_dims_t {
bool is_reorder_with_compensation(flag_bit_t flag) const;
dims_t get_compensation_dims(flag_bit_t flag) const;
int get_compensation_mask(flag_bit_t flag) const;
dt_conf_t get_conf(data_kind_t kind) const;
const dt_conf_t *get_conf(data_kind_t kind) const;

// Used to construct memory desc when dimensions are runtime since such mds
// can't be used directly from query and memory objects can't be constructed.
Expand Down
2 changes: 1 addition & 1 deletion tests/benchdnn/reorder/reorder_aux.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ int prb_t::get_compensation_mask(flag_bit_t flag) const {
return mask;
}

dt_conf_t prb_t::get_conf(data_kind_t kind) const {
const dt_conf_t *prb_t::get_conf(data_kind_t kind) const {
switch (kind) {
case SRC: return dt2cfg(sdt);
case DST: return dt2cfg(ddt);
Expand Down
6 changes: 1 addition & 5 deletions tests/benchdnn/rnn/rnn.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -333,11 +333,7 @@ struct prb_t : public desc_t {
if (mb) this->mb = mb;
count_ops();

// Broadcast data types if needed
if (tag.size() == 1) {
const auto val = tag[0]; // Need a copy here.
this->tag.assign(3, val);
}
broadcast_vector(this->tag, 3);

wei_scales = nullptr;
wei_proj_scales = nullptr;
Expand Down
Loading