|
18 | 18 | #include <set>
|
19 | 19 | #include <vector>
|
20 | 20 |
|
21 |
| -#include "oneapi/dnnl/dnnl_graph.h" |
22 |
| - |
23 | 21 | #include "cpu/platform.hpp"
|
24 |
| -#ifdef DNNL_WITH_SYCL |
25 |
| -#include "dnnl_sycl.hpp" |
26 |
| -#endif |
27 |
| - |
28 |
| -#if DNNL_GPU_RUNTIME == DNNL_RUNTIME_OCL |
29 |
| -#include "oneapi/dnnl/dnnl_graph_ocl.hpp" |
30 |
| -#endif |
31 |
| - |
32 | 22 | #include "utils.hpp"
|
33 | 23 | #include "utils/timer.hpp"
|
34 | 24 |
|
@@ -343,17 +333,6 @@ sycl::queue &get_queue() {
|
343 | 333 | }
|
344 | 334 | #endif // DNNL_WITH_SYCL
|
345 | 335 |
|
346 |
| -bool is_sycl_engine() { |
347 |
| -#if DNNL_CPU_RUNTIME == DNNL_RUNTIME_SYCL |
348 |
| - if (is_cpu()) return true; |
349 |
| -#endif |
350 |
| - |
351 |
| -#if DNNL_GPU_RUNTIME == DNNL_RUNTIME_SYCL |
352 |
| - if (!is_cpu()) return true; |
353 |
| -#endif |
354 |
| - return false; |
355 |
| -} |
356 |
| - |
357 | 336 | dnnl::graph::op::kind opstr2kind(const std::string &kind) {
|
358 | 337 | const std::unordered_map<std::string, dnnl::graph::op::kind> op_map = {
|
359 | 338 | {"Abs", dnnl::graph::op::kind::Abs},
|
@@ -672,46 +651,6 @@ dnnl_driver_t opkind2driver(const dnnl::graph::op::kind &kind) {
|
672 | 651 | return dnnl_driver_t::others;
|
673 | 652 | }
|
674 | 653 |
|
675 |
| -bool is_nxc_lt_arg(const std::string &kind, const int exec_arg) { |
676 |
| - // Mapping from the op kind to a set that indicates which input arg needs |
677 |
| - // reorder |
678 |
| - static const std::unordered_map<std::string, std::unordered_set<int>> |
679 |
| - input_arg_for_reorder = { |
680 |
| - {"AvgPool", {DNNL_ARG_SRC}}, |
681 |
| - {"AvgPoolBackward", {DNNL_ARG_DIFF_DST}}, |
682 |
| - {"BatchNormInference", {DNNL_ARG_SRC}}, |
683 |
| - {"BatchNormForwardTraining", {DNNL_ARG_SRC}}, |
684 |
| - {"BiasAddBackward", {DNNL_ARG_SRC}}, |
685 |
| - {"Interpolate", {DNNL_ARG_SRC}}, |
686 |
| - {"MaxPool", {DNNL_ARG_SRC}}, |
687 |
| - {"Convolution", {DNNL_ARG_SRC}}, |
688 |
| - {"ConvolutionBackwardData", {DNNL_ARG_DIFF_DST}}, |
689 |
| - {"ConvTranspose", {DNNL_ARG_SRC}}, |
690 |
| - {"ConvTransposeBackwardData", {DNNL_ARG_DIFF_DST}}, |
691 |
| - {"BatchNormTrainingBackward", |
692 |
| - {DNNL_ARG_SRC, DNNL_ARG_DIFF_DST}}, |
693 |
| - {"BiasAdd", {DNNL_ARG_SRC_0, DNNL_ARG_SRC_1}}, |
694 |
| - {"InterpolateBackward", {DNNL_ARG_DIFF_DST}}, |
695 |
| - {"MaxPoolBackward", {DNNL_ARG_SRC, DNNL_ARG_DIFF_DST}}, |
696 |
| - {"ConvolutionBackwardWeights", |
697 |
| - {DNNL_ARG_SRC, DNNL_ARG_DIFF_DST}}, |
698 |
| - {"ConvTransposeBackwardWeights", |
699 |
| - {DNNL_ARG_SRC, DNNL_ARG_DIFF_DST}}, |
700 |
| - {"PReLU", {DNNL_ARG_SRC, DNNL_ARG_WEIGHTS}}, |
701 |
| - {"PReLUBackward", |
702 |
| - {DNNL_ARG_SRC, DNNL_ARG_WEIGHTS, |
703 |
| - DNNL_ARG_DIFF_DST}}, |
704 |
| - }; |
705 |
| - |
706 |
| - const auto iter = input_arg_for_reorder.find(kind); |
707 |
| - if (iter != input_arg_for_reorder.end()) { |
708 |
| - const auto &args_to_reorder = iter->second; |
709 |
| - return args_to_reorder.find(exec_arg) != args_to_reorder.end(); |
710 |
| - } else { |
711 |
| - return false; |
712 |
| - } |
713 |
| -} |
714 |
| - |
715 | 654 | // when length is 3, return "abc", when length is 5, return "abcde"
|
716 | 655 | std::string get_default_tag(size_t length) {
|
717 | 656 | std::string mtag;
|
@@ -846,29 +785,6 @@ void permute_md(dnn_mem_t &mem, std::vector<int64_t> permutation) {
|
846 | 785 | assert(st == dnnl_success);
|
847 | 786 | mem.md_ = clone_md(permuted_md.get());
|
848 | 787 | }
|
849 |
| -void reshape_md(dnn_mem_t &mem, const dnnl::memory::dims &reshaped_dims, |
850 |
| - const dnnl::memory::dims &reshaped_strides) { |
851 |
| - |
852 |
| - const auto data_type = static_cast<dnnl::memory::data_type>(mem.dt()); |
853 |
| - dnnl::memory::desc md(reshaped_dims, data_type, reshaped_strides); |
854 |
| - // Dirty hack to replace md with another one. |
855 |
| - // TODO: replace it with a better solution. |
856 |
| - auto st = dnnl_memory_desc_destroy(mem.md_); |
857 |
| - (void)st; |
858 |
| - assert(st == dnnl_success); |
859 |
| - mem.md_ = clone_md(md.get()); |
860 |
| -} |
861 |
| - |
862 |
| -void reshape_md(dnn_mem_t &mem, const dnnl::memory::dims &reshaped_dims) { |
863 |
| - dnnl::memory::desc md(clone_md(mem.md_)); |
864 |
| - dnnl::memory::desc reshaped_md = md.reshape(reshaped_dims); |
865 |
| - // Dirty hack to replace md with another one. |
866 |
| - // TODO: replace it with a better solution. |
867 |
| - auto st = dnnl_memory_desc_destroy(mem.md_); |
868 |
| - (void)st; |
869 |
| - assert(st == dnnl_success); |
870 |
| - mem.md_ = clone_md(reshaped_md.get()); |
871 |
| -} |
872 | 788 |
|
873 | 789 | int get_prim_arg_name_from_graph_op_output_offset(
|
874 | 790 | dnnl::graph::op::kind op_kind, size_t output_offset) {
|
|
0 commit comments