Skip to content

Commit 98e056a

Browse files
committed
Revert "use static variable to do cache instead of thread local in thread frequent switching case (PaddlePaddle#18428)"
This reverts commit ce38bb5. test=develop
1 parent 52c1431 commit 98e056a

2 files changed

Lines changed: 6 additions & 79 deletions

File tree

paddle/fluid/framework/transfer_scope_cache.cc

Lines changed: 0 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -17,61 +17,12 @@
1717
namespace paddle {
1818
namespace framework {
1919

20-
#ifdef PADDLE_WITH_MKLDNN
21-
using transfer_data_cache_map = std::unordered_map<size_t, Scope*>;
22-
using transfer_scope_cache_map = std::unordered_set<Scope*>;
23-
static std::unordered_map<size_t, transfer_data_cache_map*>
24-
static_transfer_data_caches;
25-
static std::unordered_map<size_t, transfer_scope_cache_map*>
26-
static_transfer_scope_caches;
27-
#endif
28-
2920
std::unordered_map<size_t, Scope*>& global_transfer_data_cache() {
30-
#ifdef PADDLE_WITH_MKLDNN
31-
size_t sid = platform::get_cur_mkldnn_session_id();
32-
33-
// if there is specific mkldnn tid setting from user.
34-
if (sid != platform::kMKLDNNSessionID_Default) {
35-
sid = std::hash<std::thread::id>()(std::this_thread::get_id());
36-
37-
static std::mutex acquire_barrier;
38-
std::lock_guard<std::mutex> block_until_finish_this_job(acquire_barrier);
39-
40-
auto map_it = static_transfer_data_caches.find(sid);
41-
if (map_it == static_transfer_data_caches.end()) {
42-
auto* x = new transfer_data_cache_map;
43-
static_transfer_data_caches[sid] = x;
44-
return *x;
45-
} else {
46-
return *static_transfer_data_caches[sid];
47-
}
48-
}
49-
#endif
5021
thread_local auto* x = new std::unordered_map<size_t, Scope*>;
5122
return *x;
5223
}
5324

5425
std::unordered_set<Scope*>& global_transfer_scope_cache() {
55-
#ifdef PADDLE_WITH_MKLDNN
56-
size_t sid = platform::get_cur_mkldnn_session_id();
57-
58-
// if there is specific mkldnn session id setting from user.
59-
if (sid != platform::kMKLDNNSessionID_Default) {
60-
sid = std::hash<std::thread::id>()(std::this_thread::get_id());
61-
62-
static std::mutex acquire_barrier;
63-
std::lock_guard<std::mutex> block_until_finish_this_job(acquire_barrier);
64-
65-
auto map_it = static_transfer_scope_caches.find(sid);
66-
if (map_it == static_transfer_scope_caches.end()) {
67-
auto* x = new transfer_scope_cache_map;
68-
static_transfer_scope_caches[sid] = x;
69-
return *x;
70-
} else {
71-
return *static_transfer_scope_caches[sid];
72-
}
73-
}
74-
#endif
7526
thread_local auto* x = new std::unordered_set<Scope*>;
7627
return *x;
7728
}

paddle/fluid/inference/tests/api/analyzer_bert_tester.cc

Lines changed: 6 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -230,7 +230,7 @@ TEST(Analyzer_bert, compare_determine) {
230230
inputs);
231231
}
232232

233-
void verify_transfer_scope_cache(bool is_static = false) {
233+
TEST(Analyzer_bert, transfer_scope_cache) {
234234
AnalysisConfig config;
235235
SetConfig(&config);
236236

@@ -251,11 +251,6 @@ void verify_transfer_scope_cache(bool is_static = false) {
251251
threads.emplace_back([&, i]() {
252252
std::getline(fin, line);
253253
ParseLine(line, &input);
254-
#ifdef PADDLE_WITH_MKLDNN
255-
// Use static method to handle transfer_scope_cache()
256-
// TODO(intel) explicit session id setting will be deprecated.
257-
if (is_static) platform::set_cur_mkldnn_session_id(1);
258-
#endif
259254
predictor->Run(input, &output, FLAGS_batch_size);
260255
global_transfer_scope_cache.insert(
261256
&paddle::framework::global_transfer_scope_cache());
@@ -266,31 +261,12 @@ void verify_transfer_scope_cache(bool is_static = false) {
266261
threads.clear();
267262
std::vector<PaddleTensor>().swap(input);
268263
}
269-
#ifdef PADDLE_WITH_MKLDNN
270-
if (is_static) {
271-
// Use static method to do transfer_scope_cache() instead of thread_local
272-
// so paddle::framework::global_transfer_data_cache() should be 1
273-
PADDLE_ENFORCE(global_transfer_scope_cache.size(), 1);
274-
PADDLE_ENFORCE(global_transfer_data_cache.size(), 1);
275-
} else {
276-
#endif
277-
// Since paddle::framework::global_transfer_scope_cache() and
278-
// paddle::framework::global_transfer_data_cache() are thread_local,
279-
// their pointer should be different among different thread id.
280-
PADDLE_ENFORCE(global_transfer_scope_cache.size(), threads_num);
281-
PADDLE_ENFORCE(global_transfer_data_cache.size(), threads_num);
282-
#ifdef PADDLE_WITH_MKLDNN
283-
}
284-
#endif
264+
// Since paddle::framework::global_transfer_scope_cache() and
265+
// paddle::framework::global_transfer_data_cache() are thread_local,
266+
// their pointer should be different among different thread id.
267+
PADDLE_ENFORCE(global_transfer_scope_cache.size(), threads_num);
268+
PADDLE_ENFORCE(global_transfer_data_cache.size(), threads_num);
285269
}
286270

287-
TEST(Analyzer_bert, threadlocal_transfer_scope_cache) {
288-
verify_transfer_scope_cache();
289-
}
290-
#ifdef PADDLE_WITH_MKLDNN
291-
TEST(Analyzer_bert, static_transfer_scope_cache) {
292-
verify_transfer_scope_cache(true);
293-
}
294-
#endif
295271
} // namespace inference
296272
} // namespace paddle

0 commit comments

Comments
 (0)