Skip to content

Commit

Permalink
Merge pull request #1325 from Sajohn-CH/state_read_write_sets
Browse files Browse the repository at this point in the history
state.read_and_write_sets() ignore reads directly after writes
  • Loading branch information
alexnick83 authored Jul 21, 2023
2 parents aba8de8 + f5f4454 commit 182ef86
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 2 deletions.
1 change: 1 addition & 0 deletions AUTHORS
Original file line number Diff line number Diff line change
Expand Up @@ -36,5 +36,6 @@ Reid Wahl
Yihang Luo
Alexandru Calotoiu
Phillip Lane
Samuel Martin

and other contributors listed in https:/spcl/dace/graphs/contributors
13 changes: 11 additions & 2 deletions dace/sdfg/state.py
Original file line number Diff line number Diff line change
Expand Up @@ -502,13 +502,22 @@ def _read_and_write_sets(self) -> Tuple[Dict[AnyStr, List[Subset]], Dict[AnyStr,
# is read is not counted in the read set
for n in utils.dfs_topological_sort(sg, sources=sg.source_nodes()):
if isinstance(n, nd.AccessNode):
for e in sg.in_edges(n):
in_edges = sg.in_edges(n)
out_edges = sg.out_edges(n)
# Filter out memlets which go out but the same data is written to the AccessNode by another memlet
for out_edge in list(out_edges):
for in_edge in list(in_edges):
if (in_edge.data.data == out_edge.data.data and
in_edge.data.dst_subset.covers(out_edge.data.src_subset)):
out_edges.remove(out_edge)

for e in in_edges:
# skip empty memlets
if e.data.is_empty():
continue
# Store all subsets that have been written
ws[n.data].append(e.data.subset)
for e in sg.out_edges(n):
for e in out_edges:
# skip empty memlets
if e.data.is_empty():
continue
Expand Down
9 changes: 9 additions & 0 deletions dace/transformation/interstate/sdfg_nesting.py
Original file line number Diff line number Diff line change
Expand Up @@ -925,7 +925,12 @@ def _candidates(
continue

# For now we only detect one element
read_set, write_set = nstate.read_and_write_sets()
for e in nstate.in_edges(dnode):
if e.data.data not in write_set:
# Skip data which is not in the read and write set of the state -> there also won't be a
# connector
continue
# If more than one unique element detected, remove from
# candidates
if e.data.data in out_candidates:
Expand All @@ -941,6 +946,10 @@ def _candidates(
continue
out_candidates[e.data.data] = (e.data, nstate, set(range(len(e.data.subset))))
for e in nstate.out_edges(dnode):
if e.data.data not in read_set:
# Skip data which is not in the read and write set of the state -> there also won't be a
# connector
continue
# If more than one unique element detected, remove from
# candidates
if e.data.data in in_candidates:
Expand Down
24 changes: 24 additions & 0 deletions tests/sdfg/state_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# Copyright 2019-2023 ETH Zurich and the DaCe authors. All rights reserved.
import dace


def test_read_write_set():
sdfg = dace.SDFG('graph')
A = sdfg.add_array('A', [10], dace.float64)
B = sdfg.add_array('B', [10], dace.float64)
C = sdfg.add_array('C', [10], dace.float64)
state = sdfg.add_state('state')
task1 = state.add_tasklet('work1', {'A'}, {'B'}, 'B = A + 1')
task2 = state.add_tasklet('work2', {'B'}, {'C'}, 'C = B + 1')
read_a = state.add_access('A')
rw_b = state.add_access('B')
write_c = state.add_access('C')
state.add_memlet_path(read_a, task1, dst_conn='A', memlet=dace.Memlet('A[2]'))
state.add_memlet_path(task1, rw_b, src_conn='B', memlet=dace.Memlet('B[2]'))
state.add_memlet_path(rw_b, task2, dst_conn='B', memlet=dace.Memlet('B[2]'))
state.add_memlet_path(task2, write_c, src_conn='C', memlet=dace.Memlet('C[2]'))

assert 'B' not in state.read_and_write_sets()[0]

if __name__ == '__main__':
test_read_write_set()

0 comments on commit 182ef86

Please sign in to comment.