在并行区域中使用 fortran 保存属性

Ima*_*ary 3 parallel-processing fortran openmp

我的 Fortran 代码中有一个并行区域,它使用 OpenMP 并调用使用作用域内带有 save 属性的变量的子例程。这导致了一个问题,因为它们在线程之间共享,所以我的问题是是否有一种方法可以使这些变量成为私有变量,同时仍然在子例程调用之间保存,或者我是否需要输入和输出它们?

谢谢

Ian*_*ush 5

您可以使用以下代码来完成此操作threadprivate- 下面的代码显示了几种略有不同的方法。但请注意

a) 仅当并行区域使用相同数量的线程时,才保证在并行区域之间保留这些值

b) 请仔细考虑是否真的需要save,保存和并行编程很少是好伙伴。有一个或两个很好的用途(例如,请参阅带有子例程和函数的 Fortran OpenMP),但是如果有另一种方法可以完成您想做的事情(例如,通过参数列表),这几乎肯定会减轻您的痛苦长跑

(由于某种原因,使用正确的列表会破坏下面代码的格式 - 如果有人知道如何解决这个问题,谢谢!)

ian@eris:~/work/stack$ cat threadprivate.f90
Program test

  Implicit None

  Call do_something
  Call do_something
  Call do_something

  Write( *, * )

  !$omp parallel
  Call do_something_else
  Call do_something_else
  Call do_something_else
  !$omp end parallel

Contains

  Subroutine do_something

    Use omp_lib

    Implicit None

    Integer, Save :: calls = 0

    Integer, Save :: stuff

    Logical, Save :: first = .True.
    !$omp threadprivate( first, stuff )

    calls = calls + 1

    ! Shouldn't scope threadprivate variables - they are already private
    !$omp parallel default( none ) shared( calls )
    If( first ) Then
       first = .False.
       stuff = omp_get_thread_num()
    Else
       stuff = stuff + 1
    End If
    Write( *, '( 3( a, 1x, i2, 1x ) )' ) 'do something call ', calls, &
         'thread = ', omp_get_thread_num(), 'stuff = ', stuff
    !$omp end parallel

  End Subroutine do_something

  Subroutine do_something_else

    Use omp_lib

    Implicit None

    Integer, Save :: calls = 0

    Integer, Save :: stuff

    Logical, Save :: first = .True.
    !$omp threadprivate( first, stuff, calls )

    calls = calls + 1

    If( first ) Then
       first = .False.
       stuff = omp_get_thread_num()
    Else
       stuff = stuff + 1
    End If
    Write( *, '( 3( a, 1x, i2, 1x ) )' ) 'do something else call ', calls, &
         'thread = ', omp_get_thread_num(), 'stuff = ', stuff

  End Subroutine do_something_else

End Program test
ian@eris:~/work/stack$ gfortran -std=f2008 -Wall -Wextra -O -g -fcheck=all -pedantic -fopenmp threadprivate.f90 
ian@eris:~/work/stack$ export OMP_NUM_THREADS=2
ian@eris:~/work/stack$ ./a.out
do something call   1 thread =   0 stuff =   0
do something call   1 thread =   1 stuff =   1
do something call   2 thread =   1 stuff =   2
do something call   2 thread =   0 stuff =   1
do something call   3 thread =   1 stuff =   3
do something call   3 thread =   0 stuff =   2

do something else call   1 thread =   1 stuff =   1
do something else call   2 thread =   1 stuff =   2
do something else call   3 thread =   1 stuff =   3
do something else call   1 thread =   0 stuff =   0
do something else call   2 thread =   0 stuff =   1
do something else call   3 thread =   0 stuff =   2
ian@eris:~/work/stack$ export OMP_NUM_THREADS=4
ian@eris:~/work/stack$ ./a.out
do something call   1 thread =   3 stuff =   3
do something call   1 thread =   2 stuff =   2
do something call   1 thread =   1 stuff =   1
do something call   1 thread =   0 stuff =   0
do something call   2 thread =   1 stuff =   2
do something call   2 thread =   3 stuff =   4
do something call   2 thread =   0 stuff =   1
do something call   2 thread =   2 stuff =   3
do something call   3 thread =   3 stuff =   5
do something call   3 thread =   1 stuff =   3
do something call   3 thread =   0 stuff =   2
do something call   3 thread =   2 stuff =   4

do something else call   1 thread =   3 stuff =   3
do something else call   2 thread =   3 stuff =   4
do something else call   3 thread =   3 stuff =   5
do something else call   1 thread =   1 stuff =   1
do something else call   2 thread =   1 stuff =   2
do something else call   3 thread =   1 stuff =   3
do something else call   1 thread =   0 stuff =   0
do something else call   2 thread =   0 stuff =   1
do something else call   3 thread =   0 stuff =   2
do something else call   1 thread =   2 stuff =   2
do something else call   2 thread =   2 stuff =   3
do something else call   3 thread =   2 stuff =   4
ian@eris:~/work/stack$ 
Run Code Online (Sandbox Code Playgroud)