15.9.0 compiler: pgf901 TERMINATED by signal 11

I’m getting a very strange error with the following compile line:

ftn -module …/module -I…/module -Mallocatable=03 -c junk.f90

The compiler crashes with:

14:08:10 wsawyer@daint03:/scratch/daint/wsawyer/DAINT/PGI/refactoring_2016_01/build/x86_64-unknown-linux-
gnu/src $ ftn -module …/module -I…/module -Mallocatable=03 -c junk.f90
pgf90-Fatal-/opt/pgi/15.9.0/linux86-64/15.9/bin/pgf901 TERMINATED by signal 11
Arguments to /opt/pgi/15.9.0/linux86-64/15.9/bin/pgf901
/opt/pgi/15.9.0/linux86-64/15.9/bin/pgf901 junk.f90 -opt 1 -terse 1 -inform warn -nohpf -nostatic -x 19 0x400000 -quad -x 59 4 -x 15 2 -x 49 0x400004 -x 51 0x20 -x 57 0x4c -x 58 0x10000 -x 124 0x1000 -tp sandybridge -x 57 0xfb0000 -x 58 0x78031040 -x 47 0x08 -x 48 4608 -x 49 0x100 -x 120 0x200 -stdinc /opt/pgi/15.9.0/linux86-64/15.9/include-gcc43:/opt/pgi/15.9.0/linux86-64/15.9/include:/usr/local/include:/usr/lib64/gcc/x86_64-suse-linux/4.3/include:/usr/lib64/gcc/x86_64-suse-linux/4.3/include-fixed:/usr/lib64/gcc/x86_64-suse-linux/4.3/…/…/…/…/x86_64-suse-linux/include:/usr/include -cmdline ‘+pgf90 junk.f90 -tp=sandybridge -Bstatic -D__CRAYXC -D__CRAY_SANDYBRIDGE -D__CRAYXT_COMPUTE_LINUX_TARGET -module …/module -I…/module -Mallocatable=03 -c -I/opt/cray/hdf5/1.8.14/PGI/15.3/include -I/opt/cray/netcdf/4.3.3.1/PGI/15.3/include -I/opt/cray/mpt/7.2.2/gni/mpich2-pgi/15.3/include -I/opt/cray/rca/1.0.0-2.0502.60530.1.62.ari/include -I/opt/cray/pmi/5.0.7-1.0000.10678.155.25.ari/include -I/opt/cray/xpmem/0.1-2.0502.64982.5.3.ari/include -I/opt/cray/gni-headers/4.0-1.0502.10859.7.8.ari/include -I/opt/cray/ugni/6.0-1.0502.10863.8.29.ari/include -I/opt/cray/udreg/2.3.2-1.0502.10518.2.17.ari/include -I/opt/cray/alps/5.2.4-2.0502.9822.32.1.ari/include -I/opt/cray/wlm_detect/1.0-1.0502.64649.2.1.ari/include -I/opt/cray/krca/1.0.0-2.0502.63139.4.31.ari/include -I/opt/cray-hss-devel/7.2.0/include’ -def unix -def __unix -def unix -def linux -def __linux -def linux -def __NO_MATH_INLINES -def LP64 -def __x86_64 -def x86_64 -def LONG_MAX=9223372036854775807L -def ‘SIZE_TYPE=unsigned long int’ -def ‘PTRDIFF_TYPE=long int’ -def __THROW= -def extension= -def amd_64__amd64 -def __k8 -def k8 -def SSE -def MMX -def SSE2 -def SSE3 -idir …/module -idir /opt/cray/hdf5/1.8.14/PGI/15.3/include -idir /opt/cray/netcdf/4.3.3.1/PGI/15.3/include -idir /opt/cray/mpt/7.2.2/gni/mpich2-pgi/15.3/include -idir /opt/cray/rca/1.0.0-2.0502.60530.1.62.ari/include -idir /opt/cray/pmi/5.0.7-1.0000.10678.155.25.ari/include -idir /opt/cray/xpmem/0.1-2.0502.64982.5.3.ari/include -idir /opt/cray/gni-headers/4.0-1.0502.10859.7.8.ari/include -idir /opt/cray/ugni/6.0-1.0502.10863.8.29.ari/include -idir /opt/cray/udreg/2.3.2-1.0502.10518.2.17.ari/include -idir /opt/cray/alps/5.2.4-2.0502.9822.32.1.ari/include -idir /opt/cray/wlm_detect/1.0-1.0502.64649.2.1.ari/include -idir /opt/cray/krca/1.0.0-2.0502.63139.4.31.ari/include -idir /opt/cray-hss-devel/7.2.0/include -def __CRAYXC -def __CRAY_SANDYBRIDGE -def __CRAYXT_COMPUTE_LINUX_TARGET -freeform -vect 48 -x 54 1 -x 70 0x40000000 -y 163 0xc0000000 -x 189 0x10 -moddir …/module -modexport /tmp/pgf90JUhhPBjZcoNt.cmod -modindex /tmp/pgf907UhhX40PV2Lj.cmdx -output /tmp/pgf90ZUhhz8T2v2sQ.ilm

The slimmed down code (from a very complicated ICON module) is:

MODULE junk

! PGI 15.9.0 BUG: THIS USE STATEMENT HELPS PROVOKE THE BUG
  USE mo_interface_icoham_echam, ONLY: interface_icoham_echam

  IMPLICIT NONE
  PRIVATE

  PUBLIC :: process_grid


  INTEGER, PARAMETER :: n_dom=3
  INTEGER, PARAMETER :: wp = SELECTED_REAL_KIND(12,307)
  !>
  !! Derived data type for building pointer arrays
  !!
  TYPE t_ptr3d
    REAL(wp),POINTER :: p(:,:,:)  ! pointer to 3D (spatial) array
  END TYPE t_ptr3d

  !>
  !!--------------------------------------------------------------------------
  !! Derived data type for prognostic variables. The same type is used for
  !! tendencies

  TYPE t_hydro_atm_prog

    REAL(wp), POINTER ::  &
    & pres_sfc(:,  :),  &!< surface pressure [Pa]        (nproma,     nblks_c)
    &       vn(:,:,:),  &!< normal wind [m/s]            (nproma,nlev,nblks_e)
    &     temp(:,:,:),  &!< temperature [K]              (nproma,nlev,nblks_c)
    &    theta(:,:,:),  &!< potential temperature [K]    (nproma,nlev,nblks_c)
    &   tracer(:,:,:,:)  !< tracer concentration [kg/kg] (nproma,nlev,nblks_c,ntracer)

    TYPE(t_ptr3d),ALLOCATABLE :: tracer_ptr(:)  !< pointer array: one pointer for each tracer

  END TYPE t_hydro_atm_prog

  !>
  !!--------------------------------------------------------------------------
  !! Derived data type for diagnostic variables

  TYPE t_hydro_atm_diag

    REAL(wp), POINTER ::  &
    &          qx(:,:,:),   &!< total concentration of hydrometeors (nproma,nlev,nblks_c)
    &           u(:,:,:),   &!< zonal wind (nproma,nlev,nblks_c)
    &           v(:,:,:),   &!< meridional wind (nproma,nlev,nblks_c)
    &          vt(:,:,:),   &!< tangential wind (nproma,nlev,nblks_e)
    &    rel_vort(:,:,:),   &!< relative vorticity at dual point (nproma,nlev,nblks_v)
    &  rel_vort_e(:,:,:),   &!< needed for hexagonal model
    &  rel_vort_c(:,:,:),   &!< relative vorticity at cell centers, diagnosed for physics 
    &         div(:,:,:),   &!< wind divergence (only output) (nproma,nlev,nblks_c)
    &       e_kin(:,:,:),   &!< specific kinetic energy (nproma,nlev,nblks_c)
    &      geo_ic(:,:,:),   &!< half level geopotential (nproma,nlevp1,nblks_c)
    &      geo_mc(:,:,:),   &!< full level geopotential (nproma,nlev,nblks_c)
    &    wpres_mc(:,:,:),   &!< vert. vel. in pres. coord. at full levels (nproma,nlev,nblks_c)
    &    wpres_ic(:,:,:),   &!< vert. vel. in pres. coord. at half levels (nproma,nlevp1,nblks_c)
    &        weta(:,:,:),   &!< vert. vel. in $\eta$ coord. times dpdeta (nproma,nlevp1,nblks_c)
                             !< i.e. mass flux in $\eta$ coord. divided by delta $\eta$
    &     pres_ic(:,:,:),   &!< half level pressure (nproma,nlevp1,nblks_c)
    & pres_ic_new(:,:,:),   &!< dito, but at timestep n+1
    &     pres_mc(:,:,:),   &!< full level pressure (nproma,nlev,nblks_c)
    &       exner(:,:,:),   &!< exner function (for theta advection; nproma,nlev,nblks_c)
    &   virt_incr(:,:,:),   &!< virtual temperature increment (nproma,nlev,nblks_c)
    &     tempv(:,:,:),     &!< vertual temperature (nproma,nlev,nblks_c)
    &      delp_c(:,:,:),   &!< layer thickness at cell centers (nproma,nlev,nblks_c)
    &  delp_c_new(:,:,:),   &!< dito, but at timestep n+1
    &     rdelp_c(:,:,:),   &!< reciprocal layer thickness at cell centers (nproma,nlev,nblks_c)
    & rdelp_c_new(:,:,:),   &!< dito, but at timestep n+1
    &      delp_e(:,:,:),   &!< layer thickness at edges (nproma,nlev,nblks_e)
    &      delp_v(:,:,:),   &!< layer thickness at dual point (nproma,nlev,nblks_v)
    & hfl_tracer(:,:,:,:),  &!< horizontal tracer flux at edges (nproma,nlev,nblks_e,ntracer)
    & vfl_tracer(:,:,:,:),  &!< vertical tracer flux at cells (nproma,nlevp1,nblks_c,ntracer)
    &    rdlnpr_c(:,:,:),   &!< Rd * ln(p(k+.5)/p(k-.5)),shape:(nproma,nlev,nblks_c)
    &   rdalpha_c(:,:,:),   &!< Rd * alpha              ,shape:(nproma,nlev,nblks_c)
    &      lnp_ic(:,:,:),   &!< ln(p),shape:(nproma,nlevp1,nblks_c)
    & mass_flux_e(:,:,:)     !< mass flux at edges (nproma,nlev,nblks_e)

    TYPE(t_ptr3d),ALLOCATABLE :: hfl_tracer_ptr(:)  !< pointer array: one pointer for each tracer
    TYPE(t_ptr3d),ALLOCATABLE :: vfl_tracer_ptr(:)  !< pointer array: one pointer for each tracer

  END TYPE t_hydro_atm_diag

  !>
  !!--------------------------------------------------------------------------
  !! Derived data type for the hydrostatic state vector on a single grid level.
  !! This type is in fact a wrapper which was necessary in revisions up to 1916
  !! because the actual variable used to be defined in the main program and
  !! passed through various subroutine interfaces.

  TYPE t_hydro_atm

    TYPE(t_hydro_atm_prog),ALLOCATABLE :: prog(:)  !< shape: (nTimeLevel)
    TYPE(t_hydro_atm_diag)             :: diag     !< diagnostic variables
    TYPE(t_hydro_atm_prog)             :: tend_dyn !< tendency due to dynamics
    TYPE(t_hydro_atm_prog)             :: tend_phy !< tendency due to physics

    TYPE(t_hydro_atm_prog) :: prog_out  !< for output
    TYPE(t_hydro_atm_diag) :: diag_out  !< for output

  END TYPE t_hydro_atm

CONTAINS

  RECURSIVE SUBROUTINE process_grid( p_hydro_state, jg )
    TYPE(t_hydro_atm),  TARGET,INTENT(INOUT)   :: p_hydro_state(n_dom)
    INTEGER, INTENT(IN)    :: jg           ! current grid level
!!!  -Mallocatable=03  BUG IS HERE
              p_hydro_state(jg)%tend_dyn%tracer = 0._wp   ! Q&D. Not appropriate for refinement
!!!!

  END SUBROUTINE process_grid


END MODULE junk

The TERMINATED error can be avoided by (1) removing the -Mallocatable=03 option, or (2) removing the USE statement:

  USE mo_interface_icoham_echam, ONLY: interface_icoham_echam

in the code. In other words there is some strange interaction between modules, EVEN THOUGH interface_icoham_echam is not actually used in the code!

For completeness I’ve included mo_interface_icoham_echam.f90, but since removing -Mallocatable=03 is the workaround, I did not go to the effort of slimming down mo_interface_icoham_echam, and thus it has numerous dependencies on other modules. But maybe there is enough information here for you to locate the bug.

Thanks, --Will

!>
!! @brief Interface between ICOHAM dynamics+transport and ECHAM physics
!!
!! @author Kristina Froehlich (DWD)
!! @author Marco Giorgetta (MPI-M)
!! @author Hui Wan (MPI-M)
!!
!! @par Revision History
!!
!! @par Copyright and License
!!
!! This code is subject to the DWD and MPI-M-Software-License-Agreement in
!! its most recent form.
!! Please see the file LICENSE in the root of the source tree for this code.
!! Where software is supplied by third parties, it is indicated in the
!! headers of the routines.
!!

!----------------------------
#include "omp_definitions.inc"
!----------------------------

MODULE mo_interface_icoham_echam

  USE mo_kind                  ,ONLY: wp
  USE mo_exception             ,ONLY: finish !, message, message_text, print_value

  USE mo_impl_constants        ,ONLY: min_rlcell_int
  USE mo_impl_constants_grf    ,ONLY: grf_bdywidth_e, grf_bdywidth_c

  USE mo_coupling_config       ,ONLY: is_coupled_run
  USE mo_parallel_config       ,ONLY: nproma
  USE mo_run_config            ,ONLY: nlev, ntracer, ltimer
  USE mo_echam_phy_config      ,ONLY: echam_phy_config

  USE mo_model_domain          ,ONLY: t_patch
  USE mo_intp_data_strc        ,ONLY: t_int_state
  USE mo_intp_rbf              ,ONLY: rbf_vec_interpol_cell

  USE mo_loopindices           ,ONLY: get_indices_c, get_indices_e
  USE mo_sync                  ,ONLY: sync_c, sync_e, sync_patch_array, sync_patch_array_mult

  USE mo_icoham_dyn_types      ,ONLY: t_hydro_atm_prog, t_hydro_atm_diag
  USE mo_eta_coord_diag        ,ONLY: half_level_pressure, full_level_pressure

  USE mo_datetime              ,ONLY: t_datetime
  USE mo_echam_phy_memory      ,ONLY: prm_field, prm_tend
  USE mo_echam_phy_bcs         ,ONLY: echam_phy_bcs_global
  USE mo_echam_phy_main        ,ONLY: echam_phy_main
  USE mo_interface_echam_ocean ,ONLY: interface_echam_ocean

  USE mo_timer                 ,ONLY: timer_start, timer_stop,        &
    &                                 timer_dyn2phy, timer_phy2dyn,   &
    &                                 timer_echam_phy, timer_coupling

  IMPLICIT NONE

  PRIVATE

  PUBLIC :: interface_icoham_echam

  CHARACTER(len=*), PARAMETER :: thismodule = 'mo_interface_icoham_echam'

CONTAINS
  !>
  !! SUBROUTINE echam_physics -- the Interface between ICON dynamics and
  !! ECHAM physics
  !!
  !! This subroutine is called in the time loop of the ICOHAM model.
  !! It takes the following as input:
  !! <ol>
  !! <li> prognostic and diagnostic variables of the dynamical core;
  !! <li> tendency of the prognostic varibles induced by adiabatic dynamics;
  !! <li> time step;
  !! <li> information about the dynamics grid;
  !! <li> interplation coefficients.
  !! </ol>
  !!
  !! The output includes tendencies of the prognostic variables caused by
  !! the parameterisations.
  !!
  !! Note that each call of this subroutine deals with a single grid level
  !! rather than the entire grid tree.

  SUBROUTINE interface_icoham_echam( pdtime, psteplen ,& !in
    &                                datetime         ,& !in
    &                                patch            ,& !in
    &                                pt_int_state     ,& !in
    &                                dyn_prog_old     ,& !in
    &                                dyn_diag_old     ,& !in
    &                                dyn_prog_new     ,& !in
    &                                dyn_tend         )  !inout

    !
    !> Arguments:
    !
    REAL(wp)              , INTENT(in)            :: pdtime          !< time step
    REAL(wp)              , INTENT(in)            :: psteplen        !< 2*time step in case of leapfrog
    TYPE(t_datetime)      , INTENT(in)            :: datetime

    TYPE(t_patch)         , INTENT(in)   , TARGET :: patch           !< grid/patch info
    TYPE(t_int_state)     , INTENT(in)   , TARGET :: pt_int_state    !< interpolation state

    TYPE(t_hydro_atm_prog), INTENT(inout)         :: dyn_prog_old
    TYPE(t_hydro_atm_diag), INTENT(in)            :: dyn_diag_old
    TYPE(t_hydro_atm_prog), INTENT(in)            :: dyn_prog_new

    TYPE(t_hydro_atm_prog), INTENT(inout)         :: dyn_tend

    ! Local array bounds

    INTEGER  :: i_nchdom             !< number of child patches
    INTEGER  :: i_startblk, i_endblk
    INTEGER  :: rl_start, rl_end
    INTEGER  :: jg                   !< grid index
    INTEGER  :: jcs, jce             !< start and end cell indices
    INTEGER  :: jes, jee, je         !< start and end edge indices
    INTEGER  :: jk                   !< level in column index
    INTEGER  :: jb, jbs, jbe         !< row in block index, start and end indices
    INTEGER  :: jcn,jbn              !< jc and jb of neighbor cells sharing an edge je

    ! Local variables

    REAL(wp) :: zvn1, zvn2
    REAL(wp), POINTER :: zdudt(:,:,:), zdvdt(:,:,:)

    LOGICAL  :: any_uv_tend
    LOGICAL  :: ltrig_rad
    TYPE(t_datetime)   :: datetime_radtran !< date and time for radiative transfer calculation

    INTEGER  :: return_status

    ! Local parameters

    CHARACTER(*), PARAMETER :: method_name = "interface_icoham_echam"

    !-------------------------------------------------------------------------------------

    IF (ltimer) CALL timer_start(timer_dyn2phy)

    ! Inquire current grid level and the total number of grid cells
    i_nchdom = MAX(1,patch%n_childdom)
    rl_start = grf_bdywidth_c+1
    rl_end   = min_rlcell_int

    i_startblk = patch%cells%start_blk(rl_start,1)
    i_endblk   = patch%cells%end_blk(rl_end,i_nchdom)

    jg    = patch%id

    !-------------------------------------------------------------------------
    ! Dynamics to physics: remap dynamics variables to physics grid
    !-------------------------------------------------------------------------
    ! Currently this includes
    !  - reconstructing of u- and v-wind and their tendencies at cell centers
    !  - copying scalar fields from the dynamics state and from the tendency
    !    state to the physics state and physics tendencies, repsectively.
    !  - computing pressure values at the "new" time step
    ! Once a physics grid of different resolution is intruduced, conservative
    ! re-mapping will be called here.


    ! LL The physics runs only on the owned cells
    !  but the following rbf_vec_interpol_cell may use the halos(?)
    !
    ! - prm_field(jg)%u
    ! - prm_field(jg)%v
    CALL sync_patch_array( SYNC_E, patch, dyn_prog_old%vn )

    CALL rbf_vec_interpol_cell( dyn_prog_old%vn       ,&! in
      &                         patch                 ,&! in
      &                         pt_int_state          ,&! in
      &                         prm_field(jg)%u       ,&! out
      &                         prm_field(jg)%v       ,&! out
      &                         opt_rlstart=rl_start  ,&! in
      &                         opt_rlend=rl_end      ) ! in

!$OMP PARALLEL WORKSHARE

    ! Fill the physics state variables, which are used by echam:
    !
    prm_field(jg)%      geom(:,:,:)   = dyn_diag_old%     geo_mc(:,:,:)
    !
    prm_field(jg)%       vor(:,:,:)   = dyn_diag_old% rel_vort_c(:,:,:)
    !
    prm_field(jg)%      temp(:,:,:)   = dyn_prog_old%       temp(:,:,:)
    prm_field(jg)%        tv(:,:,:)   = dyn_diag_old%      tempv(:,:,:)
    !
    prm_field(jg)% presm_old(:,:,:)   = dyn_diag_old%    pres_mc(:,:,:)
    !
    prm_field(jg)%         q(:,:,:,:) = dyn_prog_old%     tracer(:,:,:,:)
    !
    ! cloud water+ice
    prm_field(jg)%        qx(:,:,:)   = dyn_diag_old%         qx(:,:,:)
    !
    ! vertical velocity in p-system
    prm_field(jg)%     omega(:,:,:)   = dyn_diag_old%   wpres_mc(:,:,:)
    !
    prm_field(jg)%      geoi(:,:,:)   = dyn_diag_old%     geo_ic(:,:,:)
    !
    prm_field(jg)% presi_old(:,:,:)   = dyn_diag_old%    pres_ic(:,:,:)

!$OMP END PARALLEL WORKSHARE

    !---------------------------------
    ! Additional diagnostic variables

!$OMP PARALLEL
!$OMP DO PRIVATE(jb,jcs,jce) ICON_OMP_DEFAULT_SCHEDULE
    DO jb = i_startblk,i_endblk
      CALL get_indices_c( patch, jb,i_startblk,i_endblk, jcs,jce, rl_start, rl_end)

      ! Pressure at time step "new" (i.e., n+1)

      CALL half_level_pressure( dyn_prog_new%pres_sfc(:,jb),     nproma, jce, &! in
        &                       prm_field(jg)%presi_new(:,:,jb)               )! out

      CALL full_level_pressure( prm_field(jg)%presi_new(:,:,jb), nproma, jce, &! in
        &                       prm_field(jg)%presm_new(:,:,jb)               )! out
    END DO
!$OMP END DO NOWAIT
!$OMP END PARALLEL

    !--------------------------------
    ! transfer tendencies


    ! LL The physics runs only on the owned cells
    !    but the following rbf_vec_interpol_cell may use the halos(?)
    CALL sync_patch_array( SYNC_E, patch, dyn_tend%vn )

    CALL rbf_vec_interpol_cell( dyn_tend%vn,          &! in
      &                         patch, pt_int_state,  &! in
      &                         prm_tend(jg)%u,       &! out
      &                         prm_tend(jg)%v,       &! out
      &   opt_rlstart=rl_start, opt_rlend=rl_end     ) ! in

!$OMP PARALLEL
!$OMP DO PRIVATE(jb,jcs,jce) ICON_OMP_DEFAULT_SCHEDULE
    DO jb = i_startblk,i_endblk
      CALL get_indices_c( patch, jb,i_startblk,i_endblk, jcs,jce, rl_start, rl_end)
      prm_tend(jg)% temp(jcs:jce,:,jb)   = dyn_tend%   temp(jcs:jce,:,jb)
      prm_tend(jg)%    q(jcs:jce,:,jb,:) = dyn_tend% tracer(jcs:jce,:,jb,:)
      prm_tend(jg)%q_dyn(jcs:jce,:,jb,:) = dyn_tend% tracer(jcs:jce,:,jb,:)
    END DO
!$OMP END DO NOWAIT
!$OMP END PARALLEL

    !
    !=====================================================================================

    IF (ltimer) THEN
      CALL timer_stop (timer_dyn2phy)
      CALL timer_start(timer_echam_phy)
    END IF

    !=====================================================================================
    !
    ! (3) Prepare boundary conditions for ECHAM physics
    !
    CALL echam_phy_bcs_global( datetime     ,&! in
      &                        jg           ,&! in
      &                        patch        ,&! in
      &                        pdtime       ,&! in
      &                        ltrig_rad    ,&! out
      &                        datetime_radtran ) ! out
    !
    !=====================================================================================

    !=====================================================================================
    !
    ! (4) Call echam physics and compute the total physics tendencies.
    !     This includes the atmospheric processes (proper ECHAM) and
    !     the land processes, which are vertically implicitly coupled
    !     to the parameterization of vertical turbulent fluxes.
    !
!$OMP PARALLEL
!$OMP DO PRIVATE(jb,jcs,jce),  ICON_OMP_GUIDED_SCHEDULE

    DO jb = i_startblk,i_endblk
      CALL get_indices_c(patch, jb,i_startblk,i_endblk, jcs,jce, rl_start, rl_end)

      ! Like in ECHAM, the subroutine *echam_phy_main* has direct access to the memory
      ! buffers prm_field and prm_tend. In addition it can also directly access
      ! the grid/patch information on which the computations are performed.
      ! Thus the argument list contains only
      ! - jg: the grid index in the grid hierarchy
      ! - jb: the row index in the block
      ! - jcs and jce: start and end indices of columns in a row
      ! - nproma: the block length
      ! - a few other globally valid arguments

      CALL echam_phy_main( jg           ,&! in
        &                  jb           ,&! in
        &                  jcs          ,&! in
        &                  jce          ,&! in
        &                  nproma       ,&! in
        &                  datetime     ,&! in
        &                  pdtime       ,&! in
        &                  psteplen     ,&! in
        &                  ltrig_rad    ,&! in
        &                  datetime_radtran ) ! in

    END DO
!$OMP END DO NOWAIT
!$OMP END PARALLEL
    !
    !=====================================================================================

    IF (ltimer) CALL timer_stop(timer_echam_phy)

    !=====================================================================================
    !
    ! (5) Couple to ocean surface if an ocean is present and this is a coupling time step.
    !
    !
    IF ( is_coupled_run() ) THEN
      IF (ltimer) CALL timer_start(timer_coupling)

      CALL interface_echam_ocean( jg, patch )

      IF (ltimer) CALL timer_stop(timer_coupling)
    END IF
    !
    !=====================================================================================

    IF (ltimer) CALL timer_start(timer_phy2dyn)

    !=====================================================================================
    !
    !     Copy  physics tandencies in temp. and tracers from the physics to the dynamics
    !
!$OMP PARALLEL
!$OMP DO PRIVATE(jb,jcs,jce) ICON_OMP_DEFAULT_SCHEDULE
    DO jb = i_startblk,i_endblk
      CALL get_indices_c( patch, jb,i_startblk,i_endblk, jcs,jce, rl_start, rl_end)
      dyn_tend%   temp(jcs:jce,:,jb)   = prm_tend(jg)% temp(jcs:jce,:,jb)
      dyn_tend% tracer(jcs:jce,:,jb,:) = prm_tend(jg)%    q(jcs:jce,:,jb,:)
    END DO
!$OMP END DO NOWAIT
!$OMP END PARALLEL
    !
    CALL sync_patch_array( SYNC_C, patch, dyn_tend%temp )
    CALL sync_patch_array_mult(SYNC_C, patch, ntracer, f4din=dyn_tend% tracer)
    !
    !=====================================================================================


    !=====================================================================================
    !
    ! (6) Convert physics tandencies in the wind components (u,v) to tendencies in
    !     normal wind vn.
    !
    !
    any_uv_tend = echam_phy_config%lconv     .OR. &
      &           echam_phy_config%lvdiff    .OR. &
      &           echam_phy_config%lgw_hines .OR. &
      &           echam_phy_config%lssodrag

    IF (any_uv_tend) THEN

      ALLOCATE(zdudt(nproma,nlev,patch%nblks_c), &
        &      zdvdt(nproma,nlev,patch%nblks_c), &
        &      stat=return_status)
      IF (return_status > 0) THEN
        CALL finish (method_name, 'ALLOCATE(zdudt,zdvdt)')
      END IF
      zdudt(:,:,:) = 0.0_wp
      zdvdt(:,:,:) = 0.0_wp

!$OMP PARALLEL
!$OMP DO PRIVATE(jb,jcs,jce) ICON_OMP_DEFAULT_SCHEDULE
      DO jb = i_startblk,i_endblk
        CALL get_indices_c(patch, jb,i_startblk,i_endblk, jcs,jce, rl_start, rl_end)
        zdudt(jcs:jce,:,jb) = prm_tend(jg)% u_phy(jcs:jce,:,jb)
        zdvdt(jcs:jce,:,jb) = prm_tend(jg)% v_phy(jcs:jce,:,jb)
      END DO
!$OMP END DO NOWAIT
!$OMP END PARALLEL

      ! Now derive the physics-induced normal wind tendency, and add it to the
      ! total tendency.
      CALL sync_patch_array_mult(SYNC_C, patch, 2, zdudt, zdvdt)

      jbs   = patch%edges%start_blk(grf_bdywidth_e+1,1)
      jbe   = patch%nblks_e

!$OMP PARALLEL
!$OMP DO PRIVATE(jb,jk,je,jes,jee,jcn,jbn,zvn1,zvn2) ICON_OMP_DEFAULT_SCHEDULE
      DO jb = jbs,jbe
        CALL get_indices_e(patch, jb,jbs,jbe, jes,jee, grf_bdywidth_e+1)

        DO jk = 1,nlev

          DO je = jes,jee
            jcn  =   patch%edges%cell_idx(je,jb,1)
            jbn  =   patch%edges%cell_blk(je,jb,1)
            zvn1 =   zdudt(jcn,jk,jbn)*patch%edges%primal_normal_cell(je,jb,1)%v1 &
              &    + zdvdt(jcn,jk,jbn)*patch%edges%primal_normal_cell(je,jb,1)%v2

            jcn  =   patch%edges%cell_idx(je,jb,2)
            jbn  =   patch%edges%cell_blk(je,jb,2)
            zvn2 =   zdudt(jcn,jk,jbn)*patch%edges%primal_normal_cell(je,jb,2)%v1 &
              &    + zdvdt(jcn,jk,jbn)*patch%edges%primal_normal_cell(je,jb,2)%v2

            dyn_tend%vn(je,jk,jb)        =   dyn_tend%vn(je,jk,jb)              &
              &                            + pt_int_state%c_lin_e(je,1,jb)*zvn1 &
              &                            + pt_int_state%c_lin_e(je,2,jb)*zvn2

          END DO ! je
        END DO ! jk
      END DO ! jb
!$OMP END DO NOWAIT
!$OMP END PARALLEL

      DEALLOCATE(zdudt, zdvdt)

    END IF ! any_uv_tend
    !
    !=====================================================================================

    IF (ltimer) CALL timer_stop(timer_phy2dyn)

  END SUBROUTINE interface_icoham_echam
  !----------------------------------------------------------------------------

END MODULE mo_interface_icoham_echam




Hi Will,

Could you package up the source and all dependent module files and sent it to PGI Customer Service (trs@pgroup.com)?

I tried removing the module dependencies in “mo_interface_icoham_echam” but wasn’t able to recreate the error.

Thanks,
Mat

Hi Mat,

Thanks for the reply. Not sure if this example can be packed up in a sensible way without a lot of effort: mo_interface_icoham_echam has a lot of dependencies which in turn have dependencies. I’ll see if I can find some time to shave it down to a minimalistic example.

Cheers, --Will