54 s11,s22,s33,s12,s23,s13, &
55 numnp,nstart,nend,numcstet,numat_vol, &
56 mu,kappa, rho, cd_fastest, detfold, velo, dt)
74 REAL*8,
DIMENSION(1:3,1:numnp) :: coor
76 REAL*8,
DIMENSION(1:3*numnp) :: r_in
78 REAL*8,
DIMENSION(1:3*numnp) ::
d
80 REAL*8,
DIMENSION(1:4,1:numcstet) :: s11, s22, s33, s12, s23, s13
82 INTEGER,
DIMENSION(1:10,1:numcstet) :: lmcstet
84 INTEGER,
DIMENSION(1:numcstet) :: matcstet
87 INTEGER :: n1,n2,n3,n4,n5,n6,n7,n8,n9,n10
89 REAL*8 :: u1,u2,u3,u4,u5,u6,u7,u8,u9,u10
90 REAL*8 :: v1,v2,v3,v4,v5,v6,v7,v8,v9,v10
91 REAL*8 :: w1,w2,w3,w4,w5,w6,w7,w8,w9,w10
95 REAL*8 :: b1,b2,b3,b4,b5,b6,b7,b8,b9,b10
96 REAL*8 :: b11,b12,b13,b14,b15,b16,b17,b18,b19,b20
97 REAL*8 :: b21,b22,b23,b24,b25,b26,b27,b28,b29,b30
99 REAL*8 :: dudx,dvdy,dwdz,dudy,dvdx,dvdz,dwdy,dudz,dwdx
101 REAL*8 :: e11,e22,e33,e12,e23,e13
103 REAL*8 :: x1,x2,x3,x4,x5,x6,x7,x8,x9,x10
104 REAL*8 :: y1,y2,y3,y4,y5,y6,y7,y8,y9,y10
105 REAL*8 :: z1,z2,z3,z4,z5,z6,z7,z8,z9,z10
107 INTEGER ::
i,
j,nstart,nend
108 REAL*8 :: aux1,aux2,aux3,aux4,aux5,aux6,aux7,aux8,aux9,aux10,aux11,aux12
110 REAL*8 :: r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11,r12,r13,r14,r15,r16,r17,r18
111 REAL*8 :: r19,r20,r21,r22,r23,r24,r25,r26,r27,r28,r29,r30
112 REAL*8 :: g1, g2, g3, g4
113 REAL*8 :: xn1, xn2, xn3, xn4
115 INTEGER :: k1n1,k1n2,k1n3,k1n4,k1n5,k1n6,k1n7,k1n8,k1n9,k1n10
116 INTEGER :: k2n1,k2n2,k2n3,k2n4,k2n5,k2n6,k2n7,k2n8,k2n9,k2n10
117 INTEGER :: k3n1,k3n2,k3n3,k3n4,k3n5,k3n6,k3n7,k3n8,k3n9,k3n10
119 REAL*8 :: f11, f12, f13, f21, f22, f23, f31, f32, f33
122 REAL*8 :: x14, x24, x34, y14, y24, y34, z14, z24, z34
124 REAL*8 :: x12, x13, y12, y13, z12, z13
125 REAL*8 :: val11, val21, val31
126 REAL*8,
DIMENSION(1:numat_vol) :: mu, kappa, rho
128 real*8 ,
DIMENSION(1:4, 1:numcstet) :: detfold
130 REAL*8,
DIMENSION(1:3*numnp) :: velo
133 REAL*8 :: strssvisco(3,3)
136 REAL*8 :: vel_u1,vel_u2,vel_u3,vel_u4,vel_u5,vel_u6,vel_u7,vel_u8,vel_u9,vel_u10
137 REAL*8 :: vel_v1,vel_v2,vel_v3,vel_v4,vel_v5,vel_v6,vel_v7,vel_v8,vel_v9,vel_v10
138 REAL*8 :: vel_w1,vel_w2,vel_w3,vel_w4,vel_w5,vel_w6,vel_w7,vel_w8,vel_w9,vel_w10
140 REAL*8 :: f(3,3), fdot(3,3)
142 REAL*8,
DIMENSION(1:4,1:4) :: gaussintpt = reshape( &
143 (/0.58541020d0,0.13819660d0,0.13819660d0,0.13819660d0, &
144 0.13819660d0,0.58541020d0,0.13819660d0,0.13819660d0, &
145 0.13819660d0,0.13819660d0,0.58541020d0,0.13819660d0, &
146 0.13819660d0,0.13819660d0,0.13819660d0,0.58541020d0/),(/4,4/) )
150 REAL*8,
DIMENSION(1:3,1:3) :: fij, cij
241 vel_u10 = velo(k1n10)
251 vel_v10 = velo(k2n10)
261 vel_w10 = velo(k3n10)
292 val11 = y24*z34 - z24*y34
293 val21 = -( x24*z34 - z24*x34 )
294 val31 = x24*y34 - y24*x34
296 vx6 = -( x14*val11 + y14*val21 + z14*val31 )
300 aux1 = -(y3*z4 - y4*z3 - y2*z4 + y2*z3 + z2*y4 - z2*y3)
301 aux2 = (x3*z4 - x4*z3 - x2*z4 + x2*z3 + z2*x4 - z2*x3)
302 aux3 = -(x3*y4 - x4*y3 - x2*y4 + x2*y3 + y2*x4 - y2*x3)
303 aux4 = (y3*z4 - y4*z3 - y1*z4 + y1*z3 + z1*y4 - z1*y3)
304 aux5 = -(x3*z4 - x4*z3 - x1*z4 + x1*z3 + z1*x4 - z1*x3)
305 aux6 = (x3*y4 - x4*y3 - x1*y4 + x1*y3 + y1*x4 - y1*x3)
306 aux7 = -(y2*z4 - z2*y4 - y1*z4 + y1*z2 + z1*y4 - z1*y2)
307 aux8 = (x2*z4 - z2*x4 - x1*z4 + x1*z2 + z1*x4 - z1*x2)
308 aux9 = -(x2*y4 - y2*x4 - x1*y4 + x1*y2 + y1*x4 - y1*x2)
309 aux10 = (y2*z3 - z2*y3 - y1*z3 + y1*z2 + z1*y3 - z1*y2)
310 aux11 =-(x2*z3 - z2*x3 - x1*z3 + x1*z2 + z1*x3 - z1*x2)
311 aux12 = (x2*y3 - y2*x3 - x1*y3 + x1*y2 + y1*x3 - y1*x2)
346 g1 = gaussintpt(igpt,1)
347 g2 = gaussintpt(igpt,2)
348 g3 = gaussintpt(igpt,3)
349 g4 = gaussintpt(igpt,4)
376 b13 = 4.d0*(g2*aux1 + g1*aux4)
377 b14 = 4.d0*(g2*aux2 + g1*aux5)
378 b15 = 4.d0*(g2*aux3 + g1*aux6)
380 b16 = 4.d0*(g3*aux4 + g2*aux7)
381 b17 = 4.d0*(g3*aux5 + g2*aux8)
382 b18 = 4.d0*(g3*aux6 + g2*aux9)
384 b19 = 4.d0*(g1*aux7 + g3*aux1)
385 b20 = 4.d0*(g1*aux8 + g3*aux2)
386 b21 = 4.d0*(g1*aux9 + g3*aux3)
388 b22 = 4.d0*(g4*aux1 + g1*aux10)
389 b23 = 4.d0*(g4*aux2 + g1*aux11)
390 b24 = 4.d0*(g4*aux3 + g1*aux12)
392 b25 = 4.d0*(g4*aux4 + g2*aux10)
393 b26 = 4.d0*(g4*aux5 + g2*aux11)
394 b27 = 4.d0*(g4*aux6 + g2*aux12)
396 b28 = 4.d0*(g4*aux7 + g3*aux10)
397 b29 = 4.d0*(g4*aux8 + g3*aux11)
398 b30 = 4.d0*(g4*aux9 + g3*aux12)
402 dudx = (b1*u1 + b4*u2 + b7*u3 + b10*u4 + b13*u5 + b16*u6 + b19*u7 + b22*u8 + b25*u9 + b28*u10)*vx6inv
403 dvdy = (b2*v1 + b5*v2 + b8*v3 + b11*v4 + b14*v5 + b17*v6 + b20*v7 + b23*v8 + b26*v9 + b29*v10)*vx6inv
404 dwdz = (b3*w1 + b6*w2 + b9*w3 + b12*w4 + b15*w5 + b18*w6 + b21*w7 + b24*w8 + b27*w9 + b30*w10)*vx6inv
405 dudy = (b2*u1 + b5*u2 + b8*u3 + b11*u4 + b14*u5 + b17*u6 + b20*u7 + b23*u8 + b26*u9 + b29*u10)*vx6inv
406 dvdx = (b1*v1 + b4*v2 + b7*v3 + b10*v4 + b13*v5 + b16*v6 + b19*v7 + b22*v8 + b25*v9 + b28*v10)*vx6inv
407 dvdz = (b3*v1 + b6*v2 + b9*v3 + b12*v4 + b15*v5 + b18*v6 + b21*v7 + b24*v8 + b27*v9 + b30*v10)*vx6inv
408 dwdy = (b2*w1 + b5*w2 + b8*w3 + b11*w4 + b14*w5 + b17*w6 + b20*w7 + b23*w8 + b26*w9 + b29*w10)*vx6inv
409 dudz = (b3*u1 + b6*u2 + b9*u3 + b12*u4 + b15*u5 + b18*u6 + b21*u7 + b24*u8 + b27*u9 + b30*u10)*vx6inv
410 dwdx = (b1*w1 + b4*w2 + b7*w3 + b10*w4 + b13*w5 + b16*w6 + b19*w7 + b22*w8 + b25*w9 + b28*w10)*vx6inv
415 f11 = 1.d0 + ( dudx )
416 f22 = 1.d0 + ( dvdy )
417 f33 = 1.d0 + ( dwdz )
436 (b1*vel_u1 + b4*vel_u2 + b7*vel_u3 + b10*vel_u4 + b13*vel_u5 + &
437 b16*vel_u6 + b19*vel_u7 + b22*vel_u8 + b25*vel_u9 + b28*vel_u10)*vx6inv
439 (b2*vel_v1 + b5*vel_v2 + b8*vel_v3 + b11*vel_v4 + b14*vel_v5 + &
440 b17*vel_v6 + b20*vel_v7 + b23*vel_v8 + b26*vel_v9 + b29*vel_v10)*vx6inv
442 (b3*vel_w1 + b6*vel_w2 + b9*vel_w3 + b12*vel_w4 + b15*vel_w5 + &
443 b18*vel_w6 + b21*vel_w7 + b24*vel_w8 + b27*vel_w9 + b30*vel_w10)*vx6inv
444 fdot(1,2) = (b2*vel_u1 + b5*vel_u2 + b8*vel_u3 + b11*vel_u4 + b14*vel_u5 + &
445 b17*vel_u6 + b20*vel_u7 + b23*vel_u8 + b26*vel_u9 + b29*vel_u10)*vx6inv
446 fdot(2,1) = (b1*vel_v1 + b4*vel_v2 + b7*vel_v3 + b10*vel_v4 + b13*vel_v5 + &
447 b16*vel_v6 + b19*vel_v7 + b22*vel_v8 + b25*vel_v9 + b28*vel_v10)*vx6inv
448 fdot(2,3) = (b3*vel_v1 + b6*vel_v2 + b9*vel_v3 + b12*vel_v4 + b15*vel_v5 + &
449 b18*vel_v6 + b21*vel_v7 + b24*vel_v8 + b27*vel_v9 + b30*vel_v10)*vx6inv
450 fdot(3,2) = (b2*vel_w1 + b5*vel_w2 + b8*vel_w3 + b11*vel_w4 + b14*vel_w5 + &
451 b17*vel_w6 + b20*vel_w7 + b23*vel_w8 + b26*vel_w9 + b29*vel_w10)*vx6inv
452 fdot(1,3) = (b3*vel_u1 + b6*vel_u2 + b9*vel_u3 + b12*vel_u4 + b15*vel_u5 + &
453 b18*vel_u6 + b21*vel_u7 + b24*vel_u8 + b27*vel_u9 + b30*vel_u10)*vx6inv
454 fdot(3,1) = (b1*vel_w1 + b4*vel_w2 + b7*vel_w3 + b10*vel_w4 + b13*vel_w5 + &
455 b16*vel_w6 + b19*vel_w7 + b22*vel_w8 + b25*vel_w9 + b28*vel_w10)*vx6inv
459 rho(
j), cd_fastest, detfold(igpt,
i), dt, f, fdot, vx6, strssvisco)
474 cij(kk,ll)=cij(kk,ll)+f(mm,kk)*f(mm,ll)
480 s11(igpt,
i),s22(igpt,
i),s33(igpt,
i),s12(igpt,
i),s23(igpt,
i),s13(igpt,
i),
i, &
483 s11(igpt,
i) = s11(igpt,
i) + strssvisco(1,1)
484 s22(igpt,
i) = s22(igpt,
i) + strssvisco(2,2)
485 s33(igpt,
i) = s33(igpt,
i) + strssvisco(3,3)
486 s12(igpt,
i) = s12(igpt,
i) + strssvisco(1,2)
487 s13(igpt,
i) = s13(igpt,
i) + strssvisco(1,3)
488 s23(igpt,
i) = s23(igpt,
i) + strssvisco(2,3)
491 ( s11(igpt,
i)*b1*(1.d0+dudx) + s22(igpt,
i)*b2*dudy + s33(igpt,
i)*b3*dudz &
492 + s12(igpt,
i)*( b2*(1.d0+dudx) + b1*dudy ) &
493 + s23(igpt,
i)*( b3*dudy + b2*dudz ) &
494 + s13(igpt,
i)*( b3*(1.d0+dudx) + b1*dudz ) )
496 ( s11(igpt,
i)*b1*dvdx + s22(igpt,
i)*b2*(1.d0+dvdy) + s33(igpt,
i)*b3*dvdz &
497 + s12(igpt,
i)*( b1*(1.d0+dvdy) + b2*dvdx ) &
498 + s23(igpt,
i)*( b3*(1.d0+dvdy) + b2*dvdz ) &
499 + s13(igpt,
i)*( b3*dvdx + b1*dvdz ) )
501 ( s11(igpt,
i)*b1*dwdx + s22(igpt,
i)*b2*dwdy + s33(igpt,
i)*b3*(1.d0+dwdz) &
502 + s12(igpt,
i)*( b2*dwdx + b1*dwdy ) &
503 + s23(igpt,
i)*( b3*dwdy + b2*(1.d0 + dwdz) ) &
504 + s13(igpt,
i)*( b3*dwdx + b1*(1.d0 + dwdz) ) )
507 ( s11(igpt,
i)*b4*(1.d0+dudx) + s22(igpt,
i)*b5*dudy + s33(igpt,
i)*b6*dudz &
508 + s12(igpt,
i)*( b5*(1.d0+dudx) + b4*dudy ) &
509 + s23(igpt,
i)*( b6*dudy + b5*dudz ) &
510 + s13(igpt,
i)*( b6*(1.d0+dudx) + b4*dudz ) )
512 ( s11(igpt,
i)*b4*dvdx + s22(igpt,
i)*b5*(1.d0+dvdy) + s33(igpt,
i)*b6*dvdz &
513 + s12(igpt,
i)*( b4*(1.d0+dvdy) + b5*dvdx ) &
514 + s23(igpt,
i)*( b6*(1.d0+dvdy) + b5*dvdz ) &
515 + s13(igpt,
i)*( b6*dvdx + b4*dvdz ) )
517 ( s11(igpt,
i)*b4*dwdx + s22(igpt,
i)*b5*dwdy + s33(igpt,
i)*b6*(1.d0+dwdz) &
518 + s12(igpt,
i)*( b5*dwdx + b4*dwdy ) &
519 + s23(igpt,
i)*( b6*dwdy + b5*(1.d0 + dwdz) ) &
520 + s13(igpt,
i)*( b6*dwdx + b4*(1.d0 + dwdz) ) )
523 ( s11(igpt,
i)*b7*(1.d0+dudx) + s22(igpt,
i)*b8*dudy + s33(igpt,
i)*b9*dudz &
524 + s12(igpt,
i)*( b8*(1.d0+dudx) + b7*dudy ) &
525 + s23(igpt,
i)*( b9*dudy + b8*dudz ) &
526 + s13(igpt,
i)*( b9*(1.d0+dudx) + b7*dudz ) )
528 ( s11(igpt,
i)*b7*dvdx + s22(igpt,
i)*b8*(1.d0+dvdy) + s33(igpt,
i)*b9*dvdz &
529 + s12(igpt,
i)*( b7*(1.d0+dvdy) + b8*dvdx ) &
530 + s23(igpt,
i)*( b9*(1.d0+dvdy) + b8*dvdz ) &
531 + s13(igpt,
i)*( b9*dvdx + b7*dvdz ) )
533 ( s11(igpt,
i)*b7*dwdx + s22(igpt,
i)*b8*dwdy + s33(igpt,
i)*b9*(1.d0+dwdz) &
534 + s12(igpt,
i)*( b8*dwdx + b7*dwdy ) &
535 + s23(igpt,
i)*( b9*dwdy + b8*(1.d0 + dwdz) ) &
536 + s13(igpt,
i)*( b9*dwdx + b7*(1.d0 + dwdz) ) )
539 ( s11(igpt,
i)*b10*(1.d0+dudx) + s22(igpt,
i)*b11*dudy+s33(igpt,
i)*b12*dudz &
540 + s12(igpt,
i)*( b11*(1.d0+dudx) + b10*dudy ) &
541 + s23(igpt,
i)*( b12*dudy + b11*dudz ) &
542 + s13(igpt,
i)*( b12*(1.d0+dudx) + b10*dudz ) )
544 ( s11(igpt,
i)*b10*dvdx + s22(igpt,
i)*b11*(1.d0+dvdy)+s33(igpt,
i)*b12*dvdz &
545 + s12(igpt,
i)*( b10*(1.d0+dvdy) + b11*dvdx ) &
546 + s23(igpt,
i)*( b12*(1.d0+dvdy) + b11*dvdz ) &
547 + s13(igpt,
i)*( b12*dvdx + b10*dvdz ) )
549 ( s11(igpt,
i)*b10*dwdx + s22(igpt,
i)*b11*dwdy+s33(igpt,
i)*b12*(1.d0+dwdz) &
550 + s12(igpt,
i)*( b11*dwdx + b10*dwdy ) &
551 + s23(igpt,
i)*( b12*dwdy + b11*(1.d0 + dwdz) ) &
552 + s13(igpt,
i)*( b12*dwdx + b10*(1.d0 + dwdz) ) )
555 ( s11(igpt,
i)*b13*(1.d0+dudx) + s22(igpt,
i)*b14*dudy + s33(igpt,
i)*b15*dudz &
556 + s12(igpt,
i)*( b14*(1.d0+dudx) + b13*dudy ) &
557 + s23(igpt,
i)*( b15*dudy + b14*dudz ) &
558 + s13(igpt,
i)*( b15*(1.d0+dudx) + b13*dudz ) )
560 ( s11(igpt,
i)*b13*dvdx + s22(igpt,
i)*b14*(1.d0+dvdy) + s33(igpt,
i)*b15*dvdz &
561 + s12(igpt,
i)*( b13*(1.d0+dvdy) + b14*dvdx ) &
562 + s23(igpt,
i)*( b15*(1.d0+dvdy) + b14*dvdz ) &
563 + s13(igpt,
i)*( b15*dvdx + b13*dvdz ) )
565 ( s11(igpt,
i)*b13*dwdx + s22(igpt,
i)*b14*dwdy + s33(igpt,
i)*b15*(1.d0+dwdz) &
566 + s12(igpt,
i)*( b14*dwdx + b13*dwdy ) &
567 + s23(igpt,
i)*( b15*dwdy + b14*(1.d0 + dwdz) ) &
568 + s13(igpt,
i)*( b15*dwdx + b13*(1.d0 + dwdz) ) )
571 ( s11(igpt,
i)*b16*(1.d0+dudx) + s22(igpt,
i)*b17*dudy + s33(igpt,
i)*b18*dudz &
572 + s12(igpt,
i)*( b17*(1.d0+dudx) + b16*dudy ) &
573 + s23(igpt,
i)*( b18*dudy + b17*dudz ) &
574 + s13(igpt,
i)*( b18*(1.d0+dudx) + b16*dudz ) )
576 ( s11(igpt,
i)*b16*dvdx + s22(igpt,
i)*b17*(1.d0+dvdy) + s33(igpt,
i)*b18*dvdz &
577 + s12(igpt,
i)*( b16*(1.d0+dvdy) + b17*dvdx ) &
578 + s23(igpt,
i)*( b18*(1.d0+dvdy) + b17*dvdz ) &
579 + s13(igpt,
i)*( b18*dvdx + b16*dvdz ) )
581 ( s11(igpt,
i)*b16*dwdx + s22(igpt,
i)*b17*dwdy + s33(igpt,
i)*b18*(1.d0+dwdz) &
582 + s12(igpt,
i)*( b17*dwdx + b16*dwdy ) &
583 + s23(igpt,
i)*( b18*dwdy + b17*(1.d0 + dwdz) ) &
584 + s13(igpt,
i)*( b18*dwdx + b16*(1.d0 + dwdz) ) )
587 ( s11(igpt,
i)*b19*(1.d0+dudx) + s22(igpt,
i)*b20*dudy + s33(igpt,
i)*b21*dudz &
588 + s12(igpt,
i)*( b20*(1.d0+dudx) + b19*dudy ) &
589 + s23(igpt,
i)*( b21*dudy + b20*dudz ) &
590 + s13(igpt,
i)*( b21*(1.d0+dudx) + b19*dudz ) )
592 ( s11(igpt,
i)*b19*dvdx + s22(igpt,
i)*b20*(1.d0+dvdy) + s33(igpt,
i)*b21*dvdz &
593 + s12(igpt,
i)*( b19*(1.d0+dvdy) + b20*dvdx ) &
594 + s23(igpt,
i)*( b21*(1.d0+dvdy) + b20*dvdz ) &
595 + s13(igpt,
i)*( b21*dvdx + b19*dvdz ) )
597 ( s11(igpt,
i)*b19*dwdx + s22(igpt,
i)*b20*dwdy + s33(igpt,
i)*b21*(1.d0+dwdz) &
598 + s12(igpt,
i)*( b20*dwdx + b19*dwdy ) &
599 + s23(igpt,
i)*( b21*dwdy + b20*(1.d0 + dwdz) ) &
600 + s13(igpt,
i)*( b21*dwdx + b19*(1.d0 + dwdz) ) )
603 ( s11(igpt,
i)*b22*(1.d0+dudx) + s22(igpt,
i)*b23*dudy+s33(igpt,
i)*b24*dudz &
604 + s12(igpt,
i)*( b23*(1.d0+dudx) + b22*dudy ) &
605 + s23(igpt,
i)*( b24*dudy + b23*dudz ) &
606 + s13(igpt,
i)*( b24*(1.d0+dudx) + b22*dudz ) )
608 ( s11(igpt,
i)*b22*dvdx + s22(igpt,
i)*b23*(1.d0+dvdy)+s33(igpt,
i)*b24*dvdz &
609 + s12(igpt,
i)*( b22*(1.d0+dvdy) + b23*dvdx ) &
610 + s23(igpt,
i)*( b24*(1.d0+dvdy) + b23*dvdz ) &
611 + s13(igpt,
i)*( b24*dvdx + b22*dvdz ) )
613 ( s11(igpt,
i)*b22*dwdx + s22(igpt,
i)*b23*dwdy+s33(igpt,
i)*b24*(1.d0+dwdz) &
614 + s12(igpt,
i)*( b23*dwdx + b22*dwdy ) &
615 + s23(igpt,
i)*( b24*dwdy + b23*(1.d0 + dwdz) ) &
616 + s13(igpt,
i)*( b24*dwdx + b22*(1.d0 + dwdz) ) )
619 ( s11(igpt,
i)*b25*(1.d0+dudx) + s22(igpt,
i)*b26*dudy+s33(igpt,
i)*b27*dudz &
620 + s12(igpt,
i)*( b26*(1.d0+dudx) + b25*dudy ) &
621 + s23(igpt,
i)*( b27*dudy + b26*dudz ) &
622 + s13(igpt,
i)*( b27*(1.d0+dudx) + b25*dudz ) )
624 ( s11(igpt,
i)*b25*dvdx + s22(igpt,
i)*b26*(1.d0+dvdy)+s33(igpt,
i)*b27*dvdz &
625 + s12(igpt,
i)*( b25*(1.d0+dvdy) + b26*dvdx ) &
626 + s23(igpt,
i)*( b27*(1.d0+dvdy) + b26*dvdz ) &
627 + s13(igpt,
i)*( b27*dvdx + b25*dvdz ) )
629 ( s11(igpt,
i)*b25*dwdx + s22(igpt,
i)*b26*dwdy+s33(igpt,
i)*b27*(1.d0+dwdz) &
630 + s12(igpt,
i)*( b26*dwdx + b25*dwdy ) &
631 + s23(igpt,
i)*( b27*dwdy + b26*(1.d0 + dwdz) ) &
632 + s13(igpt,
i)*( b27*dwdx + b25*(1.d0 + dwdz) ) )
635 ( s11(igpt,
i)*b28*(1.d0+dudx) + s22(igpt,
i)*b29*dudy+s33(igpt,
i)*b30*dudz &
636 + s12(igpt,
i)*( b29*(1.d0+dudx) + b28*dudy ) &
637 + s23(igpt,
i)*( b30*dudy + b29*dudz ) &
638 + s13(igpt,
i)*( b30*(1.d0+dudx) + b28*dudz ) )
640 ( s11(igpt,
i)*b28*dvdx + s22(igpt,
i)*b29*(1.d0+dvdy)+s33(igpt,
i)*b30*dvdz &
641 + s12(igpt,
i)*( b28*(1.d0+dvdy) + b29*dvdx ) &
642 + s23(igpt,
i)*( b30*(1.d0+dvdy) + b29*dvdz ) &
643 + s13(igpt,
i)*( b30*dvdx + b28*dvdz ) )
645 ( s11(igpt,
i)*b28*dwdx + s22(igpt,
i)*b29*dwdy+s33(igpt,
i)*b30*(1.d0+dwdz) &
646 + s12(igpt,
i)*( b29*dwdx + b28*dwdy ) &
647 + s23(igpt,
i)*( b30*dwdy + b29*(1.d0 + dwdz) ) &
648 + s13(igpt,
i)*( b30*dwdx + b28*(1.d0 + dwdz) ) )
660 r_in(k1n1) = r_in(k1n1) - r1*0.04166666666666667d0
661 r_in(k2n1) = r_in(k2n1) - r2*0.04166666666666667d0
662 r_in(k3n1) = r_in(k3n1) - r3*0.04166666666666667d0
664 r_in(k1n2) = r_in(k1n2) - r4*0.04166666666666667d0
665 r_in(k2n2) = r_in(k2n2) - r5*0.04166666666666667d0
666 r_in(k3n2) = r_in(k3n2) - r6*0.04166666666666667d0
668 r_in(k1n3) = r_in(k1n3) - r7*0.04166666666666667d0
669 r_in(k2n3) = r_in(k2n3) - r8*0.04166666666666667d0
670 r_in(k3n3) = r_in(k3n3) - r9*0.04166666666666667d0
672 r_in(k1n4) = r_in(k1n4) - r10*0.04166666666666667d0
673 r_in(k2n4) = r_in(k2n4) - r11*0.04166666666666667d0
674 r_in(k3n4) = r_in(k3n4) - r12*0.04166666666666667d0
676 r_in(k1n5) = r_in(k1n5) - r13*0.04166666666666667d0
677 r_in(k2n5) = r_in(k2n5) - r14*0.04166666666666667d0
678 r_in(k3n5) = r_in(k3n5) - r15*0.04166666666666667d0
680 r_in(k1n6) = r_in(k1n6) - r16*0.04166666666666667d0
681 r_in(k2n6) = r_in(k2n6) - r17*0.04166666666666667d0
682 r_in(k3n6) = r_in(k3n6) - r18*0.04166666666666667d0
684 r_in(k1n7) = r_in(k1n7) - r19*0.04166666666666667d0
685 r_in(k2n7) = r_in(k2n7) - r20*0.04166666666666667d0
686 r_in(k3n7) = r_in(k3n7) - r21*0.04166666666666667d0
688 r_in(k1n8) = r_in(k1n8) - r22*0.04166666666666667d0
689 r_in(k2n8) = r_in(k2n8) - r23*0.04166666666666667d0
690 r_in(k3n8) = r_in(k3n8) - r24*0.04166666666666667d0
692 r_in(k1n9) = r_in(k1n9) - r25*0.04166666666666667d0
693 r_in(k2n9) = r_in(k2n9) - r26*0.04166666666666667d0
694 r_in(k3n9) = r_in(k3n9) - r27*0.04166666666666667d0
696 r_in(k1n10) = r_in(k1n10) - r28*0.04166666666666667d0
697 r_in(k2n10) = r_in(k2n10) - r29*0.04166666666666667d0
698 r_in(k3n10) = r_in(k3n10) - r30*0.04166666666666667d0
subroutine artificialdamping(numcstet, numnp, rho, cd_fastest, DetFold, dt, F, Fdot, Vx6, StrssVisco)
subroutine arruda_boyce(Cij, S11, S22, S33, S12, S23, S13, ielem, mu, kappa)
subroutine v3d10_nl_arruda_boyce_damping(coor, matcstet, lmcstet, R_in, d, S11, S22, S33, S12, S23, S13, numnp, nstart, nend, numcstet, numat_vol, mu, kappa, rho, cd_fastest, DetFold, velo, dt)