unet

unet 256

UnetSkipConnectionBlock(
  (model): Sequential(
    (0): Conv2d(1, 32, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
    (1): UnetSkipConnectionBlock(
      (model): Sequential(
        (0): LeakyReLU(negative_slope=0.2, inplace=True)
        (1): Conv2d(32, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
        (2): InstanceNorm2d(64, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
        (3): UnetSkipConnectionBlock(
          (model): Sequential(
            (0): LeakyReLU(negative_slope=0.2, inplace=True)
            (1): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
            (2): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
            (3): UnetSkipConnectionBlock(
              (model): Sequential(
                (0): LeakyReLU(negative_slope=0.2, inplace=True)
                (1): Conv2d(128, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
                (3): UnetSkipConnectionBlock(
                  (model): Sequential(
                    (0): LeakyReLU(negative_slope=0.2, inplace=True)
                    (1): Conv2d(256, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                    (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
                    (3): UnetSkipConnectionBlock(
                      (model): Sequential(
                        (0): LeakyReLU(negative_slope=0.2, inplace=True)
                        (1): Conv2d(256, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                        (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
                        (3): UnetSkipConnectionBlock(
                          (model): Sequential(
                            (0): LeakyReLU(negative_slope=0.2, inplace=True)
                            (1): Conv2d(256, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                            (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
                            (3): UnetSkipConnectionBlock(
                              (model): Sequential(
                                (0): LeakyReLU(negative_slope=0.2, inplace=True)
                                (1): Conv2d(256, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                                (2): ReLU(inplace=True)
                                (3): ConvTranspose2d(256, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                                (4): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
                              )
                            )
                            (4): ReLU(inplace=True)
                            (5): ConvTranspose2d(512, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                            (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
                          )
                        )
                        (4): ReLU(inplace=True)
                        (5): ConvTranspose2d(512, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                        (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
                      )
                    )
                    (4): ReLU(inplace=True)
                    (5): ConvTranspose2d(512, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                    (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
                  )
                )
                (4): ReLU(inplace=True)
                (5): ConvTranspose2d(512, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                (6): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
              )
            )
            (4): ReLU(inplace=True)
            (5): ConvTranspose2d(256, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
            (6): InstanceNorm2d(64, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
          )
        )
        (4): ReLU(inplace=True)
        (5): ConvTranspose2d(128, 32, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
        (6): InstanceNorm2d(32, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
      )
    )
    (2): ReLU(inplace=True)
    (3): ConvTranspose2d(64, 1, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
    (4): Tanh()
  )
)

unet 128

OrderedDict([('model', UnetSkipConnectionBlock(
  (model): Sequential(
    (0): Conv2d(1, 32, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
    (1): UnetSkipConnectionBlock(
      (model): Sequential(
        (0): LeakyReLU(negative_slope=0.2, inplace=True)
        (1): Conv2d(32, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
        (2): InstanceNorm2d(64, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
        (3): UnetSkipConnectionBlock(
          (model): Sequential(
            (0): LeakyReLU(negative_slope=0.2, inplace=True)
            (1): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
            (2): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
            (3): UnetSkipConnectionBlock(
              (model): Sequential(
                (0): LeakyReLU(negative_slope=0.2, inplace=True)
                (1): Conv2d(128, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
                (3): UnetSkipConnectionBlock(
                  (model): Sequential(
                    (0): LeakyReLU(negative_slope=0.2, inplace=True)
                    (1): Conv2d(256, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                    (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
                    (3): UnetSkipConnectionBlock(
                      (model): Sequential(
                        (0): LeakyReLU(negative_slope=0.2, inplace=True)
                        (1): Conv2d(256, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                        (2): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
                        (3): UnetSkipConnectionBlock(
                          (model): Sequential(
                            (0): LeakyReLU(negative_slope=0.2, inplace=True)
                            (1): Conv2d(256, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                            (2): ReLU(inplace=True)
                            (3): ConvTranspose2d(256, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                            (4): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
                          )
                        )
                        (4): ReLU(inplace=True)
                        (5): ConvTranspose2d(512, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                        (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
                      )
                    )
                    (4): ReLU(inplace=True)
                    (5): ConvTranspose2d(512, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                    (6): InstanceNorm2d(256, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
                  )
                )
                (4): ReLU(inplace=True)
                (5): ConvTranspose2d(512, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
                (6): InstanceNorm2d(128, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
              )
            )
            (4): ReLU(inplace=True)
            (5): ConvTranspose2d(256, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
            (6): InstanceNorm2d(64, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
          )
        )
        (4): ReLU(inplace=True)
        (5): ConvTranspose2d(128, 32, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
        (6): InstanceNorm2d(32, eps=1e-05, momentum=0.1, affine=False, track_running_stats=False)
      )
    )
    (2): ReLU(inplace=True)
    (3): ConvTranspose2d(64, 1, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
    (4): Tanh()
  )
))])

unet-128 time

                                             module name    input shape   output shape      params memory(MB)               MAdd             Flops   MemRead(B)  MemWrite(B) duration[%]     MemR+W(B)
0                                          model.model.0    1 1280 2048    32 640 1024       544.0      80.00      671,088,640.0     356,515,840.0   10487936.0   83886080.0       2.58%  9.437402e+07
1                                  model.model.1.model.0    32 640 1024    32 640 1024         0.0      80.00                0.0      20,971,520.0   83886080.0   83886080.0       2.71%  1.677722e+08
2                                  model.model.1.model.1    32 640 1024     64 320 512     32832.0      40.00   10,737,418,240.0   5,379,194,880.0   84017408.0   41943040.0       5.83%  1.259604e+08
3                                  model.model.1.model.2     64 320 512     64 320 512         0.0      40.00                0.0               0.0          0.0          0.0       0.76%  0.000000e+00
4                          model.model.1.model.3.model.0     64 320 512     64 320 512         0.0      40.00                0.0      10,485,760.0   41943040.0   41943040.0       2.79%  8.388608e+07
5                          model.model.1.model.3.model.1     64 320 512    128 160 256    131200.0      20.00   10,737,418,240.0   5,373,952,000.0   42467840.0   20971520.0       3.74%  6.343936e+07
6                          model.model.1.model.3.model.2    128 160 256    128 160 256         0.0      20.00                0.0               0.0          0.0          0.0       2.12%  0.000000e+00
7                  model.model.1.model.3.model.3.model.0    128 160 256    128 160 256         0.0      20.00                0.0       5,242,880.0   20971520.0   20971520.0       1.93%  4.194304e+07
8                  model.model.1.model.3.model.3.model.1    128 160 256    256  80 128    524544.0      10.00   10,737,418,240.0   5,371,330,560.0   23069696.0   10485760.0       3.04%  3.355546e+07
9                  model.model.1.model.3.model.3.model.2    256  80 128    256  80 128         0.0      10.00                0.0               0.0          0.0          0.0       0.29%  0.000000e+00
10         model.model.1.model.3.model.3.model.3.model.0    256  80 128    256  80 128         0.0      10.00                0.0       2,621,440.0   10485760.0   10485760.0       0.69%  2.097152e+07
11         model.model.1.model.3.model.3.model.3.model.1    256  80 128    256  40  64   1048832.0       2.50    5,368,709,120.0   2,685,009,920.0   14681088.0    2621440.0       3.09%  1.730253e+07
12         model.model.1.model.3.model.3.model.3.model.2    256  40  64    256  40  64         0.0       2.50                0.0               0.0          0.0          0.0       0.52%  0.000000e+00
13     model.model.1.model.3.model.3.model.3.model.3....    256  40  64    256  40  64         0.0       2.50                0.0         655,360.0    2621440.0    2621440.0       0.16%  5.242880e+06
14     model.model.1.model.3.model.3.model.3.model.3....    256  40  64    256  20  32   1048832.0       0.62    1,342,177,280.0     671,252,480.0    6816768.0     655360.0       2.84%  7.472128e+06
15     model.model.1.model.3.model.3.model.3.model.3....    256  20  32    256  20  32         0.0       0.62                0.0               0.0          0.0          0.0       0.05%  0.000000e+00
16     model.model.1.model.3.model.3.model.3.model.3....    256  20  32    256  20  32         0.0       0.62                0.0         163,840.0     655360.0     655360.0       0.04%  1.310720e+06
17     model.model.1.model.3.model.3.model.3.model.3....    256  20  32    256  10  16   1048832.0       0.16      335,544,320.0     167,813,120.0    4850688.0     163840.0       2.90%  5.014528e+06
18     model.model.1.model.3.model.3.model.3.model.3....    256  10  16    256  10  16         0.0       0.16           40,960.0          40,960.0     163840.0     163840.0       0.01%  3.276800e+05
19     model.model.1.model.3.model.3.model.3.model.3....    256  10  16    256  20  32   1048832.0       0.62      335,544,320.0               0.0          0.0          0.0       0.49%  0.000000e+00
20     model.model.1.model.3.model.3.model.3.model.3....    256  20  32    256  20  32         0.0       0.62                0.0               0.0          0.0          0.0       0.09%  0.000000e+00
21     model.model.1.model.3.model.3.model.3.model.3....    512  20  32    512  20  32         0.0       1.25          327,680.0         327,680.0    1310720.0    1310720.0       0.01%  2.621440e+06
22     model.model.1.model.3.model.3.model.3.model.3....    512  20  32    256  40  64   2097408.0       2.50    2,684,354,560.0               0.0          0.0          0.0       2.11%  0.000000e+00
23     model.model.1.model.3.model.3.model.3.model.3....    256  40  64    256  40  64         0.0       2.50                0.0               0.0          0.0          0.0       0.08%  0.000000e+00
24         model.model.1.model.3.model.3.model.3.model.4    512  40  64    512  40  64         0.0       5.00        1,310,720.0       1,310,720.0    5242880.0    5242880.0       0.63%  1.048576e+07
25         model.model.1.model.3.model.3.model.3.model.5    512  40  64    256  80 128   2097408.0      10.00   10,737,418,240.0               0.0          0.0          0.0       7.47%  0.000000e+00
26         model.model.1.model.3.model.3.model.3.model.6    256  80 128    256  80 128         0.0      10.00                0.0               0.0          0.0          0.0       0.30%  0.000000e+00
27                 model.model.1.model.3.model.3.model.4    512  80 128    512  80 128         0.0      20.00        5,242,880.0       5,242,880.0   20971520.0   20971520.0       0.54%  4.194304e+07
28                 model.model.1.model.3.model.3.model.5    512  80 128    128 160 256   1048704.0      20.00   21,474,836,480.0               0.0          0.0          0.0       9.57%  0.000000e+00
29                 model.model.1.model.3.model.3.model.6    128 160 256    128 160 256         0.0      20.00                0.0               0.0          0.0          0.0       0.38%  0.000000e+00
30                         model.model.1.model.3.model.4    256 160 256    256 160 256         0.0      40.00       10,485,760.0      10,485,760.0   41943040.0   41943040.0       0.19%  8.388608e+07
31                         model.model.1.model.3.model.5    256 160 256     64 320 512    262208.0      40.00   21,474,836,480.0               0.0          0.0          0.0      14.56%  0.000000e+00
32                         model.model.1.model.3.model.6     64 320 512     64 320 512         0.0      40.00                0.0               0.0          0.0          0.0       1.50%  0.000000e+00
33                                 model.model.1.model.4    128 320 512    128 320 512         0.0      80.00       20,971,520.0      20,971,520.0   83886080.0   83886080.0       0.38%  1.677722e+08
34                                 model.model.1.model.5    128 320 512    32 640 1024     65568.0      80.00   21,474,836,480.0               0.0          0.0          0.0      19.28%  0.000000e+00
35                                 model.model.1.model.6    32 640 1024    32 640 1024         0.0      80.00                0.0               0.0          0.0          0.0       2.15%  0.000000e+00
36                                         model.model.2    64 640 1024    64 640 1024         0.0     160.00       41,943,040.0      41,943,040.0  167772160.0  167772160.0       0.76%  3.355443e+08
37                                         model.model.3    64 640 1024    1 1280 2048      1025.0      10.00    1,342,177,280.0               0.0          0.0          0.0       3.10%  0.000000e+00
38                                         model.model.4    1 1280 2048    1 1280 2048         0.0      10.00                0.0               0.0          0.0          0.0       0.32%  0.000000e+00
total                                                                                   10456769.0    1012.19  119,534,100,480.0  20,125,532,160.0          0.0          0.0     100.00%  1.310825e+09
======================================================================================================================================================================================================
Total params: 10,456,769
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Total memory: 1012.19MB
Total MAdd: 119.53GMAdd
Total Flops: 20.13GFlops
Total MemR+W: 1.22GB

unet 256

                                             module name    input shape   output shape      params memory(MB)               MAdd             Flops   MemRead(B)  MemWrite(B) duration[%]     MemR+W(B)
0                                          model.model.0    1 1280 2048    32 640 1024       544.0      80.00      671,088,640.0     356,515,840.0   10487936.0   83886080.0       5.41%  9.437402e+07
1                                  model.model.1.model.0    32 640 1024    32 640 1024         0.0      80.00                0.0      20,971,520.0   83886080.0   83886080.0       2.45%  1.677722e+08
2                                  model.model.1.model.1    32 640 1024     64 320 512     32832.0      40.00   10,737,418,240.0   5,379,194,880.0   84017408.0   41943040.0       5.17%  1.259604e+08
3                                  model.model.1.model.2     64 320 512     64 320 512         0.0      40.00                0.0               0.0          0.0          0.0       1.71%  0.000000e+00
4                          model.model.1.model.3.model.0     64 320 512     64 320 512         0.0      40.00                0.0      10,485,760.0   41943040.0   41943040.0       3.34%  8.388608e+07
5                          model.model.1.model.3.model.1     64 320 512    128 160 256    131200.0      20.00   10,737,418,240.0   5,373,952,000.0   42467840.0   20971520.0       3.67%  6.343936e+07
6                          model.model.1.model.3.model.2    128 160 256    128 160 256         0.0      20.00                0.0               0.0          0.0          0.0       1.46%  0.000000e+00
7                  model.model.1.model.3.model.3.model.0    128 160 256    128 160 256         0.0      20.00                0.0       5,242,880.0   20971520.0   20971520.0       1.16%  4.194304e+07
8                  model.model.1.model.3.model.3.model.1    128 160 256    256  80 128    524544.0      10.00   10,737,418,240.0   5,371,330,560.0   23069696.0   10485760.0       4.97%  3.355546e+07
9                  model.model.1.model.3.model.3.model.2    256  80 128    256  80 128         0.0      10.00                0.0               0.0          0.0          0.0       1.06%  0.000000e+00
10         model.model.1.model.3.model.3.model.3.model.0    256  80 128    256  80 128         0.0      10.00                0.0       2,621,440.0   10485760.0   10485760.0       0.54%  2.097152e+07
11         model.model.1.model.3.model.3.model.3.model.1    256  80 128    256  40  64   1048832.0       2.50    5,368,709,120.0   2,685,009,920.0   14681088.0    2621440.0       2.26%  1.730253e+07
12         model.model.1.model.3.model.3.model.3.model.2    256  40  64    256  40  64         0.0       2.50                0.0               0.0          0.0          0.0       0.09%  0.000000e+00
13     model.model.1.model.3.model.3.model.3.model.3....    256  40  64    256  40  64         0.0       2.50                0.0         655,360.0    2621440.0    2621440.0       0.16%  5.242880e+06
14     model.model.1.model.3.model.3.model.3.model.3....    256  40  64    256  20  32   1048832.0       0.62    1,342,177,280.0     671,252,480.0    6816768.0     655360.0       1.25%  7.472128e+06
15     model.model.1.model.3.model.3.model.3.model.3....    256  20  32    256  20  32         0.0       0.62                0.0               0.0          0.0          0.0       0.06%  0.000000e+00
16     model.model.1.model.3.model.3.model.3.model.3....    256  20  32    256  20  32         0.0       0.62                0.0         163,840.0     655360.0     655360.0       0.03%  1.310720e+06
17     model.model.1.model.3.model.3.model.3.model.3....    256  20  32    256  10  16   1048832.0       0.16      335,544,320.0     167,813,120.0    4850688.0     163840.0       0.21%  5.014528e+06
18     model.model.1.model.3.model.3.model.3.model.3....    256  10  16    256  10  16         0.0       0.16                0.0               0.0          0.0          0.0       0.04%  0.000000e+00
19     model.model.1.model.3.model.3.model.3.model.3....    256  10  16    256  10  16         0.0       0.16                0.0          40,960.0     163840.0     163840.0       0.01%  3.276800e+05
20     model.model.1.model.3.model.3.model.3.model.3....    256  10  16    256   5   8   1048832.0       0.04       83,886,080.0      41,953,280.0    4359168.0      40960.0       0.53%  4.400128e+06
21     model.model.1.model.3.model.3.model.3.model.3....    256   5   8    256   5   8         0.0       0.04           10,240.0          10,240.0      40960.0      40960.0       0.00%  8.192000e+04
22     model.model.1.model.3.model.3.model.3.model.3....    256   5   8    256  10  16   1048832.0       0.16       83,886,080.0               0.0          0.0          0.0       2.28%  0.000000e+00
23     model.model.1.model.3.model.3.model.3.model.3....    256  10  16    256  10  16         0.0       0.16                0.0               0.0          0.0          0.0       0.04%  0.000000e+00
24     model.model.1.model.3.model.3.model.3.model.3....    512  10  16    512  10  16         0.0       0.31           81,920.0          81,920.0     327680.0     327680.0       0.85%  6.553600e+05
25     model.model.1.model.3.model.3.model.3.model.3....    512  10  16    256  20  32   2097408.0       0.62      671,088,640.0               0.0          0.0          0.0       0.34%  0.000000e+00
26     model.model.1.model.3.model.3.model.3.model.3....    256  20  32    256  20  32         0.0       0.62                0.0               0.0          0.0          0.0       0.05%  0.000000e+00
27     model.model.1.model.3.model.3.model.3.model.3....    512  20  32    512  20  32         0.0       1.25          327,680.0         327,680.0    1310720.0    1310720.0       0.01%  2.621440e+06
28     model.model.1.model.3.model.3.model.3.model.3....    512  20  32    256  40  64   2097408.0       2.50    2,684,354,560.0               0.0          0.0          0.0       2.55%  0.000000e+00
29     model.model.1.model.3.model.3.model.3.model.3....    256  40  64    256  40  64         0.0       2.50                0.0               0.0          0.0          0.0       0.64%  0.000000e+00
30         model.model.1.model.3.model.3.model.3.model.4    512  40  64    512  40  64         0.0       5.00        1,310,720.0       1,310,720.0    5242880.0    5242880.0       0.03%  1.048576e+07
31         model.model.1.model.3.model.3.model.3.model.5    512  40  64    256  80 128   2097408.0      10.00   10,737,418,240.0               0.0          0.0          0.0       6.23%  0.000000e+00
32         model.model.1.model.3.model.3.model.3.model.6    256  80 128    256  80 128         0.0      10.00                0.0               0.0          0.0          0.0       0.59%  0.000000e+00
33                 model.model.1.model.3.model.3.model.4    512  80 128    512  80 128         0.0      20.00        5,242,880.0       5,242,880.0   20971520.0   20971520.0       0.49%  4.194304e+07
34                 model.model.1.model.3.model.3.model.5    512  80 128    128 160 256   1048704.0      20.00   21,474,836,480.0               0.0          0.0          0.0      12.07%  0.000000e+00
35                 model.model.1.model.3.model.3.model.6    128 160 256    128 160 256         0.0      20.00                0.0               0.0          0.0          0.0       0.97%  0.000000e+00
36                         model.model.1.model.3.model.4    256 160 256    256 160 256         0.0      40.00       10,485,760.0      10,485,760.0   41943040.0   41943040.0       0.16%  8.388608e+07
37                         model.model.1.model.3.model.5    256 160 256     64 320 512    262208.0      40.00   21,474,836,480.0               0.0          0.0          0.0      11.37%  0.000000e+00
38                         model.model.1.model.3.model.6     64 320 512     64 320 512         0.0      40.00                0.0               0.0          0.0          0.0       1.15%  0.000000e+00
39                                 model.model.1.model.4    128 320 512    128 320 512         0.0      80.00       20,971,520.0      20,971,520.0   83886080.0   83886080.0       0.39%  1.677722e+08
40                                 model.model.1.model.5    128 320 512    32 640 1024     65568.0      80.00   21,474,836,480.0               0.0          0.0          0.0      18.57%  0.000000e+00
41                                 model.model.1.model.6    32 640 1024    32 640 1024         0.0      80.00                0.0               0.0          0.0          0.0       1.92%  0.000000e+00
42                                         model.model.2    64 640 1024    64 640 1024         0.0     160.00       41,943,040.0      41,943,040.0  167772160.0  167772160.0       1.09%  3.355443e+08
43                                         model.model.3    64 640 1024    1 1280 2048      1025.0      10.00    1,342,177,280.0               0.0          0.0          0.0       2.54%  0.000000e+00
44                                         model.model.4    1 1280 2048    1 1280 2048         0.0      10.00                0.0               0.0          0.0          0.0       0.09%  0.000000e+00
total                                                                                   13603009.0    1013.05  120,037,468,160.0  20,167,577,600.0          0.0          0.0     100.00%  1.315963e+09
======================================================================================================================================================================================================
Total params: 13,603,009
------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Total memory: 1013.05MB
Total MAdd: 120.04GMAdd
Total Flops: 20.17GFlops
Total MemR+W: 1.23GB
全部评论

相关推荐

威猛的小饼干正在背八股:挂到根本不想整理
点赞 评论 收藏
分享
11-09 01:22
已编辑
东南大学 Java
高级特工穿山甲:羡慕,我秋招有家企业在茶馆组织线下面试,约我过去“喝茶详谈”😢结果我去了发现原来是人家喝茶我看着
点赞 评论 收藏
分享
点赞 收藏 评论
分享
牛客网
牛客企业服务