import paddle
import paddle.nn as nn
import paddle.nn.functional as F
import numpy as np
from PIL import Image
from models.networks import get_pad, ConvWithActivation, DeConvWithActivation
from models.idr import AIDR
def img2photo(imgs):
return ((imgs + 1) * 127.5).transpose(1, 2).transpose(2, 3).detach().cpu().numpy()
def visual(imgs):
im = img2photo(imgs)
Image.fromarray(im[0].astype(np.uint8)).show()
class Residual(nn.Layer):
def __init__(self, in_channels, out_channels, same_shape=True, **kwargs):
super(Residual, self).__init__()
self.same_shape = same_shape
strides = 1 if same_shape else 2
self.conv1 = nn.Conv2D(in_channels, in_channels, kernel_size=3, padding=1, stride=strides)
self.conv2 = nn.Conv2D(in_channels, out_channels, kernel_size=3, padding=1)
# self.conv2 = torch.nn.utils.spectral_norm(self.conv2)
if not same_shape:
self.conv3 = nn.Conv2D(in_channels, out_channels, kernel_size=1,
# self.conv3 = nn.Conv2D(channels, kernel_size=3, padding=1,
stride=strides)
# self.conv3 = torch.nn.utils.spectral_norm(self.conv3)
self.batch_norm2d = nn.BatchNorm2D(out_channels)
def forward(self, x):
out = F.relu(self.conv1(x))
out = self.conv2(out)
if not self.same_shape:
x = self.conv3(x)
out = self.batch_norm2d(out + x)
# out = out + x
return F.relu(out)
class ASPP(nn.Layer):
def __init__(self, in_channel=512, depth=256):
super(ASPP, self).__init__()
self.mean = nn.AdaptiveAvgPool2D((1, 1))
self.conv = nn.Conv2D(in_channel, depth, 1, 1)
# k=1 s=1 no pad
self.atrous_block1 = nn.Conv2D(in_channel, depth, 1, 1)
self.atrous_block6 = nn.Conv2D(in_channel, depth, 3, 1, padding=6, dilation=6)
self.atrous_block12 = nn.Conv2D(in_channel, depth, 3, 1, padding=12, dilation=12)
self.atrous_block18 = nn.Conv2D(in_channel, depth, 3, 1, padding=18, dilation=18)
self.conv_1x1_output = nn.Conv2D(depth * 5, depth, 1, 1)
def forward(self, x):
size = x.shape[2:]
image_features = self.mean(x)
image_features = self.conv(image_features)
image_features = F.upsample(image_features, size=size, mode='bilinear')
atrous_block1 = self.atrous_block1(x)
atrous_block6 = self.atrous_block6(x)
atrous_block12 = self.atrous_block12(x)
atrous_block18 = self.atrous_block18(x)
net = self.conv_1x1_output(paddle.concat([image_features, atrous_block1, atrous_block6,
atrous_block12, atrous_block18], axis=1))
return net
class STRAIDR(nn.Layer):
def __init__(self, n_in_channel=3, num_c=48):
super(STRAIDR, self).__init__()
#### U-Net ####
# downsample
self.conv1 = ConvWithActivation(3, 32, kernel_size=4, stride=2, padding=1)
self.conva = ConvWithActivation(32, 32, kernel_size=3, stride=1, padding=1)
self.convb = ConvWithActivation(32, 64, kernel_size=4, stride=2, padding=1)
self.res1 = Residual(64, 64)
self.res2 = Residual(64, 64)
self.res3 = Residual(64, 128, same_shape=False)
self.res4 = Residual(128, 128)
self.res5 = Residual(128, 256, same_shape=False)
# self.nn = ConvWithActivation(256, 512, 3, 1, dilation=2, padding=get_pad(64, 3, 1, 2))
self.res6 = Residual(256, 256)
self.res7 = Residual(256, 512, same_shape=False)
self.res8 = Residual(512, 512)
self.conv2 = ConvWithActivation(512, 512, kernel_size=1)
# upsample
self.deconv1 = DeConvWithActivation(512, 256, kernel_size=3, padding=1, stride=2)
self.deconv2 = DeConvWithActivation(256 * 2, 128, kernel_size=3, padding=1, stride=2)
self.deconv3 = DeConvWithActivation(128 * 2, 64, kernel_size=3, padding=1, stride=2)
self.deconv4 = DeConvWithActivation(64 * 2, 32, kernel_size=3, padding=1, stride=2)
self.deconv5 = DeConvWithActivation(64, 3, kernel_size=3, padding=1, stride=2)
# lateral connection
self.lateral_connection1 = nn.Sequential(
nn.Conv2D(256, 256, kernel_size=1, padding=0, stride=1),
nn.Conv2D(256, 512, kernel_size=3, padding=1, stride=1),
nn.Conv2D(512, 512, kernel_size=3, padding=1, stride=1),
nn.Conv2D(512, 256, kernel_size=1, padding=0, stride=1), )
self.lateral_connection2 = nn.Sequential(
nn.Conv2D(128, 128, kernel_size=1, padding=0, stride=1),
nn.Conv2D(128, 256, kernel_size=3, padding=1, stride=1),
nn.Conv2D(256, 256, kernel_size=3, padding=1, stride=1),
nn.Conv2D(256, 128, kernel_size=1, padding=0, stride=1), )
self.lateral_connection3 = nn.Sequential(
nn.Conv2D(64, 64, kernel_size=1, padding=0, stride=1),
nn.Conv2D(64, 128, kernel_size=3, padding=1, stride=1),
nn.Conv2D(128, 128, kernel_size=3, padding=1, stride=1),
nn.Conv2D(128, 64, kernel_size=1, padding=0, stride=1), )
self.lateral_connection4 = nn.Sequential(
nn.Conv2D(32, 32, kernel_size=1, padding=0, stride=1),
nn.Conv2D(32, 64, kernel_size=3, padding=1, stride=1),
nn.Conv2D(64, 64, kernel_size=3, padding=1, stride=1),
nn.Conv2D(64, 32, kernel_size=1, padding=0, stride=1), )
# self.relu = nn.elu(alpha=1.0)
self.conv_o1 = nn.Conv2D(64, 3, kernel_size=1)
self.conv_o2 = nn.Conv2D(32, 3, kernel_size=1)
##### U-Net #####
### ASPP ###
# self.aspp = ASPP(512, 256)
### ASPP ###
### mask branch decoder ###
self.mask_deconv_a = DeConvWithActivation(512, 256, kernel_size=3, padding=1, stride=2)
self.mask_conv_a = ConvWithActivation(256, 128, kernel_size=3, padding=1, stride=1)
self.mask_deconv_b = DeConvWithActivation(256, 128, kernel_size=3, padding=1, stride=2)
self.mask_conv_b = ConvWithActivation(128, 64, kernel_size=3, padding=1, stride=1)
self.mask_deconv_c = DeConvWithActivation(128, 64, kernel_size=3, padding=1, stride=2)
self.mask_conv_c = ConvWithActivation(64, 32, kernel_size=3, padding=1, stride=1)
self.mask_deconv_d = DeConvWithActivation(64, 32, kernel_size=3, padding=1, stride=2)
self.mask_conv_d = nn.Conv2D(32, 3, kernel_size=1)
### mask branch ###
##### Refine sub-network ######
self.refine = AIDR(num_c=num_c)
self.c1 = nn.Conv2D(32, 64, kernel_size=1)
self.c2 = nn.Conv2D(64, 128, kernel_size=1)
self.sig = nn.Sigmoid()
def forward(self, x):
# x: 3, h, w
# downsample
x = self.conv1(x) # 32, h/2,w/2
x = self.conva(x) # 32, h/2,w/2
con_x1 = x
# print('con_x1: ',con_x1.shape)
# import pdb;pdb.set_trace()
x = self.convb(x) # 64, h/4,w/4
x = self.res1(x) # 64, h/4,w/4
con_x2 = x
# print('con_x2: ', con_x2.shape)
x = self.res2(x) # 64, h/4,w/4
x = self.res3(x) # 128, h/8,w/8
con_x3 = x
# print('con_x3: ', con_x3.shape)
x = self.res4(x) # 128, h/8,w/8
x = self.res5(x) # 256, h/16,w/16
con_x4 = x
# print('con_x4: ', con_x4.shape)
x = self.res6(x) # 256, h/16,w/16
# x_mask = self.nn(con_x4) ### for mask branch aspp
# x_mask = self.aspp(x_mask) ### for mask branch aspp
x_mask = x ### no aspp
# print('x_mask: ', x_mask.shape)
# import pdb;pdb.set_trace()
x = self.res7(x) # 512, h/32,w/32
x = self.res8(x) # 512, h/32,w/32
x = self.conv2(x) # 512, h/32,w/32
# upsample
x = self.deconv1(x) # 256, h/16,w/16
# print(x.shape,con_x4.shape, self.lateral_connection1(con_x4).s
没有合适的资源?快使用搜索试试~ 我知道了~
手写文字擦除第1名方案python源码+数据+模型.zip
共40个文件
pyc:19个
py:15个
sh:3个
1.该资源内容由用户上传,如若侵权请联系客服进行举报
2.虚拟产品一经售出概不退款(资源遇到问题,请及时私信上传者)
2.虚拟产品一经售出概不退款(资源遇到问题,请及时私信上传者)
版权申诉
0 下载量 59 浏览量
2024-04-30
13:23:43
上传
评论
收藏 150.62MB ZIP 举报
温馨提示
1、该资源包括项目的全部源码,下载可以直接使用! 2、本项目适合作为计算机、数学、电子信息等专业的课程设计、期末大作业和毕设项目,作为参考资料学习借鉴。 3、本资源作为“参考资料”如果需要实现其他功能,需要能看懂代码,并且热爱钻研,自行调试。
资源推荐
资源详情
资源评论
收起资源包目录
手写文字擦除第1名方案python源码+数据+模型.zip (40个子文件)
work
utils.py 4KB
loss
Loss.py 5KB
__pycache__
Loss.cpython-38.pyc 2KB
Loss.cpython-37.pyc 2KB
losses.py 887B
STE_idr_best.pdparams 86.76MB
data
dataloader.py 5KB
__pycache__
dataloader.cpython-37.pyc 5KB
dataloader.cpython-38.pyc 5KB
dataloader.cpython-36.pyc 5KB
test_image_STE.py 5KB
STE_str_best.pdparams 75.51MB
compute_mask.py 1KB
test.sh 525B
train.sh 604B
zip.sh 71B
models
sa_gan.py 9KB
non_local.py 9KB
sa_aidr.py 9KB
networks.py 2KB
idr.py 5KB
Model.py 8KB
discriminator.py 2KB
__pycache__
sa_gan.cpython-37.pyc 6KB
discriminator.cpython-38.pyc 2KB
sa_aidr.cpython-38.pyc 5KB
sa_gan.cpython-38.pyc 6KB
discriminator.cpython-37.pyc 2KB
Model.cpython-38.pyc 1KB
non_local.cpython-38.pyc 7KB
networks.cpython-37.pyc 2KB
Model.cpython-37.pyc 1KB
idr.cpython-37.pyc 4KB
idr.cpython-38.pyc 3KB
networks.cpython-36.pyc 2KB
sa_gan.cpython-36.pyc 6KB
networks.cpython-38.pyc 2KB
train_STE.py 6KB
submit_dehw.zip 60KB
gauss.py 2KB
共 40 条
- 1
资源评论
FL1768317420
- 粉丝: 3844
- 资源: 4551
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功