From f24eb753dbfe20b5ee1995b3ba08038ebb4d4f22 Mon Sep 17 00:00:00 2001 From: Isaac Zaydens Date: Sat, 11 Dec 2021 15:31:17 -0800 Subject: [PATCH 1/4] for gcp Signed-off-by: Isaac Zaydens --- pointnet/model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pointnet/model.py b/pointnet/model.py index 8931fe8..685ff7b 100644 --- a/pointnet/model.py +++ b/pointnet/model.py @@ -90,7 +90,7 @@ def forward(self, x): return x class AttnEncoderBlock(nn.Module): - def __init__(self, device, embed_dim=64, num_heads=2, norm='batch1d', dim_ff=128): + def __init__(self, device, embed_dim=64, num_heads=1, norm='batch1d', dim_ff=128): super(AttnEncoderBlock, self).__init__() self.device = device self.num_heads = num_heads From 6d964702eacb81f199fa3adf2b31180d6954629e Mon Sep 17 00:00:00 2001 From: Isaac Zaydens Date: Sat, 11 Dec 2021 21:12:52 -0800 Subject: [PATCH 2/4] 2block2head no transform Signed-off-by: Isaac Zaydens --- pointnet/model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pointnet/model.py b/pointnet/model.py index 685ff7b..e036f0b 100644 --- a/pointnet/model.py +++ b/pointnet/model.py @@ -90,7 +90,7 @@ def forward(self, x): return x class AttnEncoderBlock(nn.Module): - def __init__(self, device, embed_dim=64, num_heads=1, norm='batch1d', dim_ff=128): + def __init__(self, device, embed_dim=64, num_heads=2, norm='batch1d', dim_ff=128): super(AttnEncoderBlock, self).__init__() self.device = device self.num_heads = num_heads @@ -272,7 +272,7 @@ def feature_transform_regularizer(trans, device): # https://github.com/princeton-vl/SimpleView/blob/master/models/pointnet.py class PointNet(nn.Module): - def __init__(self, dataset, task, device, feature_transform=True, attention=True): + def __init__(self, dataset, task, device, feature_transform=False, attention=True): super().__init__() self.task = task self.device = device From 8e7c28d0e23390772bd4a5c33d7d9a09bd3f684d Mon Sep 17 00:00:00 2001 From: Isaac Zaydens Date: Sat, 11 Dec 2021 22:05:57 -0800 Subject: [PATCH 3/4] 2block2head no transform Signed-off-by: Isaac Zaydens --- pointnet/model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pointnet/model.py b/pointnet/model.py index e036f0b..594e796 100644 --- a/pointnet/model.py +++ b/pointnet/model.py @@ -96,7 +96,7 @@ def __init__(self, device, embed_dim=64, num_heads=2, norm='batch1d', dim_ff=128 self.num_heads = num_heads self.embed_dim = embed_dim self.dim_ff = dim_ff - self.attn = nn.MultiheadAttention(embed_dim=self.embed_dim, num_heads=self.num_heads, batch_first=True) + self.attn = nn. (embed_dim=self.embed_dim, num_heads=self.num_heads, batch_first=True) self.ff1 = nn.Linear(64, self.dim_ff) self.ff2 = nn.Linear(self.dim_ff, 64) self.relu = nn.ReLU() From 54cf998e83d81b0302fe346d3728d6b368f59d63 Mon Sep 17 00:00:00 2001 From: Isaac Zaydens Date: Sat, 11 Dec 2021 22:07:00 -0800 Subject: [PATCH 4/4] 2block2head no transform Signed-off-by: Isaac Zaydens --- pointnet/model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pointnet/model.py b/pointnet/model.py index 594e796..e036f0b 100644 --- a/pointnet/model.py +++ b/pointnet/model.py @@ -96,7 +96,7 @@ def __init__(self, device, embed_dim=64, num_heads=2, norm='batch1d', dim_ff=128 self.num_heads = num_heads self.embed_dim = embed_dim self.dim_ff = dim_ff - self.attn = nn. (embed_dim=self.embed_dim, num_heads=self.num_heads, batch_first=True) + self.attn = nn.MultiheadAttention(embed_dim=self.embed_dim, num_heads=self.num_heads, batch_first=True) self.ff1 = nn.Linear(64, self.dim_ff) self.ff2 = nn.Linear(self.dim_ff, 64) self.relu = nn.ReLU()