liuganghuggingface commited on
Commit
f00cec9
1 Parent(s): 8c463a9

Update graph_decoder/transformer.py

Browse files
Files changed (1) hide show
  1. graph_decoder/transformer.py +33 -2
graph_decoder/transformer.py CHANGED
@@ -4,6 +4,38 @@ from .layers import Attention, MLP
4
  from .conditions import TimestepEmbedder, ConditionEmbedder
5
  # from .diffusion_utils import PlaceHolder
6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  def modulate(x, shift, scale):
8
  return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1)
9
 
@@ -98,8 +130,7 @@ class Transformer(nn.Module):
98
 
99
  # X: B * N * dx, E: B * N * N * de
100
  X, E = self.output_layer(X, X_in, E_in, c, t, node_mask)
101
- # return PlaceHolder(X=X, E=E, y=None).mask(node_mask)
102
- return X, E
103
 
104
  class Block(nn.Module):
105
  def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, **block_kwargs):
 
4
  from .conditions import TimestepEmbedder, ConditionEmbedder
5
  # from .diffusion_utils import PlaceHolder
6
 
7
+ #### graph utils
8
+ class PlaceHolder:
9
+ def __init__(self, X, E, y):
10
+ self.X = X
11
+ self.E = E
12
+ self.y = y
13
+
14
+ def type_as(self, x: torch.Tensor, categorical: bool = False):
15
+ """Changes the device and dtype of X, E, y."""
16
+ self.X = self.X.type_as(x)
17
+ self.E = self.E.type_as(x)
18
+ if categorical:
19
+ self.y = self.y.type_as(x)
20
+ return self
21
+
22
+ def mask(self, node_mask, collapse=False):
23
+ x_mask = node_mask.unsqueeze(-1) # bs, n, 1
24
+ e_mask1 = x_mask.unsqueeze(2) # bs, n, 1, 1
25
+ e_mask2 = x_mask.unsqueeze(1) # bs, 1, n, 1
26
+
27
+ if collapse:
28
+ self.X = torch.argmax(self.X, dim=-1)
29
+ self.E = torch.argmax(self.E, dim=-1)
30
+
31
+ self.X[node_mask == 0] = -1
32
+ self.E[(e_mask1 * e_mask2).squeeze(-1) == 0] = -1
33
+ else:
34
+ self.X = self.X * x_mask
35
+ self.E = self.E * e_mask1 * e_mask2
36
+ assert torch.allclose(self.E, torch.transpose(self.E, 1, 2))
37
+ return self
38
+
39
  def modulate(x, shift, scale):
40
  return x * (1 + scale.unsqueeze(1)) + shift.unsqueeze(1)
41
 
 
130
 
131
  # X: B * N * dx, E: B * N * N * de
132
  X, E = self.output_layer(X, X_in, E_in, c, t, node_mask)
133
+ return PlaceHolder(X=X, E=E, y=None).mask(node_mask)
 
134
 
135
  class Block(nn.Module):
136
  def __init__(self, hidden_size, num_heads, mlp_ratio=4.0, **block_kwargs):