Skip to content

Commit 7e7930f

Browse files
committed
cont: remove all training related code
1 parent f750695 commit 7e7930f

File tree

4 files changed

+3
-128
lines changed

4 files changed

+3
-128
lines changed

configs/open_model.yaml

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ shape_model:
1717
encoder_with_cls_token: true
1818
num_encoder_latents: 512
1919
num_decoder_latents: 0
20-
encoder_dynamic_latents: true
2120
embed_dim: 32
2221
width: 768
2322
num_heads: 12
@@ -31,9 +30,5 @@ shape_model:
3130
num_decoder_layers: 24
3231
dropout: 0.0
3332
num_codes: 16384
34-
vq_beta: 0.8
35-
sync_nu: 0.1
36-
replace_freq: 0
37-
skip_p: 0.5
3833

3934
text_model_pretrained_model_name_or_path: "openai/clip-vit-large-patch14"

src/cube/model/autoencoder/one_d_autoencoder.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
generate_dense_grid_points,
1616
marching_cubes_with_warp,
1717
)
18-
from cube.model.autoencoder.sinkhorn_vq import SinkhornVectorQuantizer
1918
from cube.model.autoencoder.spherical_vq import SphericalVectorQuantizer
2019
from cube.model.transformers.attention import (
2120
EncoderCrossAttentionLayer,
@@ -265,6 +264,7 @@ class Config:
265264
num_decoder_layers: int = 23
266265

267266
encoder_with_cls_token: bool = True
267+
num_codes: int = 16384
268268
dropout: float = 0.0
269269

270270
def __init__(self, cfg: Config) -> None:
@@ -289,13 +289,11 @@ def __init__(self, cfg: Config) -> None:
289289
eps=self.cfg.eps,
290290
)
291291

292-
block = SinkhornVectorQuantizer(
292+
block = SphericalVectorQuantizer(
293293
self.cfg.embed_dim,
294294
self.cfg.num_codes,
295295
self.cfg.width,
296-
self.cfg.vq_beta,
297-
self.cfg.sync_nu,
298-
self.cfg.skip_p,
296+
codebook_regularization="kl",
299297
)
300298
self.bottleneck = OneDBottleNeck(block=block)
301299

src/cube/model/autoencoder/sinkhorn_vq.py

Lines changed: 0 additions & 114 deletions
This file was deleted.

src/cube/model/autoencoder/spherical_vq.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,6 @@ def __init__(
1313
embed_dim: int,
1414
num_codes: int,
1515
width: Optional[int] = None,
16-
beta: float = 0.8,
17-
sync_nu: float = 0.0,
1816
codebook_regularization: Literal["batch_norm", "kl"] = "batch_norm",
1917
):
2018
"""
@@ -23,14 +21,12 @@ def __init__(
2321
embed_dim (int): The dimensionality of the embeddings.
2422
num_codes (int): The number of codes in the codebook.
2523
width (Optional[int], optional): The width of the input. Defaults to None.
26-
sync_nu (float, optional): Synchronization parameter. Defaults to 0.0.
2724
Raises:
2825
ValueError: If beta is not in the range [0, 1].
2926
"""
3027
super().__init__()
3128

3229
self.num_codes = num_codes
33-
self.sync_nu = sync_nu
3430

3531
self.codebook = nn.Embedding(num_codes, embed_dim)
3632
self.codebook.weight.data.uniform_(-1.0 / num_codes, 1.0 / num_codes)

0 commit comments

Comments
 (0)