Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
self.transformer = BGMTransformer()
self.transformer.fit(train_data, categoricals, ordinals)
train_data = self.transformer.transform(train_data)
data_sampler = Sampler(train_data, self.transformer.output_info)
data_dim = self.transformer.output_dim
self.cond_generator = Cond(train_data, self.transformer.output_info)
self.generator = Generator(
self.embedding_dim + self.cond_generator.n_opt,
self.gen_dim,
data_dim).to(self.device)
discriminator = Discriminator(
data_dim + self.cond_generator.n_opt,
self.dis_dim).to(self.device)
optimizerG = optim.Adam(
self.generator.parameters(), lr=2e-4, betas=(0.5, 0.9), weight_decay=self.l2scale)
optimizerD = optim.Adam(discriminator.parameters(), lr=2e-4, betas=(0.5, 0.9))
assert self.batch_size % 2 == 0
mean = torch.zeros(self.batch_size, self.embedding_dim, device=self.device)
std = mean + 1
steps_per_epoch = len(train_data) // self.batch_size
for i in range(self.epochs):
for id_ in range(steps_per_epoch):
fakez = torch.normal(mean=mean, std=std)