Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
import numpy as np
import tensorflow as tf
tf.set_random_seed(1000003)
np.random.seed(1000003)
HERE = os.path.dirname(os.path.abspath(__file__)) # search dir
top = os.path.dirname(os.path.dirname(HERE)) # directory containing deephyper
sys.path.append(top)
import deephyper.model.arch as a
from deephyper.evaluators import evaluate
from deephyper.search import util
from deephyper.search.nas.policy.tf import NASCellPolicyV5
from deephyper.search.nas.reinforce.tf import BasicReinforceV5
logger = util.conf_logger('deephyper.search.run_nas')
import subprocess as sp
logger.debug(f'ddd {sp.Popen("which mpirun".split())}')
logger.debug(f'python exe : {sys.executable}')
from balsam.launcher import dag
from balsam.launcher import worker
SERVICE_PERIOD = 2 # Delay (seconds) between main loop iterations
class Search:
def __init__(self, cfg):
self.opt_config = cfg
self.evaluator = evaluate.create_evaluator_nas(cfg)
self.config = cfg.config
self.map_model_reward = {}
import json
import os.path as osp
import numpy as np
import tensorflow as tf
from mpi4py import MPI
import deephyper.search.nas.utils.common.tf_util as U
from deephyper.evaluator import Evaluator
from deephyper.search.nas.env import NasEnv
from deephyper.search.nas.utils import bench, logger
from deephyper.search.nas.utils.common import set_global_seeds
from deephyper.search import util
from deephyper.search.nas.utils._logging import JsonMessage as jm
dh_logger = util.conf_logger('deephyper.search.nas.agent.nas_random')
def traj_segment_generator(env, horizon):
t = 0
ac = env.action_space.sample() # not used, just so we have the datatype
new = True # marks if we're on first timestep of an episode
ob = env.reset()
cur_ep_ret = 0 # return in current episode
cur_ep_len = 0 # len of current episode
ep_rets = [] # returns of completed episodes in this segment
ep_lens = [] # lengths of ...
ts_i2n_ep = {}
# Initialize history arrays
import tensorflow as tf
import numpy as np
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import Pipeline
import deephyper.search.nas.model.arch as a
import deephyper.search.nas.model.train_utils as U
from deephyper.search import util
logger = util.conf_logger('deephyper.model.trainer')
class KerasTrainerRegressorKfold:
def __init__(self, config, model):
self.config = config
self.model = model
self.callbacks = []
self.data = self.config[a.data]
self.config_hp = self.config[a.hyperparameters]
self.optimizer_name = self.config_hp[a.optimizer]
self.loss_metric_name = self.config_hp[a.loss_metric]
self.metrics_name = self.config_hp[a.metrics]
self.batch_size = self.config_hp[a.batch_size]
self.learning_rate = self.config_hp[a.learning_rate]
self.num_epochs = self.config_hp[a.num_epochs]
import os
import collections
import numpy as np
import json
from deephyper.search import util
from deephyper.search.nas import NeuralArchitectureSearch
from deephyper.core.parser import add_arguments_from_signature
from deephyper.core.logs.logging import JsonMessage as jm
from deephyper.evaluator.evaluate import Encoder
dhlogger = util.conf_logger(
'deephyper.search.nas.regevo')
# def key(d):
# return json.dumps(dict(arch_seq=d['arch_seq']), cls=Encoder)
class RegularizedEvolution(NeuralArchitectureSearch):
"""Regularized evolution.
https://arxiv.org/abs/1802.01548
Args:
problem (str): Module path to the Problem instance you want to use for the search (e.g. deephyper.benchmark.nas.linearReg.Problem).
run (str): Module path to the run function you want to use for the search (e.g. deephyper.search.nas.model.run.quick).
evaluator (str): value in ['balsam', 'subprocess', 'processPool', 'threadPool'].
population_size (int, optional): the number of individuals to keep in the population. Defaults to 100.
sample_size (int, optional): the number of individuals that should participate in each tournament. Defaults to 10.
from sys import float_info
from skopt import Optimizer as SkOptimizer
from skopt.learning import RandomForestRegressor, ExtraTreesRegressor, GradientBoostingQuantileRegressor
#from numpy import inf
import numpy as np
from deephyper.search import util
from deephyper.benchmark import HpProblem
logger = util.conf_logger('deephyper.search.hps.optimizer.optimizer')
class Optimizer:
SEED = 12345
KAPPA = 1.96
def __init__(self, problem, num_workers, learner='RF', acq_func='gp_hedge', liar_strategy='cl_max', n_jobs=-1, **kwargs):
assert learner in ["RF", "ET", "GBRT", "GP", "DUMMY"], f"Unknown scikit-optimize base_estimator: {learner}"
if learner == "RF":
base_estimator = RandomForestRegressor(n_jobs=n_jobs)
elif learner == "ET":
base_estimator = ExtraTreesRegressor(n_jobs=n_jobs)
elif learner == "GBRT":
base_estimator = GradientBoostingQuantileRegressor(n_jobs=n_jobs)
else:
base_estimator = learner
import tensorflow as tf
import numpy as np
import math
import traceback
from sklearn.metrics import mean_squared_error
import deephyper.search.nas.model.arch as a
import deephyper.search.nas.model.train_utils as U
from deephyper.search import util
from deephyper.core.logs.logging import JsonMessage as jm
from deephyper.search.nas.model.trainer.train_valid import TrainerTrainValid
logger = util.conf_logger('deephyper.model.trainer')
class TrainerRegressorTrainValid(TrainerTrainValid):
def __init__(self, config, model):
super().__init__(config, model)
np.random.seed(1000003)
import time
HERE = os.path.dirname(os.path.abspath(__file__)) # search dir
top = os.path.dirname(os.path.dirname(HERE)) # directory containing deephyper
sys.path.append(top)
import deephyper.model.arch as a
from deephyper.model.builder.tf import BasicBuilder
from deephyper.model.trainer.tf import BasicTrainer
from deephyper.model.utilities.conversions import action2dict_v2
from deephyper.search import util
from deephyper.search.nas.reinforce.tf import BasicReinforce
from deephyper.model.utilities.nas_cmdline import create_parser
logger = util.conf_logger('deephyper.search.nas')
def run(param_dict):
config = param_dict
logger.debug(f'[STEP] global_step = {config["global_step"]}')
logger.debug('[PARAM] Creating StateSpace')
config['state_space'] = a.StateSpace(config['state_space'])
logger.debug('[PARAM] StateSpace created')
logger.debug('[PARAM] Loading data')
load_data = import_module(param_dict['load_data_module_name']).load_data
# Loading data
config['num_steps'] = 10
data_cfg = {'num_steps':config['num_steps'], 'batch_size':config['hyperparameters']['batch_size'], 'dest':'/Users/Dipendra/Projects/deephyper/benchmarks/ptbNas/DATA'}
(t_X, t_y), (v_X, v_y), (test_X, test_y), vocab = load_data(data_cfg)
import tensorflow as tf
import numpy as np
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import Pipeline
import deephyper.search.nas.model.arch as a
import deephyper.search.nas.model.train_utils as U
from deephyper.search import util
logger = util.conf_logger('deephyper.model.trainer')
class KerasTrainerRegressorKfold:
def __init__(self, config, model, seed=2018):
self.seed = seed
np.random.seed(seed)
self.config = config
self.model = model
self.data = self.config[a.data]
self.config_hp = self.config[a.hyperparameters]
self.optimizer_name = self.config_hp[a.optimizer]
self.loss_metric_name = self.config_hp[a.loss_metric]
self.metrics_name = self.config_hp[a.metrics]
self.batch_size = self.config_hp[a.batch_size]
self.learning_rate = self.config_hp[a.learning_rate]
import tensorflow as tf
import numpy as np
from deephyper.search.nas.model.trainer.train_valid import TrainerTrainValid
import deephyper.search.nas.model.arch as a
import deephyper.search.nas.model.train_utils as U
from deephyper.search import util
from deephyper.core.logs.logging import JsonMessage as jm
logger = util.conf_logger('deephyper.model.trainer')
class TrainerClassifierTrainValid(TrainerTrainValid):
def __init__(self, config, model):
super().__init__(config, model)