How to use pyglm - 10 common examples

To help you get started, we’ve selected a few pyglm examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github slinderman / pyglm / experiments / rgc / inferred_connectivity.py View on Github external
def plot_connectivity(dataset, run, algs,):
    # Load the data and results
    train, test, true_model = load_data(dataset)
    res_dir = os.path.join("results", dataset, "run%03d" % run)
    results = load_results(dataset, run, algs)

    # samples = results["gibbs"][0]

    ###########################################################
    # Get the average connectivity
    ###########################################################
    if "bfgs" in algs:
        W_mean = results["bfgs"].W.sum(2)
    elif "gibbs" in algs:
        W_samples = [smpl.weight_model.W_effective
                         for smpl in results["gibbs"][0]]

        offset = len(W_samples) // 2
        W_samples = np.array(W_samples[offset:])
github slinderman / pyglm / experiments / rgc / inferred_connectivity.py View on Github external
def plot_impulse_responses(dataset, run):
        # Load the data and results
    from pyglm.models import BernoulliEigenmodelPopulation
    model = BernoulliEigenmodelPopulation(
        N=27, dt=1.0, dt_max=10.0, B=5)
    res_dir = os.path.join("results", dataset, "run%03d" % run)
    results = load_results(dataset, run, ["gibbs"])
    samples = results["gibbs"][0]

    ###########################################################
    # Get the average connectivity
    ###########################################################
    W_samples = [smpl.weight_model.W_effective for smpl in samples]
    offset = len(W_samples) // 2
    W_samples = np.array(W_samples[offset:])
    W_lim = np.amax(abs(W_samples))
    imp_lim = W_lim * np.amax(abs(model.basis.basis))

    syn_sort = np.argsort(W_samples.mean(0).sum(2).ravel())
    i_synapses = zip(*np.unravel_index(syn_sort[:3], (27,27)))
    e_synapses = zip(*np.unravel_index(syn_sort[-3:], (27,27)))
    synapses = i_synapses + e_synapses
github slinderman / pyglm / examples / generate_synthetic_data.py View on Github external
###########################################################
    #   Network hyperparameters
    ###########################################################
    # c = np.arange(C).repeat((N // C))                       # Neuron to cluster assignments
    # p = 0.5 * np.ones((C,C))                                      # Probability of connection for each pair of clusters
    # # p = 0.9 * np.eye(C) + 0.05 * (1-np.eye(C))              # Probability of connection for each pair of clusters
    # mu = np.zeros((C,C,B))                                  # Mean weight for each pair of clusters
    # Sigma = np.tile( 3**2 * np.eye(B)[None,None,:,:], (C,C,1,1))    # Covariance of weight for each pair of clusters
    # network_hypers = {'C': C, 'c': c, 'p': p, 'mu': mu, 'Sigma': Sigma}

    ###########################################################
    # Create the model with these parameters
    ###########################################################
    network_hypers = {"Sigma_0": 10*np.eye(B)}
    true_model = Population(N=N, dt=dt, dt_max=dt_max, B=B,
                            bias_hypers=bias_hypers,
                            network_hypers=network_hypers)

    ###########################################################
    # Sample from the true model
    ###########################################################
    S, Psi_hat = true_model.generate(T=T, keep=True, return_Psi=True)
    print "Number of generated spikes:\t", S.sum(0)

    ###########################################################
    #  Plot the network, the spike train and mean rate
    ###########################################################
    plt.figure()
    plt.imshow(true_model.weight_model.W_effective.sum(2), vmin=-1.0, vmax=1.0, interpolation="none", cmap="RdGy")

    plt.figure()
github slinderman / theano_pyglm / test / synth_map_with_xv.py View on Github external
def run_synth_test():
    """ Run a test with synthetic data and MAP inference with cross validation
    """
    options, popn, data, popn_true, x_true = initialize_test_harness()
    
    # Get the list of models for cross validation
    base_model = make_model(options.model, N=data['N'], dt=0.001)
    models = get_xv_models(base_model)

    # TODO Segment data into training and cross validation sets
    train_frac = 0.75
    T_split = data['T'] * train_frac
    train_data = segment_data(data, (0,T_split))
    xv_data = segment_data(data, (T_split,data['T']))

    # Preprocess the data sequences
    train_data = popn.preprocess_data(train_data)
    xv_data = popn.preprocess_data(xv_data)

    # Sample random initial state
    x0 = popn.sample()

    # Track the best model and parameters
github slinderman / theano_pyglm / test / synth_map_with_xv.py View on Github external
def run_synth_test():
    """ Run a test with synthetic data and MAP inference with cross validation
    """
    options, popn, data, popn_true, x_true = initialize_test_harness()
    
    # Get the list of models for cross validation
    base_model = make_model(options.model, N=data['N'], dt=0.001)
    models = get_xv_models(base_model)

    # TODO Segment data into training and cross validation sets
    train_frac = 0.75
    T_split = data['T'] * train_frac
    train_data = segment_data(data, (0,T_split))
    xv_data = segment_data(data, (T_split,data['T']))

    # Preprocess the data sequences
    train_data = popn.preprocess_data(train_data)
    xv_data = popn.preprocess_data(xv_data)

    # Sample random initial state
    x0 = popn.sample()

    # Track the best model and parameters
    best_ind = -1
    best_xv_ll = -np.Inf
    best_x = x0
    best_model = None

    # Fit each model using the optimum of the previous models
    train_lls = np.zeros(len(models))
github slinderman / theano_pyglm / test / synth_map_with_xv.py View on Github external
def run_synth_test():
    """ Run a test with synthetic data and MAP inference with cross validation
    """
    options, popn, data, popn_true, x_true = initialize_test_harness()
    
    # Get the list of models for cross validation
    base_model = make_model(options.model, N=data['N'], dt=0.001)
    models = get_xv_models(base_model)

    # TODO Segment data into training and cross validation sets
    train_frac = 0.75
    T_split = data['T'] * train_frac
    train_data = segment_data(data, (0,T_split))
    xv_data = segment_data(data, (T_split,data['T']))

    # Preprocess the data sequences
    train_data = popn.preprocess_data(train_data)
    xv_data = popn.preprocess_data(xv_data)

    # Sample random initial state
    x0 = popn.sample()

    # Track the best model and parameters
    best_ind = -1
    best_xv_ll = -np.Inf
    best_x = x0
    best_model = None

    # Fit each model using the optimum of the previous models
github slinderman / theano_pyglm / test / synth_map.py View on Github external
def run_synth_test():
    """ Run a test with synthetic data and MCMC inference
    """
    options, popn, data, popn_true, x_true = initialize_test_harness()

    # Sample random initial state
    x0 = popn.sample()
    ll0 = popn.compute_log_p(x0)
    print "LL0: %f" % ll0

    # Perform inference
    x_inf = coord_descent(popn, x0=x0, maxiter=1)
    ll_inf = popn.compute_log_p(x_inf)
    print "LL_inf: %f" % ll_inf

    # Save results
    results_file = os.path.join(options.resultsDir, 'results.pkl')
    print "Saving results to %s" % results_file
    with open(results_file, 'w') as f:
        cPickle.dump(x_inf, f, protocol=-1)

    # Plot results
    plot_results(popn, x_inf, popn_true, x_true, resdir=options.resultsDir)
github slinderman / pyglm / test / debug_collapsed_resample_with_temp.py View on Github external
def resample_x(z):
    # Resample x from its (scaled) Bernoulli lkhd
    p = logistic(z / T)
    x_smpl = np.random.rand() < p
    return x_smpl
github slinderman / pyglm / test / test_lds.py View on Github external
def update(model):
    model.resample_model()
    z_inf = model.states_list[0].stateseq
    C_inf = model.C
    psi_inf = z_inf.dot(C_inf.T)
    p_inf = logistic(psi_inf)
    return model.log_likelihood(), p_inf
github slinderman / pyglm / test / profile_vb.py View on Github external
B      = true_model.B
    dt     = true_model.dt
    dt_max = true_model.dt_max

    ###########################################################
    # Create and fit a standard model for initialization
    ###########################################################
    with gzip.open(init_path, 'r') as f:
        init_model = cPickle.load(f)

    ###########################################################
    # Create a test spike-and-slab model
    ###########################################################

    # Copy the network hypers.
    test_model = NegativeBinomialEigenmodelPopulation(N=N, dt=dt, dt_max=dt_max, B=B,
                            basis_hypers=true_model.basis_hypers,
                            observation_hypers=true_model.observation_hypers,
                            activation_hypers=true_model.activation_hypers,
                            weight_hypers=true_model.weight_hypers,
                            bias_hypers=true_model.bias_hypers,
                            network_hypers=true_model.network_hypers)
    test_model.add_data(S)

    # Initialize with the standard model
    test_model.initialize_with_standard_model(init_model)

    # Convolve the test data for fast heldout likelihood calculations
    F_test = test_model.basis.convolve_with_basis(S_test)

    ###########################################################
    # Fit the test model with Gibbs sampling