How to use the elephant.conversion.BinnedSpikeTrain function in elephant

To help you get started, we’ve selected a few elephant examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github NeuralEnsemble / elephant / elephant / sta.py View on Github external
>>> plt.plot(freqs, sfc[:,1])
    >>> plt.xlabel('Frequency [Hz]')
    >>> plt.ylabel('SFC')
    >>> plt.xlim((0, 60))
    >>> plt.show()
    """

    if not hasattr(scipy.signal, 'coherence'):
        raise AttributeError('scipy.signal.coherence is not available. The sfc '
                             'function uses scipy.signal.coherence for '
                             'the coherence calculation. This function is '
                             'available for scipy version 0.16 or newer. '
                             'Please update you scipy version.')

    # spiketrains type check
    if not isinstance(spiketrain, (SpikeTrain, BinnedSpikeTrain)):
        raise TypeError(
            "spiketrain must be of type SpikeTrain or BinnedSpikeTrain, "
            "not %s." % type(spiketrain))

    # checks on analogsignal
    if not isinstance(signal, AnalogSignal):
        raise TypeError(
            "Signal must be an AnalogSignal, not %s." % type(signal))
    if len(signal.shape) > 1:
        # num_signals: number of individual traces in the analog signal
        num_signals = signal.shape[1]
    elif len(signal.shape) == 1:
        num_signals = 1
    else:
        raise ValueError("Empty analog signal.")
    len_signals = signal.shape[0]
github simetenn / uncertainpy / src / uncertainpy / features / network_features.py View on Github external
simulation_end : float
            The simulation end time.
        neo_spiketrains : list
            A list of Neo spiketrains.

        Returns
        -------
        time : None
        values : 2D array
            The pairwise Pearson's correlation coefficients.
        """
        if len(spiketrains) == 0:
            return None, None


        binned_sts = elephant.conversion.BinnedSpikeTrain(spiketrains,
                                                          binsize=self.corrcoef_bin_size*self.units)
        corrcoef = elephant.spike_train_correlation.corrcoef(binned_sts)

        return None, corrcoef
github NeuralEnsemble / elephant / elephant / asset.py View on Github external
_quantities_almost_equal(st.t_stop, t_stop_max)):
            msg = 'SpikeTrain %d is shorter than the required time ' % i + \
                  'span: t_stop (%s) < %s' % (st.t_stop, t_stop_max)
            raise ValueError(msg)

    # For both x and y axis, cut all SpikeTrains between t_start and t_stop
    sts_x = [st.time_slice(t_start=t_start_x, t_stop=t_stop_x)
             for st in spiketrains]
    sts_y = [st.time_slice(t_start=t_start_y, t_stop=t_stop_y)
             for st in spiketrains]

    # Compute imat either by matrix multiplication (~20x faster) or by
    # nested for loops (more memory efficient)
    try:  # try the fast version
        # Compute the binned spike train matrices, along both time axes
        bsts_x = conv.BinnedSpikeTrain(
            sts_x, binsize=binsize,
            t_start=t_start_x, t_stop=t_stop_x).to_bool_array()
        bsts_y = conv.BinnedSpikeTrain(
            sts_y, binsize=binsize,
            t_start=t_start_y, t_stop=t_stop_y).to_bool_array()

        # Compute the number of spikes in each bin, for both time axes
        spikes_per_bin_x = bsts_x.sum(axis=0)
        spikes_per_bin_y = bsts_y.sum(axis=0)

        # Compute the intersection matrix imat
        N_bins = len(spikes_per_bin_x)
        imat = np.zeros((N_bins, N_bins), dtype=float)
        for ii in range(N_bins):
            # Compute the ii-th row of imat
            bin_ii = bsts_x[:, ii].reshape(-1, 1)
github NeuralEnsemble / elephant / elephant / asset.py View on Github external
raise ValueError(msg)

    # For both x and y axis, cut all SpikeTrains between t_start and t_stop
    sts_x = [st.time_slice(t_start=t_start_x, t_stop=t_stop_x)
             for st in spiketrains]
    sts_y = [st.time_slice(t_start=t_start_y, t_stop=t_stop_y)
             for st in spiketrains]

    # Compute imat either by matrix multiplication (~20x faster) or by
    # nested for loops (more memory efficient)
    try:  # try the fast version
        # Compute the binned spike train matrices, along both time axes
        bsts_x = conv.BinnedSpikeTrain(
            sts_x, binsize=binsize,
            t_start=t_start_x, t_stop=t_stop_x).to_bool_array()
        bsts_y = conv.BinnedSpikeTrain(
            sts_y, binsize=binsize,
            t_start=t_start_y, t_stop=t_stop_y).to_bool_array()

        # Compute the number of spikes in each bin, for both time axes
        spikes_per_bin_x = bsts_x.sum(axis=0)
        spikes_per_bin_y = bsts_y.sum(axis=0)

        # Compute the intersection matrix imat
        N_bins = len(spikes_per_bin_x)
        imat = np.zeros((N_bins, N_bins), dtype=float)
        for ii in range(N_bins):
            # Compute the ii-th row of imat
            bin_ii = bsts_x[:, ii].reshape(-1, 1)
            imat[ii, :] = (bin_ii * bsts_y).sum(axis=0)
            # Normalize the row according to the specified normalization
            if norm == 0 or norm is None or bin_ii.sum() == 0:
github NeuralEnsemble / elephant / elephant / statistics.py View on Github external
warnings.warn(
                    "Spiketrains have different t_stop values -- "
                    "using minimum t_stop as t_stop.")
        else:
            min_tstop = conv._get_start_stop_from_input(spiketrains)[1]
            t_stop = min_tstop
            if not all([min_tstop == t.t_stop for t in spiketrains]):
                warnings.warn(
                    "Spiketrains have different t_stop values -- "
                    "using minimum t_stop as t_stop.")

    sts_cut = [st.time_slice(t_start=t_start, t_stop=t_stop) for st in
               spiketrains]

    # Bin the spike trains and sum across columns
    bs = conv.BinnedSpikeTrain(sts_cut, t_start=t_start, t_stop=t_stop,
                               binsize=binsize)

    if binary:
        bin_hist = bs.to_sparse_bool_array().sum(axis=0)
    else:
        bin_hist = bs.to_sparse_array().sum(axis=0)
    # Flatten array
    bin_hist = np.ravel(bin_hist)
    # Renormalise the histogram
    if output == 'counts':
        # Raw
        bin_hist = bin_hist * pq.dimensionless
    elif output == 'mean':
        # Divide by number of input spike trains
        bin_hist = bin_hist * 1. / len(spiketrains) * pq.dimensionless
    elif output == 'rate':