From 5f3f6fef5ffb3cb87a0c860229d5ecdae8470fe9 Mon Sep 17 00:00:00 2001
From: Ramona <efish@verliernix.com>
Date: Wed, 21 Nov 2018 15:54:29 +0100
Subject: [PATCH] beautiful pics

---
 code/spikes_analysis.py | 35 +++++++++++++++++++++++++++++------
 code/utility.py         | 14 ++++++--------
 2 files changed, 35 insertions(+), 14 deletions(-)

diff --git a/code/spikes_analysis.py b/code/spikes_analysis.py
index 2d4a329..b96c6cf 100644
--- a/code/spikes_analysis.py
+++ b/code/spikes_analysis.py
@@ -10,8 +10,10 @@ data_dir = "../data"
 dataset = "2018-11-09-ad-invivo-1"
 # parameters for binning, smoothing and plotting
 num_bin = 12
-window = sampling_rate
+window = 1
 time_axis = np.arange(-50, 50, 1/sampling_rate)
+bin_size = 1
+spike_bins = np.arange(-50, 50+bin_size, bin_size)
 
 # read data from files
 spikes = read_chirp_spikes(os.path.join(data_dir, dataset))
@@ -60,22 +62,43 @@ for deltaf in df_map.keys():
                         df_phase_time[deltaf][idx] = [spikes_cut]
                         df_phase_binary[deltaf][idx] = binary_spikes
 
-# for plotting iterate over delta f and phases
+# for plotting and calculating iterate over delta f and phases
 for df in df_phase_time.keys():
     for phase in df_phase_time[df].keys():
+
+        # plot
         plot_trials = df_phase_time[df][phase]
         plot_trials_binary = np.mean(df_phase_binary[df][phase], axis=0)
 
-        smoothed_spikes = smooth(plot_trials_binary, window)
+        # calculation
+        overall_spikerate = (np.sum(plot_trials_binary)/len(plot_trials_binary))*sampling_rate*1000
+
+        '''
+        spike_rate = np.zeros(len(spike_bins)-1)
+        for idx in range(len(spike_bins)-1):
+            bin_start = spike_bins[idx]*sampling_rate
+            bin_end = spike_bins[idx+1]*sampling_rate
+            spike_rate[idx] = np.sum(plot_trials_binary[bin_start:bin_end])/bin_size*sampling_rate
+
+        print(np.std(spike_rate))
+        plt.plot(spike_rate)
+        plt.show()
+
+        embed()
+        exit()
+        '''
+
+        smoothed_spikes = smooth(plot_trials_binary, window, 1./sampling_rate)
 
-        fig, ax = plt.subplots(2, 1)
+        fig, ax = plt.subplots(2, 1, sharex=True)
         for i, trial in enumerate(plot_trials):
             ax[0].scatter(trial, np.ones(len(trial))+i, marker='|', color='k')
-        ax[1].plot(time_axis, smoothed_spikes)
+        ax[1].plot(time_axis, smoothed_spikes*1000)
 
         ax[0].set_title(df)
         ax[0].set_ylabel('repetition', fontsize=12)
 
         ax[1].set_xlabel('time [ms]', fontsize=12)
-        ax[1].set_ylabel('firing rate [?]', fontsize=12)
+        ax[1].set_ylabel('firing rate [Hz]', fontsize=12)
+        print(overall_spikerate)
         plt.show()
diff --git a/code/utility.py b/code/utility.py
index 41f75c9..7cc105a 100644
--- a/code/utility.py
+++ b/code/utility.py
@@ -20,18 +20,16 @@ def vector_strength(spike_times, eod_durations):
 	return vs
 
 
-def gaussian(x, mu, sig):
-	y = np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))
+def gaussian(x, sig):
+	y = np.exp(-0.5 * (x/sig)**2) / np.sqrt(2*np.pi)/sig
 	return y
 
 
-def smooth(data, window):
-	mu = 1
+def smooth(data, window, dt):
 	sigma = window
-	time_gauss = np.arange(-4 * sigma, 4 * sigma, 1)
-	gauss = gaussian(time_gauss, mu, sigma)
-	gauss_norm = gauss/(np.sum(gauss)/len(gauss))
-	smoothed_data = np.convolve(data, gauss_norm, 'same')
+	time_gauss = np.arange(-4 * sigma, 4 * sigma, dt)
+	gauss = gaussian(time_gauss, sigma)
+	smoothed_data = np.convolve(data, gauss, 'same')
 	return smoothed_data