temp update (currently has issues)

master
michael 2021-08-04 16:22:15 -07:00
parent 22b7fe7333
commit 6c3c900570
7 changed files with 1274 additions and 545 deletions

150
app.py
View File

@ -9,7 +9,6 @@ import pathlib
class AppWindow(QtWidgets.QMainWindow, Ui_MainWindow):
csv_selected = QtCore.pyqtSignal()
correlation_complete = QtCore.pyqtSignal()
fitting_complete = QtCore.pyqtSignal()
@ -17,14 +16,6 @@ class AppWindow(QtWidgets.QMainWindow, Ui_MainWindow):
super(AppWindow, self).__init__()
self.setupUi(self)
# Graphing actions
self.csv_selected.connect(self.raw_data_graph.plot)
def corr_act():
self.groups_graph.plot()
self.graph_tabs.setCurrentIndex(1)
self.correlation_complete.connect(corr_act)
# Helpers
def display_warning(message: str):
@ -52,15 +43,24 @@ class AppWindow(QtWidgets.QMainWindow, Ui_MainWindow):
return
mem['x_data'] = data.transpose()[0]
mem['y_data'] = data.transpose()[1]
mem['timestep'] = mem['x_data'][1] - mem['x_data'][0]
self.raw_data_graph.plot() # Graph new stuff
# self.groups_graph.clear() # Clear old stuff
self.voltage_graph.clear()
self.added_peaks_graph.clear()
# self.tau_graph.clear()
mem['ymin'], mem['ymax'] = crds_calc.minmax(mem['y_data'])
try:
mem['v_data'] = data.transpose()[2]
self.voltage_graph.plot()
self.graph_tabs.setCurrentIndex(1)
except IndexError:
display_warning('No voltage column detected. VThreshold algo will not work.')
self.groups_graph.clear()
self.tau_graph.clear()
self.graph_tabs.setCurrentIndex(0)
self.csv_selected.emit()
self.voltage.setVisible(False)
self.graph_tabs.setCurrentIndex(0)
# Universal Actions stuff
@ -68,6 +68,11 @@ class AppWindow(QtWidgets.QMainWindow, Ui_MainWindow):
self.actionOpen_CSV_File.triggered.connect(select_csv)
self.actionGithub_Repository.triggered.connect(lambda: QtGui.QDesktopServices.openUrl(QtCore.QUrl('https://github.com/turtlebasket/crds_analyze')))
# NOTE: Do later, use QDialog
# def check_if_quit():
# <stuff here>
self.actionQuit_2.triggered.connect(sys.exit)
# Inputs
def switch_grouping_algo():
@ -86,40 +91,121 @@ class AppWindow(QtWidgets.QMainWindow, Ui_MainWindow):
if self.check_custom_end.isChecked():
self.spin_end_time.setDisabled(False)
else:
# mem['end_time'] = self.spin_end_time.value()
self.spin_end_time.setDisabled(True)
self.check_custom_end.stateChanged.connect(set_end_time)
# Sync up peak detection settings between input locations
self.spin_min_peakheight.valueChanged.connect(lambda x: self.spin_min_peakheight_2.setValue(x))
self.spin_min_peakheight_2.valueChanged.connect(lambda x: self.spin_min_peakheight.setValue(x))
self.spin_min_peakprominence.valueChanged.connect(lambda x: self.spin_min_peakprominence_2.setValue(x))
self.spin_min_peakprominence_2.valueChanged.connect(lambda x: self.spin_min_peakprominence.setValue(x))
self.spin_moving_average_denom.valueChanged.connect(lambda x: self.spin_moving_average_denom_2.setValue(x))
self.spin_moving_average_denom_2.valueChanged.connect(lambda x: self.spin_moving_average_denom.setValue(x))
# Make advanced peak detection optional
def update_advanced_peak_detection_setting():
enabled = self.check_advanced_peak_detection.isChecked()
self.spin_min_peakheight_2.setEnabled(enabled)
self.spin_min_peakprominence_2.setEnabled(enabled)
self.spin_moving_average_denom_2.setEnabled(enabled)
self.check_advanced_peak_detection.stateChanged.connect(update_advanced_peak_detection_setting)
def init_correlate():
groups_raw = None
algo = self.combo_grouping_algo.currentIndex()
try:
if algo == 0:
display_error('VThreshold not yet implemented.')
return
groups_raw = crds_calc.vthreshold(
mem['x_data'],
mem['y_data'],
mem['v_data'],
self.spin_min_voltage.value(),
self.spin_max_voltage.value(),
mirrored=False if self.check_skip_groups.checkState() == 0 else True,
start=self.spin_start_time.value() if self.check_custom_start.isChecked() else None,
end=self.spin_end_time.value() if self.check_custom_end.isChecked() else None
)
# display_error('VThreshold not yet implemented.')
# return
elif algo == 1:
try:
groups_raw = crds_calc.spaced_groups(
mem['x_data'],
mem['y_data'],
self.spin_group_len.value(),
self.spin_min_peakheight.value(),
self.spin_min_peakprominence.value(),
self.spin_moving_average_denom.value(),
mirrored=False if self.check_skip_groups.checkState() == 0 else True,
start=self.spin_start_time.value() if self.check_custom_start.isChecked() else None,
end=self.spin_end_time.value() if self.check_custom_end.isChecked() else None
)
except (ValueError, TypeError):
display_error('Failed to correlate. This could be because no groups are being detected.')
groups_raw = crds_calc.spaced_groups(
mem['x_data'],
mem['y_data'],
self.spin_group_len.value(),
self.spin_min_peakheight.value(),
self.spin_min_peakprominence.value(),
self.spin_moving_average_denom.value(),
mirrored=False if self.check_skip_groups.checkState() == 0 else True,
start=self.spin_start_time.value() if self.check_custom_start.isChecked() else None,
end=self.spin_end_time.value() if self.check_custom_end.isChecked() else None
)
if groups_raw == None or len(groups_raw) < 1:
display_error("No groups were detected. Try adjusting grouping parameters.")
mem['groups_correlated'] = crds_calc.correlate_groups(groups_raw)
self.correlation_complete.emit()
# Graphing action
self.groups_graph.plot()
self.graph_tabs.setCurrentIndex(2)
except KeyError:
display_error('Failed to correlate. Did you import a data file & set parameters?')
self.correlate_button.pressed.connect(init_correlate)
def init_add_simple():
try:
mem['added_peaks'] = crds_calc.add_peaks_only(mem['groups_correlated'])
self.added_peaks_graph.set_params(None, shift_over=None)
self.added_peaks_graph.plot()
self.graph_tabs.setCurrentIndex(3)
except KeyError:
display_error("Correlated groups not found. Group peaks first.")
self.peak_add_button.pressed.connect(init_add_simple)
def init_add():
try:
mem['added_peaks'], mem['peak_indices'], mem['isolated_peaks'] = crds_calc.isolate_peaks(
mem['groups_correlated'],
self.spin_peak_overlap.value(),
self.spin_moving_average_denom.value(),
peak_prominence=self.spin_min_peak_height_added.value(),
peak_minheight=self.spin_peak_prominence_added.value(),
shift_over=self.spin_shift_over.value()
)
self.added_peaks_graph.set_params(self.spin_peak_overlap.value(), shift_over=self.spin_shift_over.value())
self.added_peaks_graph.plot()
self.graph_tabs.setCurrentIndex(3)
except KeyError:
display_error("Correlated groups not found. Group peaks first.")
self.isolate_button.pressed.connect(init_add)
def init_fit():
if not 'isolated_peaks' in mem:
display_error('Peaks not yet isolated.')
return
mem['fit_equations'] = crds_calc.fit_peaks(
mem['isolated_peaks'],
mem['peak_indices'],
self.spin_min_peakheight_2.value(),
self.spin_min_peakprominence_2.value(),
self.spin_moving_average_denom_2.value(),
self.spin_var_a.value(),
self.spin_var_tau.value(),
self.spin_var_y0.value(),
self.spin_shift_over_fit.value(),
self.check_advanced_peak_detection.isChecked()
)
# print(mem['fit_equations'])
self.peak_fits_graph.plot()
self.fit_button.pressed.connect(init_fit)
# Show equation
pix = QtGui.QPixmap(f"{pathlib.Path(__file__).parent.resolve()}/assets/eq3.png")

View File

@ -3,13 +3,26 @@
set compileUI=0
set build=0
IF "%~1" == "" (
echo --------------
echo BUILD SCRIPT
echo --------------
echo -compileUI Generate main window class from .UI file
echo -build Build standalone desktop app with embedded python runtime
exit
)
FOR %%A IN (%*) DO (
IF "%%A"=="-compileUI" set compileUI=1
IF "%%A"=="-build" set build=1
)
if %compileUI%==1 pyuic5.exe -x .\ui\mainwin.ui -o mainwin.py
if %compileUI%==1 (
pyuic5.exe -x .\ui\mainwin.ui -o mainwin.py
if %ERRORLEVEL% EQU 0 echo Generated mainwin.py.
if %ERRORLEVEL% EQU 1 echo Generation failed.
)
if %build%==1 pyinstaller --onefile --windowed --icon=favicon.ico app.py
exit
exit

View File

@ -1,7 +1,14 @@
import numpy as np
from scipy.signal import find_peaks, correlate
from scipy.optimize import curve_fit
from memdb import mem
def minmax(data):
return np.min(data), np.max(data)
def exp_func(x, x0, a, y0, tau): # NOTE: X is not something we pass in 🤦‍♂️
return y0 + a*np.exp(-(x-x0)/tau)
def spaced_groups(
x_data: np.array,
y_data: np.array,
@ -16,7 +23,9 @@ def spaced_groups(
"""
Use SpacedGroups algo to separate groups
Returns 2D array of raw data; every other group
Returns
-------
2D array of raw data; every other group
"""
# Helpers
@ -107,11 +116,44 @@ def spaced_groups(
return groups_raw
def vthreshold(
x_data: np.array,
y_data: np.array,
v_data: np.array,
vmin: float,
vmax: float,
mirrored: bool=True,
start=None,
end=None
):
"""
Voltage-threshold grouping algorithm
Returns
-------
A `list` of all peak groups
"""
# Helpers
def t2i(t):
delta_t = abs(x_data[0] - t)
timestep = abs(x_data[1] - x_data[0])
return int(delta_t / timestep)
def t2i_range(t):
timestep = abs(x_data[1] - x_data[0])
return int(t / timestep)
groups_raw = []
return groups_raw
def correlate_groups(groups_raw):
"""
Overlay groups using `scipy.correlate`.
Returns 2D array of overlayed groups
Returns
-------
2D array of overlayed groups
"""
# Compare groups (scipy correlate)
@ -135,19 +177,121 @@ def correlate_groups(groups_raw):
return groups_adjusted
def add_peaks_only(groups_adjusted: list):
def unequal_add_truncation(a,b): # Instead of padding with 0, truncate
if len(a) < len(b):
c = b.copy()
c = c[:len(a)]
c += a
else:
c = a.copy()
c = c[:len(b)]
c += b
return(c)
added_peaks = np.array(groups_adjusted[0])
for g in groups_adjusted[1:]:
g1 = np.array(g)
g0 = added_peaks
added_peaks = unequal_add_truncation(g0, g1)
return added_peaks
def isolate_peaks(
groups_adjusted: list,
peak_width: int,
groups_adjusted: list,
peak_minheight: int,
peak_prominence: int,
sma_denom: int
sma_denom: int,
peak_minheight: int = None,
peak_prominence: int = None,
shift_over: int = 0
):
def unequal_add(a,b): # NOTE: See https://www.delftstack.com/howto/numpy/vector-addition-in-numpy/
if len(a) < len(b):
c = b.copy()
c[:len(a)] += a
else:
c = a.copy()
c[:len(b)] += b
return(c)
def unequal_add_truncation(a,b): # Instead of padding with 0, truncate
if len(a) < len(b):
c = b.copy()
c = c[:len(a)]
c += a
else:
c = a.copy()
c = c[:len(b)]
c += b
return(c)
def moving_average(x, w):
return np.convolve(x, np.ones(w), 'valid') / w
group_peaks = []
added_peaks = np.array(groups_adjusted[0])
for g in groups_adjusted[1:]:
g1 = np.array(g)
g0 = added_peaks
added_peaks = unequal_add_truncation(g0, g1)
added_peaks_av = moving_average(added_peaks, sma_denom)
peak_indices = find_peaks(added_peaks_av, height=peak_minheight, prominence=peak_prominence, distance=peak_width/2)[0] # Get indices of all peaks
isolated_peaks = []
delta = peak_width/2
for g in groups_adjusted:
y_data_av = moving_average(g, sma_denom)
peak_indices = find_peaks(y_data_av, height=peak_minheight, prominence=peak_prominence) # Get indices of all peaks
group_peaks.append(peak_indices)
peaks_cut = []
for i in peak_indices:
peak = g[int(i-delta+shift_over):int(i+delta+shift_over)]
peaks_cut.append(peak)
isolated_peaks.append(peaks_cut)
return added_peaks, peak_indices, isolated_peaks
def fit_peaks(
isolated_peaks: list,
peak_indices: list,
min_peak_height: float,
min_peak_prominence: float,
moving_avg_size: int,
a: float,
tau: float,
y0: float,
shift_over: int,
use_advanced: bool
):
print(f'{use_advanced=}')
"""
Returns
-------
Peak fit equations. Linked to `mem['isolated_peaks']`.
"""
params_guess = (0.0000, a, y0, tau)
equations = []
for peaks_cut in isolated_peaks:
row = []
for peak_data in peaks_cut:
x_data = np.arange(len(peak_data)) # just placeholder indices
if not use_advanced:
peak_index = np.argmax(peak_data, axis=0)
else:
peak_index = find_peaks(peak_data, height=min_peak_height, prominence=min_peak_prominence)[0][0]
# print(peak_index)
params_guess = (peak_index+shift_over, a, y0, tau)
x_data_target = x_data[peak_index+shift_over:]
peak_data_target = peak_data[peak_index+shift_over:]
# popt, pcov = curve_fit(exp_func, x_data_target, peak_data_target, bounds=([-np.inf, 0.0, -np.inf, 0.0], np.inf))
popt, pcov = curve_fit(exp_func, x_data_target, peak_data_target, bounds=([-np.inf, 0.0, -np.inf, 0.0], np.inf), p0=params_guess, maxfev=10000000)
row.append({'popt': popt, 'pcov': pcov})
equations.append(row)
return equations # list linked with isolated_peaks

View File

@ -1,4 +1,5 @@
from sqlitedict import SqliteDict
from varname.core import nameof
class ModSqliteDict(SqliteDict):
def __init__(self):
@ -7,4 +8,8 @@ class ModSqliteDict(SqliteDict):
self.filename = ':memory:'
super().__init__()
def set_key(self, item):
name = nameof(item)
self[name] = item
mem = ModSqliteDict()

View File

@ -3,3 +3,5 @@ pandas
numpy
pyinstaller==4.4
sqlitedict
scipy
varname

File diff suppressed because it is too large Load Diff

View File

@ -1,17 +1,32 @@
import numpy as np
from PyQt5 import QtWidgets
import matplotlib
matplotlib.use('Qt5Agg')
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg, NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
from memdb import mem
from crds_calc import exp_func
class MplCanvas(FigureCanvasQTAgg):
def __init__(self, parent=None, width=5, height=4, dpi=100):
fig = Figure(figsize=(width, height), dpi=dpi)
self.axes = fig.add_subplot(111)
fig.tight_layout()
self.fig_width = width
self.fig_height = height
self.fig_dpi = dpi
super(MplCanvas, self).__init__(fig)
def reset(self):
self.axes.remove()
self.axes = self.figure.add_subplot(111)
self.figure.tight_layout()
# fig = Figure(figsize=(self.fig_width, self.fig_height), dpi=self.fig_dpi)
# self.axes = fig.add_subplot(111)
# self.figure.clear()
# self.figure.axes.remove()
# fig.tight_layout()
class BaseGraph(QtWidgets.QWidget):
"""
Widget with embedded matplotlib graph & navigation toolbar
@ -31,27 +46,87 @@ class BaseGraph(QtWidgets.QWidget):
layout.addWidget(toolbar)
layout.addWidget(self.canv)
self.setLayout(layout)
def plot_data(self):
self.canv.axes.plot(mem['x_data'], mem['y_data'])
def plot(self):
self.canv.axes.clear()
try:
self.canv.axes.clear()
except AttributeError:
pass
self.plot_data()
self.canv.draw()
print("attempted plot")
def clear(self):
self.canv.axes.clear()
self.canv.draw()
class RawDataGraph(BaseGraph):
pass
class VoltageGraph(BaseGraph):
def plot_data(self):
if not mem['v_data'][0] == None:
self.canv.axes.plot(mem['x_data'], mem['v_data'], color='orange')
class PeaksGraph(BaseGraph):
def plot_data(self):
for i in mem['groups_correlated']:
self.canv.axes.plot(i)
class AddedPeaksGraph(BaseGraph):
params = {
'peak_width': None,
'shift_over': None
}
def set_params(self, peak_width, shift_over=0):
self.params['peak_width'] = peak_width
self.params['shift_over'] = shift_over
def plot_data(self):
self.canv.axes.plot(mem['added_peaks'], color='green') # plot added peaks
if not self.params['peak_width'] == None: # plot peak indices
for i in mem['peak_indices']:
self.canv.axes.axvspan(int(i-self.params['peak_width']/2+self.params['shift_over']), int(i+self.params['peak_width']/2+self.params['shift_over']), color='red', alpha=0.4)
class FitsGraph(BaseGraph):
def __init__(self, x):
super(FitsGraph, self).__init__(x)
def plot_data(self):
for g_i in range(len(mem['isolated_peaks'])):
for p_i in range(len(mem['isolated_peaks'][g_i])):
peak = mem['isolated_peaks'][g_i][p_i]
x_data = np.arange(len(peak))
popt = mem['fit_equations'][g_i][p_i]['popt']
self.canv.axes.plot(peak)
self.canv.axes.plot(x_data, exp_func(x_data, *popt), color='red')
# def plot_data(self):
# try:
# self.canv.axes.remove()
# except AttributeError:
# pass
# subplots_stacked = len(mem['isolated_peaks'][0]) # should all be same length
# axes = self.canv.figure.subplots(subplots_stacked, 1, sharex=True)
# for g_i in range(len(mem['isolated_peaks'])):
# for p_i in range(subplots_stacked):
# peak = mem['isolated_peaks'][g_i][p_i]
# axes[p_i].plot(peak)
# x_data = np.arange(len(peak))
# popt = mem['fit_equations'][g_i][p_i]['popt']
# axes[p_i].plot(x_data, exp_func(x_data, *popt), color='red')
# for ax in axs.flat:
# ax.set(xlabel='x-label', ylabel='y-label')
class TimeConstantGraph(BaseGraph):
pass # no modifications thus far
pass