2
0
forked from awendt/pyrelacs
Files
minipyrelacs/pyrelacs/dataio/daq_producer.py
2024-10-25 18:03:23 +02:00

248 lines
9.9 KiB
Python

import time
import faulthandler
import uldaq
import numpy as np
from IPython import embed
import matplotlib.pyplot as plt
from pyrelacs.dataio.circbuffer import CircBuffer
from pyrelacs.util.logging import config_logging
log = config_logging()
faulthandler.enable()
class DaqProducer:
def __init__(
self, buffer: CircBuffer, device: uldaq.DaqDevice, channels: list[int]
):
self.buffer = buffer
self.device = device
self.ai_device = self.device.get_ai_device()
self.channels = channels
self.stop = False
def read_analog_continously(
self,
*args,
**kwargs,
):
log.debug("starting acquisition")
if self.channels[0] == self.channels[1]:
channel_range = np.arange(1)
else:
channel_range = np.arange(self.channels[0], self.channels[1] + 1)
assert channel_range.size == self.buffer.channel_count, ValueError(
f"Missmatch in channel count,\n daq_channel: "
f"{channel_range.size}\n buffer_channel: {self.buffer.channel_count}"
)
# let the buffer for the daq device hold 5 seconds of data
daq_buffer_size = int(self.buffer.samplerate * 30)
data_in = uldaq.create_float_buffer(channel_range.size, daq_buffer_size)
log.debug(f"Buffersize for daq {len(data_in)}")
log.debug(f"Buffersize {self.buffer.size}")
er = self.ai_device.a_in_scan(
self.channels[0],
self.channels[1],
uldaq.AiInputMode.SINGLE_ENDED,
uldaq.Range.BIP10VOLTS,
daq_buffer_size,
self.buffer.samplerate,
uldaq.ScanOption.CONTINUOUS,
uldaq.AInScanFlag.DEFAULT,
data=data_in,
)
chunk_size = int(daq_buffer_size / 10)
wrote_chunk = False
start_time = time.time()
daq_status = uldaq.ScanStatus.IDLE
while daq_status == uldaq.ScanStatus.IDLE:
daq_status = self.ai_device.get_scan_status()[0]
while daq_status != uldaq.ScanStatus.IDLE:
prev_count = 0
prev_index = 0
while not self.stop:
try:
daq_status, transfer_status = self.ai_device.get_scan_status()
except uldaq.ul_exception.ULException as e:
log.error("Could not get the scan status")
log.error("Stopping writting")
break
# The index into the data buffer immediately following the last sample transferred.
current_index = transfer_status.current_index
# total samples since start of the scan
total_samples = transfer_status.current_total_count
# The number of samples per channel transferred since the scan started
channel_samples = transfer_status.current_scan_count
new_data_count = total_samples - prev_count
# check if new data is bigger than the buffer
# if that happends stop the acquisition
if new_data_count > len(data_in):
self.ai_device.scan_stop()
log.error("A Buffer overrun occurred")
break
if new_data_count > chunk_size:
wrote_chunk = True
# index wraps around the buffer
if prev_index + chunk_size > len(data_in) - 1:
log.debug("Chunk wraps around buffersize")
first_chunk = len(data_in) - prev_index
data_first_channel = data_in[
prev_index : prev_index + first_chunk : 2
]
data_second_channel = data_in[
prev_index + 1 : prev_index + first_chunk : 2
]
[
self.buffer.append(data_first_channel[i], channel=0)
for i in range(int(first_chunk / 2))
]
[
self.buffer.append(data_second_channel[i], channel=1)
for i in range(int(first_chunk / 2))
]
second_chunk = chunk_size - first_chunk
data_first_channel = data_in[0:second_chunk:2]
data_second_channel = data_in[1:second_chunk:2]
[
self.buffer.append(data_first_channel[i], channel=0)
for i in range(int(second_chunk / 2))
]
[
self.buffer.append(data_second_channel[i], channel=1)
for i in range(int(second_chunk / 2))
]
else:
log.debug("Writing chunk to buffer")
# appending to the first channel
data_first_channel = data_in[
prev_index : prev_index + chunk_size : 2
]
data_second_channel = data_in[
prev_index + 1 : prev_index + chunk_size : 2
]
[
self.buffer.append(data_first_channel[i], channel=0)
for i in range(int(chunk_size / 2))
]
[
self.buffer.append(data_second_channel[i], channel=1)
for i in range(int(chunk_size / 2))
]
if total_samples - prev_count > len(data_in):
self.ai_device.scan_stop()
log.error("A Buffer overrun occurred")
break
else:
wrote_chunk = False
if wrote_chunk:
prev_count += chunk_size
prev_index += chunk_size
prev_index %= daq_buffer_size
self.ai_device.scan_stop()
daq_status, transfer_status = self.ai_device.get_scan_status()
log.debug(daq_status)
chunk_size = transfer_status.current_total_count - prev_count
log.debug(f"DAQ current index {transfer_status.current_index}")
log.debug(f"DAQ total count {transfer_status.current_total_count}")
log.debug(
f"DAQ Samples per channel {transfer_status.current_scan_count}"
)
log.debug("Appending last chunk")
if prev_index + chunk_size > len(data_in) - 1:
log.debug("Chunk wraps around buffersize")
first_chunk = len(data_in) - prev_index
data_first_channel = data_in[
prev_index : prev_index + first_chunk : 2
]
data_second_channel = data_in[
prev_index + 1 : prev_index + first_chunk : 2
]
[
self.buffer.append(data_first_channel[i], channel=0)
for i in range(int(first_chunk / 2))
]
[
self.buffer.append(data_second_channel[i], channel=1)
for i in range(int(first_chunk / 2))
]
second_chunk = chunk_size - first_chunk
data_first_channel = data_in[0:second_chunk:2]
data_second_channel = data_in[1:second_chunk:2]
[
self.buffer.append(data_first_channel[i], channel=0)
for i in range(int(second_chunk / 2))
]
[
self.buffer.append(data_second_channel[i], channel=1)
for i in range(int(second_chunk / 2))
]
else:
log.debug("Writing chunk to buffer")
# appending to the first channel
data_first_channel = data_in[
prev_index : prev_index + chunk_size : 2
]
data_second_channel = data_in[
prev_index + 1 : prev_index + chunk_size : 2
]
[
self.buffer.append(data_first_channel[i], channel=0)
for i in range(int(chunk_size / 2))
]
[
self.buffer.append(data_second_channel[i], channel=1)
for i in range(int(chunk_size / 2))
]
log.info("stopping")
log.debug(self.buffer.totalcount())
log.debug(self.ai_device.get_scan_status())
break
break
return "Done. "
def stop_aquisition(self):
self.stop = True
# if __name__ == "__main__":
# devices = uldaq.get_daq_device_inventory(uldaq.InterfaceType.USB)
# log.debug(f"Found daq devices {len(devices)}, connecting to the first one")
# try:
# daq_device = uldaq.DaqDevice(devices[0])
# except uldaq.ul_exception.ULException as e:
# log.error("Did not found daq devices, please connect one")
# raise e
# daq_device.connect()
#
# buf = CircBuffer(size=1_000_000, samplerate=100)
# producer = DaqProducer(buf, daq_device, [1, 1])
# producer.read_analog_continously()