[dataio/daq] writes now two channels in the ringbuffer

This commit is contained in:
wendtalexander 2024-10-24 11:50:00 +02:00
parent 8f6c9b1e5e
commit 3d3d9a0cdd

View File

@ -70,7 +70,12 @@ class DaqProducer:
prev_count = 0
prev_index = 0
while not self.stop:
daq_status, transfer_status = self.ai_device.get_scan_status()
try:
daq_status, transfer_status = self.ai_device.get_scan_status()
except uldaq.ul_exception.ULException as e:
log.error("Could not get the scan status")
log.error("Stopping writting")
break
# The index into the data buffer immediately following the last sample transferred.
current_index = transfer_status.current_index
# total samples since start of the scan
@ -92,23 +97,56 @@ class DaqProducer:
if prev_index + chunk_size > len(data_in) - 1:
log.debug("Chunk wraps around buffersize")
first_chunk = len(data_in) - prev_index
data_first_channel = data_in[
prev_index : prev_index + first_chunk : 2
]
data_second_channel = data_in[
prev_index + 1 : prev_index + first_chunk : 2
]
[
self.buffer.append(data_in[prev_index + i])
for i in range(first_chunk)
self.buffer.append(data_first_channel[i], channel=0)
for i in range(int(first_chunk / 2))
]
[
self.buffer.append(data_second_channel[i], channel=1)
for i in range(int(first_chunk / 2))
]
second_chunk = chunk_size - first_chunk
data_first_channel = data_in[0:second_chunk:2]
data_second_channel = data_in[1:second_chunk:2]
[
self.buffer.append(data_in[i])
for i in range(second_chunk)
self.buffer.append(data_first_channel[i], channel=0)
for i in range(int(second_chunk / 2))
]
[
self.buffer.append(data_second_channel[i], channel=1)
for i in range(int(second_chunk / 2))
]
else:
log.debug("Writing chunk to buffer")
# appending to the first channel
data_first_channel = data_in[
prev_index : prev_index + chunk_size : 2
]
data_second_channel = data_in[
prev_index + 1 : prev_index + chunk_size : 2
]
embed()
exit()
[
self.buffer.append(data_in[prev_index + i])
for i in range(chunk_size)
self.buffer.append(data_first_channel[i], channel=0)
for i in range(int(chunk_size / 2))
]
self.buffer.append(data_in[current_index])
[
self.buffer.append(data_second_channel[i], channel=1)
for i in range(int(chunk_size / 2))
]
if total_samples - prev_count > len(data_in):
self.ai_device.scan_stop()
@ -124,33 +162,67 @@ class DaqProducer:
self.ai_device.scan_stop()
daq_status, transfer_status = self.ai_device.get_scan_status()
current_index = transfer_status.current_index
log.debug(daq_status)
chunk_size = transfer_status.current_total_count - prev_count
log.debug(transfer_status.current_index)
log.debug(transfer_status.current_total_count)
log.debug(transfer_status.current_scan_count)
log.debug(self.buffer.totalcount())
log.debug("Appending last chunk")
if prev_index + chunk_size > len(data_in) - 1:
log.debug("Chunk wraps around buffersize")
first_chunk = len(data_in) - prev_index
data_first_channel = data_in[
prev_index : prev_index + first_chunk : 2
]
data_second_channel = data_in[
prev_index + 1 : prev_index + first_chunk : 2
]
[
self.buffer.append(data_in[prev_index + i])
for i in range(first_chunk)
self.buffer.append(data_first_channel[i], channel=0)
for i in range(int(first_chunk / 2))
]
[
self.buffer.append(data_second_channel[i], channel=1)
for i in range(int(first_chunk / 2))
]
second_chunk = chunk_size - first_chunk
[self.buffer.append(data_in[i]) for i in range(second_chunk)]
data_first_channel = data_in[0:second_chunk:2]
data_second_channel = data_in[1:second_chunk:2]
[
self.buffer.append(data_first_channel[i], channel=0)
for i in range(int(second_chunk / 2))
]
[
self.buffer.append(data_second_channel[i], channel=1)
for i in range(int(second_chunk / 2))
]
else:
log.debug("Writing chunk to buffer")
# appending to the first channel
data_first_channel = data_in[
prev_index : prev_index + chunk_size : 2
]
data_second_channel = data_in[
prev_index + 1 : prev_index + chunk_size : 2
]
[
self.buffer.append(data_in[prev_index + i])
for i in range(chunk_size)
self.buffer.append(data_first_channel[i], channel=0)
for i in range(int(chunk_size / 2))
]
self.buffer.append(data_in[current_index])
[
self.buffer.append(data_second_channel[i], channel=1)
for i in range(int(chunk_size / 2))
]
log.info("stopping")
log.debug(self.buffer.totalcount())
break
break