I have a detector which generates 3072 pixles and 64 lines of data. How it works is when the detector acquires 64 lines and 3072 pixles and then send it to my computer.
my job is to take this lines and append it to a image which has 5000 lines
each time detector generates a frame (64 lines) i append it to the graphics view of the QT and then display it. when i reach 5000 lines i reset the graphics view and image and everything and starts from the beginning to display new image
I also have multiple threads to do this one is main thread in which my gui is running one is the thread which listens to the detector for any incoming messages and one display which process the input from detector and pass it to the gui.
My concern is am i resetting all the objects properly?
Because it appears to me that when i hard code the scaledValue = 255; and put if condition it appears at the exact spot everytime but when I am reading data from detector it is shifted. sometime one image goes to next image and then sometimes the next image start in the same picture.
// GUI class implementation
GUI::GUI(QWidget *parent)
: QMainWindow(parent)
, ui(new Ui::GUI)
{
ui->setupUi(this);
m_try = new Try(this);
// Initialize the graphics view
int totalFrames = 5248;
scene = new QGraphicsScene();
scene->setSceneRect(0, 0, 3072, totalFrames);
scene->setBackgroundBrush(Qt::black);
ui->graphicsView->setScene(scene);
ui->graphicsView->rotate(270);
ui->graphicsView->fitInView(scene->sceneRect(), Qt::KeepAspectRatio);
}
void GUI::on_start_clicked()
{
qInfo() << "button clicked";
qInfo() << "should be main" << QThread::currentThread();
m_detector = new Detector();
QThread *detectorThread = new QThread();
detectorThread->setObjectName("detector Thread");
m_detector->totalNumberOfProjections = ui->total_no_of_projections->value();
m_detector->framePerProjection = ui->frames_per_projections->value();
m_detector->lineRate = ui->line_rate->value();
m_detector->moveToThread(detectorThread);
connect(m_detector, &Detector::progress, this, &GUI::updateProgress);
connect(detectorThread, &QThread::started, m_detector, &Detector::run);
QThread *displayThread = new QThread();
Display *processor = new Display();
processor->moveToThread(displayThread);
displayThread->setObjectName("Display Thread");
connect(m_detector, &Detector::transferFrame, processor , &Display::processFrameBuffer, Qt::QueuedConnection);
connect(displayThread, &QThread::started, processor, &Display::run);
connect(processor, &Display::pixelProcessed, this, &GUI::updateScene, Qt::QueuedConnection);
connect(processor, &Display::frameProcessed, this, &GUI::resetCurrentLine, Qt::QueuedConnection);
displayThread->start();
detectorThread->start();
}
void GUI::resetCurrentLine(const QImage &image) {
int frameHeight = 64;
QPixmap pixmap = QPixmap::fromImage(image);
QRectF updateRect(0, currentLinegui, 3072, frameHeight);
scene->invalidate(updateRect, QGraphicsScene::ForegroundLayer);
QGraphicsPixmapItem *pixmapItem = scene->addPixmap(pixmap.copy(0, currentLinegui, 3072, frameHeight));
pixmapItem->setPos(0, currentLinegui);
currentLinegui += frameHeight;
ui->graphicsView->fitInView(scene->sceneRect(), Qt::KeepAspectRatio);
if (currentLinegui > 5245){
qDebug() << "doing reset" << "breset" << currentLinegui;
currentLinegui = 0;
scene->clear();
scene->setBackgroundBrush(Qt::black);
ui->graphicsView->setScene(scene);
ui->graphicsView->fitInView(scene->sceneRect(), Qt::KeepAspectRatio);
}
}
// Display class implementation
Display::Display(QObject *parent)
: QThread{parent}, image(3072, 5248, QImage::Format_Grayscale16), currentLine(0){}
void Display::processFrameBuffer(char *framebuf) {
QMutex mutex;
QMutexLocker locker(&mutex);
int width = 3072;
int frameHeight = 64;
int totalLines = 5248;
if (currentLine >= totalLines) {
qWarning("All lines have been processed");
return;
}
const ushort* rawPixels = reinterpret_cast<const ushort*>(framebuf);
for (int y = 0; y < frameHeight; y++) {
for (int x = 0; x < width; x++) {
int bufIndex = (y * width + x) + 3072 * currentLine;
ushort pixelValue = rawPixels[bufIndex];
if (static_cast<int>(static_cast<double>(pixelValue)) > 1000)
pixelValue = 990;
int scaledValue = static_cast<int>(static_cast<double>(pixelValue) * 255.0 / 1000);
if (currentLine > 2500 && currentLine < 2600)
scaledValue = 255;
image.setPixelColor(x, currentLine + y, QColor(scaledValue, scaledValue, scaledValue));
}
}
currentLine += frameHeight;
emit frameProcessed(image);
if (currentLine > 5245){
qDebug() << "breset" << currentLine;
currentLine = 0;
image = QImage(3072, 5248, QImage::Format_Grayscale16);
}
}
// Detector thread snippet dont care about this much
char* framebuf = NULL;
framebuf = new char[iImageByteSizeWithDescriptor];
while (true)
{
j = 0;
memset(framebuf, 0, ImageByteSize);
while (j < iImageByteSizeWithDescriptor)
{
i = recv(ClientSocket, framebuf + j, iImageByteSizeWithDescriptor - j, 0);
if (i == 0) throw std::runtime_error("Transmission error: no data received");
j += i;
}
memcpy(images + (recv_frames * ImageByteSize), framebuf, ImageByteSize);
std::cout << "Frame num " << recv_frames << "n";
recv_frames++;
if (recv_frames >= nrImages)
{
break;
}
emit transferFrame(framebuf);
}