Cluster convolution for GATE: Difference between revisions

From pCT
Hpe090 (talk | contribs)
Made page
 
Hpe090 (talk | contribs)
Added code to take edep arguments
 
(3 intermediate revisions by the same user not shown)
Line 1: Line 1:
In her MSc project, Silje Grimstad made a filtered library of the clusters from the summer beamtest in Heidelberg.
In her MSc project, Silje Grimstad made a filtered library of the clusters from the summer beamtest in Heidelberg.
The result is ~20k Helium clusters stored as a x/y array in a ROOT file:
 
The result is ~15k Helium clusters stored as x/y arrays in a [[:File:database_final.zip|ROOT file included here]].


{| class="wikitable"
{| class="wikitable"
Line 36: Line 37:
[[File:original_vs_convoluted_hitmap.PNG|1000px]]
[[File:original_vs_convoluted_hitmap.PNG|1000px]]


The needed cluster database for this is located at [https://github.com/HelgeEgil/focal/blob/master/DTCToolkit/Data/ClusterSizes/ALPIDE/database_combined_clusters_filter.root GitHib].
Below is example code to perform the diffusion on-fly using a Hits container class with Hit objects (x,y,layer,edep,eventID).
For more information about the implementation see [https://github.com/HelgeEgil/focal GitHub].


<source lang="cpp">
<source lang="cpp">
#include <TTree.h>
#include <TTree.h>
#include <TFile.h>
#include <TFile.h>
#include <TRandom.h>
#include <TRandom3.h>
#include <list>
#include <list>
#include <vector>
#include <vector>
Line 47: Line 49:
using namespace std;
using namespace std;


void convert_root_file() {
void           makeSortedClusterDatabase();
  TFile        *fCluster = new TFile("database_combined_clusters_filter.root", "READ");
Hits         * diffuseHits(TRandom3 * gRandom, Hits * singlePixelHits);
  TFile         *fSimulationIn = new TFile("simulationToBeClustered.root", "READ");
  TFile        *fSimulationOut = new TFile("simulationToBeClustered_clustered.root", "recreate");
  TTree        *treeCluster = (TTree*) fCluster->Get("database");
  TTree        *treeSimulationIn  = (TTree*) fSimulationIn->Get("Hits");
  TTree        *treeSimulationOut = new TTree("Hits", "Clustered GATE tree");


  Double_t      x_mean, y_mean;
TTree        * CDB_treeCluster;
  Int_t          randomClusterIdx, binPos, idx_x;
list<Int_t> * CDB_hit_array = 0;
  list<Int_t>   *hit_array = 0;
TBranch     * CDB_b_hit_array;
  TBranch       *b_hit_array;
TTreeIndex  * CDB_index;
  TRandom3      *gRand = new TRandom3(0);
Int_t          CDB_clusterSize;
  Float_t        pixel_size = 0.02929;
Double_t      CDB_x_mean, CDB_y_mean;
 
Int_t          CDB_sortIndex[50];
  Float_t        allZ = 0;
TFile        * CDB_fCluster;  
  Int_t          fillN = 0;
  Float_t        x,y,z,edep,outX, outY, outZ;
  Int_t          parentID, eventID, outEventID, outParentID, nClusters, timeInClockSpeed, PDGEncoding;
  Char_t        processName[17];


  TCanvas      *c = new TCanvas();
void makeSortedClusterDatabase() {
  TH2C          *hOrig = new TH2C("hOrig", "Original hitmap",1024,-15,15,512,-7.5,7.5);
   // From GlobalConstants/MaterialConstants.C / .h
   TH2C          *hNew  = new TH2C("hNew",  "New hitmap",    1024,-15,15,512,-7.5,7.5);
  Int_t          palette[2] = {kWhite, kBlack};
 
  c->Divide(2,1);
  gStyle->SetPalette(2, palette);


   treeSimulationIn->SetBranchAddress("posX",&x);
   Int_t lastClusterSize = -1;
   treeSimulationIn->SetBranchAddress("posY",&y);
   CDB_fCluster = new TFile("Data/ClusterSizes/ALPIDE/database_final.root", "READ");
   treeSimulationIn->SetBranchAddress("posZ",&z);
   CDB_treeCluster = (TTree*) CDB_fCluster->Get("database");
   treeSimulationIn->SetBranchAddress("edep",&edep);
   CDB_treeCluster->SetBranchAddress("size", &CDB_clusterSize);
   treeSimulationIn->SetBranchAddress("eventID",&eventID);
   CDB_treeCluster->SetBranchAddress("x_mean", &CDB_x_mean);
   treeSimulationIn->SetBranchAddress("parentID",&parentID);
   CDB_treeCluster->SetBranchAddress("y_mean", &CDB_y_mean);
   treeSimulationIn->SetBranchAddress("PDGEncoding", &PDGEncoding);
   CDB_treeCluster->SetBranchAddress("hit_array", &CDB_hit_array, &CDB_b_hit_array);


   treeSimulationOut->Branch("posX", &outX, "posX/F");
   // Sort clusters based on cluster size
   treeSimulationOut->Branch("posY", &outY, "posY/F");
  for (int i=0; i<50; i++) CDB_sortIndex[i] = -1;
   treeSimulationOut->Branch("posZ", &outZ, "posZ/F");
   CDB_treeCluster->BuildIndex("size");
   treeSimulationOut->Branch("eventID", &outEventID, "eventID/I");
   CDB_index = (TTreeIndex*) CDB_treeCluster->GetTreeIndex();
  treeSimulationOut->Branch("parentID", &outParentID, "parentID/I");
   for (int i=0; i<CDB_index->GetN()-1; i++) {
  treeSimulationOut->Branch("clockTime", &timeInClockSpeed, "clockTime/I");
      Long64_t local = CDB_treeCluster->LoadTree(CDB_index->GetIndex()[i]);
      CDB_treeCluster->GetEntry(local);
      if (lastClusterSize < 0 || lastClusterSize != CDB_clusterSize) {
        CDB_sortIndex[CDB_clusterSize] = i;
        lastClusterSize = CDB_clusterSize;
      }
  }
}


  treeCluster->SetBranchAddress("x_mean", &x_mean);
Hits * diffuseHits(TRandom3 *gRandom, Hits * singlePixelHits) {
  treeCluster->SetBranchAddress("y_mean", &y_mean);
   // From HelperTools/getTracks.C
   treeCluster->SetBranchAddress("hit_array", &hit_array, &b_hit_array);


   nClusters = treeCluster->GetEntriesFast();
   Int_t          nHits = singlePixelHits->GetEntriesFast();
   randomClusterIdx = gRand->Integer(nClusters);
   Hits        * diffusedHits = new Hits();
   treeCluster->GetEntry(randomClusterIdx);
   Int_t          x, y, outX, outY, layer, cs, eventID, idx_x, binPos;
  Float_t        edep;
  Int_t          randomClusterIdx;


   for (Int_t i=0, N = treeSimulationIn->GetEntries(); i<N; ++i) {
   for (Int_t h=0; h<nHits; h++) {
       treeSimulationIn->GetEntry(i);
       x = singlePixelHits->getX(h);
        
       y = singlePixelHits->getY(h);
      timeInClockSpeed = 400 * eventID;
       layer = singlePixelHits->getLayer(h);
       outParentID = parentID;
       edep = singlePixelHits->getEdep(h);
       outEventID = eventID;
       eventID = singlePixelHits->getEventID(h);
       outZ = z;


       if (z<2) hOrig->Fill(x,y);
      // The correspondence between CS and Edep [keV/um] is calculated from Ganesh's NIM A Proceedings article.
      cs = 4.2267 * pow(edep, 0.6500) + 0.5;
   
      // To keep any outliers within the tabulated clusters in the database
       if (cs<2) cs=2;
      if (cs>=27) cs=26;


       randomClusterIdx = gRand->Integer(nClusters);
       randomClusterIdx = gRandom->Integer(CDB_sortIndex[cs+1] - CDB_sortIndex[cs]) + CDB_sortIndex[cs];
       treeCluster->GetEntry(randomClusterIdx);
       CDB_treeCluster->GetEntry(CDB_treeCluster->LoadTree(CDB_index->GetIndex()[randomClusterIdx]));


       idx_x = 0;
       idx_x = 0;
       if (abs(PDGEncoding) == 2212) {
       Int_t thisCS = 0;
        for (Int_t n : *hit_array) { // loop x
      for (Int_t n : *CDB_hit_array) {
            for (int binPosPow = 0; binPosPow < 10; binPosPow++) { // loop y
        for (Int_t binPosPow = 0; binPosPow < 10; binPosPow++) {
              binPos = pow(2, binPosPow);
            binPos = pow(2, binPosPow);
              if (binPos & n) {
            if (binPos & n) {
                  outX = x + (idx_x - x_mean) * pixel_size;
              outX = x + (idx_x - CDB_x_mean) + 0.5;
                  outY = y + (binPosPow - y_mean) * pixel_size;
              outY = y + (binPosPow - CDB_y_mean) + 0.5;
                  treeSimulationOut->Fill();
              diffusedHits->appendPoint(outX, outY, layer, eventID, edep/CDB_clusterSize);  
                  if (z<2) hNew->Fill(outX, outY);
              }
             }
             }
            idx_x++;
         }
         }
      idx_x++;
       }
       }
   }
   }
  return diffusedHits;
}


  treeSimulationOut->Write();
  c->cd(1);
  hOrig->Draw("COL");
  c->cd(2);
  hNew->Draw("COL");
  /*
  delete fSimulationIn;
  delete fSimulationOut;
  delete fCluster;
  */
}
</source>
</source>

Latest revision as of 11:56, 1 April 2019

In her MSc project, Silje Grimstad made a filtered library of the clusters from the summer beamtest in Heidelberg.

The result is ~15k Helium clusters stored as x/y arrays in a ROOT file included here.

x_mean y_mean size hit_array height width x_start y_start i_event combined
float float int list<int> int int int int int bool

A random sample of 9 clusters is shown below:

We want to convolute a GATE simulation with clusters from this library. For each hit in the MC data, we pick a random cluster from the library and draw it around the hit position. To do this, use the code at the bottom, with the following result:

Below is example code to perform the diffusion on-fly using a Hits container class with Hit objects (x,y,layer,edep,eventID). For more information about the implementation see GitHub.

<source lang="cpp">

  1. include <TTree.h>
  2. include <TFile.h>
  3. include <TRandom3.h>
  4. include <list>
  5. include <vector>

using namespace std;

void makeSortedClusterDatabase(); Hits * diffuseHits(TRandom3 * gRandom, Hits * singlePixelHits);

TTree * CDB_treeCluster; list<Int_t> * CDB_hit_array = 0; TBranch * CDB_b_hit_array; TTreeIndex * CDB_index; Int_t CDB_clusterSize; Double_t CDB_x_mean, CDB_y_mean; Int_t CDB_sortIndex[50]; TFile * CDB_fCluster;

void makeSortedClusterDatabase() {

  // From GlobalConstants/MaterialConstants.C / .h
  Int_t lastClusterSize = -1;
  CDB_fCluster = new TFile("Data/ClusterSizes/ALPIDE/database_final.root", "READ");
  CDB_treeCluster = (TTree*) CDB_fCluster->Get("database");
  CDB_treeCluster->SetBranchAddress("size", &CDB_clusterSize);
  CDB_treeCluster->SetBranchAddress("x_mean", &CDB_x_mean);
  CDB_treeCluster->SetBranchAddress("y_mean", &CDB_y_mean);
  CDB_treeCluster->SetBranchAddress("hit_array", &CDB_hit_array, &CDB_b_hit_array);
  // Sort clusters based on cluster size
  for (int i=0; i<50; i++) CDB_sortIndex[i] = -1;
  CDB_treeCluster->BuildIndex("size");
  CDB_index = (TTreeIndex*) CDB_treeCluster->GetTreeIndex();
  for (int i=0; i<CDB_index->GetN()-1; i++) {
     Long64_t local = CDB_treeCluster->LoadTree(CDB_index->GetIndex()[i]);
     CDB_treeCluster->GetEntry(local);
     if (lastClusterSize < 0 || lastClusterSize != CDB_clusterSize) {
        CDB_sortIndex[CDB_clusterSize] = i;
        lastClusterSize = CDB_clusterSize;
     }
  }

}

Hits * diffuseHits(TRandom3 *gRandom, Hits * singlePixelHits) {

  // From HelperTools/getTracks.C
  Int_t          nHits = singlePixelHits->GetEntriesFast();
  Hits         * diffusedHits = new Hits();
  Int_t          x, y, outX, outY, layer, cs, eventID, idx_x, binPos;
  Float_t        edep;
  Int_t          randomClusterIdx;
  for (Int_t h=0; h<nHits; h++) {
     x = singlePixelHits->getX(h);
     y = singlePixelHits->getY(h);
     layer = singlePixelHits->getLayer(h);
     edep = singlePixelHits->getEdep(h);
     eventID = singlePixelHits->getEventID(h);
     // The correspondence between CS and Edep [keV/um] is calculated from Ganesh's NIM A Proceedings article.
     cs = 4.2267 * pow(edep, 0.6500) + 0.5;
   
     // To keep any outliers within the tabulated clusters in the database
     if (cs<2) cs=2;
     if (cs>=27) cs=26;
     randomClusterIdx = gRandom->Integer(CDB_sortIndex[cs+1] - CDB_sortIndex[cs]) + CDB_sortIndex[cs];
     CDB_treeCluster->GetEntry(CDB_treeCluster->LoadTree(CDB_index->GetIndex()[randomClusterIdx]));
     idx_x = 0;
     Int_t thisCS = 0;
     for (Int_t n : *CDB_hit_array) {
        for (Int_t binPosPow = 0; binPosPow < 10; binPosPow++) {
           binPos = pow(2, binPosPow);
           if (binPos & n) {
              outX = x + (idx_x - CDB_x_mean) + 0.5;
              outY = y + (binPosPow - CDB_y_mean) + 0.5;
              diffusedHits->appendPoint(outX, outY, layer, eventID, edep/CDB_clusterSize); 
           }
        }
     idx_x++;
     }
  }
  return diffusedHits;

}

</source>