rivet is hosted by Hepforge, IPPP Durham
MC_ParticleAnalysis.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 #include "Rivet/Analyses/MC_ParticleAnalysis.hh"
00003 
00004 namespace Rivet {
00005 
00006   
00007 
00008 
00009   MC_ParticleAnalysis::MC_ParticleAnalysis(const string& name,
00010                                            size_t nparticles,
00011                                            const string& particle_name)
00012     : Analysis(name),
00013       _nparts(nparticles), _pname(particle_name),
00014       _h_pt(nparticles),
00015       _h_eta(nparticles), _h_eta_plus(nparticles), _h_eta_minus(nparticles),
00016       _h_rap(nparticles), _h_rap_plus(nparticles), _h_rap_minus(nparticles)
00017   {
00018     setNeedsCrossSection(true); // legitimate use, since a base class has no .info file!
00019   }
00020 
00021 
00022 
00023   // Book histograms
00024   void MC_ParticleAnalysis::init() {
00025 
00026     for (size_t i = 0; i < _nparts; ++i) {
00027       const string ptname = _pname + "_pt_" + to_str(i+1);
00028       const double ptmax = 1.0/(double(i)+2.0) * sqrtS()/GeV/2.0;
00029       const int nbins_pt = 100/(i+1);
00030       _h_pt[i] = bookHisto1D(ptname, logspace(nbins_pt, 1.0, ptmax));
00031 
00032       const string etaname = _pname + "_eta_" + to_str(i+1);
00033       _h_eta[i] = bookHisto1D(etaname, i > 1 ? 25 : 50, -5.0, 5.0);
00034       _h_eta_plus[i].reset(new Histo1D(i > 1 ? 15 : 25, 0, 5));
00035       _h_eta_minus[i].reset(new Histo1D(i > 1 ? 15 : 25, 0, 5));
00036 
00037       const string rapname = _pname + "_y_" + to_str(i+1);
00038       _h_rap[i] = bookHisto1D(rapname, i > 1 ? 25 : 50, -5.0, 5.0);
00039       _h_rap_plus[i].reset(new Histo1D(i > 1 ? 15 : 25, 0, 5));
00040       _h_rap_minus[i].reset(new Histo1D(i > 1 ? 15 : 25, 0, 5));
00041 
00042       for (size_t j = i+1; j < min(size_t(3), _nparts); ++j) {
00043         const pair<size_t, size_t> ij = std::make_pair(i, j);
00044 
00045         string detaname = _pname + "s_deta_" + to_str(i+1) + to_str(j+1);
00046         _h_deta.insert(make_pair(ij, bookHisto1D(detaname, 25, -5.0, 5.0)));
00047 
00048         string dphiname = _pname + "s_dphi_" + to_str(i+1) + to_str(j+1);
00049         _h_dphi.insert(make_pair(ij, bookHisto1D(dphiname, 25, 0.0, M_PI)));
00050 
00051         string dRname = _pname + "s_dR_" + to_str(i+1) + to_str(j+1);
00052         _h_dR.insert(make_pair(ij, bookHisto1D(dRname, 25, 0.0, 5.0)));
00053       }
00054     }
00055 
00056     _h_multi_exclusive = bookHisto1D(_pname + "_multi_exclusive", _nparts+3, -0.5, _nparts+3-0.5);
00057     _h_multi_inclusive = bookHisto1D(_pname + "_multi_inclusive", _nparts+3, -0.5, _nparts+3-0.5);
00058     _h_multi_ratio = bookScatter2D(_pname + "_multi_ratio");
00059   }
00060 
00061 
00062   // Do the analysis
00063   void MC_ParticleAnalysis::_analyze(const Event& event, const Particles& particles) {
00064     const double weight = event.weight();
00065     Particles promptparticles;
00066     foreach (const Particle& p, particles)
00067       if (!p.fromDecay()) promptparticles += p;
00068 
00069     for (size_t i = 0; i < _nparts; ++i) {
00070       if (particles.size() < i+1) continue;
00071       _h_pt[i]->fill(particles[i].pt()/GeV, weight);
00072 
00073       // Eta
00074       const double eta_i = particles[i].eta();
00075       _h_eta[i]->fill(eta_i, weight);
00076       (eta_i > 0.0 ? _h_eta_plus : _h_eta_minus)[i]->fill(eta_i, weight);
00077 
00078       // Rapidity
00079       const double rap_i = particles[i].rapidity();
00080       _h_rap[i]->fill(rap_i, weight);
00081       (rap_i > 0.0 ? _h_rap_plus : _h_rap_minus)[i]->fill(rap_i, weight);
00082 
00083       // Inter-particle properties
00084       for (size_t j = i+1; j < min(size_t(3),_nparts); ++j) {
00085         if (particles.size() < j+1) continue;
00086         std::pair<size_t, size_t> ij = std::make_pair(i, j);
00087         double deta = particles[i].eta() - particles[j].eta();
00088         double dphi = deltaPhi(particles[i].momentum(), particles[j].momentum());
00089         double dR = deltaR(particles[i].momentum(), particles[j].momentum());
00090         _h_deta[ij]->fill(deta, weight);
00091         _h_dphi[ij]->fill(dphi, weight);
00092         _h_dR[ij]->fill(dR, weight);
00093       }
00094     }
00095 
00096     // Multiplicities
00097     _h_multi_exclusive->fill(particles.size(), weight);
00098     _h_multi_exclusive_prompt->fill(promptparticles.size(), weight);
00099     for (size_t i = 0; i < _nparts+2; ++i) {
00100       if (particles.size() >= i) _h_multi_inclusive->fill(i, weight);
00101       if (promptparticles.size() >= i) _h_multi_inclusive_prompt->fill(i, weight);
00102     }
00103 
00104   }
00105 
00106 
00107   // Finalize
00108   void MC_ParticleAnalysis::finalize() {
00109     for (size_t i = 0; i < _nparts; ++i) {
00110       scale(_h_pt[i], crossSection()/sumOfWeights());
00111       scale(_h_eta[i], crossSection()/sumOfWeights());
00112       scale(_h_rap[i], crossSection()/sumOfWeights());
00113 
00114       // Create eta/rapidity ratio plots
00115       divide(*_h_eta_plus[i], *_h_eta_minus[i], bookScatter2D(_pname + "_eta_pmratio_" + to_str(i+1)));
00116       divide(*_h_rap_plus[i], *_h_rap_minus[i], bookScatter2D(_pname + "_y_pmratio_" + to_str(i+1)));
00117     }
00118 
00119     // Scale the d{eta,phi,R} histograms
00120     typedef map<pair<size_t, size_t>, Histo1DPtr> HistMap;
00121     foreach (HistMap::value_type& it, _h_deta) scale(it.second, crossSection()/sumOfWeights());
00122     foreach (HistMap::value_type& it, _h_dphi) scale(it.second, crossSection()/sumOfWeights());
00123     foreach (HistMap::value_type& it, _h_dR) scale(it.second, crossSection()/sumOfWeights());
00124 
00125     // Fill inclusive multi ratios
00126     for (size_t i = 0; i < _h_multi_inclusive->numBins()-1; ++i) {
00127       _h_multi_ratio->addPoint(i+1, 0, 0.5, 0);
00128       if (_h_multi_inclusive->bin(i).sumW() > 0.0) {
00129         const double ratio = _h_multi_inclusive->bin(i+1).sumW() / _h_multi_inclusive->bin(i).sumW();
00130         const double relerr_i = _h_multi_inclusive->bin(i).relErr();
00131         const double relerr_j = _h_multi_inclusive->bin(i+1).relErr();
00132         const double err = ratio * (relerr_i + relerr_j);
00133         _h_multi_ratio->point(i).setY(ratio, err);
00134       }
00135     }
00136     for (size_t i = 0; i < _h_multi_inclusive_prompt->numBins()-1; ++i) {
00137       _h_multi_ratio_prompt->addPoint(i+1, 0, 0.5, 0);
00138       if (_h_multi_inclusive_prompt->bin(i).sumW() > 0.0) {
00139         const double ratio = _h_multi_inclusive_prompt->bin(i+1).sumW() / _h_multi_inclusive_prompt->bin(i).sumW();
00140         const double relerr_i = _h_multi_inclusive_prompt->bin(i).relErr();
00141         const double relerr_j = _h_multi_inclusive_prompt->bin(i+1).relErr();
00142         const double err = ratio * (relerr_i + relerr_j);
00143         _h_multi_ratio_prompt->point(i).setY(ratio, err);
00144       }
00145     }
00146 
00147     scale(_h_multi_exclusive, crossSection()/sumOfWeights());
00148     scale(_h_multi_exclusive_prompt, crossSection()/sumOfWeights());
00149     scale(_h_multi_inclusive, crossSection()/sumOfWeights());
00150     scale(_h_multi_inclusive_prompt, crossSection()/sumOfWeights());
00151   }
00152 
00153 
00154 }