MC_ParticleAnalysis.cc
Go to the documentation of this file.
00001 // -*- C++ -*- 00002 #include "Rivet/Analyses/MC_ParticleAnalysis.hh" 00003 00004 namespace Rivet { 00005 00006 00007 00008 00009 MC_ParticleAnalysis::MC_ParticleAnalysis(const string& name, 00010 size_t nparticles, 00011 const string& particle_name) 00012 : Analysis(name), 00013 _nparts(nparticles), _pname(particle_name), 00014 _h_pt(nparticles), 00015 _h_eta(nparticles), _h_eta_plus(nparticles), _h_eta_minus(nparticles), 00016 _h_rap(nparticles), _h_rap_plus(nparticles), _h_rap_minus(nparticles) 00017 { 00018 setNeedsCrossSection(true); // legitimate use, since a base class has no .info file! 00019 } 00020 00021 00022 00023 // Book histograms 00024 void MC_ParticleAnalysis::init() { 00025 00026 for (size_t i = 0; i < _nparts; ++i) { 00027 const string ptname = _pname + "_pt_" + to_str(i+1); 00028 const double ptmax = 1.0/(double(i)+2.0) * (sqrtS()>0.?sqrtS():14000.)/GeV/2.0; 00029 const int nbins_pt = 100/(i+1); 00030 _h_pt[i] = bookHisto1D(ptname, logspace(nbins_pt, 1.0, ptmax)); 00031 00032 const string etaname = _pname + "_eta_" + to_str(i+1); 00033 _h_eta[i] = bookHisto1D(etaname, i > 1 ? 25 : 50, -5.0, 5.0); 00034 _h_eta_plus[i].reset(new Histo1D(i > 1 ? 15 : 25, 0, 5)); 00035 _h_eta_minus[i].reset(new Histo1D(i > 1 ? 15 : 25, 0, 5)); 00036 00037 const string rapname = _pname + "_y_" + to_str(i+1); 00038 _h_rap[i] = bookHisto1D(rapname, i > 1 ? 25 : 50, -5.0, 5.0); 00039 _h_rap_plus[i].reset(new Histo1D(i > 1 ? 15 : 25, 0, 5)); 00040 _h_rap_minus[i].reset(new Histo1D(i > 1 ? 15 : 25, 0, 5)); 00041 00042 for (size_t j = i+1; j < min(size_t(3), _nparts); ++j) { 00043 const pair<size_t, size_t> ij = std::make_pair(i, j); 00044 00045 string detaname = _pname + "s_deta_" + to_str(i+1) + to_str(j+1); 00046 _h_deta.insert(make_pair(ij, bookHisto1D(detaname, 25, -5.0, 5.0))); 00047 00048 string dphiname = _pname + "s_dphi_" + to_str(i+1) + to_str(j+1); 00049 _h_dphi.insert(make_pair(ij, bookHisto1D(dphiname, 25, 0.0, M_PI))); 00050 00051 string dRname = _pname + "s_dR_" + to_str(i+1) + to_str(j+1); 00052 _h_dR.insert(make_pair(ij, bookHisto1D(dRname, 25, 0.0, 5.0))); 00053 } 00054 } 00055 00056 _h_multi_exclusive = bookHisto1D(_pname + "_multi_exclusive", _nparts+3, -0.5, _nparts+3-0.5); 00057 _h_multi_inclusive = bookHisto1D(_pname + "_multi_inclusive", _nparts+3, -0.5, _nparts+3-0.5); 00058 _h_multi_ratio = bookScatter2D(_pname + "_multi_ratio"); 00059 00060 _h_multi_exclusive_prompt = bookHisto1D(_pname + "_multi_exclusive_prompt", _nparts+3, -0.5, _nparts+3-0.5); 00061 _h_multi_inclusive_prompt = bookHisto1D(_pname + "_multi_inclusive_prompt", _nparts+3, -0.5, _nparts+3-0.5); 00062 _h_multi_ratio_prompt = bookScatter2D(_pname + "_multi_ratio_prompt"); 00063 } 00064 00065 00066 // Do the analysis 00067 void MC_ParticleAnalysis::_analyze(const Event& event, const Particles& particles) { 00068 const double weight = event.weight(); 00069 Particles promptparticles; 00070 foreach (const Particle& p, particles) 00071 if (!p.fromDecay()) promptparticles += p; 00072 00073 for (size_t i = 0; i < _nparts; ++i) { 00074 if (particles.size() < i+1) continue; 00075 _h_pt[i]->fill(particles[i].pt()/GeV, weight); 00076 00077 // Eta 00078 const double eta_i = particles[i].eta(); 00079 _h_eta[i]->fill(eta_i, weight); 00080 (eta_i > 0.0 ? _h_eta_plus : _h_eta_minus)[i]->fill(fabs(eta_i), weight); 00081 00082 // Rapidity 00083 const double rap_i = particles[i].rapidity(); 00084 _h_rap[i]->fill(rap_i, weight); 00085 (rap_i > 0.0 ? _h_rap_plus : _h_rap_minus)[i]->fill(fabs(rap_i), weight); 00086 00087 // Inter-particle properties 00088 for (size_t j = i+1; j < min(size_t(3),_nparts); ++j) { 00089 if (particles.size() < j+1) continue; 00090 std::pair<size_t, size_t> ij = std::make_pair(i, j); 00091 double deta = particles[i].eta() - particles[j].eta(); 00092 double dphi = deltaPhi(particles[i].momentum(), particles[j].momentum()); 00093 double dR = deltaR(particles[i].momentum(), particles[j].momentum()); 00094 _h_deta[ij]->fill(deta, weight); 00095 _h_dphi[ij]->fill(dphi, weight); 00096 _h_dR[ij]->fill(dR, weight); 00097 } 00098 } 00099 00100 // Multiplicities 00101 _h_multi_exclusive->fill(particles.size(), weight); 00102 _h_multi_exclusive_prompt->fill(promptparticles.size(), weight); 00103 for (size_t i = 0; i < _nparts+2; ++i) { 00104 if (particles.size() >= i) _h_multi_inclusive->fill(i, weight); 00105 if (promptparticles.size() >= i) _h_multi_inclusive_prompt->fill(i, weight); 00106 } 00107 00108 } 00109 00110 00111 // Finalize 00112 void MC_ParticleAnalysis::finalize() { 00113 for (size_t i = 0; i < _nparts; ++i) { 00114 scale(_h_pt[i], crossSection()/sumOfWeights()); 00115 scale(_h_eta[i], crossSection()/sumOfWeights()); 00116 scale(_h_rap[i], crossSection()/sumOfWeights()); 00117 00118 // Create eta/rapidity ratio plots 00119 divide(*_h_eta_plus[i], *_h_eta_minus[i], bookScatter2D(_pname + "_eta_pmratio_" + to_str(i+1))); 00120 divide(*_h_rap_plus[i], *_h_rap_minus[i], bookScatter2D(_pname + "_y_pmratio_" + to_str(i+1))); 00121 } 00122 00123 // Scale the d{eta,phi,R} histograms 00124 typedef map<pair<size_t, size_t>, Histo1DPtr> HistMap; 00125 foreach (HistMap::value_type& it, _h_deta) scale(it.second, crossSection()/sumOfWeights()); 00126 foreach (HistMap::value_type& it, _h_dphi) scale(it.second, crossSection()/sumOfWeights()); 00127 foreach (HistMap::value_type& it, _h_dR) scale(it.second, crossSection()/sumOfWeights()); 00128 00129 // Fill inclusive multi ratios 00130 for (size_t i = 0; i < _h_multi_inclusive->numBins()-1; ++i) { 00131 _h_multi_ratio->addPoint(i+1, 0, 0.5, 0); 00132 if (_h_multi_inclusive->bin(i).sumW() > 0.0) { 00133 const double ratio = _h_multi_inclusive->bin(i+1).sumW() / _h_multi_inclusive->bin(i).sumW(); 00134 const double relerr_i = _h_multi_inclusive->bin(i).relErr(); 00135 const double relerr_j = _h_multi_inclusive->bin(i+1).relErr(); 00136 const double err = ratio * (relerr_i + relerr_j); 00137 _h_multi_ratio->point(i).setY(ratio, err); 00138 } 00139 } 00140 for (size_t i = 0; i < _h_multi_inclusive_prompt->numBins()-1; ++i) { 00141 _h_multi_ratio_prompt->addPoint(i+1, 0, 0.5, 0); 00142 if (_h_multi_inclusive_prompt->bin(i).sumW() > 0.0) { 00143 const double ratio = _h_multi_inclusive_prompt->bin(i+1).sumW() / _h_multi_inclusive_prompt->bin(i).sumW(); 00144 const double relerr_i = _h_multi_inclusive_prompt->bin(i).relErr(); 00145 const double relerr_j = _h_multi_inclusive_prompt->bin(i+1).relErr(); 00146 const double err = ratio * (relerr_i + relerr_j); 00147 _h_multi_ratio_prompt->point(i).setY(ratio, err); 00148 } 00149 } 00150 00151 scale(_h_multi_exclusive, crossSection()/sumOfWeights()); 00152 scale(_h_multi_exclusive_prompt, crossSection()/sumOfWeights()); 00153 scale(_h_multi_inclusive, crossSection()/sumOfWeights()); 00154 scale(_h_multi_inclusive_prompt, crossSection()/sumOfWeights()); 00155 } 00156 00157 00158 } Generated on Wed Oct 7 2015 12:09:13 for The Rivet MC analysis system by ![]() |