CDF_2002_S4796047.cc
Go to the documentation of this file.
00001 // -*- C++ -*- 00002 #include "Rivet/Analysis.hh" 00003 #include "Rivet/Projections/Beam.hh" 00004 #include "Rivet/Projections/ChargedFinalState.hh" 00005 #include "Rivet/Projections/TriggerCDFRun0Run1.hh" 00006 00007 namespace Rivet { 00008 00009 00010 /// @brief CDF Run I charged multiplicity measurement 00011 /// @author Hendrik Hoeth 00012 /// 00013 /// This analysis measures the charged multiplicity distribution 00014 /// in minimum bias events at two different center-of-mass energies: 00015 /// \f$ \sqrt{s} = \f$ 630 and 1800 GeV. 00016 /// 00017 /// Particles with c*tau > 10 mm are considered stable, i.e. they 00018 /// are reconstructed and their decay products removed. Selection 00019 /// cuts are |eta|<1 and pT>0.4 GeV. 00020 /// 00021 /// @par Run conditions 00022 /// 00023 /// @arg Two different beam energies: \f$ \sqrt{s} = \$f 630 & 1800 GeV 00024 /// @arg Run with generic QCD events. 00025 /// @arg Set particles with c*tau > 10 mm stable 00026 class CDF_2002_S4796047 : public Analysis { 00027 public: 00028 00029 /// Constructor 00030 CDF_2002_S4796047() 00031 : Analysis("CDF_2002_S4796047") 00032 { 00033 _sumWTrig = 0; 00034 } 00035 00036 00037 /// @name Analysis methods 00038 //@{ 00039 00040 /// Book projections and histograms 00041 void init() { 00042 addProjection(TriggerCDFRun0Run1(), "Trigger"); 00043 const ChargedFinalState cfs(-1.0, 1.0, 0.4*GeV); 00044 addProjection(cfs, "FS"); 00045 00046 // Histos 00047 if (fuzzyEquals(sqrtS()/GeV, 630)) { 00048 _hist_multiplicity = bookHisto1D(1, 1, 1); 00049 _hist_pt_vs_multiplicity = bookProfile1D(3, 1, 1); 00050 } else if (fuzzyEquals(sqrtS()/GeV, 1800)) { 00051 _hist_multiplicity = bookHisto1D(2, 1, 1); 00052 _hist_pt_vs_multiplicity = bookProfile1D(4, 1, 1); 00053 } 00054 } 00055 00056 00057 /// Do the analysis 00058 void analyze(const Event& evt) { 00059 // Trigger 00060 const bool trigger = applyProjection<TriggerCDFRun0Run1>(evt, "Trigger").minBiasDecision(); 00061 if (!trigger) vetoEvent; 00062 const double weight = evt.weight(); 00063 _sumWTrig += weight; 00064 00065 // Get beam energy and tracks 00066 const ChargedFinalState& fs = applyProjection<ChargedFinalState>(evt, "FS"); 00067 const size_t numParticles = fs.particles().size(); 00068 00069 // Fill histos of charged multiplicity distributions 00070 _hist_multiplicity->fill(numParticles, weight); 00071 00072 // Fill histos for <pT> vs. charged multiplicity 00073 foreach (const Particle& p, fs.particles()) { 00074 const double pT = p.pT(); 00075 _hist_pt_vs_multiplicity->fill(numParticles, pT/GeV, weight); 00076 } 00077 00078 } 00079 00080 00081 void finalize() { 00082 // This normalisation is NOT a cross-section. 00083 // In the paper the x-axes (!) of the histograms are 00084 // scaled such that they can put both energies in the 00085 // same plot. Of course this affects the area, too. 00086 // Since we want to plot the actual multiplicity, we 00087 // scale the x-axes back and have to adjust the areas 00088 // accordingly. The scale factors are given in the 00089 // legend of the plot in the paper. Have a look at 00090 // figure 1 and everything immediately becomes clear. 00091 // DON'T TRY TO REPAIR THIS, YOU WILL BREAK IT. 00092 if (fuzzyEquals(sqrtS()/GeV, 630)) { 00093 normalize(_hist_multiplicity, 3.21167); // fixed norm OK 00094 } else if (fuzzyEquals(sqrtS()/GeV, 1800)) { 00095 normalize(_hist_multiplicity, 4.19121); // fixed norm OK 00096 } 00097 } 00098 00099 //@} 00100 00101 00102 private: 00103 00104 /// @name Counter 00105 //@{ 00106 double _sumWTrig; 00107 //@} 00108 00109 /// @name Histos 00110 //@{ 00111 Histo1DPtr _hist_multiplicity; 00112 Profile1DPtr _hist_pt_vs_multiplicity; 00113 //@} 00114 00115 }; 00116 00117 00118 00119 // The hook for the plugin system 00120 DECLARE_RIVET_PLUGIN(CDF_2002_S4796047); 00121 00122 } Generated on Thu Feb 6 2014 17:38:42 for The Rivet MC analysis system by ![]() |