rivet is hosted by Hepforge, IPPP Durham
LHCB_2013_I1218996.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 #include "Rivet/Analysis.hh"
00003 #include "Rivet/Tools/BinnedHistogram.hh"
00004 #include "Rivet/Projections/FinalState.hh"
00005 #include "Rivet/Projections/UnstableFinalState.hh"
00006 
00007 namespace Rivet {
00008 
00009 
00010   /// LHCb prompt charm hadron pT and rapidity spectra
00011   class LHCB_2013_I1218996 : public Analysis {
00012   public:
00013 
00014     /// @name Constructors etc.
00015     //@{
00016 
00017     /// Constructor
00018     LHCB_2013_I1218996()
00019       : Analysis("LHCB_2013_I1218996")
00020     {    }
00021 
00022     //@}
00023 
00024 
00025     /// @name Analysis methods
00026     //@{
00027 
00028     /// Book histograms and initialise projections before the run
00029     void init() {
00030 
00031       /// Initialise and register projections
00032       addProjection(UnstableFinalState(), "UFS");
00033 
00034       /// Book histograms
00035       _h_pdg411_Dplus_pT_y.addHistogram(  2.0, 2.5, bookHisto1D(3, 1, 1));
00036       _h_pdg411_Dplus_pT_y.addHistogram(  2.5, 3.0, bookHisto1D(3, 1, 2));
00037       _h_pdg411_Dplus_pT_y.addHistogram(  3.0, 3.5, bookHisto1D(3, 1, 3));
00038       _h_pdg411_Dplus_pT_y.addHistogram(  3.5, 4.0, bookHisto1D(3, 1, 4));
00039       _h_pdg411_Dplus_pT_y.addHistogram(  4.0, 4.5, bookHisto1D(3, 1, 5));
00040 
00041       _h_pdg421_Dzero_pT_y.addHistogram(  2.0, 2.5, bookHisto1D(2, 1, 1));
00042       _h_pdg421_Dzero_pT_y.addHistogram(  2.5, 3.0, bookHisto1D(2, 1, 2));
00043       _h_pdg421_Dzero_pT_y.addHistogram(  3.0, 3.5, bookHisto1D(2, 1, 3));
00044       _h_pdg421_Dzero_pT_y.addHistogram(  3.5, 4.0, bookHisto1D(2, 1, 4));
00045       _h_pdg421_Dzero_pT_y.addHistogram(  4.0, 4.5, bookHisto1D(2, 1, 5));
00046 
00047       _h_pdg431_Dsplus_pT_y.addHistogram(  2.0, 2.5, bookHisto1D(5, 1, 1));
00048       _h_pdg431_Dsplus_pT_y.addHistogram(  2.5, 3.0, bookHisto1D(5, 1, 2));
00049       _h_pdg431_Dsplus_pT_y.addHistogram(  3.0, 3.5, bookHisto1D(5, 1, 3));
00050       _h_pdg431_Dsplus_pT_y.addHistogram(  3.5, 4.0, bookHisto1D(5, 1, 4));
00051       _h_pdg431_Dsplus_pT_y.addHistogram(  4.0, 4.5, bookHisto1D(5, 1, 5));
00052 
00053       _h_pdg413_Dstarplus_pT_y.addHistogram(  2.0, 2.5, bookHisto1D(4, 1, 1));
00054       _h_pdg413_Dstarplus_pT_y.addHistogram(  2.5, 3.0, bookHisto1D(4, 1, 2));
00055       _h_pdg413_Dstarplus_pT_y.addHistogram(  3.0, 3.5, bookHisto1D(4, 1, 3));
00056       _h_pdg413_Dstarplus_pT_y.addHistogram(  3.5, 4.0, bookHisto1D(4, 1, 4));
00057       _h_pdg413_Dstarplus_pT_y.addHistogram(  4.0, 4.5, bookHisto1D(4, 1, 5));
00058 
00059       _h_pdg4122_Lambdac_pT = bookHisto1D(1, 1, 1);
00060     }
00061 
00062 
00063     /// Perform the per-event analysis
00064     void analyze(const Event& event) {
00065       const double weight = event.weight();
00066 
00067       /// @todo Use PrimaryHadrons to avoid double counting and automatically remove the contributions from unstable?
00068       const UnstableFinalState &ufs = applyProjection<UnstableFinalState> (event, "UFS");
00069       foreach (const Particle& p, ufs.particles() ) {
00070 
00071         // We're only interested in charm hadrons
00072         if (!p.isHadron() || !p.hasCharm()) continue;
00073 
00074         // Kinematic acceptance
00075         const double y = p.absrap(); ///< Double analysis efficiency with a "two-sided LHCb"
00076         const double pT = p.pT();
00077 
00078         // Fiducial acceptance of the measurements
00079         if (pT > 8.0*GeV || y < 2.0 || y > 4.5) continue;
00080 
00081         /// Experimental selection removes non-prompt charm hadrons: we ignore those from b decays
00082         if (p.fromBottom()) continue;
00083 
00084         switch (p.abspid()) {
00085         case 411:
00086           _h_pdg411_Dplus_pT_y.fill(y, pT/GeV, weight);
00087           break;
00088         case 421:
00089           _h_pdg421_Dzero_pT_y.fill(y, pT/GeV, weight);
00090           break;
00091         case 431:
00092           _h_pdg431_Dsplus_pT_y.fill(y, pT/GeV, weight);
00093           break;
00094         case 413:
00095           _h_pdg413_Dstarplus_pT_y.fill(y, pT/GeV, weight);
00096           break;
00097         case 4122:
00098           _h_pdg4122_Lambdac_pT->fill(pT/GeV, weight);
00099           break;
00100         }
00101       }
00102 
00103     }
00104 
00105 
00106     /// Normalise histograms etc., after the run
00107     void finalize() {
00108       const double scale_factor = 0.5 * crossSection()/microbarn / sumOfWeights();
00109       /// Avoid the implicit division by the bin width in the BinnedHistogram::scale method.
00110       foreach (Histo1DPtr h, _h_pdg411_Dplus_pT_y.getHistograms()) h->scaleW(scale_factor);
00111       foreach (Histo1DPtr h, _h_pdg421_Dzero_pT_y.getHistograms()) h->scaleW(scale_factor);
00112       foreach (Histo1DPtr h, _h_pdg431_Dsplus_pT_y.getHistograms()) h->scaleW(scale_factor);
00113       foreach (Histo1DPtr h, _h_pdg413_Dstarplus_pT_y.getHistograms()) h->scaleW(scale_factor);
00114       _h_pdg4122_Lambdac_pT->scaleW(scale_factor);
00115     }
00116 
00117     //@}
00118 
00119 
00120   private:
00121 
00122     /// @name Histograms
00123     //@{
00124     BinnedHistogram<double> _h_pdg411_Dplus_pT_y;
00125     BinnedHistogram<double> _h_pdg421_Dzero_pT_y;
00126     BinnedHistogram<double> _h_pdg431_Dsplus_pT_y;
00127     BinnedHistogram<double> _h_pdg413_Dstarplus_pT_y;
00128     Histo1DPtr _h_pdg4122_Lambdac_pT;
00129     //@}
00130 
00131 
00132   };
00133 
00134 
00135   // The hook for the plugin system
00136   DECLARE_RIVET_PLUGIN(LHCB_2013_I1218996);
00137 
00138 }