rivet is hosted by Hepforge, IPPP Durham
LHCB_2013_I1218996.cc
Go to the documentation of this file.
00001 // -*- C++ -*-
00002 #include "Rivet/Analysis.hh"
00003 #include "Rivet/Tools/BinnedHistogram.hh"
00004 #include "Rivet/Projections/FinalState.hh"
00005 #include "Rivet/Projections/UnstableFinalState.hh"
00006 
00007 namespace Rivet {
00008 
00009 
00010   /// LHCb prompt charm hadron pT and rapidity spectra
00011   class LHCB_2013_I1218996 : public Analysis {
00012   public:
00013 
00014     /// @name Constructors etc.
00015     //@{
00016 
00017     /// Constructor
00018     LHCB_2013_I1218996()
00019       : Analysis("LHCB_2013_I1218996")
00020     {    }
00021 
00022     //@}
00023 
00024 
00025     /// @name Analysis methods
00026     //@{
00027 
00028     /// Book histograms and initialise projections before the run
00029     void init() {
00030 
00031       /// Initialise and register projections
00032       addProjection(UnstableFinalState(), "UFS");
00033 
00034       /// Book histograms
00035       _h_pdg411_Dplus_pT_y.addHistogram(  2.0, 2.5, bookHisto1D(3, 1, 1));
00036       _h_pdg411_Dplus_pT_y.addHistogram(  2.5, 3.0, bookHisto1D(3, 1, 2));
00037       _h_pdg411_Dplus_pT_y.addHistogram(  3.0, 3.5, bookHisto1D(3, 1, 3));
00038       _h_pdg411_Dplus_pT_y.addHistogram(  3.5, 4.0, bookHisto1D(3, 1, 4));
00039       _h_pdg411_Dplus_pT_y.addHistogram(  4.0, 4.5, bookHisto1D(3, 1, 5));
00040 
00041       _h_pdg421_Dzero_pT_y.addHistogram(  2.0, 2.5, bookHisto1D(2, 1, 1));
00042       _h_pdg421_Dzero_pT_y.addHistogram(  2.5, 3.0, bookHisto1D(2, 1, 2));
00043       _h_pdg421_Dzero_pT_y.addHistogram(  3.0, 3.5, bookHisto1D(2, 1, 3));
00044       _h_pdg421_Dzero_pT_y.addHistogram(  3.5, 4.0, bookHisto1D(2, 1, 4));
00045       _h_pdg421_Dzero_pT_y.addHistogram(  4.0, 4.5, bookHisto1D(2, 1, 5));
00046 
00047       _h_pdg431_Dsplus_pT_y.addHistogram(  2.0, 2.5, bookHisto1D(5, 1, 1));
00048       _h_pdg431_Dsplus_pT_y.addHistogram(  2.5, 3.0, bookHisto1D(5, 1, 2));
00049       _h_pdg431_Dsplus_pT_y.addHistogram(  3.0, 3.5, bookHisto1D(5, 1, 3));
00050       _h_pdg431_Dsplus_pT_y.addHistogram(  3.5, 4.0, bookHisto1D(5, 1, 4));
00051       _h_pdg431_Dsplus_pT_y.addHistogram(  4.0, 4.5, bookHisto1D(5, 1, 5));
00052 
00053       _h_pdg413_Dstarplus_pT_y.addHistogram(  2.0, 2.5, bookHisto1D(4, 1, 1));
00054       _h_pdg413_Dstarplus_pT_y.addHistogram(  2.5, 3.0, bookHisto1D(4, 1, 2));
00055       _h_pdg413_Dstarplus_pT_y.addHistogram(  3.0, 3.5, bookHisto1D(4, 1, 3));
00056       _h_pdg413_Dstarplus_pT_y.addHistogram(  3.5, 4.0, bookHisto1D(4, 1, 4));
00057       _h_pdg413_Dstarplus_pT_y.addHistogram(  4.0, 4.5, bookHisto1D(4, 1, 5));
00058 
00059       _h_pdg4122_Lambdac_pT = bookHisto1D(1, 1, 1);
00060     }
00061 
00062 
00063     /// Perform the per-event analysis
00064     void analyze(const Event& event) {
00065       const double weight = event.weight();
00066 
00067       /// @todo Use PrimaryHadrons to avoid double counting and automatically remove the contributions from unstable?
00068       const UnstableFinalState &ufs = applyProjection<UnstableFinalState> (event, "UFS");
00069       foreach (const Particle& p, ufs.particles() ) {
00070 
00071         // We're only interested in charm hadrons
00072         if (!p.isHadron() || !p.hasCharm()) continue;
00073         // Kinematic acceptance
00074         const double y = p.absrap(); //< Double analysis efficiency with a "two-sided LHCb"
00075         const double pT = p.pT();
00076 
00077         // Fiducial acceptance of the measurements
00078         if (pT > 8.0*GeV || y < 2.0 || y > 4.5) continue;
00079 
00080         /// Experimental selection removes non-prompt charm hadrons: we ignore those from b decays
00081         if (p.fromBottom()) continue;
00082 
00083         switch (p.abspid()) {
00084         case 411:
00085           _h_pdg411_Dplus_pT_y.fill(y, pT/GeV, weight);
00086           break;
00087         case 421:
00088           _h_pdg421_Dzero_pT_y.fill(y, pT/GeV, weight);
00089           break;
00090         case 431:
00091           _h_pdg431_Dsplus_pT_y.fill(y, pT/GeV, weight);
00092           break;
00093         case 413:
00094           _h_pdg413_Dstarplus_pT_y.fill(y, pT/GeV, weight);
00095           break;
00096         case 4122:
00097           _h_pdg4122_Lambdac_pT->fill(pT/GeV, weight);
00098           break;
00099         }
00100       }
00101     }
00102 
00103 
00104     /// Normalise histograms etc., after the run
00105     void finalize() {
00106       const double scale_factor = crossSection()/microbarn / sumOfWeights();
00107       /// Avoid the implicit division by the bin width in the BinnedHistogram::scale method.
00108       foreach (Histo1DPtr h, _h_pdg411_Dplus_pT_y.getHistograms()) h->scaleW(scale_factor);
00109       foreach (Histo1DPtr h, _h_pdg421_Dzero_pT_y.getHistograms()) h->scaleW(scale_factor);
00110       foreach (Histo1DPtr h, _h_pdg431_Dsplus_pT_y.getHistograms()) h->scaleW(scale_factor);
00111       foreach (Histo1DPtr h, _h_pdg413_Dstarplus_pT_y.getHistograms()) h->scaleW(scale_factor);
00112       _h_pdg4122_Lambdac_pT->scaleW(scale_factor);
00113     }
00114 
00115     //@}
00116 
00117 
00118   private:
00119 
00120     /// @name Histograms
00121     //@{
00122     BinnedHistogram<double> _h_pdg411_Dplus_pT_y;
00123     BinnedHistogram<double> _h_pdg421_Dzero_pT_y;
00124     BinnedHistogram<double> _h_pdg431_Dsplus_pT_y;
00125     BinnedHistogram<double> _h_pdg413_Dstarplus_pT_y;
00126     Histo1DPtr _h_pdg4122_Lambdac_pT;
00127     //@}
00128 
00129 
00130   };
00131 
00132 
00133   // The hook for the plugin system
00134   DECLARE_RIVET_PLUGIN(LHCB_2013_I1218996);
00135 
00136 }