rivet is hosted by Hepforge, IPPP Durham

Rivet analyses reference

LHCB_2015_I1396331

Charm hadron differential cross-sections in $p_T$ and rapidity at $\sqrt{s} = 13$ TeV
Experiment: LHCB (LHC 13TeV)
Inspire ID: 1396331
Status: VALIDATED
Authors:
  • Dominik Muller
  • Patrick Spradlin
References:
  • JHEP 1603 (2016) 159 JHEP 1609 (2016) 013 JHEP 1705 (2017) 074
  • doi 10.1007/JHEP09(2016)013 10.1007/JHEP05(2017)074 10.1007/JHEP03(2016)159
  • arXiv 1510.01707 [hep-ex]
  • LHCB-PAPER-2015-041, CERN-PH-EP-2015-272
Beams: p+ p+
Beam energies: (6500.0, 6500.0) GeV
Run details:
  • Minimum bias QCD events, proton--proton interactions at $\sqrt{s} = 13$ TeV.

Measurements of differential production cross-sections with respect to transverse momentum, $d \sigma(H_c + \mathrm{c.c.}) / d p_T$, for charm hadron species $H_c \in \{ D^0, D^+, D^\ast(2010)^+, D_s^+ \}$ in proton--proton collisions at center-of-mass energy $\sqrt{s}= 13$ TeV. The differential cross-sections are measured in bins of hadron transverse momentum ($p_T$) and rapidity ($y$) with respect to the beam axis in the region $0 < p_T < 15$ GeV/$c$ and $2.0 < y < 4.5$, where $p_T$ and $y$ are measured in the proton--proton CM frame. In this analysis code, it is assumed that the event coordinate system is in the proton--proton CM frame with the $z$-axis corresponding to the proton--proton collision axis (as usual). Contributions of charm hadrons from the decays of $b$-hadrons and other particles with comparably large mean lifetimes have been removed in the measurement. In this analysis code, this is implemented by counting only charm hadrons that do not have an ancestor that contains a $b$ quark.

Source code: LHCB_2015_I1396331.cc
  1// -*- C++ -*-
  2#include "Rivet/Analysis.hh"
  3#include "Rivet/Tools/BinnedHistogram.hh"
  4#include "Rivet/Projections/UnstableParticles.hh"
  5
  6namespace Rivet {
  7
  8  /// LHCb prompt charm hadron pT and rapidity spectra
  9  class LHCB_2015_I1396331 : public Analysis {
 10  public:
 11
 12    /// Constructor
 13    RIVET_DEFAULT_ANALYSIS_CTOR(LHCB_2015_I1396331);
 14
 15
 16    /// @name Analysis methods
 17    /// @{
 18
 19    /// Book histograms and initialise projections before the run
 20    void init() {
 21
 22      /// Initialise and register projections
 23      Cut selection = (Cuts::abspid == 411 || Cuts::abspid == 421 || Cuts::abspid == 431 || Cuts::abspid == 413) \
 24                       && Cuts::pT < 15.0 && Cuts::absrapIn(2.0, 4.5);
 25      declare(UnstableParticles(selection), "UPDs");
 26
 27      /// Book histograms
 28      {Histo1DPtr tmp; _h_pdg421_Dzero_pT_y.add(2.0, 2.5, book(tmp, 1, 1, 1));}
 29      {Histo1DPtr tmp; _h_pdg421_Dzero_pT_y.add(2.5, 3.0, book(tmp, 1, 1, 2));}
 30      {Histo1DPtr tmp; _h_pdg421_Dzero_pT_y.add(3.0, 3.5, book(tmp, 1, 1, 3));}
 31      {Histo1DPtr tmp; _h_pdg421_Dzero_pT_y.add(3.5, 4.0, book(tmp, 1, 1, 4));}
 32      {Histo1DPtr tmp; _h_pdg421_Dzero_pT_y.add(4.0, 4.5, book(tmp, 1, 1, 5));}
 33
 34      {Histo1DPtr tmp; _h_pdg411_Dplus_pT_y.add(2.0, 2.5, book(tmp, 2, 1, 1));}
 35      {Histo1DPtr tmp; _h_pdg411_Dplus_pT_y.add(2.5, 3.0, book(tmp, 2, 1, 2));}
 36      {Histo1DPtr tmp; _h_pdg411_Dplus_pT_y.add(3.0, 3.5, book(tmp, 2, 1, 3));}
 37      {Histo1DPtr tmp; _h_pdg411_Dplus_pT_y.add(3.5, 4.0, book(tmp, 2, 1, 4));}
 38      {Histo1DPtr tmp; _h_pdg411_Dplus_pT_y.add(4.0, 4.5, book(tmp, 2, 1, 5));}
 39
 40      {Histo1DPtr tmp; _h_pdg431_Dsplus_pT_y.add(2.0, 2.5, book(tmp, 3, 1, 1));}
 41      {Histo1DPtr tmp; _h_pdg431_Dsplus_pT_y.add(2.5, 3.0, book(tmp, 3, 1, 2));}
 42      {Histo1DPtr tmp; _h_pdg431_Dsplus_pT_y.add(3.0, 3.5, book(tmp, 3, 1, 3));}
 43      {Histo1DPtr tmp; _h_pdg431_Dsplus_pT_y.add(3.5, 4.0, book(tmp, 3, 1, 4));}
 44      {Histo1DPtr tmp; _h_pdg431_Dsplus_pT_y.add(4.0, 4.5, book(tmp, 3, 1, 5));}
 45
 46      {Histo1DPtr tmp; _h_pdg413_Dstarplus_pT_y.add(2.0, 2.5, book(tmp, 4, 1, 1));}
 47      {Histo1DPtr tmp; _h_pdg413_Dstarplus_pT_y.add(2.5, 3.0, book(tmp, 4, 1, 2));}
 48      {Histo1DPtr tmp; _h_pdg413_Dstarplus_pT_y.add(3.0, 3.5, book(tmp, 4, 1, 3));}
 49      {Histo1DPtr tmp; _h_pdg413_Dstarplus_pT_y.add(3.5, 4.0, book(tmp, 4, 1, 4));}
 50      {Histo1DPtr tmp; _h_pdg413_Dstarplus_pT_y.add(4.0, 4.5, book(tmp, 4, 1, 5));}
 51
 52      // Temporary histos for ratios
 53      for (int i = 0; i< 5; ++i) {
 54      	{Histo1DPtr tmp; _hbr_Dzero.add(2.0+i*0.5, 2.5+i*0.5, book(tmp, "/TMP/Dzero"+to_str(i+1), refData(9, 1, 2)));}
 55      	{Histo1DPtr tmp; _hbr_Dplus.add(2.0+i*0.5, 2.5+i*0.5, book(tmp, "/TMP/Dplus"+to_str(i+1), refData(9, 1, 2)));}
 56      	{Histo1DPtr tmp; _hbr_Ds.add(2.0+i*0.5, 2.5+i*0.5, book(tmp, "/TMP/Ds"+to_str(i+1), refData(9, 1, 2)));}
 57      	{Histo1DPtr tmp; _hbr_Dstar.add(2.0+i*0.5, 2.5+i*0.5, book(tmp, "/TMP/Dstar"+to_str(i+1), refData(9, 1, 2)));}
 58      }
 59
 60    }
 61
 62
 63    /// Perform the per-event analysis
 64    void analyze(const Event& event) {
 65
 66      /// @todo Use PrimaryHadrons to avoid double counting and automatically remove the contributions from unstable?
 67      const UnstableParticles &ufs = apply<UnstableParticles> (event, "UPDs");
 68      for (const Particle& p : ufs.particles()) {
 69
 70        if (p.fromBottom()) continue;
 71
 72        const PdgId apid = p.abspid();
 73        const double y = p.absrap(); ///< Double analysis efficiency with a "two-sided LHCb"
 74        const double pT = p.pT()/GeV;
 75
 76        // select inclusive decay modes
 77        Particles daus;
 78        switch (apid) {
 79        case 411:
 80          _h_pdg411_Dplus_pT_y.fill(y, pT);
 81          // veto on decay channel [D+ -> K- pi+ pi+]cc
 82          if (p.children().size() != 3) break;
 83          if ( ((p.children(Cuts::pid == -321).size() == 1) && (p.children(Cuts::pid == 211).size() == 2)) ||
 84          		 ((p.children(Cuts::pid == 321).size() == 1) && (p.children(Cuts::pid == -211).size() == 2)) )
 85          	_hbr_Dplus.fill(y, pT); // MSG_INFO("Found [ D+ -> K- pi+ pi+ ]cc..."); };
 86          break;
 87        case 421:
 88          _h_pdg421_Dzero_pT_y.fill(y, pT);
 89          // veto on decay channel [D0 -> K- pi+]cc
 90          if (p.children().size() != 2) break;
 91          if ( ((p.children(Cuts::pid == -321).size() == 1) && (p.children(Cuts::pid == 211).size() == 1)) ||
 92          		 ((p.children(Cuts::pid == 321).size() == 1) && (p.children(Cuts::pid == -211).size() == 1)) )
 93          	_hbr_Dzero.fill(y, pT); // MSG_INFO("Found [ D0 -> K- pi+ ]cc..."); };
 94
 95          break;
 96        case 431:
 97          _h_pdg431_Dsplus_pT_y.fill(y, pT);
 98          //veto on decay channel [Ds+ -> [K+ K-]phi0 pi+]cc
 99          if (p.children().size() != 2) break;
100          daus = p.children(Cuts::pid == 333);
101          if ( (daus.size() == 1) && (p.children(Cuts::abspid == 211).size() == 1) &&
102          		 (daus.front().children(Cuts::abspid ==321).size() == 2) )
103          	_hbr_Ds.fill(y, pT); // MSG_INFO("Found [ Ds+ -> phi0(-> K+ K-) pi+ ]cc..."); };
104          break;
105        case 413:
106          _h_pdg413_Dstarplus_pT_y.fill(y, pT);
107          // veto on decay channel [D*+ -> [K- pi+]D0 pi+]cc
108          if (p.children().size() != 2) break;
109          daus = p.children(Cuts::pid == 421);
110          if ( (daus.size() == 1) && (p.children(Cuts::abspid == 211).size() == 1) &&
111          		( daus.front().children().size() == 2 ) &&
112          		( ( (daus.front().children(Cuts::pid == -321).size() == 1 ) && (daus.front().children(Cuts::pid == 211).size() == 1 )	) ||
113          		  ( (daus.front().children(Cuts::pid == 321).size() == 1 ) && (daus.front().children(Cuts::pid == -211).size() == 1 ) ) ) )
114          	_hbr_Dstar.fill(y, pT); // MSG_INFO("Found [ D*+ -> D0 (-> K- pi+)cc pi+ ]cc..."); };
115          break;
116        default:
117        	break;
118        }
119      }
120
121    }
122
123
124    /// Normalise histograms etc., after the run
125    void finalize() {
126      Histo1DPtr h;
127      Histo1DPtr hden;
128      /// Factor of 0.5 to correct for the abs(rapidity) used above
129      const double scale_factor = 0.5 * crossSection()/microbarn / sumOfWeights();
130      /// Avoid the implicit division by the bin width in the BinnedHistogram::scale method
131      /// since spectra are really single differential (cf. gitlab issue #222).
132      for (Histo1DPtr h : _h_pdg411_Dplus_pT_y.histos()) h->scaleW(scale_factor);
133      for (Histo1DPtr h : _h_pdg431_Dsplus_pT_y.histos()) h->scaleW(scale_factor);
134      for (Histo1DPtr h : _h_pdg413_Dstarplus_pT_y.histos()) h->scaleW(scale_factor);
135      for (Histo1DPtr h : _h_pdg421_Dzero_pT_y.histos()) h->scaleW(scale_factor);
136
137
138      // Do ratios
139      for (size_t i = 0; i < 5; ++i) {
140      	// book final ratio plots
141        book(hr_DplusDzero[i], 9, 1, i+1, true);
142        book(hr_DsDzero[i], 10, 1, i+1, true);
143        book(hr_DstarDzero[i], 11, 1, i+1, true);
144        book(hr_DsDplus[i], 12, 1, i+1, true);
145        book(hr_DstarDplus[i], 13, 1, i+1, true);
146        book(hr_DsDstar[i], 14, 1, i+1, true);
147        // fill ratio plots
148      	ratioScatterBins(_hbr_Dplus.histos()[i], _hbr_Dzero.histos()[i], hr_DplusDzero[i]);
149      	ratioScatterBins(_hbr_Ds.histos()[i], _hbr_Dzero.histos()[i], hr_DsDzero[i]);
150      	ratioScatterBins(_hbr_Dstar.histos()[i], _hbr_Dzero.histos()[i], hr_DstarDzero[i]);
151      	ratioScatterBins(_hbr_Ds.histos()[i], _hbr_Dplus.histos()[i], hr_DsDplus[i]);
152      	ratioScatterBins(_hbr_Dstar.histos()[i], _hbr_Dplus.histos()[i], hr_DstarDplus[i]);
153      	ratioScatterBins(_hbr_Ds.histos()[i], _hbr_Dstar.histos()[i], hr_DsDstar[i]);
154      	// scale 100x as measurement is in %
155      	hr_DplusDzero[i]->scaleY(100.);
156      	hr_DsDzero[i]->scaleY(100.);
157      	hr_DstarDzero[i]->scaleY(100.);
158      	hr_DsDplus[i]->scaleY(100.);
159      	hr_DstarDplus[i]->scaleY(100.);
160      	hr_DsDstar[i]->scaleY(100.);
161      }
162    }
163
164    /// @}
165
166
167  private:
168
169    // rebin histos to scatter and take ratio
170    void ratioScatterBins(Histo1DPtr& hn, Histo1DPtr& hd, Scatter2DPtr &s) {
171      vector<double> sedges;
172      // extract bin edges from Scatter2D
173      for (auto p=s->points().begin(); p != s->points().end(); ++p) {
174        sedges.push_back((*p).xMin());
175        // MSG_INFO("Scatter2D bin: " << (*p).xMin() << " - " << (*p).xMax());
176      }
177      sedges.push_back(s->points().back().xMax());
178      // make deep-copies as rebinning changes bins each time - any smarter alternative ?!
179      Histo1D *hnc, *hdc;
180      hnc = new YODA::Histo1D(hn->bins(), hn->totalDbn(), hn->underflow(), hn->overflow());
181      hdc = new YODA::Histo1D(hd->bins(), hd->totalDbn(), hd->underflow(), hd->overflow());
182      hnc->rebinTo(sedges);
183      hdc->rebinTo(sedges);
184      divide(*hnc, *hdc, s);
185      delete hnc; delete hdc;
186    }
187
188
189    /// @name Histograms
190    /// @{
191    BinnedHistogram _h_pdg411_Dplus_pT_y, _hbr_Dplus;
192    BinnedHistogram _h_pdg421_Dzero_pT_y, _hbr_Dzero;
193    BinnedHistogram _h_pdg431_Dsplus_pT_y, _hbr_Ds;
194    BinnedHistogram _h_pdg413_Dstarplus_pT_y, _hbr_Dstar;
195    Scatter2DPtr hr_DplusDzero[5];
196    Scatter2DPtr hr_DsDzero[5];
197    Scatter2DPtr hr_DstarDzero[5];
198    Scatter2DPtr hr_DsDplus[5];
199    Scatter2DPtr hr_DstarDplus[5];
200    Scatter2DPtr hr_DsDstar[5];
201    /// @}
202
203  };
204
205
206  RIVET_DECLARE_PLUGIN(LHCB_2015_I1396331);
207
208}