rivet is hosted by Hepforge, IPPP Durham

Rivet analyses reference

LHCB_2016_I1490663

Charm hadron differential cross-sections in $p_\perp$ and rapidity at $\sqrt{s} = 5$ TeV
Experiment: LHCB (LHC 5TeV)
Inspire ID: 1396331
Status: VALIDATED
Authors:
  • Dominik Muller
  • Patrick Spradlin
References:
  • JHEP 1706 (2017) 147
  • doi JHEP 1706 (2017) 147
  • arXiv 1610.02230 [hep-ex]
  • CERN-EP-2016-244, LHCB-PAPER-2016-042
Beams: p+ p+
Beam energies: (2500.0, 2500.0) GeV
Run details:
  • Minimum bias QCD events, proton--proton interactions at $\sqrt{s} = 5$ TeV.

Measurements of differential production cross-sections with respect to transverse momentum, $d \sigma(H_c + \mathrm{c.c.}) / d p_T$, for charm hadron species $H_c \in \{ D^0, D^+, D^\ast(2010)^+, D_s^+ \}$ in proton--proton collisions at center-of-mass energy $\sqrt{s}= 5$ TeV. The differential cross-sections are measured in bins of hadron transverse momentum ($p_T$) and rapidity ($y$) with respect to the beam axis in the region $0 < p_T < 10$ GeV/$c$ and $2.0 < y < 4.5$, where $p_T$ and $y$ are measured in the proton--proton CM frame. In this analysis code, it is assumed that the event coordinate system is in the proton--proton CM frame with the $z$-axis corresponding to the proton--proton collision axis (as usual). Contributions of charm hadrons from the decays of $b$-hadrons and other particles with comparably large mean lifetimes have been removed in the measurement. In this analysis code, this is implemented by counting only charm hadrons that do not have an ancestor that contains a $b$ quark.

Source code: LHCB_2016_I1490663.cc
  1// -*- C++ -*-
  2#include "Rivet/Analysis.hh"
  3#include "Rivet/Tools/BinnedHistogram.hh"
  4#include "Rivet/Projections/UnstableParticles.hh"
  5
  6namespace Rivet {
  7
  8
  9  /// LHCb prompt charm hadron pT and rapidity spectra
 10  class LHCB_2016_I1490663 : public Analysis {
 11  public:
 12
 13    /// Constructor
 14    RIVET_DEFAULT_ANALYSIS_CTOR(LHCB_2016_I1490663);
 15
 16
 17    /// @name Analysis methods
 18    /// @{
 19
 20    /// Book histograms and initialise projections before the run
 21    void init() {
 22
 23      /// Initialise and register projections
 24      declare(UnstableParticles(), "UFS");
 25
 26      /// Book histograms
 27      /// @todo Make this interface nicer!
 28      {Histo1DPtr tmp; _h_pdg411_Dplus_pT_y.add(2.0, 2.5, book(tmp, 1, 1, 1));}
 29      {Histo1DPtr tmp; _h_pdg411_Dplus_pT_y.add(2.5, 3.0, book(tmp, 1, 1, 2));}
 30      {Histo1DPtr tmp; _h_pdg411_Dplus_pT_y.add(3.0, 3.5, book(tmp, 1, 1, 3));}
 31      {Histo1DPtr tmp; _h_pdg411_Dplus_pT_y.add(3.5, 4.0, book(tmp, 1, 1, 4));}
 32      {Histo1DPtr tmp; _h_pdg411_Dplus_pT_y.add(4.0, 4.5, book(tmp, 1, 1, 5));}
 33
 34      {Histo1DPtr tmp; _h_pdg421_Dzero_pT_y.add(2.0, 2.5, book(tmp, 2, 1, 1));}
 35      {Histo1DPtr tmp; _h_pdg421_Dzero_pT_y.add(2.5, 3.0, book(tmp, 2, 1, 2));}
 36      {Histo1DPtr tmp; _h_pdg421_Dzero_pT_y.add(3.0, 3.5, book(tmp, 2, 1, 3));}
 37      {Histo1DPtr tmp; _h_pdg421_Dzero_pT_y.add(3.5, 4.0, book(tmp, 2, 1, 4));}
 38      {Histo1DPtr tmp; _h_pdg421_Dzero_pT_y.add(4.0, 4.5, book(tmp, 2, 1, 5));}
 39
 40      {Histo1DPtr tmp; _h_pdg431_Dsplus_pT_y.add(2.0, 2.5, book(tmp, 3, 1, 1));}
 41      {Histo1DPtr tmp; _h_pdg431_Dsplus_pT_y.add(2.5, 3.0, book(tmp, 3, 1, 2));}
 42      {Histo1DPtr tmp; _h_pdg431_Dsplus_pT_y.add(3.0, 3.5, book(tmp, 3, 1, 3));}
 43      {Histo1DPtr tmp; _h_pdg431_Dsplus_pT_y.add(3.5, 4.0, book(tmp, 3, 1, 4));}
 44      {Histo1DPtr tmp; _h_pdg431_Dsplus_pT_y.add(4.0, 4.5, book(tmp, 3, 1, 5));}
 45
 46      {Histo1DPtr tmp; _h_pdg413_Dstarplus_pT_y.add(2.0, 2.5, book(tmp, 4, 1, 1));}
 47      {Histo1DPtr tmp; _h_pdg413_Dstarplus_pT_y.add(2.5, 3.0, book(tmp, 4, 1, 2));}
 48      {Histo1DPtr tmp; _h_pdg413_Dstarplus_pT_y.add(3.0, 3.5, book(tmp, 4, 1, 3));}
 49      {Histo1DPtr tmp; _h_pdg413_Dstarplus_pT_y.add(3.5, 4.0, book(tmp, 4, 1, 4));}
 50      {Histo1DPtr tmp; _h_pdg413_Dstarplus_pT_y.add(4.0, 4.5, book(tmp, 4, 1, 5));}
 51
 52      for (int i = 0; i< 5; ++i) {
 53      	{Histo1DPtr tmp; _hbr_Dzero.add(2.0+i*0.5, 2.5+i*0.5, book(tmp, "TMP/Dzero_b"+to_str(i+1), refData(9, 1, 2)));}
 54      	{Histo1DPtr tmp; _hbr_Dplus.add(2.0+i*0.5, 2.5+i*0.5, book(tmp, "TMP/Dplus_b"+to_str(i+1), refData(9, 1, 2)));}
 55      	{Histo1DPtr tmp; _hbr_Ds.add(2.0+i*0.5, 2.5+i*0.5, book(tmp, "TMP/Ds_b"+to_str(i+1), refData(9, 1, 2)));}
 56      	{Histo1DPtr tmp; _hbr_Dstar.add(2.0+i*0.5, 2.5+i*0.5, book(tmp, "TMP/Dstar_b"+to_str(i+1), refData(9, 1, 2)));}
 57      }
 58
 59    }
 60
 61
 62    /// Perform the per-event analysis
 63    void analyze(const Event& event) {
 64
 65      /// @todo Use PrimaryHadrons to avoid double counting and automatically remove the contributions from unstable?
 66      const UnstableParticles &ufs = apply<UnstableParticles> (event, "UFS");
 67      for (const Particle& p : ufs.particles() ) {
 68
 69        // We're only interested in charm hadrons
 70        //if (!p.isHadron() || !p.hasCharm()) continue;
 71
 72        PdgId apid = p.abspid();
 73
 74        // do not use Cuts::abspid to avoid supplemental iteration on particles?
 75        if ((apid != 411) && (apid != 421) && (apid != 431) && (apid != 413)) continue;
 76
 77        // Experimental selection removes non-prompt charm hadrons: we ignore those from b decays
 78        if (p.fromBottom()) continue;
 79
 80        // Kinematic acceptance
 81        const double y = p.absrap(); ///< Double analysis efficiency with a "two-sided LHCb"
 82        const double pT = p.pT()/GeV;
 83
 84        // Fiducial acceptance of the measurements
 85        if ((pT > 10.0) || (y < 2.0) || (y > 4.5)) continue;
 86
 87        Particles daus;
 88
 89        switch (apid) {
 90        case 411:
 91          _h_pdg411_Dplus_pT_y.fill(y, pT);
 92          // veto on decay channel [D+ -> K- pi+ pi+]cc
 93          if (p.children().size() != 3) break;
 94          if ( ((p.children(Cuts::pid == -321).size() == 1) && (p.children(Cuts::pid == 211).size() == 2)) ||
 95          		 ((p.children(Cuts::pid == 321).size() == 1) && (p.children(Cuts::pid == -211).size() == 2)) )
 96          	_hbr_Dplus.fill(y, pT); // MSG_INFO("Found [ D+ -> K- pi+ pi+ ]cc..."); };
 97          break;
 98        case 421:
 99          _h_pdg421_Dzero_pT_y.fill(y, pT);
100          // veto on decay channel [D0 -> K- pi+]cc
101          if (p.children().size() != 2) break;
102          if ( ((p.children(Cuts::pid == -321).size() == 1) && (p.children(Cuts::pid == 211).size() == 1)) ||
103          		 ((p.children(Cuts::pid == 321).size() == 1) && (p.children(Cuts::pid == -211).size() == 1)) )
104          	_hbr_Dzero.fill(y, pT); // MSG_INFO("Found [ D0 -> K- pi+ ]cc..."); };
105          break;
106        case 431:
107          _h_pdg431_Dsplus_pT_y.fill(y, pT);
108          //veto on decay channel [Ds+ -> [K+ K-]phi0 pi+]cc
109          if (p.children().size() != 2) break;
110          daus = p.children(Cuts::pid == 333);
111          if ( (daus.size() == 1) && (p.children(Cuts::abspid == 211).size() == 1) &&
112          		 (daus.front().children(Cuts::abspid ==321).size() == 2) )
113          	_hbr_Ds.fill(y, pT); // MSG_INFO("Found [ Ds+ -> phi0(-> K+ K-) pi+ ]cc..."); };
114          break;
115        case 413:
116          _h_pdg413_Dstarplus_pT_y.fill(y, pT);
117          // veto on decay channel [D*+ -> [K- pi+]D0 pi+]cc
118          if (p.children().size() != 2) break;
119          daus = p.children(Cuts::pid == 421);
120          if ( (daus.size() == 1) && (p.children(Cuts::abspid == 211).size() == 1) &&
121          		( daus.front().children().size() == 2 ) &&
122          		( ( (daus.front().children(Cuts::pid == -321).size() == 1 ) && (daus.front().children(Cuts::pid == 211).size() == 1 )	) ||
123          		  ( (daus.front().children(Cuts::pid == 321).size() == 1 ) && (daus.front().children(Cuts::pid == -211).size() == 1 ) ) ) )
124          	_hbr_Dstar.fill(y, pT); // MSG_INFO("Found [ D*+ -> D0 (-> K- pi+)cc pi+ ]cc..."); };
125          break;
126        default:
127        	break;
128        }
129      }
130
131    }
132
133
134    /// Normalise histograms etc., after the run
135    void finalize() {
136
137      /// Factor of 0.5 to correct for the abs(rapidity) used above
138      const double scale_factor = 0.5 * crossSection()/microbarn / sumOfWeights();
139
140      /// Avoid the implicit division by the bin width in the BinnedHistogram::scale method.
141      /// @todo Another thing to make nicer / more flexible in BinnedHisto
142      for (Histo1DPtr h : _h_pdg411_Dplus_pT_y.histos()) h->scaleW(scale_factor);
143      for (Histo1DPtr h : _h_pdg421_Dzero_pT_y.histos()) h->scaleW(scale_factor);
144      for (Histo1DPtr h : _h_pdg431_Dsplus_pT_y.histos()) h->scaleW(scale_factor);
145      for (Histo1DPtr h : _h_pdg413_Dstarplus_pT_y.histos()) h->scaleW(scale_factor);
146
147      // Do ratios
148      for (int i = 0; i < 5; ++i) {
149      	book(hr_DplusDzero[i], 9, 1, i+1, true);
150      	book(hr_DsDzero[i], 10, 1, i+1, true);
151      	book(hr_DstarDzero[i], 11, 1, i+1, true);
152      	book(hr_DsDplus[i], 12, 1, i+1, true);
153      	book(hr_DstarDplus[i], 13, 1, i+1, true);
154      	book(hr_DsDstar[i], 14, 1, i+1, true);
155      	ratioScatterBins(_hbr_Dplus.histos()[i], _hbr_Dzero.histos()[i], hr_DplusDzero[i]);
156      	ratioScatterBins(_hbr_Ds.histos()[i], _hbr_Dzero.histos()[i], hr_DsDzero[i]);
157      	ratioScatterBins(_hbr_Dstar.histos()[i], _hbr_Dzero.histos()[i], hr_DstarDzero[i]);
158      	ratioScatterBins(_hbr_Ds.histos()[i], _hbr_Dplus.histos()[i], hr_DsDplus[i]);
159      	ratioScatterBins(_hbr_Dstar.histos()[i], _hbr_Dplus.histos()[i], hr_DstarDplus[i]);
160      	ratioScatterBins(_hbr_Ds.histos()[i], _hbr_Dstar.histos()[i], hr_DsDstar[i]);
161      	// scale 100x as measurement is in %
162      	hr_DplusDzero[i]->scaleY(100.);
163      	hr_DsDzero[i]->scaleY(100.);
164      	hr_DstarDzero[i]->scaleY(100.);
165      	hr_DsDplus[i]->scaleY(100.);
166      	hr_DstarDplus[i]->scaleY(100.);
167      	hr_DsDstar[i]->scaleY(100.);
168      }
169
170    }
171
172    /// @}
173
174
175  private:
176
177    void ratioScatterBins(Histo1DPtr& hn, Histo1DPtr& hd, Scatter2DPtr &s) {
178    	vector<double> sedges;
179    	// extract bin edges from Scatter2D
180    	for (auto p=s->points().begin(); p != s->points().end(); ++p) {
181    		sedges.push_back((*p).xMin());
182    		// MSG_INFO("Scatter2D bin: " << (*p).xMin() << " - " << (*p).xMax());
183    	};
184    	sedges.push_back(s->points().back().xMax());
185    	// make deep-copies as rebinning changes bins each time - any smarter alternative ?!
186    	Histo1D *hnc, *hdc;
187    	hnc = new YODA::Histo1D(hn->bins(), hn->totalDbn(), hn->underflow(), hn->overflow());
188    	hdc = new YODA::Histo1D(hd->bins(), hd->totalDbn(), hd->underflow(), hd->overflow());
189    	hnc->rebinTo(sedges);
190    	hdc->rebinTo(sedges);
191    	divide(*hnc, *hdc, s);
192    	delete hnc; delete hdc;
193    }
194
195
196    /// @name Histograms
197    /// @{
198    BinnedHistogram _h_pdg411_Dplus_pT_y, _hbr_Dplus;
199    BinnedHistogram _h_pdg421_Dzero_pT_y, _hbr_Dzero;
200    BinnedHistogram _h_pdg431_Dsplus_pT_y, _hbr_Ds;
201    BinnedHistogram _h_pdg413_Dstarplus_pT_y, _hbr_Dstar;
202    Scatter2DPtr hr_DplusDzero[5];
203    Scatter2DPtr hr_DsDzero[5];
204    Scatter2DPtr hr_DstarDzero[5];
205    Scatter2DPtr hr_DsDplus[5];
206    Scatter2DPtr hr_DstarDplus[5];
207    Scatter2DPtr hr_DsDstar[5];
208    /// @}
209
210  };
211
212
213  RIVET_DECLARE_PLUGIN(LHCB_2016_I1490663);
214
215}