Rivet analyses referenceLHCB_2016_I1490663Charm hadron differential cross-sections in $p_\perp$ and rapidity at $\sqrt{s} = 5$ TeVExperiment: LHCB (LHC 5TeV) Inspire ID: 1490663 Status: VALIDATED Authors:
Beam energies: (2510.0, 2510.0) GeV Run details:
Measurements of differential production cross-sections with respect to transverse momentum, $d \sigma(H_c + \mathrm{c.c.}) / d p_T$, for charm hadron species $H_c \in \{ D^0, D^+, D^\ast(2010)^+, D_s^+ \}$ in proton--proton collisions at center-of-mass energy $\sqrt{s}= 5$ TeV. The differential cross-sections are measured in bins of hadron transverse momentum ($p_T$) and rapidity ($y$) with respect to the beam axis in the region $0 < p_T < 10$ GeV/$c$ and $2.0 < y < 4.5$, where $p_T$ and $y$ are measured in the proton--proton CM frame. In this analysis code, it is assumed that the event coordinate system is in the proton--proton CM frame with the $z$-axis corresponding to the proton--proton collision axis (as usual). Contributions of charm hadrons from the decays of $b$-hadrons and other particles with comparably large mean lifetimes have been removed in the measurement. In this analysis code, this is implemented by counting only charm hadrons that do not have an ancestor that contains a $b$ quark. Source code: LHCB_2016_I1490663.cc 1// -*- C++ -*-
2#include "Rivet/Analysis.hh"
3#include "Rivet/Projections/UnstableParticles.hh"
4
5namespace Rivet {
6
7
8 /// LHCb prompt charm hadron pT and rapidity spectra
9 class LHCB_2016_I1490663 : public Analysis {
10 public:
11
12 /// Constructor
13 RIVET_DEFAULT_ANALYSIS_CTOR(LHCB_2016_I1490663);
14
15
16 /// @name Analysis methods
17 /// @{
18
19 /// Book histograms and initialise projections before the run
20 void init() {
21
22 /// Initialise and register projections
23 declare(UnstableParticles(), "UFS");
24
25 /// Book histograms
26 book(_h_pdg411_Dplus_pT_y, {2., 2.5, 3., 3.5, 4., 4.5});
27 book(_h_pdg421_Dzero_pT_y, {2., 2.5, 3., 3.5, 4., 4.5});
28 book(_h_pdg431_Dsplus_pT_y, {2., 2.5, 3., 3.5, 4., 4.5});
29 book(_h_pdg413_Dstarplus_pT_y, {2., 2.5, 3., 3.5, 4., 4.5});
30 for (size_t i = 1; i < _h_pdg411_Dplus_pT_y->numBins()+1; ++i) {
31 size_t y = _h_pdg411_Dplus_pT_y->bin(i).index();
32 book(_h_pdg411_Dplus_pT_y->bin(i), 1, 1, y);
33 book(_h_pdg421_Dzero_pT_y->bin(i), 2, 1, y);
34 book(_h_pdg431_Dsplus_pT_y->bin(i), 3, 1, y);
35 book(_h_pdg413_Dstarplus_pT_y->bin(i), 4, 1, y);
36 }
37
38 book(_hbr_Dzero, {2., 2.5, 3., 3.5, 4., 4.5});
39 book(_hbr_Dplus, {2., 2.5, 3., 3.5, 4., 4.5});
40 book(_hbr_Ds, {2., 2.5, 3., 3.5, 4., 4.5});
41 book(_hbr_Dstar, {2., 2.5, 3., 3.5, 4., 4.5});
42 for (size_t i = 1; i < _hbr_Dzero->numBins()+1; ++i) {
43 book(_hbr_Dzero->bin(i), "TMP/Dzero_b"+to_str(i), refData(9, 1, 2));
44 book(_hbr_Dplus->bin(i), "TMP/Dplus_b"+to_str(i), refData(9, 1, 2));
45 book(_hbr_Ds->bin(i), "TMP/Ds_b"+to_str(i), refData(9, 1, 2));
46 book(_hbr_Dstar->bin(i), "TMP/Dstar_b"+to_str(i), refData(9, 1, 2));
47 }
48
49 }
50
51
52 /// Perform the per-event analysis
53 void analyze(const Event& event) {
54
55 /// @todo Use PrimaryHadrons to avoid double counting and automatically remove the contributions from unstable?
56 const UnstableParticles &ufs = apply<UnstableParticles> (event, "UFS");
57 for (const Particle& p : ufs.particles() ) {
58
59 // We're only interested in charm hadrons
60 //if (!p.isHadron() || !p.hasCharm()) continue;
61
62 PdgId apid = p.abspid();
63
64 // do not use Cuts::abspid to avoid supplemental iteration on particles?
65 if ((apid != 411) && (apid != 421) && (apid != 431) && (apid != 413)) continue;
66
67 // Experimental selection removes non-prompt charm hadrons: we ignore those from b decays
68 if (p.fromBottom()) continue;
69
70 // Kinematic acceptance
71 const double y = p.absrap(); ///< Double analysis efficiency with a "two-sided LHCb"
72 const double pT = p.pT()/GeV;
73
74 // Fiducial acceptance of the measurements
75 if ((pT > 10.0) || (y < 2.0) || (y > 4.5)) continue;
76
77 Particles daus;
78
79 switch (apid) {
80 case 411:
81 _h_pdg411_Dplus_pT_y->fill(y, pT);
82 // veto on decay channel [D+ -> K- pi+ pi+]cc
83 if (p.children().size() != 3) break;
84 if ( ((p.children(Cuts::pid == -321).size() == 1) && (p.children(Cuts::pid == 211).size() == 2)) ||
85 ((p.children(Cuts::pid == 321).size() == 1) && (p.children(Cuts::pid == -211).size() == 2)) )
86 _hbr_Dplus->fill(y, pT); // MSG_INFO("Found [ D+ -> K- pi+ pi+ ]cc..."); };
87 break;
88 case 421:
89 _h_pdg421_Dzero_pT_y->fill(y, pT);
90 // veto on decay channel [D0 -> K- pi+]cc
91 if (p.children().size() != 2) break;
92 if ( ((p.children(Cuts::pid == -321).size() == 1) && (p.children(Cuts::pid == 211).size() == 1)) ||
93 ((p.children(Cuts::pid == 321).size() == 1) && (p.children(Cuts::pid == -211).size() == 1)) )
94 _hbr_Dzero->fill(y, pT); // MSG_INFO("Found [ D0 -> K- pi+ ]cc..."); };
95 break;
96 case 431:
97 _h_pdg431_Dsplus_pT_y->fill(y, pT);
98 //veto on decay channel [Ds+ -> [K+ K-]phi0 pi+]cc
99 if (p.children().size() != 2) break;
100 daus = p.children(Cuts::pid == 333);
101 if ( (daus.size() == 1) && (p.children(Cuts::abspid == 211).size() == 1) &&
102 (daus.front().children(Cuts::abspid ==321).size() == 2) )
103 _hbr_Ds->fill(y, pT); // MSG_INFO("Found [ Ds+ -> phi0(-> K+ K-) pi+ ]cc..."); };
104 break;
105 case 413:
106 _h_pdg413_Dstarplus_pT_y->fill(y, pT);
107 // veto on decay channel [D*+ -> [K- pi+]D0 pi+]cc
108 if (p.children().size() != 2) break;
109 daus = p.children(Cuts::pid == 421);
110 if ( (daus.size() == 1) && (p.children(Cuts::abspid == 211).size() == 1) &&
111 ( daus.front().children().size() == 2 ) &&
112 ( ( (daus.front().children(Cuts::pid == -321).size() == 1 ) && (daus.front().children(Cuts::pid == 211).size() == 1 ) ) ||
113 ( (daus.front().children(Cuts::pid == 321).size() == 1 ) && (daus.front().children(Cuts::pid == -211).size() == 1 ) ) ) )
114 _hbr_Dstar->fill(y, pT); // MSG_INFO("Found [ D*+ -> D0 (-> K- pi+)cc pi+ ]cc..."); };
115 break;
116 default:
117 break;
118 }
119 }
120
121 }
122
123
124 /// Normalise histograms etc., after the run
125 void finalize() {
126
127 /// Factor of 0.5 to correct for the abs(rapidity) used above
128 const double scale_factor = 0.5 * crossSection()/microbarn / sumOfWeights();
129
130 scale(_h_pdg411_Dplus_pT_y, scale_factor);
131 scale(_h_pdg421_Dzero_pT_y, scale_factor);
132 scale(_h_pdg431_Dsplus_pT_y, scale_factor);
133 scale(_h_pdg413_Dstarplus_pT_y, scale_factor);
134
135 // Do ratios
136 for (int i = 0; i < 5; ++i) {
137 book(hr_DplusDzero[i], 9, 1, i+1);
138 book(hr_DsDzero[i], 10, 1, i+1);
139 book(hr_DstarDzero[i], 11, 1, i+1);
140 book(hr_DsDplus[i], 12, 1, i+1);
141 book(hr_DstarDplus[i], 13, 1, i+1);
142 book(hr_DsDstar[i], 14, 1, i+1);
143 divide(_hbr_Dplus->bin(i+1), _hbr_Dzero->bin(i+1), hr_DplusDzero[i]);
144 divide(_hbr_Ds->bin(i+1), _hbr_Dzero->bin(i+1), hr_DsDzero[i]);
145 divide(_hbr_Dstar->bin(i+1), _hbr_Dzero->bin(i+1), hr_DstarDzero[i]);
146 divide(_hbr_Ds->bin(i+1), _hbr_Dplus->bin(i+1), hr_DsDplus[i]);
147 divide(_hbr_Dstar->bin(i+1), _hbr_Dplus->bin(i+1), hr_DstarDplus[i]);
148 divide(_hbr_Ds->bin(i+1), _hbr_Dstar->bin(i+1), hr_DsDstar[i]);
149 // scale 100x as measurement is in %
150 hr_DplusDzero[i]->scale(100.);
151 hr_DsDzero[i]->scale(100.);
152 hr_DstarDzero[i]->scale(100.);
153 hr_DsDplus[i]->scale(100.);
154 hr_DstarDplus[i]->scale(100.);
155 hr_DsDstar[i]->scale(100.);
156 }
157
158 }
159
160 /// @}
161
162
163 private:
164
165 /// @name Histograms
166 /// @{
167
168 Histo1DGroupPtr _h_pdg411_Dplus_pT_y, _hbr_Dplus;
169 Histo1DGroupPtr _h_pdg421_Dzero_pT_y, _hbr_Dzero;
170 Histo1DGroupPtr _h_pdg431_Dsplus_pT_y, _hbr_Ds;
171 Histo1DGroupPtr _h_pdg413_Dstarplus_pT_y, _hbr_Dstar;
172 Estimate1DPtr hr_DplusDzero[5];
173 Estimate1DPtr hr_DsDzero[5];
174 Estimate1DPtr hr_DstarDzero[5];
175 Estimate1DPtr hr_DsDplus[5];
176 Estimate1DPtr hr_DstarDplus[5];
177 Estimate1DPtr hr_DsDstar[5];
178
179 /// @}
180
181 };
182
183
184 RIVET_DECLARE_PLUGIN(LHCB_2016_I1490663);
185
186}
|