Rivet analyses referenceLHCB_2015_I1396331Charm hadron differential cross-sections in $p_T$ and rapidity at $\sqrt{s} = 13$ TeVExperiment: LHCB (LHC 13TeV) Inspire ID: 1396331 Status: VALIDATED Authors:
Beam energies: (6500.0, 6500.0) GeV Run details:
Measurements of differential production cross-sections with respect to transverse momentum, $d \sigma(H_c + \mathrm{c.c.}) / d p_T$, for charm hadron species $H_c \in \{ D^0, D^+, D^\ast(2010)^+, D_s^+ \}$ in proton--proton collisions at center-of-mass energy $\sqrt{s}= 13$ TeV. The differential cross-sections are measured in bins of hadron transverse momentum ($p_T$) and rapidity ($y$) with respect to the beam axis in the region $0 < p_T < 15$ GeV/$c$ and $2.0 < y < 4.5$, where $p_T$ and $y$ are measured in the proton--proton CM frame. In this analysis code, it is assumed that the event coordinate system is in the proton--proton CM frame with the $z$-axis corresponding to the proton--proton collision axis (as usual). Contributions of charm hadrons from the decays of $b$-hadrons and other particles with comparably large mean lifetimes have been removed in the measurement. In this analysis code, this is implemented by counting only charm hadrons that do not have an ancestor that contains a $b$ quark. Source code: LHCB_2015_I1396331.cc 1// -*- C++ -*-
2#include "Rivet/Analysis.hh"
3#include "Rivet/Projections/UnstableParticles.hh"
4
5namespace Rivet {
6
7 /// LHCb prompt charm hadron pT and rapidity spectra
8 class LHCB_2015_I1396331 : public Analysis {
9 public:
10
11 /// Constructor
12 RIVET_DEFAULT_ANALYSIS_CTOR(LHCB_2015_I1396331);
13
14
15 /// @name Analysis methods
16 /// @{
17
18 /// Book histograms and initialise projections before the run
19 void init() {
20
21 /// Initialise and register projections
22 Cut selection = (Cuts::abspid == 411 || Cuts::abspid == 421 || Cuts::abspid == 431 || Cuts::abspid == 413) \
23 && Cuts::pT < 15.0 && Cuts::absrapIn(2.0, 4.5);
24 declare(UnstableParticles(selection), "UPDs");
25
26 /// Book histograms
27 book(_h_pdg421_Dzero_pT_y, {2., 2.5, 3., 3.5, 4., 4.5});
28 for (auto& b : _h_pdg421_Dzero_pT_y->bins()) {
29 book(b, 1, 1, b.index());
30 }
31
32 book(_h_pdg411_Dplus_pT_y, {2., 2.5, 3., 3.5, 4., 4.5});
33 for (auto& b : _h_pdg411_Dplus_pT_y->bins()) {
34 book(b, 2, 1, b.index());
35 }
36
37 book(_h_pdg431_Dsplus_pT_y, {2., 2.5, 3., 3.5, 4., 4.5});
38 for (auto& b : _h_pdg431_Dsplus_pT_y->bins()) {
39 book(b, 3, 1, b.index());
40 }
41
42 book(_h_pdg413_Dstarplus_pT_y, {2., 2.5, 3., 3.5, 4., 4.5});
43 for (auto& b : _h_pdg413_Dstarplus_pT_y->bins()) {
44 book(b, 4, 1, b.index());
45 }
46
47 // Temporary histos for ratios
48 book(_hbr_Dzero, {2., 2.5, 3., 3.5, 4., 4.5});
49 book(_hbr_Dplus, {2., 2.5, 3., 3.5, 4., 4.5});
50 book(_hbr_Ds, {2., 2.5, 3., 3.5, 4., 4.5});
51 book(_hbr_Dstar, {2., 2.5, 3., 3.5, 4., 4.5});
52 for (size_t i=1; i<_hbr_Ds->numBins()+1; ++i) {
53 book(_hbr_Dzero->bin(i), "/TMP/Dzero"+to_str(i), refData(9, 1, 2));
54 book(_hbr_Dplus->bin(i), "/TMP/Dplus"+to_str(i), refData(9, 1, 2));
55 book(_hbr_Ds->bin(i), "/TMP/Ds"+to_str(i), refData(9, 1, 2));
56 book(_hbr_Dstar->bin(i), "/TMP/Dstar"+to_str(i), refData(9, 1, 2));
57 }
58
59 }
60
61
62 /// Perform the per-event analysis
63 void analyze(const Event& event) {
64
65 /// @todo Use PrimaryHadrons to avoid double counting and automatically remove the contributions from unstable?
66 const UnstableParticles &ufs = apply<UnstableParticles> (event, "UPDs");
67 for (const Particle& p : ufs.particles()) {
68
69 if (p.fromBottom()) continue;
70
71 const PdgId apid = p.abspid();
72 const double y = p.absrap(); ///< Double analysis efficiency with a "two-sided LHCb"
73 const double pT = p.pT()/GeV;
74
75 // select inclusive decay modes
76 Particles daus;
77 switch (apid) {
78 case 411:
79 _h_pdg411_Dplus_pT_y->fill(y, pT);
80 // veto on decay channel [D+ -> K- pi+ pi+]cc
81 if (p.children().size() != 3) break;
82 if ( ((p.children(Cuts::pid == -321).size() == 1) && (p.children(Cuts::pid == 211).size() == 2)) ||
83 ((p.children(Cuts::pid == 321).size() == 1) && (p.children(Cuts::pid == -211).size() == 2)) )
84 _hbr_Dplus->fill(y, pT); // MSG_INFO("Found [ D+ -> K- pi+ pi+ ]cc..."); };
85 break;
86 case 421:
87 _h_pdg421_Dzero_pT_y->fill(y, pT);
88 // veto on decay channel [D0 -> K- pi+]cc
89 if (p.children().size() != 2) break;
90 if ( ((p.children(Cuts::pid == -321).size() == 1) && (p.children(Cuts::pid == 211).size() == 1)) ||
91 ((p.children(Cuts::pid == 321).size() == 1) && (p.children(Cuts::pid == -211).size() == 1)) )
92 _hbr_Dzero->fill(y, pT); // MSG_INFO("Found [ D0 -> K- pi+ ]cc..."); };
93
94 break;
95 case 431:
96 _h_pdg431_Dsplus_pT_y->fill(y, pT);
97 //veto on decay channel [Ds+ -> [K+ K-]phi0 pi+]cc
98 if (p.children().size() != 2) break;
99 daus = p.children(Cuts::pid == 333);
100 if ( (daus.size() == 1) && (p.children(Cuts::abspid == 211).size() == 1) &&
101 (daus.front().children(Cuts::abspid ==321).size() == 2) )
102 _hbr_Ds->fill(y, pT); // MSG_INFO("Found [ Ds+ -> phi0(-> K+ K-) pi+ ]cc..."); };
103 break;
104 case 413:
105 _h_pdg413_Dstarplus_pT_y->fill(y, pT);
106 // veto on decay channel [D*+ -> [K- pi+]D0 pi+]cc
107 if (p.children().size() != 2) break;
108 daus = p.children(Cuts::pid == 421);
109 if ( (daus.size() == 1) && (p.children(Cuts::abspid == 211).size() == 1) &&
110 ( daus.front().children().size() == 2 ) &&
111 ( ( (daus.front().children(Cuts::pid == -321).size() == 1 ) && (daus.front().children(Cuts::pid == 211).size() == 1 ) ) ||
112 ( (daus.front().children(Cuts::pid == 321).size() == 1 ) && (daus.front().children(Cuts::pid == -211).size() == 1 ) ) ) )
113 _hbr_Dstar->fill(y, pT); // MSG_INFO("Found [ D*+ -> D0 (-> K- pi+)cc pi+ ]cc..."); };
114 break;
115 default:
116 break;
117 }
118 }
119
120 }
121
122
123 /// Normalise histograms etc., after the run
124 void finalize() {
125 Histo1DPtr h;
126 Histo1DPtr hden;
127 /// Factor of 0.5 to correct for the abs(rapidity) used above
128 const double scale_factor = 0.5 * crossSection()/microbarn / sumOfWeights();
129 scale(_h_pdg411_Dplus_pT_y, scale_factor);
130 scale(_h_pdg431_Dsplus_pT_y, scale_factor);
131 scale(_h_pdg413_Dstarplus_pT_y, scale_factor);
132 scale(_h_pdg421_Dzero_pT_y, scale_factor);
133
134
135 // Do ratios
136 for (size_t i = 0; i < 5; ++i) {
137 // book final ratio plots
138 book(hr_DplusDzero[i], 9, 1, i+1);
139 book(hr_DsDzero[i], 10, 1, i+1);
140 book(hr_DstarDzero[i], 11, 1, i+1);
141 book(hr_DsDplus[i], 12, 1, i+1);
142 book(hr_DstarDplus[i], 13, 1, i+1);
143 book(hr_DsDstar[i], 14, 1, i+1);
144 // fill ratio plots
145 divide(_hbr_Dplus->bin(i+1), _hbr_Dzero->bin(i+1), hr_DplusDzero[i]);
146 divide(_hbr_Ds->bin(i+1), _hbr_Dzero->bin(i+1), hr_DsDzero[i]);
147 divide(_hbr_Dstar->bin(i+1), _hbr_Dzero->bin(i+1), hr_DstarDzero[i]);
148 divide(_hbr_Ds->bin(i+1), _hbr_Dplus->bin(i+1), hr_DsDplus[i]);
149 divide(_hbr_Dstar->bin(i+1), _hbr_Dplus->bin(i+1), hr_DstarDplus[i]);
150 divide(_hbr_Ds->bin(i+1), _hbr_Dstar->bin(i+1), hr_DsDstar[i]);
151 // scale 100x as measurement is in %
152 hr_DplusDzero[i]->scale(100.);
153 hr_DsDzero[i]->scale(100.);
154 hr_DstarDzero[i]->scale(100.);
155 hr_DsDplus[i]->scale(100.);
156 hr_DstarDplus[i]->scale(100.);
157 hr_DsDstar[i]->scale(100.);
158 }
159 }
160
161 /// @}
162
163
164 private:
165
166 /// @name Histograms
167 /// @{
168 Histo1DGroupPtr _h_pdg411_Dplus_pT_y, _hbr_Dplus;
169 Histo1DGroupPtr _h_pdg421_Dzero_pT_y, _hbr_Dzero;
170 Histo1DGroupPtr _h_pdg431_Dsplus_pT_y, _hbr_Ds;
171 Histo1DGroupPtr _h_pdg413_Dstarplus_pT_y, _hbr_Dstar;
172 Estimate1DPtr hr_DplusDzero[5];
173 Estimate1DPtr hr_DsDzero[5];
174 Estimate1DPtr hr_DstarDzero[5];
175 Estimate1DPtr hr_DsDplus[5];
176 Estimate1DPtr hr_DstarDplus[5];
177 Estimate1DPtr hr_DsDstar[5];
178 /// @}
179
180 };
181
182
183 RIVET_DECLARE_PLUGIN(LHCB_2015_I1396331);
184
185}
|