Rivet analyses referenceDELPHI_1998_I473409Flavour separated spectra for $\pi^\pm$, $K^\pm$, $p,\bar{p}$ production in hadronic $Z^0$ decaysExperiment: DELPHI (LEP) Inspire ID: 473409 Status: VALIDATED Authors:
Beam energies: (45.6, 45.6) GeV Run details:
DELPHI results for the spectra \pi^\pm$, $K^\pm$, $p,\bar{p}$ production in hadronic $Z^0$ decays. The results are separated in light and bottom quark initiated events. Source code: DELPHI_1998_I473409.cc 1// -*- C++ -*-
2#include "Rivet/Analysis.hh"
3#include "Rivet/Projections/Beam.hh"
4#include "Rivet/Projections/FinalState.hh"
5#include "Rivet/Projections/ChargedFinalState.hh"
6
7#define I_KNOW_THE_INITIAL_QUARKS_PROJECTION_IS_DODGY_BUT_NEED_TO_USE_IT
8#include "Rivet/Projections/InitialQuarks.hh"
9
10namespace Rivet {
11
12
13 /// @brief flavour seperate pi,K,p spectra
14 class DELPHI_1998_I473409 : public Analysis {
15 public:
16
17 /// Constructor
18 RIVET_DEFAULT_ANALYSIS_CTOR(DELPHI_1998_I473409);
19
20
21 /// @name Analysis methods
22 /// @{
23
24 /// Book histograms and initialise projections before the run
25 void init() {
26
27 // Initialise and register projections
28 declare(Beam(), "Beams");
29 declare(ChargedFinalState(), "FS");
30 declare(InitialQuarks(), "IQF");
31 // Book histograms
32 book(_h_all_pi, "TMP/h_all_pi",refData( 4,1,1));
33 book(_h_all_K , "TMP/h_all_K ",refData( 5,1,1));
34 book(_h_all_p , "TMP/h_all_p ",refData( 6,1,1));
35 book(_h_all_Kp, "TMP/h_all_Kp",refData( 7,1,1));
36 book(_d_all , "TMP/d_all ",refData( 4,1,1));
37
38 book(_h_bot_pi, "TMP/h_bot_pi",refData( 8,1,1));
39 book(_h_bot_K , "TMP/h_bot_K ",refData( 9,1,1));
40 book(_h_bot_p , "TMP/h_bot_p ",refData(10,1,1));
41 book(_h_bot_Kp, "TMP/h_bot_Kp",refData(11,1,1));
42 book(_d_bot , "TMP/d_bot ",refData( 8,1,1));
43
44 book(_h_lgt_pi, "TMP/h_lgt_pi",refData(12,1,1));
45 book(_h_lgt_K , "TMP/h_lgt_K ",refData(13,1,1));
46 book(_h_lgt_p , "TMP/h_lgt_p ",refData(14,1,1));
47 book(_h_lgt_Kp, "TMP/h_lgt_Kp",refData(15,1,1));
48 book(_d_lgt , "TMP/d_lgt ",refData(12,1,1));
49
50 book(_h_all_ch_p, 16,1,1);
51 book(_h_all_ch_x, 17,1,1);
52 book(_h_all_pi_p, 18,1,1);
53 book(_h_all_pi_x, 19,1,1);
54 book(_h_all_K_p , 20,1,1);
55 book(_h_all_k_x , 21,1,1);
56 book(_h_all_p_p , 22,1,1);
57 book(_h_all_p_x , 23,1,1);
58
59 book(_h_bot_ch_p, 24,1,1);
60 book(_h_bot_ch_x, 25,1,1);
61 book(_h_bot_pi_p, 26,1,1);
62 book(_h_bot_pi_x, 27,1,1);
63 book(_h_bot_K_p , 28,1,1);
64 book(_h_bot_k_x , 29,1,1);
65 book(_h_bot_p_p , 30,1,1);
66 book(_h_bot_p_x , 31,1,1);
67
68 book(_h_lgt_ch_p, 32,1,1);
69 book(_h_lgt_ch_x, 33,1,1);
70 book(_h_lgt_pi_p, 34,1,1);
71 book(_h_lgt_pi_x, 35,1,1);
72 book(_h_lgt_K_p , 36,1,1);
73 book(_h_lgt_k_x , 37,1,1);
74 book(_h_lgt_p_p , 38,1,1);
75 book(_h_lgt_p_x , 39,1,1);
76
77 for (unsigned int ix=0; ix<3; ++ix) {
78 for (unsigned int iy=0; iy<5; ++iy) {
79 book(_mult[ix][iy], ix+1, 1, iy+1);
80 }
81 }
82 book(_wLgt,"TMP/wLgt");
83 book(_wBot,"TMP/wBot");
84 book(_wAll,"TMP/wAll");
85 }
86
87
88 /// Perform the per-event analysis
89 void analyze(const Event& event) {
90
91 // First, veto on leptonic events by requiring at least 4 charged FS particles
92 const FinalState& fs = apply<ChargedFinalState>(event, "FS");
93 const size_t numParticles = fs.particles().size();
94
95 // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
96 if (numParticles < 2) {
97 MSG_DEBUG("Failed leptonic event cut");
98 vetoEvent;
99 }
100 MSG_DEBUG("Passed leptonic event cut");
101
102
103 int flavour = 0;
104 const InitialQuarks& iqf = apply<InitialQuarks>(event, "IQF");
105
106 // If we only have two quarks (qqbar), just take the flavour.
107 // If we have more than two quarks, look for the highest energetic q-qbar pair.
108 if (iqf.particles().size() == 2) {
109 flavour = iqf.particles().front().abspid();
110 }
111 else {
112 map<int, double> quarkmap;
113 for (const Particle& p : iqf.particles()) {
114 if (quarkmap[p.pid()] < p.E()) {
115 quarkmap[p.pid()] = p.E();
116 }
117 }
118 double maxenergy = 0.;
119 for (int i = 1; i <= 5; ++i) {
120 if (quarkmap[i]+quarkmap[-i] > maxenergy) {
121 flavour = i;
122 }
123 }
124 }
125
126 // Get event weight for histo filling
127 _wAll->fill();
128 if(flavour<=3) _wLgt->fill();
129 else if(flavour==5) _wBot->fill();
130
131 // Get beams and average beam momentum
132 const ParticlePair& beams = apply<Beam>(event, "Beams").beams();
133 const double meanBeamMom = ( beams.first.p3().mod() +
134 beams.second.p3().mod() ) / 2.0;
135 MSG_DEBUG("Avg beam momentum = " << meanBeamMom);
136 // loop over the charged particles
137 for (const Particle& p : fs.particles()) {
138 double modp = p.p3().mod();
139 double xp = modp/meanBeamMom;
140 int id = abs(p.pid());
141 _d_all->fill(modp);
142 _mult[0][0]->fill(Ecm);
143 _h_all_ch_p->fill(modp);
144 _h_all_ch_x->fill(xp );
145 if(flavour<=3) {
146 _d_lgt->fill(modp);
147 _mult[2][0]->fill(Ecm);
148 _h_lgt_ch_p->fill(modp);
149 _h_lgt_ch_x->fill(xp );
150 }
151 else if(flavour==5) {
152 _d_bot ->fill(modp);
153 _mult[1][0]->fill(Ecm);
154 _h_bot_ch_p->fill(modp);
155 _h_bot_ch_x->fill(xp );
156 }
157 if(id==211) {
158 _h_all_pi ->fill(modp);
159 _mult[0][1]->fill(Ecm);
160 _h_all_pi_p->fill(modp);
161 _h_all_pi_x->fill(xp );
162 if(flavour<=3) {
163 _h_lgt_pi ->fill(modp);
164 _mult[2][1]->fill(Ecm);
165 _h_lgt_pi_p->fill(modp);
166 _h_lgt_pi_x->fill(xp );
167 }
168 else if(flavour==5) {
169 _h_bot_pi ->fill(modp);
170 _mult[1][1]->fill(Ecm);
171 _h_bot_pi_p->fill(modp);
172 _h_bot_pi_x->fill(xp );
173 }
174 }
175 else if(id==321) {
176 _h_all_K ->fill(modp);
177 _h_all_Kp->fill(modp);
178 _mult[0][2]->fill(Ecm);
179 _mult[0][4]->fill(Ecm);
180 _h_all_K_p ->fill(modp);
181 _h_all_k_x ->fill(xp );
182 if(flavour<=3) {
183 _h_lgt_K->fill(modp);
184 _h_lgt_Kp->fill(modp);
185 _mult[2][2]->fill(Ecm);
186 _mult[2][4]->fill(Ecm);
187 _h_lgt_K_p ->fill(modp);
188 _h_lgt_k_x ->fill(xp );
189 }
190 else if(flavour==5) {
191 _h_bot_K ->fill(modp);
192 _h_bot_Kp->fill(modp);
193 _mult[1][2]->fill(Ecm);
194 _mult[1][4]->fill(Ecm);
195 _h_bot_K_p ->fill(modp);
196 _h_bot_k_x ->fill(xp );
197 }
198 }
199 else if(id==2212) {
200 _h_all_p ->fill(modp);
201 _h_all_Kp->fill(modp);
202 _mult[0][3]->fill(Ecm);
203 _mult[0][4]->fill(Ecm);
204 _h_all_p_p ->fill(modp);
205 _h_all_p_x ->fill(xp );
206 if(flavour<=3) {
207 _h_lgt_p ->fill(modp);
208 _h_lgt_Kp->fill(modp);
209 _mult[2][3]->fill(Ecm);
210 _mult[2][4]->fill(Ecm);
211 _h_lgt_p_p ->fill(modp);
212 _h_lgt_p_x ->fill(xp );
213 }
214 else if(flavour==5) {
215 _h_bot_p ->fill(modp);
216 _h_bot_Kp->fill(modp);
217 _mult[1][3]->fill(Ecm);
218 _mult[1][4]->fill(Ecm);
219 _h_bot_p_p ->fill(modp);
220 _h_bot_p_x ->fill(xp );
221 }
222 }
223 }
224 }
225
226
227 /// Normalise histograms etc., after the run
228 void finalize() {
229
230
231 // // Book histograms
232 scale(_h_all_pi,100.);
233 scale(_h_all_K ,100.);
234 scale(_h_all_p ,100.);
235 scale(_h_all_Kp,100.);
236 Estimate1DPtr temp;
237 book(temp,4,1,1);
238 divide(_h_all_pi, _d_all, temp);
239 book(temp,5,1,1);
240 divide(_h_all_K , _d_all, temp);
241 book(temp,6,1,1);
242 divide(_h_all_p , _d_all, temp);
243 book(temp,7,1,1);
244 divide(_h_all_Kp, _d_all, temp);
245
246 scale(_h_bot_pi,100.);
247 scale(_h_bot_K ,100.);
248 scale(_h_bot_p ,100.);
249 scale(_h_bot_Kp,100.);
250 book(temp, 8,1,1);
251 divide(_h_bot_pi, _d_bot, temp);
252 book(temp, 9,1,1);
253 divide(_h_bot_K , _d_bot, temp);
254 book(temp,10,1,1);
255 divide(_h_bot_p , _d_bot, temp);
256 book(temp,11,1,1);
257 divide(_h_bot_Kp, _d_bot, temp);
258
259 scale(_h_lgt_pi,100.);
260 scale(_h_lgt_K ,100.);
261 scale(_h_lgt_p ,100.);
262 scale(_h_lgt_Kp,100.);
263 book(temp,12,1,1);
264 divide(_h_lgt_pi, _d_lgt, temp);
265 book(temp,13,1,1);
266 divide(_h_lgt_K , _d_lgt, temp);
267 book(temp,14,1,1);
268 divide(_h_lgt_p , _d_lgt, temp);
269 book(temp,15,1,1);
270 divide(_h_lgt_Kp, _d_lgt, temp);
271
272 scale(_h_all_ch_p, 1./ *_wAll);
273 scale(_h_all_ch_x, 1./ *_wAll);
274 scale(_h_all_pi_p, 1./ *_wAll);
275 scale(_h_all_pi_x, 1./ *_wAll);
276 scale(_h_all_K_p , 1./ *_wAll);
277 scale(_h_all_k_x , 1./ *_wAll);
278 scale(_h_all_p_p , 1./ *_wAll);
279 scale(_h_all_p_x , 1./ *_wAll);
280
281 scale(_h_bot_ch_p, 1./ *_wBot);
282 scale(_h_bot_ch_x, 1./ *_wBot);
283 scale(_h_bot_pi_p, 1./ *_wBot);
284 scale(_h_bot_pi_x, 1./ *_wBot);
285 scale(_h_bot_K_p , 1./ *_wBot);
286 scale(_h_bot_k_x , 1./ *_wBot);
287 scale(_h_bot_p_p , 1./ *_wBot);
288 scale(_h_bot_p_x , 1./ *_wBot);
289
290 scale(_h_lgt_ch_p, 1./ *_wLgt);
291 scale(_h_lgt_ch_x, 1./ *_wLgt);
292 scale(_h_lgt_pi_p, 1./ *_wLgt);
293 scale(_h_lgt_pi_x, 1./ *_wLgt);
294 scale(_h_lgt_K_p , 1./ *_wLgt);
295 scale(_h_lgt_k_x , 1./ *_wLgt);
296 scale(_h_lgt_p_p , 1./ *_wLgt);
297 scale(_h_lgt_p_x , 1./ *_wLgt);
298
299 // multiplicities
300 vector<CounterPtr> scales = {_wAll, _wBot, _wLgt};
301 for (unsigned int ix=0; ix<3; ++ix) {
302 if (scales[ix]->effNumEntries()<=0.) continue;
303 scale(_mult[ix], 1./ *scales[ix]);
304 }
305 }
306
307 /// @}
308
309
310 /// @name Histograms
311 /// @{
312 Histo1DPtr _h_all_pi , _h_all_K , _h_all_p , _h_all_Kp , _d_all;
313 Histo1DPtr _h_bot_pi , _h_bot_K , _h_bot_p , _h_bot_Kp , _d_bot;
314 Histo1DPtr _h_lgt_pi , _h_lgt_K , _h_lgt_p , _h_lgt_Kp , _d_lgt;
315 Histo1DPtr _h_all_ch_p, _h_all_ch_x , _h_all_pi_p , _h_all_pi_x ;
316 Histo1DPtr _h_all_K_p , _h_all_k_x , _h_all_p_p , _h_all_p_x ;
317 Histo1DPtr _h_bot_ch_p , _h_bot_ch_x , _h_bot_pi_p , _h_bot_pi_x;
318 Histo1DPtr _h_bot_K_p , _h_bot_k_x , _h_bot_p_p , _h_bot_p_x ;
319 Histo1DPtr _h_lgt_ch_p , _h_lgt_ch_x , _h_lgt_pi_p , _h_lgt_pi_x;
320 Histo1DPtr _h_lgt_K_p , _h_lgt_k_x , _h_lgt_p_p , _h_lgt_p_x ;
321 BinnedHistoPtr<string> _mult[3][5];
322 const string Ecm = "91.2";
323
324 CounterPtr _wLgt, _wBot, _wAll;
325 /// @}
326
327 };
328
329
330 RIVET_DECLARE_PLUGIN(DELPHI_1998_I473409);
331
332
333}
|