Rivet analyses referenceH1_1997_I445116Evolution of e p fragmentation and multiplicity distributions in the Breit frameExperiment: H1 (HERA) Inspire ID: 445116 Status: VALIDATED Authors:
Beam energies: (27.5, 820.0); (820.0, 27.5) GeV Run details:
Low x deep-inelastic ep scattering data, taken in 1994 at the H1 detector at HERA, are analysed in the Breit frame of reference. The evolution of the peak and width of the current hemisphere fragmentation function is presented as a function of Q. Source code: H1_1997_I445116.cc 1// -*- C++ -*-
2#include "Rivet/Analysis.hh"
3#include "Rivet/Projections/FinalState.hh"
4#include "Rivet/Projections/DISKinematics.hh"
5#include "Rivet/Projections/DISLepton.hh"
6#include "Rivet/Projections/ChargedFinalState.hh"
7#include "Rivet/Tools/BinnedHistogram.hh"
8
9namespace Rivet {
10
11
12 /// @brief Evolution of e p fragmentation and multiplicity distributions in the Breit frame (H1)
13 class H1_1997_I445116 : public Analysis {
14 public:
15
16 /// Constructor
17 RIVET_DEFAULT_ANALYSIS_CTOR(H1_1997_I445116);
18
19
20 /// @name Analysis methods
21 /// @{
22 const vector<double> QEdges {3.17, 3.915, 4.72, 6.13, 7.635, 8.85, 10.81, 13.415, 16.365, 21.745, 33.835, 50.745};
23 const vector<double> AvgEdges {3.13, 3.875, 4.675, 6.065, 7.55, 8.76, 9.785, 14.675, 16.25, 21.45, 33.06, 49.26};
24 const vector<double> xp_range{0.02, 0.05, 0.10, 0.20, 0.3, 0.4, 0.5, 0.7};
25 const size_t iPmax = 7;
26
27 /// Book histograms and initialise projections before the run
28 void init() {
29
30 declare(DISKinematics(), "Kinematics");
31 const DISLepton dl;
32 declare(ChargedFinalState(dl.remainingFinalState()), "CFS");
33
34 book(_Nevt_after_cuts_Qlow, "TMP/Nevt_after_cuts_Qlow");
35 book(_Nevt_after_cuts_QHigh, "TMP/Nevt_after_cuts_QHigh");
36
37 book(_h["xp"], 1, 1, 1);
38 book(_h["xpQgt"], 1, 1, 2);
39 book(_h["xi"], 2, 1, 1);
40 book(_h["xiQgt"], 2, 1, 2);
41
42 Histo1DPtr dummy;
43 for (size_t iQ = 0; iQ < 11; ++iQ) {
44 _h_Q2_xp.add(QEdges[iQ], QEdges[iQ+1],book(dummy,"TMP/xpQ"+ to_string(iQ), xp_range));
45 book(_Nevt_after_cuts_Q[iQ], "TMP/Nevt_after_cuts_Q"+ to_string(iQ));
46 }
47
48 for (size_t iP = 0 ; iP < iPmax; ++iP) {
49 book(_s["Qxp"+to_string(iP)], 3+iP, 1, 1);
50 }
51
52 book(_s["Avg1"], 10,1,1);
53 book(_s["Avg2"], 11,1,1);
54
55 book(_h["QT1"], "TMP/QT1",refData(10,1,1));
56 book(_h["QT2"], "TMP/QT2",refData(11,1,1));
57
58 book(_h["AvgT1"], "TMP/AvgT1",refData(10,1,1));
59 book(_h["AvgT2"], "TMP/AvgT2",refData(11,1,1));
60
61 for (size_t iE = 0; iE < 8; ++iE) {
62 book(_h["E"+to_string(iE)], 12+iE, 1, 1);
63 book(_Nevt_after_cuts_E[iE], "TMP/Nevt_after_cuts_E"+to_string(iE));
64 }
65
66 for (size_t iN = 0; iN < 6; ++iN) {
67 book(_h["N"+to_string(iN)], 20+iN, 1, 1);
68 book(_Nevt_after_cuts_N[iN], "TMP/Nevt_after_cuts_N"+to_string(iN));
69 }
70
71 book(_h["MeanTest1"], "TMP/Meantest1",20,5.,6.14);
72 book(_h["MeanTest2"], "TMP/Meantest2",50,19.,8000.);
73 }
74
75
76 /// Perform the per-event analysis
77 void analyze(const Event& event) {
78
79 const ChargedFinalState& cfs = apply<ChargedFinalState>(event, "CFS");
80 Particles particles = cfs.particles();
81 const size_t numParticles = particles.size();
82
83 const DISKinematics& dk = apply<DISKinematics>(event, "Kinematics");
84 double Q2 = dk.Q2()/GeV2;
85 double y= dk.y();
86 double x= dk.x();
87 double W2= dk.W2();
88 double Q = sqrt(Q2);
89
90 if (y < 0.05 or y > 0.6 ) vetoEvent;
91 if (W2 < 4400) vetoEvent ;
92
93 if (numParticles < 2) {
94 MSG_DEBUG("Failed leptonic event cut");
95 vetoEvent;
96 }
97
98 for (int iQ = 0; iQ < 11; ++iQ) {
99 if (inRange(sqrt(Q2), QEdges[iQ],QEdges[iQ+1])) {
100 _Nevt_after_cuts_Q[iQ] -> fill();
101 }
102 }
103
104 if (Q > 5 && Q < 6.14) _Nevt_after_cuts_E[0]->fill();
105 if (Q >19 && Q < 8000) _Nevt_after_cuts_E[1]->fill();
106 if (Q2 >12 && Q2 < 15) _Nevt_after_cuts_E[2]->fill();
107 if (Q2 >15 && Q2 < 25) _Nevt_after_cuts_E[3]->fill();
108 if (Q2 >20 && Q2 < 40) _Nevt_after_cuts_E[4]->fill();
109 if (Q2 >40 && Q2 < 60) _Nevt_after_cuts_E[5]->fill();
110 if (Q2 >60 && Q2 < 80) _Nevt_after_cuts_E[6]->fill();
111 if (Q2 >80 && Q2 < 100) _Nevt_after_cuts_E[7]->fill();
112
113 if (Q2 > 12 && Q2 < 30 && x>6e-4 && x<2e-3) _Nevt_after_cuts_N[0]->fill();
114 if (Q2 > 12 && Q2 < 30 && x>2e-3 && x<1e-2) _Nevt_after_cuts_N[1]->fill();
115 if (Q2 > 30 && Q2 < 80 && x>6e-4 && x<2e-3) _Nevt_after_cuts_N[2]->fill();
116 if (Q2 > 30 && Q2 < 80 && x>2e-3 && x<1e-2) _Nevt_after_cuts_N[3]->fill();
117 if (Q2 > 100 && Q2 < 500 && x>2e-3 && x<1e-2) _Nevt_after_cuts_N[4]->fill();
118 if (Q2 > 100 && Q2 < 500 && x>1e-2 && x<2e-1) _Nevt_after_cuts_N[5]->fill();
119
120 if ( Q2 >12 && Q2 < 100 ) {
121 _Nevt_after_cuts_Qlow->fill();
122 }
123
124 if ( Q2 >100 && Q2 < 8000 ) {
125 _Nevt_after_cuts_QHigh->fill();
126 }
127
128 double multi=0;
129 double multiperevent1 = 0.0;
130 double multiperevent2 = 0.0;
131 double multiperevent3 = 0.0;
132 double multiperevent4 = 0.0;
133 double multiperevent5 = 0.0;
134 double multiperevent6 = 0.0;
135
136
137 const LorentzTransform Breitboost = dk.boostBreit();
138
139
140 for (size_t ip1 = 0; ip1 < particles.size(); ++ip1) {
141 const Particle& p = particles[ip1];
142
143 const FourMomentum BreMom = Breitboost.transform(p.momentum());
144
145 if ( BreMom.pz() > 0. ) continue;
146 double pcal= std::sqrt(BreMom.px2() + BreMom.py2()+ BreMom.pz2());
147 double xp = 2*pcal/(sqrt(Q2));
148
149 double E = std::sqrt((.27*.27)+(pcal*pcal));
150 double dp = 4*M_PI*pcal*pcal;
151 double dE = pcal;
152 double factor = dE/dp;
153
154 if ( Q2 >12 && Q2 < 100 ) {
155 _h["xp"] -> fill(xp);
156 _h["xi"] -> fill(log(1/(xp)));
157 }
158
159 if ( Q2 >100 && Q2 < 8000 ) {
160 _h["xpQgt"] -> fill(xp);
161 _h["xiQgt"] -> fill(log(1/(xp)));
162 }
163
164 if (Q > 5 && Q < 6.14) _h["E0"]->fill(E, factor);
165 if (Q > 19 && Q < 8000) _h["E1"]->fill(E, factor);
166 if (Q2 > 12 && Q2 < 15) _h["E2"]->fill(E, factor);
167 if (Q2 > 15 && Q2 < 25) _h["E3"]->fill(E, factor);
168 if (Q2 > 20 && Q2 < 40) _h["E4"]->fill(E, factor);
169 if (Q2 > 40 && Q2 < 60) _h["E5"]->fill(E, factor);
170 if (Q2 > 60 && Q2 < 80) _h["E6"]->fill(E, factor);
171 if (Q2 > 80 && Q2 < 100) _h["E7"]->fill(E, factor);
172
173 _h_Q2_xp.fill(sqrt(Q2),xp);
174
175 multi = multi+1;
176
177 if (Q2 > 12 && Q2 < 30 && x>6e-4 && x<2e-3) multiperevent1 = multiperevent1+1;
178 if (Q2 > 12 && Q2 < 30 && x>2e-3 && x<1e-2) multiperevent2 = multiperevent2+1;
179 if (Q2 > 30 && Q2 < 80 && x>6e-4 && x<2e-3) multiperevent3 = multiperevent3+1;
180 if (Q2 > 30 && Q2 < 80 && x>2e-3 && x<1e-2) multiperevent4 = multiperevent4+1;
181 if (Q2 > 100 && Q2 < 500 && x>2e-3 && x<1e-2) multiperevent5 = multiperevent5+1;
182 if (Q2 > 100 && Q2 < 500 && x>1e-2 && x<2e-1) multiperevent6 = multiperevent6+1;
183
184 }
185
186 if (Q2 > 12 && Q2 < 30 && x>6e-4 && x<2e-3) _h["N0"]->fill(multiperevent1);
187 if (Q2 > 12 && Q2 < 30 && x>2e-3 && x<1e-2) _h["N1"]->fill(multiperevent2);
188 if (Q2 > 30 && Q2 < 80 && x>6e-4 && x<2e-3) _h["N2"]->fill(multiperevent3);
189 if (Q2 > 30 && Q2 < 80 && x>2e-3 && x<1e-2) _h["N3"]->fill(multiperevent4);
190 if (Q2 > 100 && Q2 < 500 && x>2e-3 && x<1e-2) _h["N4"]->fill(multiperevent5);
191 if (Q2 > 100 && Q2 < 500 && x>1e-2 && x<2e-1) _h["N5"]->fill(multiperevent6);
192
193 if (Q2 > 12) {
194 _h["AvgT2"] -> fill(sqrt(Q2), multi);
195 _h["QT2"] -> fill(sqrt(Q2));
196 }
197
198 _h["AvgT1"] -> fill(sqrt(Q2), multi);
199 _h["QT1"] -> fill(sqrt(Q2));
200
201 _h["MeanTest1"] -> fill(sqrt(Q2));
202 _h["MeanTest2"] -> fill(sqrt(Q2));
203
204 }
205
206
207 /// Normalise histograms etc., after the run
208 void finalize() {
209 MSG_DEBUG("Nevt Qlow " << dbl(*_Nevt_after_cuts_Qlow));
210 scale(_h["xp"], 1.0/ *_Nevt_after_cuts_Qlow);
211 scale(_h["xi"], 1.0/ *_Nevt_after_cuts_Qlow);
212 scale(_h["xpQgt"], 1.0/ *_Nevt_after_cuts_QHigh);
213 scale(_h["xiQgt"], 1.0/ *_Nevt_after_cuts_QHigh);
214
215 for(int iE=0 ; iE< 8 ; ++iE){
216 scale(_h["E"+to_string(iE)], 1.0/ *_Nevt_after_cuts_E[iE]);
217 }
218
219 for(int iN=0 ; iN< 8 ; ++iN){
220 scale(_h["N"+to_string(iN)], 1.0/ *_Nevt_after_cuts_N[iN]);
221 }
222
223 divide(_h["AvgT1"], _h["QT1"],_s["Avg1"] );
224 divide(_h["AvgT2"], _h["QT2"],_s["Avg2"] );
225
226
227
228 int iQ = 0;
229 double mean;
230 for (Histo1DPtr histo : _h_Q2_xp.histos()) {
231 double Qerr = (QEdges[iQ+1] - QEdges[iQ])/2. ;
232 double Qmid = (QEdges[iQ+1] + QEdges[iQ])/2. ;
233 double Nev = dbl(*_Nevt_after_cuts_Q[iQ]) ;
234 if (Nev != 0) scale(histo, 1./Nev);
235
236 for (size_t iP = 0; iP < iPmax; ++iP) {
237 mean = histo->bin(iP).height() ;
238 double mean_err = mean/100;
239 _s["Qxp"+to_string(iP)]->addPoint(Qmid, mean, Qerr, mean_err);
240 }
241 ++iQ;
242 }
243 if(_h["MeanTest1"]->numEntries(false)>0 && _h["MeanTest1"]->effNumEntries(false)>0) {
244 const double x1 = _h["MeanTest1"]->xMean(false);
245 MSG_DEBUG("Mean of low Q = " << x1);
246 }
247 if(_h["MeanTest2"]->numEntries(false)>0 && _h["MeanTest2"]->effNumEntries(false)>0) {
248 const double x2 = _h["MeanTest2"]->xMean(false);
249 MSG_DEBUG("Mean of High Q = " << x2);
250 }
251 }
252
253 /// @}
254
255
256 private:
257
258 /// @name Histograms
259 /// @{
260
261 CounterPtr _Nevt_after_cuts_Qlow,_Nevt_after_cuts_QHigh,_Nevt_after_cuts_E[8],_Nevt_after_cuts_N[6];
262 CounterPtr _Nevt_after_cuts_Q[12];
263 map<string, CounterPtr> _Nevt;
264 map<string, Histo1DPtr> _h;
265 map<string, Scatter2DPtr> _s;
266 map<string, Profile1DPtr> _p;
267 map<string, CounterPtr> _c;
268 Scatter2DPtr _h_pt_06_ratio;
269
270 BinnedHistogram _h_Q2_xp,_h_Avg,_h_xipeak,_h_xiwidth;
271
272 /// @}
273
274 };
275
276
277 RIVET_DECLARE_PLUGIN(H1_1997_I445116);
278
279}
|