rivet is hosted by Hepforge, IPPP Durham

Rivet analyses reference

H1_1996_I422230

Charged particle multiplicities in deep inelastic scattering at HERA
Experiment: H1 (HERA)
Inspire ID: 422230
Status: VALIDATED
Authors:
  • Ariadna Leon
  • Hannes Jung
References: Beams: e+ p+, p+ e+
Beam energies: (27.5, 820.0); (820.0, 27.5) GeV
Run details:
  • validation with 3000001 events generated RAPGAP33, 26.7x820 GeV, Q2>2, 0.01 < y <0.95, IPRO=12, pt2cut=9, mixing with O(alphas) process, Nf_QCDC=4, NFLA=5

Using the H1 detector at HERA, charged particle multiplicity distributions in deep inelastic ep scattering have been measured over a large kinematical region. The evolution with W and Q2 of the multiplicity distribution and of the multiplicity moments in pseudorapidity domains of varying size is studied in the current fragmentation region of the hadronic centre-of-mass frame. Data on multiplicity as well as the various moments of the distribution are given in a variety of psuedorapidity regions. The moments are the generalised dispersions (D), the factorial moments (R), the Muller moments (K) and the C factors (C).

Source code: H1_1996_I422230.cc
  1// -*- C++ -*-
  2#include "Rivet/Analysis.hh"
  3#include "Rivet/Projections/FinalState.hh"
  4#include "Rivet/Projections/FastJets.hh"
  5#include "Rivet/Projections/ChargedFinalState.hh"
  6#include "Rivet/Projections/DISKinematics.hh"
  7
  8namespace Rivet {
  9
 10
 11  /// @brief Charged particle multiplicities in deep inelastic scattering at HERA (H1)
 12  class H1_1996_I422230 : public Analysis {
 13  public:
 14
 15    /// Constructor
 16    RIVET_DEFAULT_ANALYSIS_CTOR(H1_1996_I422230);
 17
 18
 19    /// @name Analysis methods
 20    ///@{
 21
 22    /// Book histograms and initialise projections before the run
 23    void init() {
 24
 25      // Initialise and register projections
 26      const DISLepton disl;
 27      declare(disl, "Lepton");
 28      declare(DISKinematics(), "Kinematics");
 29
 30      // The basic final-state projection:
 31      // all final-state particles within
 32      // the given eta acceptance
 33      const FinalState fs;
 34      declare(fs,"FS");
 35      const ChargedFinalState cfs(disl.remainingFinalState());
 36      declare(cfs,"CFS");
 37
 38      //binned histograms to count for the multiplicity
 39      for (size_t ix = 0; ix < 4; ++ix) {
 40        book(_Nevt_after_cuts[ix], "TMP/Nevt_after_cuts"+ to_string(ix));
 41      }
 42
 43      const vector<double> WEdges{80., 115, 150., 185., 220.};
 44      book(_g["mult1"], WEdges);
 45      book(_g["mult2"], WEdges);
 46      book(_g["mult3"], WEdges);
 47      book(_g["mult4"], WEdges);
 48      book(_g["mult_all"], WEdges);
 49      book(_g["mult10_all"], WEdges);
 50      book(_g["mult11_all"], WEdges);
 51      book(_g["mult12_all"], WEdges);
 52      for (size_t iW=0; iW < _g["mult1"]->numBins(); ++iW) {
 53        book(_g["mult1"]->bin(iW+1), iW+1, 1, 1);
 54        book(_g["mult2"]->bin(iW+1), iW+1, 1, 2);
 55        book(_g["mult3"]->bin(iW+1), iW+1, 1, 3);
 56        book(_g["mult4"]->bin(iW+1), iW+1, 1, 4);
 57        const auto& ref = refData<YODA::BinnedEstimate<int>>(4,1,4);
 58        book(_g["mult_all"]->bin(iW+1),   "TMP/dummy" + to_string(iW), ref);
 59        book(_g["mult10_all"]->bin(iW+1), "TMP/dummy1"+ to_string(iW), ref);
 60        book(_g["mult11_all"]->bin(iW+1), "TMP/dummy2"+ to_string(iW), ref);
 61        book(_g["mult12_all"]->bin(iW+1), "TMP/dummy3"+ to_string(iW), ref);
 62      }
 63
 64      //histograms for the statistical moments and the mean
 65      book(_e["mean0"],5,1,1);
 66      book(_e["D2_0"],5,1,2);
 67      book(_e["D3_0"],5,1,3);
 68      book(_e["D4_0"],5,1,4);
 69      book(_e["C2_0"],5,1,5);
 70      book(_e["C3_0"],5,1,6);
 71      book(_e["C4_0"],5,1,7);
 72      book(_e["R2_0"],5,1,8);
 73      book(_e["R3_0"],5,1,9);
 74
 75      book(_e["mean12"],6,1,1);
 76      book(_e["D2_12"],6,1,2);
 77      book(_e["D3_12"],6,1,3);
 78      book(_e["D4_12"],6,1,4);
 79      book(_e["C2_12"],6,1,5);
 80      book(_e["C3_12"],6,1,6);
 81      book(_e["C4_12"],6,1,7);
 82      book(_e["R2_12"],6,1,8);
 83      book(_e["R3_12"],6,1,9);
 84      book(_e["K3_12"],6,1,10);
 85
 86      book(_e["mean13"],7,1,1);
 87      book(_e["D2_13"],7,1,2);
 88      book(_e["D3_13"],7,1,3);
 89      book(_e["D4_13"],7,1,4);
 90      book(_e["C2_13"],7,1,5);
 91      book(_e["C3_13"],7,1,6);
 92      book(_e["C4_13"],7,1,7);
 93      book(_e["R2_13"],7,1,8);
 94      book(_e["R3_13"],7,1,9);
 95      book(_e["K3_13"],7,1,10);
 96
 97      book(_e["mean14"],8,1,1);
 98      book(_e["D2_14"],8,1,2);
 99      book(_e["D3_14"],8,1,3);
100      book(_e["D4_14"],8,1,4);
101      book(_e["C2_14"],8,1,5);
102      book(_e["C3_14"],8,1,6);
103      book(_e["C4_14"],8,1,7);
104      book(_e["R2_14"],8,1,8);
105      book(_e["R3_14"],8,1,9);
106
107      book(_e["mean15"],9,1,1);
108      book(_e["D2_15"],9,1,2);
109      book(_e["D3_15"],9,1,3);
110      book(_e["D4_15"],9,1,4);
111      book(_e["C2_15"],9,1,5);
112      book(_e["C3_15"],9,1,6);
113      book(_e["C4_15"],9,1,7);
114      book(_e["R2_15"],9,1,8);
115      book(_e["R3_15"],9,1,9);
116
117      book(_e["mean23"],10,1,1);
118      book(_e["D2_23"],10,1,2);
119      book(_e["D3_23"],10,1,3);
120      book(_e["D4_23"],10,1,4);
121      book(_e["C2_23"],10,1,5);
122      book(_e["C3_23"],10,1,6);
123      book(_e["C4_23"],10,1,7);
124      book(_e["R2_23"],10,1,8);
125      book(_e["R3_23"],10,1,9);
126      book(_e["K3_23"],10,1,10);
127
128      book(_e["mean34"],11,1,1);
129      book(_e["D2_34"],11,1,2);
130      book(_e["D3_34"],11,1,3);
131      book(_e["D4_34"],11,1,4);
132      book(_e["C2_34"],11,1,5);
133      book(_e["C3_34"],11,1,6);
134      book(_e["C4_34"],11,1,7);
135      book(_e["R2_34"],11,1,8);
136      book(_e["R3_34"],11,1,9);
137      book(_e["K3_34"],11,1,10);
138
139      book(_e["mean45"],12,1,1);
140      book(_e["D2_45"],12,1,2);
141      book(_e["D3_45"],12,1,3);
142      book(_e["D4_45"],12,1,4);
143      book(_e["C2_45"],12,1,5);
144      book(_e["C3_45"],12,1,6);
145      book(_e["C4_45"],12,1,7);
146      book(_e["R2_45"],12,1,8);
147      book(_e["R3_45"],12,1,9);
148      book(_e["K3_45"],12,1,10);
149    }
150
151
152    /// Perform the per-event analysis
153    void analyze(const Event& event) {
154
155      //apply the final state of all particles and the one for only charged particles
156      const FinalState& fs = apply<FinalState>(event, "FS");
157      const size_t numParticles = fs.particles().size();
158      const ChargedFinalState& cfs = apply<ChargedFinalState>(event,"CFS");
159      const Particles& particles = cfs.particles();
160
161      //because it does not make sense to have a collision with the numparticles is less than two,we use the vetoEvent so that if there is an event like this it does not run the analysis and goes to the next one
162
163      if (numParticles<2) {
164        MSG_DEBUG("Failed leptonic cut");
165        vetoEvent;
166      }
167
168      //apply DIS kinematics in the event
169      const DISKinematics& dk = apply<DISKinematics>(event,"Kinematics");
170      //const DISLepton& dl = apply<DISLepton>(event,"Lepton");
171
172      //get the DIS Kinematics but not in a loop because they are variables that descrube the type of event not the particles
173      double Q2 = dk.Q2();
174      double W2 = dk.W2();
175      double W = std::sqrt(W2);
176      bool cut1 = W<220. && W>80.;
177      if (!cut1) vetoEvent;
178      bool cut = Q2<1000. && Q2>10. && W>80. && W<220.;
179      if (!cut) vetoEvent;
180
181      double Efwd = 0. ;
182      for (size_t ip1 = 0; ip1 < particles.size(); ++ip1) {
183        const Particle& p = particles[ip1];
184        double theta = p.theta()/degree ;
185        if ( inRange(theta,4.4,15.) ) {
186          Efwd = Efwd + p.E() ;
187        }
188      }
189
190      bool cut_fwd = Efwd > 0.5 && dk.beamLepton().E() > 12. ;
191      if (!cut_fwd) vetoEvent ;
192
193      const size_t idx = _g["mult1"]->binAt(W).index();
194      if (0 < idx && idx < 5) {
195        _Nevt_after_cuts[idx-1]->fill();
196      }
197
198
199      //boost to the hadronic centre of mass frame
200      const LorentzTransform hcmboost = dk.boostHCM();
201      int kall = 0.;
202      int k1 = 0.;
203      int k2 = 0.;
204      int k3 = 0.;
205      int k4 = 0.;
206      int k10 = 0.;
207      int k11 = 0.;
208      int k12 = 0.;
209      for (size_t ip1 = 0; ip1 < particles.size(); ++ip1) {
210        const Particle& p = particles[ip1];
211        const FourMomentum hcmMom = hcmboost.transform(p.momentum());
212        const double etahcm_char = hcmMom.eta();
213        if (etahcm_char>0.)  ++kall;
214        if (etahcm_char>1. && etahcm_char<2.) {
215          ++k1;
216        }
217        if (etahcm_char>1. && etahcm_char<3.) {
218          ++k2;
219        }
220        if (etahcm_char>1. && etahcm_char<4.) {
221          ++k3;
222        }
223        if (etahcm_char>1. && etahcm_char<5.) {
224          ++k4;
225        }
226        if (etahcm_char>2. && etahcm_char<3.) {
227          ++k10;
228        }
229        if (etahcm_char>3. && etahcm_char<4.) {
230          ++k11;
231        }
232        if (etahcm_char>4. && etahcm_char<5.) {
233          ++k12;
234        }
235
236      }
237      // cout<<k1<<endl;
238      _g["mult_all"]->fill(W,kall);
239      _g["mult10_all"]->fill(W,k10);
240      _g["mult11_all"]->fill(W,k11);
241      _g["mult12_all"]->fill(W,k12);
242      _g["mult1"]->fill(W,k1);
243      _g["mult2"]->fill(W,k2);
244      _g["mult3"]->fill(W,k3);
245      _g["mult4"]->fill(W,k4);
246
247    }
248
249    /// Normalise histograms etc., after the run
250    void finalize() {
251      // cout << _h_mult1.histo() << endl;
252      int iW = 0 ;
253      double iq  ;
254      double mean, dispersion, cq ,R2,R3,K3 ;
255      for (auto& histo : _g["mult_all"]->bins()) {
256        iq = 2 ;
257        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
258        //cout << " mean " << mean << " dispersion " << dispersion << " for iq = " << iq << endl;
259
260        // just to have some values, needs to be corrected
261
262        _e["mean0"]->bin(iW+1).set(mean, dispersion/2);
263        _e["D2_0"]->bin(iW+1).set(dispersion, dispersion/mean);
264        _e["C2_0"]->bin(iW+1).set(cq, cq/mean);
265        _e["R2_0"]->bin(iW+1).set(R2, R2/mean);
266        _e["R3_0"]->bin(iW+1).set(R3, R3/mean);
267        iq = 3 ;
268        dispersion = 0 ;
269        cq = 0;
270        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
271        _e["D3_0"]->bin(iW+1).set(dispersion,dispersion/mean);
272        _e["C3_0"]->bin(iW+1).set(cq, cq/mean);
273        iq = 4 ;
274        dispersion = 0 ;
275        cq = 0;
276        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
277        _e["D4_0"]->bin(iW+1).set(dispersion, dispersion/mean);
278        _e["C4_0"]->bin(iW+1).set(cq, cq/mean);
279
280        ++iW;
281      }
282      iW = 0 ;
283      mean = 0.;
284      dispersion = 0.;
285      cq = 0.;
286      R2 = 0;
287      R3 = 0.;
288      K3 = 0.;
289      for (auto& histo : _g["mult1"]->bins()) {
290        // use scale to get Prob in %, and use counter to count events after cuts
291        //cout << " Nevt " << dbl(*_Nevt_after_cuts[iW]) << endl;
292        scale(histo, 100.0/ *_Nevt_after_cuts[iW]);
293        //
294        iq = 2 ;
295        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3);
296
297        _e["mean12"]->bin(iW+1).set(mean, dispersion/2);
298        _e["D2_12"]->bin(iW+1).set(dispersion, dispersion/mean);
299        _e["C2_12"]->bin(iW+1).set(cq, cq/mean);
300        _e["R2_12"]->bin(iW+1).set(R2, R2/mean);
301        _e["R3_12"]->bin(iW+1).set(R3,R3/mean);
302        _e["K3_12"]->bin(iW+1).set(K3,K3/mean);
303        iq = 3 ;
304        dispersion = 0 ;
305        cq = 0;
306        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
307        _e["D3_12"]->bin(iW+1).set(dispersion, dispersion/mean);
308        _e["C3_12"]->bin(iW+1).set(cq, cq/mean);
309        iq = 4 ;
310        dispersion = 0 ;
311        cq = 0;
312        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
313        _e["D4_12"]->bin(iW+1).set(dispersion, dispersion/mean);
314        _e["C4_12"]->bin(iW+1).set(cq, cq/mean);
315        ++iW;
316      }
317      iW = 0 ;
318      mean = 0.;
319      dispersion = 0.;
320      cq = 0.;
321      R2 = 0;
322      R3 = 0.;
323      K3 = 0.;
324
325      for (auto& histo : _g["mult2"]->bins()) {
326        // use scale to get Prob in %, and use counter to count events after cuts
327        //cout << " Nevt " << dbl(*_Nevt_after_cuts[0]) << endl;
328        scale(histo, 100.0/ *_Nevt_after_cuts[iW]);
329        //
330        iq = 2 ;
331        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
332
333        _e["mean13"]->bin(iW+1).set(mean, dispersion/2);
334        _e["D2_13"]->bin(iW+1).set(dispersion, dispersion/mean);
335        _e["C2_13"]->bin(iW+1).set(cq, cq/mean);
336        _e["R2_13"]->bin(iW+1).set(R2, R2/mean);
337        _e["R3_13"]->bin(iW+1).set(R3,R3/mean);
338        _e["K3_13"]->bin(iW+1).set(K3,K3/mean);
339        iq = 3 ;
340        dispersion = 0 ;
341        cq = 0;
342        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
343        _e["D3_13"]->bin(iW+1).set(dispersion, dispersion/mean);
344        _e["C3_13"]->bin(iW+1).set(cq, cq/mean);
345        iq = 4 ;
346        dispersion = 0 ;
347        cq = 0;
348        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
349        _e["D4_13"]->bin(iW+1).set(dispersion, dispersion/mean);
350        _e["C4_13"]->bin(iW+1).set(cq, cq/mean);
351        ++iW;
352      }
353      iW = 0 ;
354      mean = 0.;
355      dispersion = 0.;
356      cq = 0.;
357      R2 = 0;
358      R3 = 0.;
359      K3 = 0.;
360
361      for (auto& histo : _g["mult3"]->bins()) {
362        // use scale to get Prob in %, and use counter to count events after cuts
363        //cout << " Nevt " << dbl(*_Nevt_after_cuts[0]) << endl;
364        scale(histo, 100.0/ *_Nevt_after_cuts[iW]);
365        //
366        iq = 2 ;
367        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
368        _e["mean14"]->bin(iW+1).set(mean, dispersion/2);
369        _e["D2_14"]->bin(iW+1).set(dispersion, dispersion/mean);
370        _e["C2_14"]->bin(iW+1).set(cq, cq/mean);
371        _e["R2_14"]->bin(iW+1).set(R2, R2/mean);
372        _e["R3_14"]->bin(iW+1).set(R3,R3/mean);
373        iq = 3 ;
374        dispersion = 0 ;
375        cq = 0;
376        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
377        _e["D3_14"]->bin(iW+1).set(dispersion, dispersion/mean);
378        _e["C3_14"]->bin(iW+1).set(cq, cq/mean);
379        iq = 4 ;
380        dispersion = 0 ;
381        cq = 0;
382        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
383        _e["D4_14"]->bin(iW+1).set(dispersion, dispersion/mean);
384        _e["C4_14"]->bin(iW+1).set(cq, cq/mean);
385        ++iW;
386      }
387      iW = 0 ;
388      mean = 0.;
389      dispersion = 0.;
390      cq = 0.;
391      R2 = 0;
392      R3 = 0.;
393      K3 = 0.;
394
395      for (auto& histo : _g["mult4"]->bins()) {
396        // use scale to get Prob in %, and use counter to count events after cuts
397        //cout << " Nevt " << dbl(*_Nevt_after_cuts[0]) << endl;
398        scale(histo, 100.0/ *_Nevt_after_cuts[iW]);
399        //
400        iq = 2 ;
401        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
402        _e["mean15"]->bin(iW+1).set(mean, dispersion/2);
403        _e["D2_15"]->bin(iW+1).set(dispersion, dispersion/mean);
404        _e["C2_15"]->bin(iW+1).set(cq, cq/mean);
405        _e["R2_15"]->bin(iW+1).set(R2, R2/mean);
406        _e["R3_15"]->bin(iW+1).set(R3,R3/mean);
407        iq = 3 ;
408        dispersion = 0 ;
409        cq = 0;
410        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
411        _e["D3_15"]->bin(iW+1).set(dispersion, dispersion/mean);
412        _e["C3_15"]->bin(iW+1).set(cq, cq/mean);
413        iq = 4 ;
414        dispersion = 0 ;
415        cq = 0;
416        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
417        _e["D4_15"]->bin(iW+1).set(dispersion, dispersion/mean);
418        _e["C4_15"]->bin(iW+1).set(cq, cq/mean);
419        ++iW;
420      }
421      iW = 0 ;
422      mean = 0.;
423      dispersion = 0.;
424      cq = 0.;
425      R2 = 0;
426      R3 = 0.;
427      K3 = 0.;
428
429      for (auto& histo : _g["mult10_all"]->bins()) {
430        // use scale to get Prob in %, and use counter to count events after cuts
431        //cout << " Nevt " << dbl(*_Nevt_after_cuts[0]) << endl;
432        scale(histo, 100.0/ *_Nevt_after_cuts[iW]);
433        //
434        iq = 2 ;
435        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
436        _e["mean23"]->bin(iW+1).set(mean, dispersion/2);
437        _e["D2_23"]->bin(iW+1).set(dispersion, dispersion/mean);
438        _e["C2_23"]->bin(iW+1).set(cq, cq/mean);
439        _e["R2_23"]->bin(iW+1).set(R2, R2/mean);
440        _e["R3_23"]->bin(iW+1).set(R3,R3/mean);
441        _e["K3_23"]->bin(iW+1).set(K3,K3/mean);
442        iq = 3 ;
443        dispersion = 0 ;
444        cq = 0;
445        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
446        _e["D3_23"]->bin(iW+1).set(dispersion, dispersion/mean);
447        _e["C3_23"]->bin(iW+1).set(cq, cq/mean);
448        iq = 4 ;
449        dispersion = 0 ;
450        cq = 0;
451        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
452        _e["D4_23"]->bin(iW+1).set(dispersion, dispersion/mean);
453        _e["C4_23"]->bin(iW+1).set(cq, cq/mean);
454        ++iW;
455      }
456
457      iW = 0 ;
458      mean = 0.;
459      dispersion = 0.;
460      cq = 0.;
461      R2 = 0;
462      R3 = 0.;
463      K3 = 0.;
464
465      for (auto& histo : _g["mult11_all"]->bins()) {
466        // use scale to get Prob in %, and use counter to count events after cuts
467        //cout << " Nevt " << dbl(*_Nevt_after_cuts[0]) << endl;
468        scale(histo, 100.0/ *_Nevt_after_cuts[iW]);
469        //
470        iq = 2 ;
471        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
472        _e["mean34"]->bin(iW+1).set(mean, dispersion/2);
473        _e["D2_34"]->bin(iW+1).set(dispersion, dispersion/mean);
474        _e["C2_34"]->bin(iW+1).set(cq, cq/mean);
475        _e["R2_34"]->bin(iW+1).set(R2, R2/mean);
476        _e["R3_34"]->bin(iW+1).set(R3,R3/mean);
477        _e["K3_34"]->bin(iW+1).set(K3,K3/mean);
478        iq = 3 ;
479        dispersion = 0 ;
480        cq = 0;
481        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
482        _e["D3_34"]->bin(iW+1).set(dispersion, dispersion/mean);
483        _e["C3_34"]->bin(iW+1).set(cq, cq/mean);
484        iq = 4 ;
485        dispersion = 0 ;
486        cq = 0;
487        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
488        _e["D4_34"]->bin(iW+1).set(dispersion, dispersion/mean);
489        _e["C4_34"]->bin(iW+1).set(cq, cq/mean);
490        ++iW;
491      }
492      iW = 0 ;
493      mean = 0.;
494      dispersion = 0.;
495      cq = 0.;
496      R2 = 0;
497      R3 = 0.;
498      K3 = 0.;
499
500      for (auto& histo : _g["mult12_all"]->bins()) {
501        // use scale to get Prob in %, and use counter to count events after cuts
502        //cout << " Nevt " << dbl(*_Nevt_after_cuts[0]) << endl;
503        scale(histo, 100.0/ *_Nevt_after_cuts[iW]);
504        //
505        iq = 2 ;
506        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
507        _e["mean45"]->bin(iW+1).set(mean, dispersion/2);
508        _e["D2_45"]->bin(iW+1).set(dispersion, dispersion/mean);
509        _e["C2_45"]->bin(iW+1).set(cq, cq/mean);
510        _e["R2_45"]->bin(iW+1).set(R2, R2/mean);
511        _e["R3_45"]->bin(iW+1).set(R3,R3/mean);
512        // cout<<"R2= "<<R2<<"R3= "<<R3<<"K3= "<<K3<<endl;
513        _e["K3_45"]->bin(iW+1).set(K3,K3/mean);
514        iq = 3 ;
515        dispersion = 0 ;
516        cq = 0;
517        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
518        _e["D3_45"]->bin(iW+1).set(dispersion, dispersion/mean);
519        _e["C3_45"]->bin(iW+1).set(cq, cq/mean);
520        iq = 4 ;
521        dispersion = 0 ;
522        cq = 0;
523        _histo_to_moments(histo, iq , mean, dispersion,cq,R2,R3,K3) ;
524        _e["D4_45"]->bin(iW+1).set(dispersion, dispersion/mean);
525        _e["C4_45"]->bin(iW+1).set(cq, cq/mean);
526        ++iW;
527      }
528
529    }
530
531
532    inline void _histo_to_moments(const BinnedHistoPtr<int>& histo_input,
533                                  double iq, double& mean, double& dispersion, double& cq,
534                                  double& R2, double& R3, double& K3) {
535
536      double mysumWX = 0.;
537      double mysumW = 0.;
538
539      if (histo_input->effNumEntries() == 0 || histo_input->sumW() == 0) {
540        MSG_WARNING("Requested mean of a distribution with no net fill weights");
541      }
542      else {
543        // loop to calcualte mean
544        for (auto& b : histo_input->bins()) { // loop over points
545          mysumWX  += b.sumW()*b.xEdge();
546          mysumW   += b.sumW();
547        }
548        mean = mysumWX/mysumW ;
549
550        // loop to calculate dispersion (variance)
551        double var = 0., c = 0., r2 = 0., r3 = 0.;
552        for (auto& b : histo_input->bins()) { // loop over points
553          const double xval = b.xEdge();
554          const double weight = b.sumW();
555          var = var + weight * pow((xval - mean),iq) ;
556          c = c+pow(xval,iq)*weight;
557          r2 = r2+xval*(xval-1)*weight;
558          r3 = r3+xval*(xval-1)*(xval-2)*weight;
559        }
560        var = var/mysumW;
561        dispersion = pow(var,1./iq);
562        cq = c/(mysumW*pow(mean,iq));
563        R2 = r2/(mysumW*pow(mean,2));
564        R3 = r3/(mysumW*pow(mean,3));
565        K3 = R3 - 3*R2 + 2;
566      }
567    }
568
569    ///@}
570
571
572  private:
573
574    /// @name Histograms
575    ///@{
576
577    map<string,HistoGroupPtr<double,int>> _g;
578    map<string,Estimate1DPtr> _e;
579    CounterPtr _Nevt_after_cuts[4];
580
581    ///@}
582
583  };
584
585  RIVET_DECLARE_PLUGIN(H1_1996_I422230);
586
587}