rivet is hosted by Hepforge, IPPP Durham

Rivet analyses reference

OPAL_2000_I513476

Event Shapes at 172, 183 and 189 GeV
Experiment: OPAL (LEP)
Inspire ID: 513476
Status: VALIDATED
Authors:
  • Peter Richardson
References:
  • Eur.Phys.J. C16 (2000) 185-210
Beams: e- e+
Beam energies: (86.0, 86.0); (91.5, 91.5); (94.5, 94.5) GeV
Run details:
  • e+e- to hadrons.

Event shapes at 172, 183 and 189 GeV.

Source code: OPAL_2000_I513476.cc
  1// -*- C++ -*-
  2#include "Rivet/Analysis.hh"
  3#include "Rivet/Projections/Beam.hh"
  4#include "Rivet/Projections/FinalState.hh"
  5#include "Rivet/Projections/ChargedFinalState.hh"
  6#include "Rivet/Projections/Sphericity.hh"
  7#include "Rivet/Projections/Thrust.hh"
  8#include "Rivet/Projections/FastJets.hh"
  9#include "Rivet/Projections/ParisiTensor.hh"
 10#include "Rivet/Projections/Hemispheres.hh"
 11
 12namespace Rivet {
 13
 14
 15  /// @brief  event shapes at 172, 183, 189
 16  class OPAL_2000_I513476 : public Analysis {
 17  public:
 18
 19    /// Constructor
 20    RIVET_DEFAULT_ANALYSIS_CTOR(OPAL_2000_I513476);
 21
 22
 23    /// @name Analysis methods
 24    /// @{
 25
 26    /// Book histograms and initialise projections before the run
 27    void init() {
 28
 29      // Initialise and register projections
 30      // Projections
 31      const FinalState fs;
 32      declare(Beam(), "Beams");
 33      const ChargedFinalState cfs;
 34      declare(cfs, "CFS");
 35      declare(FastJets(fs, JetAlg::DURHAM, 0.7), "DurhamJets");
 36      declare(Sphericity(fs), "Sphericity");
 37      declare(ParisiTensor(fs), "Parisi");
 38      const Thrust thrust(fs);
 39      declare(thrust, "Thrust");
 40      declare(Hemispheres(thrust), "Hemispheres");
 41
 42      // Book histograms
 43      size_t ih = 1;
 44      for (double eVal : allowedEnergies()) {
 45
 46        const string en = toString(int(eVal/MeV));
 47        if (isCompatibleWithSqrtS(eVal))  _sqs = en;
 48
 49        book(_h[en+"thrust"],      1,1,ih);
 50        book(_h[en+"major"],       2,1,ih);
 51        book(_h[en+"minor"],       3,1,ih);
 52        book(_h[en+"aplanarity"],  4,1,ih);
 53        book(_h[en+"oblateness"],  5,1,ih);
 54        book(_h[en+"C"],           6,1,ih);
 55        book(_h[en+"rhoH"],        7,1,ih);
 56        book(_h[en+"sphericity"],  8,1,ih);
 57        book(_h[en+"totalB"],      9,1,ih);
 58        book(_h[en+"wideB"],      10,1,ih);
 59        book(_h[en+"y23"],        11,1,ih);
 60        book(_mult[en],           13,1,ih);
 61        book(_h[en+"pTin"],       15,1,ih);
 62        book(_h[en+"pTout"],      16,1,ih);
 63        book(_h[en+"y"],          17,1,ih);
 64        book(_h[en+"x"],          18,1,ih);
 65        book(_h[en+"xi"],         19,1,ih);
 66        ++ih;
 67      }
 68      if (_sqs == "" && !merging()) {
 69        throw BeamError("Invalid beam energy for " + name() + "\n");
 70      }
 71    }
 72
 73
 74    /// Perform the per-event analysis
 75    void analyze(const Event& event) {
 76      // Even if we only generate hadronic events, we still need a cut on numCharged >= 2.
 77      const FinalState& cfs = apply<FinalState>(event, "CFS");
 78      if (cfs.size() < 2) vetoEvent;
 79
 80      // Get beams and average beam momentum
 81      const ParticlePair& beams = apply<Beam>(event, "Beams").beams();
 82      const double meanBeamMom = 0.5*(beams.first.p3().mod() + beams.second.p3().mod());
 83
 84      // Thrust related
 85      const Thrust& thrust = apply<Thrust>(event, "Thrust");
 86      _h[_sqs+"thrust"]    ->fill(thrust.thrust()     );
 87      _h[_sqs+"major"]     ->fill(thrust.thrustMajor());
 88      _h[_sqs+"minor"]     ->fill(thrust.thrustMinor());
 89      _h[_sqs+"oblateness"]->fill(thrust.oblateness() );
 90
 91      // Sphericity related
 92      const Sphericity& sphericity = apply<Sphericity>(event, "Sphericity");
 93      _h[_sqs+"sphericity"]->fill(sphericity.sphericity());
 94      _h[_sqs+"aplanarity"]->fill(sphericity.aplanarity());
 95
 96      // C parameter
 97      const ParisiTensor& parisi = apply<ParisiTensor>(event, "Parisi");
 98      _h[_sqs+"C"]->fill(parisi.C());
 99
100      // Hemispheres
101      const Hemispheres& hemi = apply<Hemispheres>(event, "Hemispheres");
102
103      _h[_sqs+"rhoH"]  ->fill(hemi.scaledMhigh());
104      _h[_sqs+"wideB"] ->fill(hemi.Bmax());
105      _h[_sqs+"totalB"]->fill(hemi.Bsum());
106
107      // Jets
108      const FastJets& durjet = apply<FastJets>(event, "DurhamJets");
109      const double y23 = durjet.clusterSeq()->exclusive_ymerge_max(2);
110      _h[_sqs+"y23"]->fill(y23);
111
112      // charged particles
113      _mult[_sqs]->fill(cfs.particles().size());
114      for (const Particle& p : cfs.particles()) {
115        const Vector3 mom3  = p.p3();
116        const double energy = p.E();
117        const double pTinT  = dot(mom3, thrust.thrustMajorAxis());
118        const double pToutT = dot(mom3, thrust.thrustMinorAxis());
119        _h[_sqs+"pTin"] ->fill(fabs(pTinT/GeV) );
120        _h[_sqs+"pTout"]->fill(fabs(pToutT/GeV));
121        const double momT = dot(thrust.thrustAxis(), mom3);
122        const double rapidityT = 0.5 * std::log((energy + momT) / (energy - momT));
123        _h[_sqs+"y"]->fill(fabs(rapidityT));
124        const double mom = mom3.mod();
125        const double scaledMom = mom/meanBeamMom;
126        const double logInvScaledMom = -std::log(scaledMom);
127        _h[_sqs+"xi"]->fill(logInvScaledMom);
128        _h[_sqs+"x"] ->fill(scaledMom);
129      }
130    }
131
132
133    /// Normalise histograms etc., after the run
134    void finalize() {
135      // mean multiplicity
136      BinnedEstimatePtr<int> m_ch;
137      book(m_ch,14,1,1);
138      // mean ptIn
139      BinnedEstimatePtr<int> m_pTin;
140      book(m_pTin,20,1,1);
141      // mean ptOut
142      BinnedEstimatePtr<int> m_pTout;
143      book(m_pTout,20,1,2);
144      // mean y
145      BinnedEstimatePtr<int> m_y;
146      book(m_y,20,1,3);
147      // mean x
148      BinnedEstimatePtr<int> m_x;
149      book(m_x,20,1,4);
150
151      // scale histos + fill averages
152      for (double eVal : allowedEnergies()) {
153        const string en = toString(int(eVal/MeV));
154
155        const double sumw = _mult[en]->sumW();
156        if (sumw == 0)  continue;
157        scale(_mult[en], 100./sumw);
158
159        for (auto& item : _h) {
160          if (item.first.substr(0,6) != en)  continue;
161          scale(item.second, 1./sumw);
162        }
163
164        for (size_t ih=1; ih <= m_ch->numBins(); ++ih) {
165          if (m_ch->bin(ih).xEdge() != int(eVal))  continue;
166
167          const double nch     = _mult[en]->xMean();
168          const double nch_err = _mult[en]->xStdErr();
169          m_ch->bin(ih).set(nch, nch_err);
170
171          double pTin     = _h[en+"pTin"]->xMean();
172          double pTin_err = _h[en+"pTin"]->xStdErr();
173          m_pTin->bin(ih).set(pTin, pTin_err);
174
175          double pTout     = _h[en+"pTout"]->xMean();
176          double pTout_err = _h[en+"pTout"]->xStdErr();
177          m_pTout->bin(ih).set(pTout, pTout_err);
178
179          double y     = _h[en+"y"]->xMean();
180          double y_err = _h[en+"y"]->xStdErr();
181          m_y->bin(ih).set(y, y_err);
182
183          double x     = _h[en+"x"]->xMean();
184          double x_err = _h[en+"x"]->xStdErr();
185          m_x->bin(ih).set(x, x_err);
186        }
187      }
188    }
189
190    /// @}
191
192
193    /// @name Histograms
194    /// @{
195    map<string,Histo1DPtr> _h;
196
197    map<string,BinnedHistoPtr<int>> _mult;
198
199    string _sqs = "";
200    /// @}
201
202
203  };
204
205
206  RIVET_DECLARE_PLUGIN(OPAL_2000_I513476);
207
208
209}