Rivet is hosted by Hepforge, IPPP Durham

Rivet analyses reference

UA5_1988_S1867512

Charged particle correlations in UA5 $p\bar{p}$ NSD events at $\sqrt{s} = 200$, 546 and 900\;GeV
Experiment: UA5 (CERN SPS)
Inspire ID: 263399
Status: VALIDATED
Authors:
  • Holger Schulz
References:
  • Z.Phys.C37:191-213,1988
Beams: p- p+
Beam energies: (100.0, 100.0); (273.0, 273.0); (450.0, 450.0) GeV
Run details:
  • ppbar events. Non-single diffractive events need to be switched on. The trigger implementation is the same as in UA5_1989_S1926373. Important: Only the correlation strengths with symmetric eta bins should be used for tuning.

Data on two-particle pseudorapidity and multiplicity correlations of charged particles for non single-diffractive $p\bar{p}$ collisions at c.m. energies of 200, 546 and 900 GeV. Pseudorapidity correlations are interpreted in terms of a cluster model, which has been motivated by this and other experiments, require on average about two charged particles per cluster. The decay width of the clusters in pseudorapidity is approximately independent of multiplicity and of c.m. energy. The investigations of correlations in terms of pseudorapidity gaps confirm the picture of cluster production. The strength of forward--backward multiplicity correlations increases linearly with ins and depends strongly on position and size of the pseudorapidity gap separating the forward and backward interval. All our correlation studies can be understood in terms of a cluster model in which clusters contain on average about two charged particles, i.e. are of similar magnitude to earlier estimates from the ISR.

Source code: UA5_1988_S1867512.cc
  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
// -*- C++ -*-
#include "Rivet/Analysis.hh"
#include "Rivet/Projections/ChargedFinalState.hh"
#include "Rivet/Projections/Beam.hh"
#include "Rivet/Projections/TriggerUA5.hh"

namespace Rivet {


  namespace {
    /// @brief Helper function to fill correlation points into scatter plot
    Point2D correlation_helper(double x, double xerr, const vector<int> & nf, const vector<int> & nb, double sumWPassed) {
      return Point2D(x, correlation(nf, nb), xerr, correlation_err(nf, nb)/sqrt(sumWPassed));
    }
  }


  /// @brief UA5 charged particle correlations at 200, 546 and 900 GeV
  class UA5_1988_S1867512 : public Analysis {
  public:

    UA5_1988_S1867512()
      : Analysis("UA5_1988_S1867512"), _sumWPassed(0)
    {    }


    /// @name Analysis methods
    //@{

    void init() {
      // Projections
      declare(TriggerUA5(), "Trigger");

      // Symmetric eta interval
      declare(ChargedFinalState(-0.5, 0.5), "CFS05");

      // Asymmetric intervals first
      // Forward eta intervals
      declare(ChargedFinalState(0.0, 1.0), "CFS10F");
      declare(ChargedFinalState(0.5, 1.5), "CFS15F");
      declare(ChargedFinalState(1.0, 2.0), "CFS20F");
      declare(ChargedFinalState(1.5, 2.5), "CFS25F");
      declare(ChargedFinalState(2.0, 3.0), "CFS30F");
      declare(ChargedFinalState(2.5, 3.5), "CFS35F");
      declare(ChargedFinalState(3.0, 4.0), "CFS40F");

      // Backward eta intervals
      declare(ChargedFinalState(-1.0,  0.0), "CFS10B");
      declare(ChargedFinalState(-1.5, -0.5), "CFS15B");
      declare(ChargedFinalState(-2.0, -1.0), "CFS20B");
      declare(ChargedFinalState(-2.5, -1.5), "CFS25B");
      declare(ChargedFinalState(-3.0, -2.0), "CFS30B");
      declare(ChargedFinalState(-3.5, -2.5), "CFS35B");
      declare(ChargedFinalState(-4.0, -3.0), "CFS40B");

      // Histogram booking, we have sqrt(s) = 200, 546 and 900 GeV
      // TODO use Scatter2D to be able to output errors
      if (fuzzyEquals(sqrtS()/GeV, 200.0, 1E-4)) {
        _hist_correl = bookScatter2D(2, 1, 1);
        _hist_correl_asym = bookScatter2D(3, 1, 1);
      } else if (fuzzyEquals(sqrtS()/GeV, 546.0, 1E-4)) {
        _hist_correl = bookScatter2D(2, 1, 2);
        _hist_correl_asym = bookScatter2D(3, 1, 2);
      } else if (fuzzyEquals(sqrtS()/GeV, 900.0, 1E-4)) {
        _hist_correl = bookScatter2D(2, 1, 3);
        _hist_correl_asym = bookScatter2D(3, 1, 3);
      }
    }


    void analyze(const Event& event) {
      // Trigger
      const bool trigger = apply<TriggerUA5>(event, "Trigger").nsdDecision();
      if (!trigger) vetoEvent;
      _sumWPassed += event.weight();

      // Count forward/backward particles
      n_10f.push_back(apply<ChargedFinalState>(event, "CFS10F").size());
      n_15f.push_back(apply<ChargedFinalState>(event, "CFS15F").size());
      n_20f.push_back(apply<ChargedFinalState>(event, "CFS20F").size());
      n_25f.push_back(apply<ChargedFinalState>(event, "CFS25F").size());
      n_30f.push_back(apply<ChargedFinalState>(event, "CFS30F").size());
      n_35f.push_back(apply<ChargedFinalState>(event, "CFS35F").size());
      n_40f.push_back(apply<ChargedFinalState>(event, "CFS40F").size());
      //
      n_10b.push_back(apply<ChargedFinalState>(event, "CFS10B").size());
      n_15b.push_back(apply<ChargedFinalState>(event, "CFS15B").size());
      n_20b.push_back(apply<ChargedFinalState>(event, "CFS20B").size());
      n_25b.push_back(apply<ChargedFinalState>(event, "CFS25B").size());
      n_30b.push_back(apply<ChargedFinalState>(event, "CFS30B").size());
      n_35b.push_back(apply<ChargedFinalState>(event, "CFS35B").size());
      n_40b.push_back(apply<ChargedFinalState>(event, "CFS40B").size());
      //
      n_05 .push_back(apply<ChargedFinalState>(event, "CFS05").size());
    }


    void finalize() {
      // The correlation strength is defined in formulas
      // 4.1 and 4.2

      // Fill histos, gap width histo comes first
      //      * Set the errors as Delta b / sqrt(sumWPassed) with
      //      Delta b being the absolute uncertainty of b according to
      //      Gaussian error-propagation (linear limit) and assuming
      //      Poissonian uncertainties for the number of particles in
      //      the eta-intervals
      //

      // Define vectors to be able to fill Scatter2Ds
      vector<Point2D> points;
      // Fill the y-value vector
      points.push_back(correlation_helper(0, 0.5, n_10f, n_10b, _sumWPassed));
      points.push_back(correlation_helper(1, 0.5, n_15f, n_15b, _sumWPassed));
      points.push_back(correlation_helper(2, 0.5, n_20f, n_20b, _sumWPassed));
      points.push_back(correlation_helper(3, 0.5, n_25f, n_25b, _sumWPassed));
      points.push_back(correlation_helper(4, 0.5, n_30f, n_30b, _sumWPassed));
      points.push_back(correlation_helper(5, 0.5, n_35f, n_35b, _sumWPassed));
      points.push_back(correlation_helper(6, 0.5, n_40f, n_40b, _sumWPassed));

      // Fill the DPS
      _hist_correl->addPoints(points);

      // Fill gap-center histo (Fig 15)
      //
      // The first bin contains the c_str strengths of
      // the gap size histo that has ane eta gap of two
      //
      // Now do the other histo -- clear already defined vectors first
      points.clear();

      points.push_back(correlation_helper(0,   0.25, n_20f, n_20b, _sumWPassed));
      points.push_back(correlation_helper(0.5, 0.25, n_25f, n_15b, _sumWPassed));
      points.push_back(correlation_helper(1,   0.25, n_30f, n_10b, _sumWPassed));
      points.push_back(correlation_helper(1.5, 0.25, n_35f, n_05 , _sumWPassed));
      points.push_back(correlation_helper(2,   0.25, n_40f, n_10f, _sumWPassed));

      // Fill in correlation strength for assymetric intervals,
      // see Tab. 5
      // Fill the DPS
      _hist_correl_asym->addPoints(points);
    }

    //@}


  private:

    /// @name Counters
    //@{
    double _sumWPassed;
    //@}


    /// @name Vectors for storing the number of particles in the different eta intervals per event.
    /// @todo Is there a better way?
    //@{
    std::vector<int> n_10f;
    std::vector<int> n_15f;
    std::vector<int> n_20f;
    std::vector<int> n_25f;
    std::vector<int> n_30f;
    std::vector<int> n_35f;
    std::vector<int> n_40f;
    //
    std::vector<int> n_10b;
    std::vector<int> n_15b;
    std::vector<int> n_20b;
    std::vector<int> n_25b;
    std::vector<int> n_30b;
    std::vector<int> n_35b;
    std::vector<int> n_40b;
    //
    std::vector<int> n_05;
    //@}


    /// @name Histograms
    //@{
    // Symmetric eta intervals
    Scatter2DPtr _hist_correl;
    // For asymmetric eta intervals
    Scatter2DPtr _hist_correl_asym;
    //@}

  };



  // The hook for the plugin system
  DECLARE_RIVET_PLUGIN(UA5_1988_S1867512);

}