rivet is hosted by Hepforge, IPPP Durham

Rivet analyses reference

ALICE_2012_I1127497

Centrality dependence of charged particle production at large transverse momentum in Pb-Pb collisions at $\sqrt{s_{\mathrm{NN}}} = 2.76$ TeV
Experiment: ALICE (LHC)
Inspire ID: 1127497
Status: VALIDATED
Authors:
  • Przemyslaw Karczmarczyk
  • Jan Fiete Grosse-Oetringhaus
  • Jochen Klein
References: Beams: p+ p+, 1000822080 1000822080
Beam energies: (1380.0, 1380.0); (287040.0, 287040.0) GeV
    No run details listed

The inclusive transverse momentum ($p_T$) distributions of primary charged particles are measured in the pseudo-rapidity range $|\eta| < 0.8$ as a function of event centrality in Pb--Pb collisions at $\sqrt{s_{nn}} = 2.76$ TeV with ALICE at the LHC. The data are presented in the $p_T$ range $0.15<p_T<50$ GeV/c for nine centrality intervals from 70-80% to 0-5%. The Pb-Pb spectra are presented in terms of the nuclear modification factor RAA using a pp reference spectrum measured at the same collision energy. We observe that the suppression of high-pT particles strongly depends on event centrality. In central collisions (0-5%) the yield is most suppressed with $R_{AA} \approx 0.13$ at $p_T= 6-7$ GeV/c. Above $p_T = 7$ GeV/c, there is a significant rise in the nuclear modification factor, which reaches $R_{AA} \approx 0.4$ for $p_T > 30$ GeV/c. In peripheral collisions (70-80%), the suppression is weaker with $R_{AA} \approx 0.7$ almost independently of $p_T$. The measured nuclear modification factors are compared to other measurements and model calculations.

Source code: ALICE_2012_I1127497.cc
  1// -*- C++ -*-
  2#include "Rivet/Analysis.hh"
  3#include "Rivet/Projections/Beam.hh"
  4#include "Rivet/Projections/ChargedFinalState.hh"
  5#include "Rivet/Tools/Cuts.hh"
  6#include "Rivet/Projections/SingleValueProjection.hh"
  7#include "Rivet/Analyses/AliceCommon.hh"
  8#include "Rivet/Projections/HepMCHeavyIon.hh"
  9
 10namespace Rivet {
 11
 12  /// @brief ALICE PbPb at 2.76 TeV R_AA analysis.
 13  class ALICE_2012_I1127497 : public Analysis {
 14
 15  public:
 16
 17    /// Constructor
 18    RIVET_DEFAULT_ANALYSIS_CTOR(ALICE_2012_I1127497);
 19
 20    /// @name Analysis methods
 21    /// @{
 22
 23    /// Book histograms and initialise projections before the run
 24    void init() {
 25
 26      // Access the HepMC heavy ion info
 27      declare(HepMCHeavyIon(), "HepMC");
 28
 29      // Declare centrality projection
 30      declareCentrality(ALICE::V0MMultiplicity(),
 31        "ALICE_2015_PBPBCentrality", "V0M", "V0M");
 32
 33      // Charged, primary particles with |eta| < 0.5 and pT > 150 MeV
 34      declare(ALICE::PrimaryParticles(Cuts::abseta < 0.5 &&
 35        Cuts::pT > 150*MeV && Cuts::abscharge > 0), "APRIM");
 36
 37      // Loop over all histograms
 38      for (size_t ihist = 0; ihist < NHISTOS; ++ihist) {
 39
 40        // Initialize PbPb objects
 41        book(_histNch[PBPB][ihist], ihist+1, 1, 1);
 42
 43        std::string nameCounterPbPb = "counter.pbpb." + std::to_string(ihist);
 44        book(_counterSOW[PBPB][ihist], nameCounterPbPb); // Sum of weights counter for PbPb
 45
 46        std::string nameCounterNcoll = "counter.ncoll." + std::to_string(ihist);
 47        book(_counterNcoll[ihist], nameCounterNcoll); // Ncoll counter for PbPb
 48
 49        // Initialize pp objects. In principle, only one pp histogram would be
 50        // needed since centrality does not make any difference here. However,
 51        // in some cases in this analysis the binning differ from each other,
 52        // so this is easy-to-implement way to account for that.
 53        std::string namePP = mkAxisCode(ihist+1,1,1) + "-pp";
 54
 55        // The binning is taken from the reference data
 56        book(_histNch[PP][ihist], namePP, refData(ihist+1, 1, 1));
 57
 58        std::string nameCounterpp = "counter.pp." + std::to_string(ihist);
 59        book(_counterSOW[PP][ihist], nameCounterpp); // Sum of weights counter for pp
 60
 61        // Book ratios, to be used in finalize
 62        book(_histRAA[ihist], ihist+16, 1, 1);
 63      }
 64
 65      // Centrality regions keeping boundaries for a certain region.
 66      // Note, that some regions overlap with other regions.
 67      _centrRegions.clear();
 68      _centrRegions = {{0., 5.},   {5., 10.},  {10., 20.},
 69                       {20., 30.}, {30., 40.}, {40., 50.},
 70                       {50., 60.}, {60., 70.}, {70., 80.},
 71                       {0., 10.},  {0., 20.},  {20., 40.},
 72                       {40., 60.}, {40., 80.}, {60., 80.}};
 73
 74      // Find out the beam type, also specified from option.
 75      string beamOpt = getOption<string>("beam","NONE");
 76      if (beamOpt != "NONE") {
 77        MSG_WARNING("You are using a specified beam type, instead of using what"
 78	"is provided by the generator. "
 79	"Only do this if you are completely sure what you are doing.");
 80	if (beamOpt=="PP") isHI = false;
 81	else if (beamOpt=="HI") isHI = true;
 82	else {
 83	  MSG_ERROR("Beam error (option)!");
 84	  return;
 85      	}
 86      }
 87      else {
 88        const ParticlePair& beam = beams();
 89        if (beam.first.pid() == PID::PROTON && beam.second.pid() == PID::PROTON) isHI = false;
 90	else if (beam.first.pid() == PID::LEAD && beam.second.pid() == PID::LEAD)
 91	  isHI = true;
 92	else {
 93	  MSG_ERROR("Beam error (found)!");
 94	  return;
 95	}
 96      }
 97    }
 98
 99    /// Perform the per-event analysis
100    void analyze(const Event& event) {
101
102      // Charged, primary particles with at least pT = 150 MeV
103      // in eta range of |eta| < 0.5
104      Particles chargedParticles =
105        apply<ALICE::PrimaryParticles>(event,"APRIM").particlesByPt();
106
107      // Check type of event.
108      if ( isHI ) {
109
110        const HepMCHeavyIon & hi = apply<HepMCHeavyIon>(event, "HepMC");
111        if (!hi.ok()) {
112	  MSG_WARNING("HEPMC Heavy ion container needed for this analysis, but not "
113	    "found for this event. Skipping.");
114	  vetoEvent;
115	}
116        // Prepare centrality projection and value
117        const CentralityProjection& centrProj =
118          apply<CentralityProjection>(event, "V0M");
119        double centr = centrProj();
120        // Veto event for too large centralities since those are not used
121        // in the analysis at all
122        if ((centr < 0.) || (centr > 80.)) vetoEvent;
123
124        // Fill PbPb histograms and add weights based on centrality value
125        for (size_t ihist = 0; ihist < NHISTOS; ++ihist) {
126          if (inRange(centr, _centrRegions[ihist].first, _centrRegions[ihist].second)) {
127            _counterSOW[PBPB][ihist]->fill();
128            _counterNcoll[ihist]->fill(hi.Ncoll());
129            for (const Particle& p : chargedParticles) {
130              double pT = p.pT()/GeV;
131              if (pT < 50.) {
132                const double pTAtBinCenter = _histNch[PBPB][ihist]->binAt(pT).xMid();
133                _histNch[PBPB][ihist]->fill(pT, 1/pTAtBinCenter);
134              }
135            }
136          }
137        }
138
139      }
140      else {
141
142        // Fill all pp histograms and add weights
143        for (size_t ihist = 0; ihist < NHISTOS; ++ihist) {
144          _counterSOW[PP][ihist]->fill();
145          for (const Particle& p : chargedParticles) {
146            double pT = p.pT()/GeV;
147            if (pT < 50.) {
148              const double pTAtBinCenter = _histNch[PP][ihist]->binAt(pT).xMid();
149              _histNch[PP][ihist]->fill(pT, 1/pTAtBinCenter);
150            }
151          }
152        }
153
154      }
155
156    }
157
158
159    /// Normalise histograms etc., after the run
160    void finalize() {
161
162      // Right scaling of the histograms with their individual weights.
163      for (size_t itype = 0; itype < EVENT_TYPES; ++itype ) {
164        for (size_t ihist = 0; ihist < NHISTOS; ++ihist) {
165          if (_counterSOW[itype][ihist]->sumW() > 0.) {
166            scale(_histNch[itype][ihist],
167              (1. / _counterSOW[itype][ihist]->sumW() / 2. / M_PI));
168          }
169        }
170      }
171
172      // Postprocessing of the histograms
173      for (size_t ihist = 0; ihist < NHISTOS; ++ihist) {
174        // If there are entires in histograms for both beam types
175        if (_histNch[PP][ihist]->numEntries() > 0 && _histNch[PBPB][ihist]->numEntries() > 0) {
176          // Initialize and fill R_AA histograms
177          divide(_histNch[PBPB][ihist], _histNch[PP][ihist], _histRAA[ihist]);
178          // Scale by Ncoll. Unfortunately some generators does not provide
179          // Ncoll value (eg. JEWEL), so the following scaling will be done
180          // only if there are entries in the counters
181          double ncoll = _counterNcoll[ihist]->sumW();
182          double sow = _counterSOW[PBPB][ihist]->sumW();
183          if (ncoll > 1e-6 && sow > 1e-6)
184            _histRAA[ihist]->scale(1. / (ncoll / sow));
185
186        }
187      }
188
189    }
190
191    /// @}
192
193  private:
194
195    bool isHI;
196    static const int NHISTOS = 15;
197    static const int EVENT_TYPES = 2;
198    static const int PP = 0;
199    static const int PBPB = 1;
200
201    /// @name Histograms
202    /// @{
203    Histo1DPtr _histNch[EVENT_TYPES][NHISTOS];
204    CounterPtr _counterSOW[EVENT_TYPES][NHISTOS];
205    CounterPtr _counterNcoll[NHISTOS];
206    Estimate1DPtr _histRAA[NHISTOS];
207    /// @}
208
209    std::vector<std::pair<double, double>> _centrRegions;
210
211  };
212
213  RIVET_DECLARE_PLUGIN(ALICE_2012_I1127497);
214
215
216}