rivet is hosted by Hepforge, IPPP Durham

Rivet analyses reference

ALICE_2012_I1127497

Centrality dependence of charged particle production at large transverse momentum in Pb-Pb collisions at $\sqrt{s_{\rm{NN}}} = 2.76$ TeV
Experiment: ALICE (LHC)
Inspire ID: 1127497
Status: VALIDATED
Authors:
  • Przemyslaw Karczmarczyk
  • Jan Fiete Grosse-Oetringhaus
  • Jochen Klein
References: Beams: p+ p+, 1000822080 1000822080
Beam energies: (1380.0, 1380.0); (287040.0, 287040.0) GeV
    No run details listed

The inclusive transverse momentum ($p_T$) distributions of primary charged particles are measured in the pseudo-rapidity range $|\eta| < 0.8$ as a function of event centrality in Pb--Pb collisions at $\sqrt{s_{nn}} = 2.76$ TeV with ALICE at the LHC. The data are presented in the $p_T$ range $0.15<p_T<50$ GeV/c for nine centrality intervals from 70-80% to 0-5%. The Pb-Pb spectra are presented in terms of the nuclear modification factor RAA using a pp reference spectrum measured at the same collision energy. We observe that the suppression of high-pT particles strongly depends on event centrality. In central collisions (0-5%) the yield is most suppressed with $R_{AA} \approx 0.13$ at $p_T= 6-7$ GeV/c. Above $p_T = 7$ GeV/c, there is a significant rise in the nuclear modification factor, which reaches $R_{AA} \approx 0.4$ for $p_T > 30$ GeV/c. In peripheral collisions (70-80%), the suppression is weaker with $R_{AA} \approx 0.7$ almost independently of $p_T$. The measured nuclear modification factors are compared to other measurements and model calculations.

Source code: ALICE_2012_I1127497.cc
  1// -*- C++ -*-
  2#include "Rivet/Analysis.hh"
  3#include "Rivet/Projections/Beam.hh"
  4#include "Rivet/Projections/ChargedFinalState.hh"
  5#include "Rivet/Tools/Cuts.hh"
  6#include "Rivet/Projections/SingleValueProjection.hh"
  7#include "Rivet/Tools/AliceCommon.hh"
  8#include "Rivet/Projections/AliceCommon.hh"
  9#include "Rivet/Projections/HepMCHeavyIon.hh"
 10
 11namespace Rivet {
 12
 13  /// @brief ALICE PbPb at 2.76 TeV R_AA analysis.
 14  class ALICE_2012_I1127497 : public Analysis {
 15
 16  public:
 17
 18    /// Constructor
 19    RIVET_DEFAULT_ANALYSIS_CTOR(ALICE_2012_I1127497);
 20
 21    /// @name Analysis methods
 22    //@{
 23
 24    /// Book histograms and initialise projections before the run
 25    void init() {
 26
 27      // Access the HepMC heavy ion info
 28      declare(HepMCHeavyIon(), "HepMC");
 29
 30      // Declare centrality projection
 31      declareCentrality(ALICE::V0MMultiplicity(),
 32        "ALICE_2015_PBPBCentrality", "V0M", "V0M");
 33
 34      // Charged, primary particles with |eta| < 0.5 and pT > 150 MeV
 35      declare(ALICE::PrimaryParticles(Cuts::abseta < 0.5 &&
 36        Cuts::pT > 150*MeV && Cuts::abscharge > 0), "APRIM");
 37
 38      // Loop over all histograms
 39      for (size_t ihist = 0; ihist < NHISTOS; ++ihist) {
 40
 41        // Initialize PbPb objects
 42        book(_histNch[PBPB][ihist], ihist+1, 1, 1);
 43
 44        std::string nameCounterPbPb = "counter.pbpb." + std::to_string(ihist);
 45        book(_counterSOW[PBPB][ihist], nameCounterPbPb); // Sum of weights counter for PbPb
 46
 47        std::string nameCounterNcoll = "counter.ncoll." + std::to_string(ihist);
 48        book(_counterNcoll[ihist], nameCounterNcoll); // Ncoll counter for PbPb
 49
 50        // Initialize pp objects. In principle, only one pp histogram would be
 51        // needed since centrality does not make any difference here. However,
 52        // in some cases in this analysis the binning differ from each other,
 53        // so this is easy-to-implement way to account for that.
 54        std::string namePP = mkAxisCode(ihist+1,1,1) + "-pp";
 55        
 56        // The binning is taken from the reference data
 57        book(_histNch[PP][ihist], namePP, refData(ihist+1, 1, 1));
 58
 59        std::string nameCounterpp = "counter.pp." + std::to_string(ihist);
 60        book(_counterSOW[PP][ihist], nameCounterpp); // Sum of weights counter for pp
 61
 62        // Book ratios, to be used in finalize
 63        book(_histRAA[ihist], ihist+16, 1, 1);
 64      }
 65
 66      // Centrality regions keeping boundaries for a certain region.
 67      // Note, that some regions overlap with other regions.
 68      _centrRegions.clear();
 69      _centrRegions = {{0., 5.},   {5., 10.},  {10., 20.},
 70                       {20., 30.}, {30., 40.}, {40., 50.},
 71                       {50., 60.}, {60., 70.}, {70., 80.},
 72                       {0., 10.},  {0., 20.},  {20., 40.},
 73                       {40., 60.}, {40., 80.}, {60., 80.}};
 74
 75      // Find out the beam type, also specified from option.
 76      string beamOpt = getOption<string>("beam","NONE");
 77      if (beamOpt != "NONE") {
 78        MSG_WARNING("You are using a specified beam type, instead of using what"
 79	"is provided by the generator. "
 80	"Only do this if you are completely sure what you are doing.");
 81	if (beamOpt=="PP") isHI = false;
 82	else if (beamOpt=="HI") isHI = true;
 83	else {
 84	  MSG_ERROR("Beam error (option)!");
 85	  return;
 86      	}
 87      }
 88      else {
 89        const ParticlePair& beam = beams();
 90        if (beam.first.pid() == PID::PROTON && beam.second.pid() == PID::PROTON) isHI = false;
 91	else if (beam.first.pid() == PID::LEAD && beam.second.pid() == PID::LEAD)
 92	  isHI = true;
 93	else {
 94	  MSG_ERROR("Beam error (found)!");
 95	  return;
 96	}
 97      }
 98    }
 99
100    /// Perform the per-event analysis
101    void analyze(const Event& event) {
102
103      // Charged, primary particles with at least pT = 150 MeV
104      // in eta range of |eta| < 0.5
105      Particles chargedParticles =
106        apply<ALICE::PrimaryParticles>(event,"APRIM").particlesByPt();
107
108      // Check type of event.
109      if ( isHI ) {
110
111        const HepMCHeavyIon & hi = apply<HepMCHeavyIon>(event, "HepMC");
112        if (!hi.ok()) {
113	  MSG_WARNING("HEPMC Heavy ion container needed for this analysis, but not "
114	    "found for this event. Skipping.");
115	  vetoEvent;
116	}
117        // Prepare centrality projection and value
118        const CentralityProjection& centrProj =
119          apply<CentralityProjection>(event, "V0M");
120        double centr = centrProj();
121        // Veto event for too large centralities since those are not used
122        // in the analysis at all
123        if ((centr < 0.) || (centr > 80.)) vetoEvent;
124
125        // Fill PbPb histograms and add weights based on centrality value
126        for (size_t ihist = 0; ihist < NHISTOS; ++ihist) {
127          if (inRange(centr, _centrRegions[ihist].first, _centrRegions[ihist].second)) {
128            _counterSOW[PBPB][ihist]->fill();
129            _counterNcoll[ihist]->fill(hi.Ncoll());
130            for (const Particle& p : chargedParticles) {
131              double pT = p.pT()/GeV;
132              if (pT < 50.) {
133                const double pTAtBinCenter = _histNch[PBPB][ihist]->binAt(pT).xMid();
134                _histNch[PBPB][ihist]->fill(pT, 1/pTAtBinCenter);
135              }
136            }
137          }
138        }
139
140      }
141      else {
142
143        // Fill all pp histograms and add weights
144        for (size_t ihist = 0; ihist < NHISTOS; ++ihist) {
145          _counterSOW[PP][ihist]->fill();
146          for (const Particle& p : chargedParticles) {
147            double pT = p.pT()/GeV;
148            if (pT < 50.) {
149              const double pTAtBinCenter = _histNch[PP][ihist]->binAt(pT).xMid();
150              _histNch[PP][ihist]->fill(pT, 1/pTAtBinCenter);
151            }
152          }
153        }
154
155      }
156
157    }
158
159
160    /// Normalise histograms etc., after the run
161    void finalize() {
162
163      // Right scaling of the histograms with their individual weights.
164      for (size_t itype = 0; itype < EVENT_TYPES; ++itype ) {
165        for (size_t ihist = 0; ihist < NHISTOS; ++ihist) {
166          if (_counterSOW[itype][ihist]->sumW() > 0.) {
167            scale(_histNch[itype][ihist],
168              (1. / _counterSOW[itype][ihist]->sumW() / 2. / M_PI));
169          }
170        }
171      }
172
173      // Postprocessing of the histograms
174      for (size_t ihist = 0; ihist < NHISTOS; ++ihist) {
175        // If there are entires in histograms for both beam types
176        if (_histNch[PP][ihist]->numEntries() > 0 && _histNch[PBPB][ihist]->numEntries() > 0) {
177          // Initialize and fill R_AA histograms
178          divide(_histNch[PBPB][ihist], _histNch[PP][ihist], _histRAA[ihist]);
179          // Scale by Ncoll. Unfortunately some generators does not provide
180          // Ncoll value (eg. JEWEL), so the following scaling will be done
181          // only if there are entries in the counters
182          double ncoll = _counterNcoll[ihist]->sumW();
183          double sow = _counterSOW[PBPB][ihist]->sumW();
184          if (ncoll > 1e-6 && sow > 1e-6)
185            _histRAA[ihist]->scaleY(1. / (ncoll / sow));
186
187        }
188      }
189
190    }
191
192    //@}
193
194  private:
195
196    bool isHI;
197    static const int NHISTOS = 15;
198    static const int EVENT_TYPES = 2;
199    static const int PP = 0;
200    static const int PBPB = 1;
201
202    /// @name Histograms
203    //@{
204    Histo1DPtr _histNch[EVENT_TYPES][NHISTOS];
205    CounterPtr _counterSOW[EVENT_TYPES][NHISTOS];
206    CounterPtr _counterNcoll[NHISTOS];
207    Scatter2DPtr _histRAA[NHISTOS];
208    //@}
209
210    std::vector<std::pair<double, double>> _centrRegions;
211
212  };
213
214  // The hook for the plugin system
215  RIVET_DECLARE_PLUGIN(ALICE_2012_I1127497);
216
217
218}