Rivet analyses referenceLHCF_2016_I1385877Longitudinal and transverse momentum distributions for neutral pions in the forward-rapidity regionExperiment: LHCF (LHC) Inspire ID: 1385877 Status: VALIDATED Authors:
Beam energies: (3500.0, 3500.0); (1380.0, 1380.0); (4000.0, 328000.0) GeV Run details:
The differential cross sections for inclusive neutral pions as a function of transverse and longitudinal momentum in the very forward-rapidity region have been measured at the LHC with the LHC forward detector in proton-proton collisions at $s=2.76$ and $7\,$TeV and in proton-lead collisions at nucleon-nucleon center-of-mass energies of $s_{\rm NN}=5.02\,$TeV. Such differential cross sections in proton-proton collisions are compatible with the hypotheses of limiting fragmentation and Feynman scaling. Comparing proton-proton with proton-lead collisions, we find a sizeable suppression of the production of neutral pions in the differential cross sections after subtraction of ultraperipheral proton-lead collisions. This suppression corresponds to the nuclear modification factor value of about $0.1-0.3$. The experimental measurements presented in this paper provide a benchmark for the hadronic interaction Monte Carlo simulation codes that are used for the simulation of cosmic-ray air showers. Beam energy must be specified as analysis option "ENERGY" when rivet-merging samples. Source code: LHCF_2016_I1385877.cc 1// -*- C++ -*-
2#include "Rivet/Analysis.hh"
3#include "Rivet/Projections/Beam.hh"
4#include "Rivet/Tools/BinnedHistogram.hh"
5#include "Rivet/Projections/UnstableParticles.hh"
6
7namespace Rivet {
8
9
10 /// @brief Longitudinal and transverse momenta of neutral pions in the forward-rapidity region
11 class LHCF_2016_I1385877 : public Analysis {
12 public:
13
14 /// Constructor
15 RIVET_DEFAULT_ANALYSIS_CTOR(LHCF_2016_I1385877);
16
17
18 // In some models there can be very small-value pT but greater than 0.
19 // In order to avoid unphysical behavior in the first bin a cutoff is needed
20 // If you are sure the model does not have this problem you can set pt_cutoff to 0.
21 const double pt_cutoff = 0.01;
22
23
24 /// @name Analysis methods
25 /// @{
26 void bookHistosPP() {
27 if (isCompatibleWithSqrtS(7000., 1e-3)) {
28 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 8.8, 9.0, book(tmp, 2, 1, 2));}
29 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 9.0, 9.2, book(tmp, 3, 1, 2));}
30 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 9.2, 9.4, book(tmp, 4, 1, 2));}
31 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 9.4, 9.6, book(tmp, 5, 1, 2));}
32 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 9.6, 9.8, book(tmp, 6, 1, 2));}
33 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 9.8, 10.0, book(tmp, 7, 1, 2));}
34 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 10.0, 10.2, book(tmp, 8, 1, 2));}
35 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 10.2, 10.4, book(tmp, 9, 1, 2));}
36 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 10.4, 10.6, book(tmp, 10, 1, 2));}
37 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 10.6, 10.8, book(tmp, 11, 1, 2));}
38
39 {Histo1DPtr tmp; _h_pi0_pT_pZ.add( 0.0, 0.2, book(tmp, 12, 1, 2));}
40 {Histo1DPtr tmp; _h_pi0_pT_pZ.add( 0.2, 0.4, book(tmp, 13, 1, 2));}
41 {Histo1DPtr tmp; _h_pi0_pT_pZ.add( 0.4, 0.6, book(tmp, 14, 1, 2));}
42 {Histo1DPtr tmp; _h_pi0_pT_pZ.add( 0.6, 0.8, book(tmp, 15, 1, 2));}
43 {Histo1DPtr tmp; _h_pi0_pT_pZ.add( 0.8, 1.0, book(tmp, 16, 1, 2));}
44
45 book(_p_pi0_rap_apT, 1, 1, 2);
46 book(_h_pi0_rap, 21, 1, 2);
47 book(_p_pi0_raploss_apT, 22, 1, 2);
48 book(_h_pi0_raploss, 23, 1, 2);
49 }
50 else if (isCompatibleWithSqrtS(2760., 1e-3)) {
51 book(_p_pi0_rap_apT, 1, 1, 1);
52
53 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 8.8, 9.0, book(tmp, 2, 1, 1));}
54 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 9.0, 9.2, book(tmp, 3, 1, 1));}
55 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 9.2, 9.4, book(tmp, 4, 1, 1));}
56 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 9.4, 9.6, book(tmp, 5, 1, 1));}
57 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 9.6, 9.8, book(tmp, 6, 1, 1));}
58
59 {Histo1DPtr tmp; _h_pi0_pT_pZ.add( 0.0, 0.2, book(tmp, 12, 1, 1));}
60 {Histo1DPtr tmp; _h_pi0_pT_pZ.add( 0.2, 0.4, book(tmp, 13, 1, 1));}
61
62 book(_h_pi0_rap, 21, 1, 1);
63
64 book(_p_pi0_raploss_apT, 22, 1, 1);
65 book(_h_pi0_raploss, 23, 1, 1);
66 }
67 else {
68 MSG_WARNING("p-p collisions : energy out of range!");
69 }
70 }
71
72
73 void bookHistosPPb() {
74 if (isCompatibleWithSqrtS(sqrt(208.)*5020., 1e-3)) {
75
76 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 8.8, 9.0, book(tmp, 2, 1, 3));}
77 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 9.0, 9.2, book(tmp, 3, 1, 3));}
78 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 9.2, 9.4, book(tmp, 4, 1, 3));}
79 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 9.4, 9.6, book(tmp, 5, 1, 3));}
80 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 9.6, 9.8, book(tmp, 6, 1, 3));}
81 {Histo1DPtr tmp; _h_pi0_rap_pT.add( 9.8, 10.0, book(tmp, 7, 1, 3));}
82 {Histo1DPtr tmp; _h_pi0_rap_pT.add(10.0, 10.2, book(tmp, 8, 1, 3));}
83 {Histo1DPtr tmp; _h_pi0_rap_pT.add(10.2, 10.4, book(tmp, 9, 1, 3));}
84 {Histo1DPtr tmp; _h_pi0_rap_pT.add(10.4, 10.6, book(tmp, 10, 1, 3));}
85 {Histo1DPtr tmp; _h_pi0_rap_pT.add(10.6, 10.8, book(tmp, 11, 1, 3));}
86
87 {Histo1DPtr tmp; _h_pi0_pT_pZ.add( 0.0, 0.2, book(tmp, 12, 1, 3));}
88 {Histo1DPtr tmp; _h_pi0_pT_pZ.add( 0.2, 0.4, book(tmp, 13, 1, 3));}
89 {Histo1DPtr tmp; _h_pi0_pT_pZ.add( 0.4, 0.6, book(tmp, 14, 1, 3));}
90 {Histo1DPtr tmp; _h_pi0_pT_pZ.add( 0.6, 0.8, book(tmp, 15, 1, 3));}
91 {Histo1DPtr tmp; _h_pi0_pT_pZ.add( 0.8, 1.0, book(tmp, 16, 1, 3));}
92
93 book(_p_pi0_rap_apT, 1, 1, 3);
94 book(_p_pi0_raploss_apT, 22, 1, 3);
95 }
96 else {
97 MSG_WARNING("p-Pb collisions : energy out of range!");
98 }
99 }
100
101
102 /// Book histograms and initialise projections before the run
103 void init() {
104
105 // Initialise and register projections
106 declare(UnstableParticles(), "UFS");
107 declare(Beam(), "Beam");
108
109 // Calculate beam rapidity
110 const Particle bm1 = beams().first;
111 const Particle bm2 = beams().second;
112 MSG_DEBUG("Beam 1 : momentum=" << bm1.pz() << " PID=" << bm1.pid() << " rapidity=" << bm1.rap());
113 MSG_DEBUG("Beam 2 : momentum=" << bm2.pz() << " PID=" << bm2.pid() << " rapidity=" << bm2.rap());
114 MSG_DEBUG("CM energy: " << sqrtS() );
115 _beam_rap = bm1.rap();
116
117 // Book histos for p-p or p-Pb mode
118 if (bm1.pid() == PID::PROTON && bm2.pid() == PID::PROTON) {
119 _isPP = true;
120 bookHistosPP();
121 } else if (bm1.pid() == PID::PROTON && bm2.pid() == PID::LEAD) {
122 _isPP = false;
123 bookHistosPPb();
124 } else MSG_WARNING("Beam PDG ID out of range --- should be pp or p-Pb");
125
126 }
127
128
129 /// Perform the per-event analysis
130 void analyze(const Event& event) {
131
132 // Select neutral pions
133 const UnstableParticles& ufs = applyProjection<UnstableParticles> (event, "UFS");
134 const Particles pions = ufs.particles(Cuts::pz > 0 && Cuts::abspid == PID::PI0 && Cuts::pT > pt_cutoff*GeV);
135 for (const Particle& p : pions) {
136 const double pT = p.pT()/GeV;
137 const double rap = p.rap();
138 const double raploss = _beam_rap - p.rap();
139 _p_pi0_rap_apT->fill(rap, p.pT()/MeV);
140 _p_pi0_raploss_apT->fill(raploss, p.pT()/MeV);
141 _h_pi0_rap_pT.fill(rap, pT, 1.0/pT);
142 _h_pi0_pT_pZ.fill(pT, p.pz()/GeV, p.E()/GeV/pT);
143 if (_isPP) {
144 _h_pi0_rap->fill(rap);
145 _h_pi0_raploss->fill(raploss);
146 }
147 }
148
149 }
150
151
152 /// Normalise histograms etc., after the run
153 void finalize() {
154
155 const double inv_scale_factor = 1. / sumOfWeights() / (2.*PI);
156 const double pt_bin_width = 0.2;
157 for (Histo1DPtr h: _h_pi0_pT_pZ.histos()){
158 if (h->path() == "/LHCF_2016_I1385877/d12-x01-y01" ||
159 h->path() == "/LHCF_2016_I1385877/d12-x01-y02" ||
160 h->path() == "/LHCF_2016_I1385877/d12-x01-y03") {
161 h->scaleW( inv_scale_factor / (pt_bin_width-pt_cutoff) );
162 } else {
163 h->scaleW( inv_scale_factor / pt_bin_width );
164 }
165 }
166
167 const double scale_factor = 1. / sumOfWeights() / (2.*PI);
168 const double rap_bin_width = 0.2;
169 for (Histo1DPtr h: _h_pi0_rap_pT.histos()) {
170 const int cutoff_bin = h->binIndexAt(pt_cutoff);
171 if (cutoff_bin >= 0) {
172 const double cutoff_wdt = h->bin(cutoff_bin).xMax()-h->bin(cutoff_bin).xMin();
173 h->bin(cutoff_bin).scaleW((cutoff_wdt)/(cutoff_wdt-pt_cutoff));
174 }
175 h->scaleW( scale_factor / rap_bin_width );
176 }
177
178 if (_isPP) {
179 scale( _h_pi0_rap , 1/sumOfWeights() );
180 scale( _h_pi0_raploss , 1/sumOfWeights() );
181 }
182
183 }
184
185 /// @}
186
187
188
189 /// Flag for handling extra histograms in p-p runs
190 bool _isPP;
191 // Store the beam rapidity for rap-loss calculation (could just re-access this in analyze())
192 double _beam_rap;
193
194 /// @name Histograms
195 /// @{
196 BinnedHistogram _h_pi0_pT_pZ;
197 BinnedHistogram _h_pi0_rap_pT;
198 Profile1DPtr _p_pi0_rap_apT;
199 Histo1DPtr _h_pi0_rap;
200 Profile1DPtr _p_pi0_raploss_apT;
201 Histo1DPtr _h_pi0_raploss;
202 /// @}
203
204 };
205
206
207
208 RIVET_DECLARE_PLUGIN(LHCF_2016_I1385877);
209
210}
|