rivet is hosted by Hepforge, IPPP Durham

Rivet analyses reference

LHCF_2015_I1351909

Measurement of very forward neutron energy spectra for 7 TeV proton-proton collisions at the Large Hadron Collider
Experiment: LHCF (LHC)
Inspire ID: 1351909
Status: VALIDATED
Authors:
  • Eugenio Berti
  • LHCf collaboration
References:
  • Phys. Lett. B 750 (2015) 360-366
Beams: p+ p+
Beam energies: (3500.0, 3500.0) GeV
Run details:
  • Differential production cross section of neutrons in p-p collisions in the very forward region expressed as a function of energy. Note that, being LHCf detector locate 140m away from IP, two additional effects must be taken into account. They are particles decay in the transport though the beam pipe and trajectories bending due to the dipole magnet. Because of them, the final energy spectra included about $0-6 \%$ of other hadrons, mainly $\Lambda^{0}$ and $K^{0}$. These effects are considered in the Rivet code making use of some approximations that are able to reproduce the model distributions shown in the paper within 15% in most cases, depending on the model, the pseudorapidity region and the energy bin.

The Large Hadron Collider forward (LHCf) experiment is designed to use the LHC to verify the hadronic-interaction models used in cosmic-ray physics. Forward baryon production is one of the crucial points to understand the development of cosmic-ray showers. We report the neutron-energy spectra for LHC $\sqrt{s}$ = 7 TeV proton--proton collisions with the pseudo-rapidity $\eta$ ranging from 8.81 to 8.99, from 8.99 to 9.22, and from 10.76 to infinity. The measured energy spectra obtained from the two independent calorimeters of Arm1 and Arm2 show the same characteristic feature before unfolding the difference in the detector responses. We unfolded the measured spectra by using the multidimensional unfolding method based on Bayesian theory, and the unfolded spectra were compared with current hadronic-interaction models. The QGSJET II-03 model predicts a high neutron production rate at the highest pseudo-rapidity range similar to our results and the DPMJET 3.04 model describes our results well at the lower pseudo-rapidity ranges. However no model perfectly explains the experimental results in the whole pseudo-rapidity range. The experimental data indicate the most abundant neutron production rate relative to the photon production, which does not agree with predictions of the models.

Source code: LHCF_2015_I1351909.cc
  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
// -*- C++ -*-
#include "Rivet/Analysis.hh"
#include "Rivet/Projections/FinalState.hh"

namespace Rivet {

/// @brief Add a short analysis description here
class LHCF_2015_I1351909 : public Analysis {
public:

	/// Constructor
	DEFAULT_RIVET_ANALYSIS_CTOR(LHCF_2015_I1351909);

	static constexpr bool lhcf_like = true;
	static constexpr int ndecay = 1;
	static constexpr int nbeam = 2;
	static constexpr double D1_begin = 82000.; //mm 60000.; //mm
	static constexpr double D1_end = 82000; //mm 90000.; //mm
	static constexpr double IPtoLHCf = 141050.; //mm

	/// @name Analysis methods

	bool isParticleFromCollision(Particle p, vector<Particle> parents) {
		bool beam[nbeam]={false};

		if(parents.size()==nbeam) {
			for ( int ipar=0; ipar < nbeam; ++ipar )
				beam[ipar] = parents[ipar].genParticle()->is_beam();
			if(beam[0] && beam[1])
				return true;
		}

		return false;
	}

	bool isParticleFromDecay(Particle p, vector<Particle> parents) {
		if(parents.size()==ndecay)
			return true;
		else
			return false;
	}

	bool isDeviated(Particle p, Particle parent) { //Select/Remove particles decayed between IP and LHCf
		GenVertex* pv = p.genParticle()->production_vertex();
		assert(pv != NULL);

		const double decay_vertex = pv->position().z()/mm;

		const double parent_charge = PID::charge(parent.pid());
		const double descendant_charge = PID::charge(p.pid());

		if(parent_charge == 0) { //Particles produced by neutral parent decay
			if(descendant_charge == 0) {
				return false;
			} else {
				if(decay_vertex >= D1_end)
					return false;
				else
					return true; //Remove charged descendants produced from decay before end of D1
			}
		} else { //Particles produced by charged parent decay
			if(decay_vertex <= D1_begin) {
				if(descendant_charge == 0)
					return false;
				else
					return true; //Remove charged descendants produced from decay before end of D1
			} else {
				return true; //Remove particles produced by charged parent decay after begin of D1
			}
		}

		return false;
	}

	bool isSameParticle(Particle p1, Particle p2) {
		if(p1.pid() == p2.pid() &&
				mom(p1).t() == mom(p2).t() &&
				mom(p1).x() == mom(p2).x() &&
				mom(p1).y() == mom(p2).y() &&
				mom(p1).z() == mom(p2).z())
			return true;
		else
			return false;
	}

	bool isAlreadyProcessed(Particle p, vector<Particle> list) {
		for(unsigned int ipar=0; ipar<list.size(); ++ipar)
			if(isSameParticle(p, list[ipar]))
				return true;

		return false;
	}

	/// This method return a fake pseudorapidity to check id decayed particle is in LHCf acceptance
	double RecomputeEta(Particle p) {
		GenVertex* pv = p.genParticle()->production_vertex();

		const double x0 = pv->position().x()/mm;
		const double y0 = pv->position().y()/mm;
		const double z0 = pv->position().z()/mm;

		const double px = p.px()/MeV;
		const double py = p.py()/MeV;
		const double pz = abs(p.pz()/MeV);

		const double dist_to_lhcf = IPtoLHCf - z0;
		const double x1 = x0 + (dist_to_lhcf * px/pz);
		const double y1 = y0 + (dist_to_lhcf * py/pz);

		const double r = sqrt(pow(x1, 2.)+pow(y1, 2.));
		const double theta = atan(abs(r / IPtoLHCf));
		const double pseudorapidity = - log (tan (theta/2.) );

		return pseudorapidity;
	}

	/// Book histograms and initialise projections before the run
	void init() {

		// Initialise and register projections
		//      declare(FinalState("FS");
		addProjection(FinalState(), "FS");

		// Book histograms
		_h_n_en_eta1 = bookHisto1D(1, 1, 1);
		_h_n_en_eta2 = bookHisto1D(1, 1, 2);
		_h_n_en_eta3 = bookHisto1D(1, 1, 3);

	}

	/// Perform the per-event analysis
	void analyze(const Event& event) {

		const double weight = event.weight();

		const FinalState &fs = applyProjection<FinalState> (event, "FS");
		Particles fs_particles = fs.particles();

		vector<Particle> processed_parents;
		processed_parents.clear();

		for (Particle& p: fs_particles ) {

			if(p.pz()/GeV<0.) continue;

			double eta = 0.;
			double en = 0.;

			if(lhcf_like) {
				//======================================================================
				//========== LHCf-like analysis ========================================
				//======================================================================

				vector<Particle> parents = p.parents();

				if(isParticleFromCollision(p, parents)) { //Particles directly produced in collisions
					if(!PID::isHadron(p.pid())) continue; //Remove non-hadron particles
					if(PID::charge(p.pid()) != 0) continue; //Remove charged particles

					eta = p.eta();
					en = p.E()/GeV;
				} else if(isParticleFromDecay(p, parents)) { //Particles produced from decay
					GenVertex* pv = p.genParticle()->production_vertex();
					assert(pv != NULL);

					const double decay_vertex = pv->position().z()/mm;
					Particle parent = parents[0];

					if(decay_vertex < IPtoLHCf) { //If decay happens before LHCf we consider descendants
						if(!PID::isHadron(p.pid())) continue; //Remove non-hadron descendants
						if(isDeviated(p, parent)) continue; //Remove descendants deviated by D1

						eta = RecomputeEta(p);
						en = p.E()/GeV;
					} else {//If decay happens after LHCf we consider parents
						vector<Particle> ancestors;
						ancestors.clear();

						int ngeneration=0;
						bool isValid=true;
						bool isEnded=false;
						while(!isEnded) //Loop over all generations in the decay
						{
							vector<Particle> temp_part;
							temp_part.clear();
							if(ngeneration==0) {
								parent = parents[0];
								temp_part = parent.parents();
							}
							else {
								parent = ancestors[0];
								temp_part = parent.parents();
							}
							ancestors.clear();
							ancestors = temp_part;

							Particle ancestor = ancestors[0];

							if(isParticleFromCollision(parent, ancestors)) { //if we found first particles produced in collisions we consider them
								isEnded=true;

								if(!PID::isHadron(parent.pid())) isValid=false; //Remove non-hadron ancestors/parents
								if(PID::charge(parent.pid()) != 0) isValid=false; //Remove charged ancestors/parents
								if(isAlreadyProcessed(parent, processed_parents))
									isValid=false; //Remove already processed ancestors/parents when looping other descendants
								else
									processed_parents.push_back(parent); //Fill ancestors/parents in the list

								eta = parent.eta();
								en = parent.E()/GeV;
							} else if (isParticleFromDecay(parent, ancestors)) { //if we found first particles produced entering LHCf we consider them
								GenVertex* pv_prev = parent.genParticle()->production_vertex();
								assert(pv_prev != NULL);

								const double previous_decay_vertex = pv_prev->position().z()/mm;

								if(previous_decay_vertex < IPtoLHCf) {
									isEnded=true;

									if(!PID::isHadron(parent.pid())) isValid=false; //Remove non-hadron ancestors/parents
									if(isDeviated(parent, ancestor)) isValid=false; //Remove ancestors/parents deviated by D1
									if(isAlreadyProcessed(parent, processed_parents))
										isValid=false; //Remove already processed ancestors/parents when looping other descendants
									else
										processed_parents.push_back(parent); //Fill ancestors/parents in the list

									eta = RecomputeEta(parent);
									en = parent.E()/GeV;
								}
							} else { //This condition should never happen
								cout << "Looping over particles generation ended without match : Exit..." << endl;
								exit(EXIT_FAILURE);
							}

							++ngeneration;
						}

						if(!isValid) continue;
					}
				} else { //This condition should never happen
					cout << "Particle seems not to be produced in collision or decay : Exit..." << endl;
					exit(EXIT_FAILURE);
				}

			} else {
				//======================================================================
				//========== Only neutrons at IP =======================================
				//======================================================================

				vector<Particle> parents = p.parents();

				//if(isParticleFromCollision(p, parents)) { //Particles directly produced in collisions
					if(p.pid() != 2112 ) continue;

					eta = p.eta();
					en = p.E()/GeV;
				//}
			}

			// Fill histograms
			if( eta > 10.76 ){
				_h_n_en_eta1->fill( en , weight );

			}else if(eta > 8.99 && eta < 9.22){
				_h_n_en_eta2->fill( en , weight );

			}else if(eta > 8.81 && eta < 8.99){
				_h_n_en_eta3->fill( en , weight );

			}
		}
	}


	/// Normalise histograms etc., after the run
	void finalize() {

		scale(_h_n_en_eta1, crossSection()/millibarn/sumOfWeights()); // norm to cross section
		scale(_h_n_en_eta2, crossSection()/millibarn/sumOfWeights()); // norm to cross section
		scale(_h_n_en_eta3, crossSection()/millibarn/sumOfWeights()); // norm to cross section

	}

	//@}


private:


	/// @name Histograms
	//@{
	Histo1DPtr _h_n_en_eta1;
	Histo1DPtr _h_n_en_eta2;
	Histo1DPtr _h_n_en_eta3;
	//@}


};


// The hook for the plugin system
DECLARE_RIVET_PLUGIN(LHCF_2015_I1351909);


}