Merge lp:~christopher-hunt08/maus/maus_analysis_devel into lp:maus/merge

Proposed by Christopher Hunt
Status: Merged
Merged at revision: 740
Proposed branch: lp:~christopher-hunt08/maus/maus_analysis_devel
Merge into: lp:maus/merge
Diff against target: 917 lines (+750/-20)
7 files modified
bin/scifi/tracker_resolution_plots.py (+45/-0)
src/common_py/analysis/__init__.py (+1/-0)
src/common_py/analysis/analysis_track.py (+141/-0)
src/common_py/analysis/hit_types.py (+217/-5)
src/common_py/analysis/inspectors.py (+91/-8)
src/common_py/analysis/systematic_error_estimation.py (+247/-0)
src/common_py/framework/single_thread.py (+8/-7)
To merge this branch: bzr merge lp:~christopher-hunt08/maus/maus_analysis_devel
Reviewer Review Type Date Requested Status
MAUS Maintainers Pending
Review via email: mp+345284@code.launchpad.net

Description of the change

Some tweaks and additions to the analysis code in src/common_py, plus a bug fix in single_thread.py where using the data object wasn't being correctly deleted at the end of the pipeline.

To post a comment you must log in.
Revision history for this message
Paolo Franchini (p-franchini) wrote :

Thanks Chris,

I will do it later today,

cheers,

Paolo

On 09/05/18 10:32, Christopher Hunt wrote:
> Christopher Hunt has proposed merging lp:~christopher-hunt08/maus/maus_analysis_devel into lp:maus/merge.
>
> Requested reviews:
> MAUS Maintainers (maus-maintainers)
>
> For more details, see:
> https://code.launchpad.net/~christopher-hunt08/maus/maus_analysis_devel/+merge/345284
>
> Some tweaks and additions to the analysis code in src/common_py, plus a bug fix in single_thread.py where using the data object wasn't being correctly deleted at the end of the pipeline.
>

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'bin/scifi/tracker_resolution_plots.py'
2--- bin/scifi/tracker_resolution_plots.py 2017-12-12 10:11:06 +0000
3+++ bin/scifi/tracker_resolution_plots.py 2018-05-09 09:31:35 +0000
4@@ -318,6 +318,11 @@
5 tracker+'_p_residual_p', "p Residuals in p", \
6 PZ_BIN, PZ_MIN, PZ_MAX, 500, -50.0, 50.0 )
7
8+ tracker_dict['dpp_residual_p'] = ROOT.TH2F( \
9+ tracker+'_dpp_residual_p', "dp/p Residuals in p", \
10+ PZ_BIN, PZ_MIN, PZ_MAX, 500, -1.0, 1.0 )
11+
12+
13
14 tracker_dict['x_residual_pt'] = ROOT.TH2F( \
15 tracker+'_x_residual_pt', "X Residuals in p_{t}", \
16@@ -1016,6 +1021,7 @@
17 tracker_plots['pt_residual_p'].Fill( P_mc, Pt_res )
18 tracker_plots['pz_residual_p'].Fill( P_mc, res_mom[2] )
19 tracker_plots['p_residual_p'].Fill( P_mc, P_res )
20+ tracker_plots['dpp_residual_p'].Fill( P_mc, P_res/P_mc )
21
22
23 tracker_plots['x_residual_pz'].Fill( Pz_mc, res_pos[0] )
24@@ -1230,6 +1236,45 @@
25 plot_dict[tracker][component+plot_axis+'_bias'] = bias_graph
26
27
28+ for tracker in [ "upstream", "downstream" ] :
29+ for a_plot in [ "dpp_residual_p" ] :
30+ plot = plot_dict[tracker][a_plot]
31+
32+ rms_error = array.array( 'd' )
33+ bin_size = array.array( 'd' )
34+ bins = array.array( 'd' )
35+ rms = array.array( 'd' )
36+ mean = array.array( 'd' )
37+ mean_error = array.array( 'd' )
38+
39+ width = plot.GetXaxis().GetBinWidth(1)
40+ for i in range( 0, plot.GetXaxis().GetNbins() ) :
41+ projection = plot.ProjectionY( a_plot+'_pro_'+str(i), i, (i+1) )
42+
43+ plot_mean = plot.GetXaxis().GetBinCenter( i ) + width
44+ pro_mean, pro_mean_err, pro_std, pro_std_err = \
45+ analysis.tools.fit_gaussian(projection)
46+
47+ bin_size.append( width*0.5 )
48+ bins.append( plot_mean )
49+ rms.append( pro_std )
50+ rms_error.append( pro_std_err )
51+ mean.append( pro_mean )
52+ mean_error.append( pro_mean_err )
53+
54+ if len(bins) != 0 :
55+ resolution_graph = ROOT.TGraphErrors( len(bins), \
56+ bins, rms, bin_size, rms_error )
57+ bias_graph = ROOT.TGraphErrors( len(bins), \
58+ bins, mean, bin_size, mean_error )
59+ else :
60+ resolution_graph = None
61+ bias_graph = None
62+
63+ plot_dict[tracker][a_plot+'_resolution'] = resolution_graph
64+ plot_dict[tracker][a_plot+'_bias'] = bias_graph
65+
66+
67
68 for tracker in [ "upstream", "downstream" ] :
69 # for component in [ "pt_", "pz_", ] :
70
71=== modified file 'src/common_py/analysis/__init__.py'
72--- src/common_py/analysis/__init__.py 2017-02-17 12:38:12 +0000
73+++ src/common_py/analysis/__init__.py 2018-05-09 09:31:35 +0000
74@@ -31,4 +31,5 @@
75 import analysis.hit_types
76 import analysis.inspectors
77 import analysis.scifitools # pylint: disable = E0401
78+import analysis.systematic_error_estimation
79
80
81=== added file 'src/common_py/analysis/analysis_track.py'
82--- src/common_py/analysis/analysis_track.py 1970-01-01 00:00:00 +0000
83+++ src/common_py/analysis/analysis_track.py 2018-05-09 09:31:35 +0000
84@@ -0,0 +1,141 @@
85+#!/usr/bin/env python
86+# This file is part of MAUS: http://micewww.pp.rl.ac.uk/projects/maus
87+#
88+# MAUS is free software: you can redistribute it and/or modify
89+# it under the terms of the GNU General Public License as published by
90+# the Free Software Foundation, either version 3 of the License, or
91+# (at your option) any later version.
92+#
93+# MAUS is distributed in the hope that it will be useful,
94+# but WITHOUT ANY WARRANTY; without even the implied warranty of
95+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
96+# GNU General Public License for more details.
97+#
98+# You should have received a copy of the GNU General Public License
99+# along with MAUS. If not, see <http://www.gnu.org/licenses/>.
100+#
101+
102+"""
103+ Defines the AnalysisTrack class.
104+
105+ A generic track class that can be used to simplify the analysis of track data.
106+"""
107+
108+# pylint: disable = W0311, R0902, R0904, R0913, C0103, W0102
109+
110+class AnalysisTrack() :
111+ """
112+ A simple, unified class that holds the essential information, when copied
113+ from MAUS data types, during various analyses.
114+
115+ Some syntax was borrowed from XBoa for simplicity and because I like the
116+ interface!
117+ """
118+ def __init__( self, trackpoints={}, chisq=0.0, ndf=0, p_value=1.0, status=0 ) :
119+ """
120+ Initialise object.
121+ """
122+ self.__trackpoints = trackpoints
123+ self.__chisq = chisq
124+ self.__ndf = ndf
125+ self.__p_value = p_value
126+ self.__status = status
127+
128+
129+ def __getitem__(self, index) :
130+ """
131+ Return a trackpoint from the track at index ``index''
132+ """
133+ return self.__trackpoints[index]
134+
135+
136+ def __setitem__(self, index, item) :
137+ """
138+ Set the value of the trackpoint in the track at index ``index''
139+ """
140+ self.__trackpoints[index] = item
141+
142+
143+ def __len__(self) :
144+ """
145+ Return the number of trackpoints in the track>
146+ """
147+ return len(self.__trackpoints)
148+
149+
150+ def get_trackpoint( self, index ) :
151+ """
152+ Return a trackpoint from the track at index ``index''
153+ """
154+ return self.__trackpoints[index]
155+
156+
157+ def set_trackpoint( self, index, item ) :
158+ """
159+ Set the value of the trackpoint in the track at index ``index''
160+ """
161+ self.__trackpoints[index] = item
162+
163+
164+ def get_p_value( self ) :
165+ """
166+ Return the track p-value
167+ """
168+ return self.__p_value
169+
170+
171+ def set_p_value( self, pvalue ) :
172+ """
173+ Set the track p-value
174+ """
175+ self.__p_value = pvalue
176+
177+
178+ def get_chisq( self ) :
179+ """
180+ Return the track chisq squared
181+ """
182+ return self.__chisq
183+
184+
185+ def set_chisq( self, chisq ) :
186+ """
187+ Set the track chisq squared
188+ """
189+ self.__chisq = chisq
190+
191+
192+ def get_ndf( self ) :
193+ """
194+ Get the track No. Degrees of Freedom
195+ """
196+ return self.__ndf
197+
198+
199+ def set_ndf( self, ndf ) :
200+ """
201+ Set the track No. Degrees of Freedom
202+ """
203+ self.__ndf = ndf
204+
205+
206+ def get_chisq_ndf(self) :
207+ """
208+ Get the track Chisquared / No. Degrees of Freedom
209+ """
210+ return self.__chisq / self.__ndf
211+
212+
213+ def set_status(self, status) :
214+ """
215+ Set a undefined status flag
216+ """
217+ self.__status = status
218+
219+
220+ def get_status(self) :
221+ """
222+ Get the status flag
223+ """
224+ return self.__status
225+
226
227=== modified file 'src/common_py/analysis/hit_types.py'
228--- src/common_py/analysis/hit_types.py 2017-08-15 16:19:42 +0000
229+++ src/common_py/analysis/hit_types.py 2018-05-09 09:31:35 +0000
230@@ -23,7 +23,7 @@
231 Note that this class is used by the add_hit meth
232 """
233
234-# pylint: disable = W0311, R0902, R0904, R0913, C0103
235+# pylint: disable = W0311, R0902, R0904, R0913, C0103, R0914, R0915
236
237 import math
238 import analysis.tools as tools
239@@ -40,14 +40,15 @@
240 px = 0.0, py = 0.0, pz = 0.0, station = 0, \
241 time = 0.0, mass = 105.6583715, p_value = 1.0, pid = 13, \
242 weight = 1.0, \
243- scifi_track_point = None, virtual_track_point = None ) :
244+ scifi_track_point = None, virtual_track_point = None, \
245+ global_track_point = None ) :
246 """
247 Initialise the object. this can be done in three ways:
248 1. Specify all components by hand
249 2. Build from a sci-fr trackpoint
250 3. Build from a virtual trackpoint
251 """
252- if scifi_track_point is None and virtual_track_point is None :
253+ if scifi_track_point is None and virtual_track_point is None and global_track_point is None :
254 self.__x = x
255 self.__y = y
256 self.__z = z
257@@ -62,7 +63,7 @@
258 self.__weight = weight
259 # self.__reference = reference
260
261- elif scifi_track_point is not None and virtual_track_point is None :
262+ elif scifi_track_point is not None and virtual_track_point is None and global_track_point is None :
263 self.__x = scifi_track_point.pos().x()
264 self.__y = scifi_track_point.pos().y()
265 self.__z = scifi_track_point.pos().z()
266@@ -82,7 +83,7 @@
267 # else :
268 # self.__reference = reference
269
270- elif scifi_track_point is None and virtual_track_point is not None :
271+ elif scifi_track_point is None and virtual_track_point is not None and global_track_point is None :
272 self.__x = virtual_track_point.GetPosition().x()
273 self.__y = virtual_track_point.GetPosition().y()
274 self.__z = virtual_track_point.GetPosition().z()
275@@ -101,6 +102,20 @@
276 # else :
277 # self.__reference = reference
278
279+ elif scifi_track_point is None and virtual_track_point is None and global_track_point is not None :
280+ self.__x = global_track_point.get_position().X()
281+ self.__y = global_track_point.get_position().Y()
282+ self.__z = global_track_point.get_position().Z()
283+ self.__px = global_track_point.get_momentum().X()
284+ self.__py = global_track_point.get_momentum().Y()
285+ self.__pz = global_track_point.get_momentum().Z()
286+ self.__time = global_track_point.get_position().T()
287+ self.__mass = mass
288+ self.__p_value = p_value
289+ self.__pid = pid
290+ self.__station = station
291+ self.__weight = weight
292+
293 else :
294 print "WTF!"
295 raise ValueError( 'Please supply precisely one of "virtual_track_point"'+\
296@@ -388,3 +403,200 @@
297
298 for key, val in __hit_get_variables.iteritems() :
299 __hit_get_keys.append( key )
300+
301+
302+################################################################################
303+
304+
305+class AnalysisSpacePoint() :
306+ """
307+ A simple, unified class that holds the essential information, when copied
308+ from MAUS data types, during various analyses.
309+
310+ Some syntax was borrowed from XBoa for simplicity and because I like the
311+ interface!
312+ """
313+ def __init__( self, x = 0.0, y = 0.0, z = 0.0, station = 0, \
314+ time = 0.0, weight = 1.0, tof = None, scifi = None ) :
315+ """
316+ Initialise the object. this can be done in three ways:
317+ 1. Specify all components by hand
318+ 2. Build from a sci-fr trackpoint
319+ 3. Build from a virtual trackpoint
320+ """
321+ if tof is None and scifi is None :
322+ self.__x = x
323+ self.__y = y
324+ self.__z = z
325+ self.__time = time
326+ self.__station = station
327+ self.__weight = weight
328+
329+ elif scifi is not None and tof is None :
330+ self.__x = scifi.pos().x()
331+ self.__y = scifi.pos().y()
332+ self.__z = scifi.pos().z()
333+ self.__time = time
334+ self.__station = tools.calculate_plane_id(scifi.tracker(), \
335+ scifi.station(), 1)
336+ self.__weight = weight
337+
338+ elif scifi is None and tof is not None :
339+ self.__x = tof.GetGlobalPosX()
340+ self.__y = tof.GetGlobalPosY()
341+ self.__z = tof.GetGlobalPosZ()
342+ self.__time = tof.GetTime()
343+ self.__station = tof.GetStation()
344+ self.__weight = weight
345+
346+ else :
347+ print "WTF!"
348+ raise ValueError( 'Please supply precisely one of "virtual_track_point"'+\
349+ ' or "scifi_track_point", or specify all values explicitly.' )
350+
351+
352+ def __str__( self ) :
353+ """
354+ Return a string of the parameters.
355+ Mostly useful for debuging.
356+ """
357+ return '(' + str( self.__x ) + ',' +\
358+ str( self.__y ) + ',' +\
359+ str( self.__z ) + '):[' +\
360+ str( self.__time ) +\
361+ str( self.__station ) + ']'
362+
363+
364+ def __repr__( self ) :
365+ """
366+ Return a string of the parameters.
367+ Mostly useful for debuging.
368+ """
369+ return '(' + str( self.__x ) + ',' +\
370+ str( self.__y ) + ',' +\
371+ str( self.__z ) + '):[' +\
372+ str( self.__time ) +\
373+ str( self.__station ) + ']'
374+
375+
376+
377+ def set_x( self, val ) :
378+ """
379+ Set the x position
380+ """
381+ self.__x = val
382+
383+ def set_y( self, val ) :
384+ """
385+ Set the y position
386+ """
387+ self.__y = val
388+
389+ def set_z( self, val ) :
390+ """
391+ Set the z position
392+ """
393+ self.__z = val
394+
395+ def set_time( self, val ) :
396+ """
397+ Set the time
398+ """
399+ self.__time = val
400+
401+ def set_station( self, station ) :
402+ """
403+ Set the station ID
404+ """
405+ self.__station = station
406+
407+ def set_weight( self, w ) :
408+ """
409+ Set the statistical weight of the hit
410+ """
411+ self.__weight = w
412+
413+
414+ def get_x( self ) :
415+ """
416+ Get the x position
417+ """
418+ return self.__x
419+
420+ def get_y( self ) :
421+ """
422+ Get the y position
423+ """
424+ return self.__y
425+
426+ def get_z( self ) :
427+ """
428+ Get the z position
429+ """
430+ return self.__z
431+
432+ def get_time( self ) :
433+ """
434+ Get the time
435+ """
436+ return self.__time
437+
438+ def get_r( self ) :
439+ """
440+ Get the position radius
441+ """
442+ return math.sqrt( self.__x**2 + self.__y**2 )
443+
444+ def get_phi( self ) :
445+ """
446+ Get the Phi angle (Position)
447+ """
448+ return math.atan2( self.__y, self.__x )
449+
450+ def get_station( self ) :
451+ """
452+ Get the particle ID
453+ """
454+ return self.__station
455+
456+
457+ def get_weight( self ) :
458+ """
459+ Return the statistical weight of the hit
460+ """
461+ return self.__weight
462+
463+
464+ def get( self, string ) :
465+ """
466+ Return the value of the variable described in the string
467+ """
468+ if not string in AnalysisSpacePoint.__hit_get_keys :
469+ return None
470+ else :
471+ return AnalysisSpacePoint.__hit_get_variables[ string ]( self )
472+
473+
474+ def get_as_vector( self ) :
475+ """
476+ Returns the 6D phase-space vector corresponding to the particle.
477+ [t, x, y, z]
478+ """
479+ return [self.__time, self.__x, self.__y, self.__z]
480+
481+
482+ def get_variable_list() :
483+ """
484+ Return the list of variables that the user can request
485+ """
486+ return AnalysisSpacePoint.__hit_get_keys
487+ get_variable_list = staticmethod( get_variable_list )
488+
489+
490+
491+ __hit_get_keys = []
492+ __hit_get_variables = { 'x':get_x, 'y':get_y, 'z':get_z,
493+ 't':get_time, 'r':get_r, 'station':get_station }
494+
495+ for key, val in __hit_get_variables.iteritems() :
496+ __hit_get_keys.append( key )
497
498=== modified file 'src/common_py/analysis/inspectors.py'
499--- src/common_py/analysis/inspectors.py 2017-08-15 16:19:42 +0000
500+++ src/common_py/analysis/inspectors.py 2018-05-09 09:31:35 +0000
501@@ -82,13 +82,13 @@
502
503 self.emittance_plot = ROOT.TH1F(\
504 'inspected_emittance_{0}'.format(self.plane), 'Emittance', \
505- 1000, 0.0, 20.0)
506+ 5000, 0.0, 20.0)
507 self.emittance_x_plot = ROOT.TH1F(\
508 'inspected_emittance_x_{0}'.format(self.plane), 'X Emittance', \
509- 1000, 0.0, 20.0)
510+ 5000, 0.0, 20.0)
511 self.emittance_y_plot = ROOT.TH1F(\
512 'inspected_emittance_y_{0}'.format(self.plane), 'Y Emittance', \
513- 1000, 0.0, 20.0)
514+ 5000, 0.0, 20.0)
515 self.alpha_plot = ROOT.TH1F(\
516 'inspected_alpha_{0}'.format(self.plane), 'Alpha Function', \
517 2000, -5.0, 5.0)
518@@ -109,11 +109,11 @@
519 3000, 0.0, 3000.0)
520 self.total_momentum_plot = ROOT.TH1F(\
521 'inspected_total_momentum_{0}'.format(self.plane), 'Momentum', \
522- 1000, 0.0, 500.0)
523+ 2000, 0.0, 500.0)
524
525 self.amplitude_plot = ROOT.TH1F(\
526 'single_particle_amplitudes_{0}'.format(self.plane), 'Amplitude', \
527- 200, 0.0, 100.0)
528+ 500, 0.0, 100.0)
529 self.amplitude_momentum_plot = ROOT.TH2F(\
530 'inspected_A_p_phasespace_{0}'.format(self.plane), 'A-p-Phasespace' , \
531 200, 0.0, 100.0, 260, 130.0, 260.0 )
532@@ -170,15 +170,15 @@
533 if self.ensemble_size > 1 :
534 self.ensemble_covariance.add_hit(hit)
535
536- if self.covariance.length() > self.ensemble_size :
537+ if self.ensemble_covariance.length() > self.ensemble_size :
538 self.fill_plots()
539
540
541 def fill_plots(self) :
542- if ( self.covariance.length() < (self.ensemble_size / 2) ) or self.ensemble_size < 2 :
543+ if ( self.ensemble_covariance.length() < (self.ensemble_size / 2) ) or self.ensemble_size < 2 :
544 return
545
546- self.emittance_plot.Fill(self.covariance.get_emittance(\
547+ self.emittance_plot.Fill(self.ensemble_covariance.get_emittance(\
548 ['x','px','y','py']))
549 self.emittance_x_plot.Fill(self.ensemble_covariance.get_emittance(['x','px']))
550 self.emittance_y_plot.Fill(self.ensemble_covariance.get_emittance(['y','py']))
551@@ -446,3 +446,86 @@
552
553 return ins_data
554
555+
556+####################################################################################################
557+
558+
559+class EmittanceSystematicErrorInspector() :
560+
561+ def __init__(self, plane_id, ensemble_size) :
562+ if ensemble_size < 2 :
563+ raise ValueError("Ensemble size is too small.")
564+
565+ self.plane = plane_id
566+ self.ensemble_size = ensemble_size
567+ self.covariance = analysis.covariances.CovarianceMatrix()
568+ self.true_covariance = analysis.covariances.CovarianceMatrix()
569+ self.systematic_error_calculator = analysis.systematic_error_estimation.chi_squared_estimator()
570+ self.true_emittances = []
571+
572+
573+ def add_hit(self, hit, true_hit) :
574+ self.covariance.add_hit(hit)
575+ self.true_covariance.add_hit(true_hit)
576+
577+ if self.covariance.length() > self.ensemble_size :
578+ self.fill_calculator()
579+
580+
581+ def fill_calculator(self) :
582+ if ( self.covariance.length() < (self.ensemble_size / 2) ) :
583+ return
584+
585+ emittance = self.covariance.get_emittance(['x','px','y','py'])
586+ true_emittance = self.true_covariance.get_emittance(['x','px','y','py'])
587+
588+ self.true_emittances.append( ( true_emittance, true_emittance / math.sqrt( 2.0*(self.true_covariance.length() - 1) ) ) )
589+ self.systematic_error_calculator.add_value( emittance, emittance / math.sqrt( 2.0*(self.covariance.length() - 1) ) )
590+
591+ self.covariance.clear()
592+ self.true_covariance.clear()
593+
594+
595+ def get_current_ensemble_size(self) :
596+ return self.covariance.length()
597+
598+
599+ def get_systematic_error(self) :
600+ num_measurements = len(self.systematic_error_calculator)
601+
602+ if num_measurements > 0 :
603+ numer = 0.0
604+ denom = 0.0
605+ for meas, weight in self.true_emittances :
606+ numer += meas / (weight**2)
607+ denom += 1.0 / (weight**2)
608+ true_emittance = numer / denom
609+
610+ bias = self.systematic_error_calculator.calculate_mean() - true_emittance
611+ error = math.sqrt(self.systematic_error_calculator.calculate_variance()/num_measurements)
612+ bias_lower = bias - error
613+ bias_upper = bias + error
614+
615+ result = self.systematic_error_calculator.calculate_systematic_error()
616+ lower, upper = self.systematic_error_calculator.calculate_bounds( result )
617+ else :
618+ bias = 0.0
619+ error = 0.0
620+ bias_lower = 0.0
621+ bias_upper = 0.0
622+
623+ result = 0.0
624+ lower = 0.0
625+ upper = 0.0
626+
627+ return bias, bias_lower, bias_upper, result, lower, upper
628+
629+
630+ def get_data_dictionary(self) :
631+ return {}
632+
633+
634+ def get_plot_dictionary(self) :
635+ return self.systematic_error_calculator.get_plots()
636+
637+
638
639=== added file 'src/common_py/analysis/systematic_error_estimation.py'
640--- src/common_py/analysis/systematic_error_estimation.py 1970-01-01 00:00:00 +0000
641+++ src/common_py/analysis/systematic_error_estimation.py 2018-05-09 09:31:35 +0000
642@@ -0,0 +1,247 @@
643+#!/usr/bin/env python
644+
645+# This file is part of MAUS: http://micewww.pp.rl.ac.uk/projects/maus
646+#
647+# MAUS is free software: you can redistribute it and/or modify
648+# it under the terms of the GNU General Public License as published by
649+# the Free Software Foundation, either version 3 of the License, or
650+# (at your option) any later version.
651+#
652+# MAUS is distributed in the hope that it will be useful,
653+# but WITHOUT ANY WARRANTY; without even the implied warranty of
654+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
655+# GNU General Public License for more details.
656+#
657+# You should have received a copy of the GNU General Public License
658+# along with MAUS. If not, see <http://www.gnu.org/licenses/>.
659+#
660+
661+"""
662+ Some classes for the calculation of systematic errors.
663+
664+ Inspiration is from:
665+ L Lyons 1992 J. Phys. A: Math. Gen. 25 1967
666+"""
667+
668+# pylint: disable = W0311, E1101, W0102, R0902, C0103, R0904
669+
670+import math
671+import ROOT
672+
673+import scipy
674+import scipy.optimize
675+
676+
677+class chi_squared_estimator(object) :
678+ """
679+ Performs a chi-squared estiamtion of the systematic error based on pairs of values + errors.
680+ """
681+
682+ def __init__(self, unique_name="chisq_error_estimation") :
683+ """
684+ Initialise empty object.
685+ Option to set the convergence criteria for the chisq-minimisation routine
686+ """
687+ self.__unique_name = unique_name
688+ self.__measurements = []
689+ self.__errors = []
690+ self.__systematic_error = 0.0
691+ self.__mean = 0.0
692+
693+
694+ def __len__(self) :
695+ """
696+ Return the number of measurements
697+ """
698+ return len(self.__measurements)
699+
700+
701+ def add_value(self, measurement, standard_error) :
702+ """
703+ Add a measurement and its standard error to the object
704+ """
705+ self.__measurements.append(measurement)
706+ self.__errors.append(standard_error)
707+
708+
709+ def _calculate_chisquared(self) :
710+ """
711+ Private function: calculate the chisquared of the measurements
712+ """
713+ total = 0.0
714+ s_sq = self.__systematic_error**2
715+
716+ for meas, error in zip(self.__measurements, self.__errors) :
717+ total += (meas - self.__mean)**2 / (error*error + s_sq)
718+
719+ return total
720+
721+
722+ def _calculate_mean(self) :
723+ """
724+ Private function: calculate the weighted mean of the measurements
725+ """
726+ numerator = 0.0
727+ denominator = 0.0
728+# s_sq = self.__systematic_error**2
729+
730+ for meas, error in zip(self.__measurements, self.__errors) :
731+ numerator += meas / (error*error)
732+ denominator += 1.0 / (error*error)
733+
734+ return numerator / denominator
735+
736+
737+ def calculate_mean(self) :
738+ """
739+ Calculate the weighted mean of the measurements assuming no systematic error.
740+ """
741+ self.__systematic_error = 0.0
742+ return self._calculate_mean()
743+
744+
745+ def calculate_variance(self) :
746+ """
747+ Calculate the variance of the measurements assuming no systematic error.
748+ """
749+ mean = self.calculate_mean()
750+ summation = 0.0
751+ for meas in self.__measurements :
752+ summation += (meas - mean)**2
753+
754+ return summation / len(self.__measurements)
755+
756+
757+ def _constant_error_systematic(self, get_mean, NDOF, stat_error) :
758+ """
759+ Private function: Calculate the systematic error assuming every measurement has the
760+ same statistical error, and the mean is already assumed.
761+
762+ This permits a simpler equation to solve.
763+ """
764+ self.__mean = get_mean()
765+ total = 0.0
766+
767+ for meas in self.__measurements :
768+ total += (meas - self.__mean)**2
769+
770+ diff = ( total / NDOF ) - stat_error**2
771+
772+ if diff <= 0.0 :
773+ return 0.0
774+ else :
775+ return math.sqrt(diff)
776+
777+
778+ def calculate_bounds(self, systematic_estimate, mean=None) :
779+ """
780+ Estimate an asymmetric error bar based on the chisquared distribution.
781+
782+ Changes the estimate of the systematic error until the delta chi-squared reaches the
783+ equivalent of a 1-sigma deviation (central 68%).
784+ """
785+ lower_bar = -1.0e300 # Ridonkulous starting values
786+ upper_bar = 1.0e300
787+
788+ get_mean = None
789+
790+ if mean is None : # Estimate mean from the measurements
791+ get_mean = self._calculate_mean
792+ NDOF = len(self.__measurements) - 1
793+
794+ else : # If the mean is specified we gain a degree of freedom.
795+ get_mean = lambda : mean
796+ NDOF = len(self.__measurements)
797+
798+
799+ bounds = [(0.0, None)]
800+
801+ limit = scipy.stats.chi2.ppf(0.16, NDOF)
802+ upper_bar = scipy.optimize.minimize(self._minimize_me, [systematic_estimate], args=(limit, get_mean), method="TNC", bounds=bounds)
803+
804+ limit = scipy.stats.chi2.ppf(0.84, NDOF)
805+ lower_bar = scipy.optimize.minimize(self._minimize_me, [systematic_estimate], args=(limit, get_mean), method="TNC", bounds=bounds)
806+
807+ return lower_bar.x[0], upper_bar.x[0]
808+
809+
810+ def calculate_systematic_error(self, mean=None, error_assumption=None) :
811+ """
812+ The main function - returns the systematic error given the data and measurements stored in
813+ the class.
814+
815+ Specifying the mean stops the mean from being calculated on the fly from the data, this
816+ slightly changes th behaviour - the mean is no longer a function of the systematic error and
817+ the number of degrees of freedom is different.
818+
819+ Specifying the error_assumption, assumes that every measurement has the same (specified)
820+ error. This allows for a faster calculation of the systematic error.
821+
822+ Specifying both allows the for systematic error to be calculated trivially.
823+ """
824+ get_mean = None
825+ NDOF = 0
826+ self.__systematic_error = 0.0
827+
828+ if mean is None : # Estimate mean from the measurements
829+ get_mean = self._calculate_mean
830+ NDOF = len(self.__measurements) - 1
831+
832+ else : # If the mean is specified we gain a degree of freedom.
833+ get_mean = lambda : mean
834+ NDOF = len(self.__measurements)
835+
836+
837+ if error_assumption is not None : # Assume all statistical errors are the
838+ # same. Shortcut!
839+ return self._constant_error_systematic(get_mean, NDOF, error_assumption)
840+
841+
842+ bounds = [(0.0, None)]
843+ estimate = 0.0
844+ result = scipy.optimize.minimize(self._minimize_me, [estimate], args=(NDOF, get_mean), method="TNC", bounds=bounds)
845+
846+ self.__systematic_error = result.x[0]
847+
848+ return self.__systematic_error
849+
850+
851+ def _minimize_me(self, parameters, ndof, get_mean) :
852+ """
853+ Minimize function for SciPy.Optimize.
854+
855+ Calculates the squared residual of the Chi-squared value and number of degrees of freedom.
856+ """
857+ self.__systematic_error = parameters[0]
858+ self.__mean = get_mean()
859+ chisq = self._calculate_chisquared()
860+ return (chisq - ndof)**2
861+
862+
863+ def get_plots(self) :
864+ """
865+ Return a dictionary containing histograms of the data that was stored.
866+ """
867+ if len(self.__measurements) == 0 :
868+ return {}
869+
870+ min_val = min(self.__measurements)
871+ max_val = max(self.__measurements)
872+ total_weight = 0.0
873+
874+ for error in self.__errors :
875+ total_weight += 1.0 / (error**2)
876+
877+ hist = ROOT.TH1F(self.__unique_name, "", 100, min_val, max_val)
878+ weighted_hist = ROOT.TH1F(self.__unique_name+'-weighted', "", 100, min_val, max_val)
879+
880+ for value, error in zip( self.__measurements, self.__errors ) :
881+ hist.Fill(value)
882+ weighted_hist.Fill(value, (1.0/(error**2))/total_weight)
883+
884+ plots = {}
885+ plots['measurements'] = hist
886+ plots['weighted_measurements'] = weighted_hist
887+
888+ return plots
889+
890
891=== modified file 'src/common_py/framework/single_thread.py'
892--- src/common_py/framework/single_thread.py 2018-02-14 14:17:17 +0000
893+++ src/common_py/framework/single_thread.py 2018-05-09 09:31:35 +0000
894@@ -201,15 +201,16 @@
895 # done with tranform-merge, now write it out
896 self.outputer.save(redevent)
897 try:
898- del redevent
899- except:
900- pass
901- # if we converted to a different representation, delete the old one
902- try:
903 maus_cpp.converter.del_data_repr(event)
904+ except TypeError: # pylint: disable = W0702
905+ print "Failed to delete MAUS::Data object."
906+ try:
907 maus_cpp.converter.del_data_repr(redevent)
908- except: # pylint: disable = W0702
909- pass
910+ except TypeError: # pylint: disable = W0702
911+ try: # If we returned a different datatype, try to delete it anyway.
912+ del redevent
913+ except:
914+ print "Failed to delete Reduced Data object."
915
916 def start_of_run(self, new_run_number):
917 """

Subscribers

People subscribed via source and target branches