Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions python/uw/data/dataman.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import collections
import glob
import warnings
from cPickle import dump,load
from pickle import dump,load

import numpy as np
from astropy.io import fits as pyfits
Expand Down Expand Up @@ -536,7 +536,7 @@ def _make_ltcube(self):
print ('Constructing all-sky livetime cube')
else:
print('Constructing livetime cube about RA,Dec = ({0:0.3f},{1:0.3f}) with a radius of {2:0.3f} deg.'.format(roi_dir.ra(),roi_dir.dec(),exp_radius))
for i in xrange(1+self.use_weighted_livetime):
for i in range(1+self.use_weighted_livetime):
#print('on iteration {0}'.format(i))
sys.stdout.flush()
lt = skymaps.LivetimeCube(
Expand Down
8 changes: 4 additions & 4 deletions python/uw/data/dssman.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ def __init__(self,fits_name,header_key='EVENTS'):
indeces = sorted(list(set([int(k[-1]) for k in keys])))
kdeque,vdeque = deque(),deque()
counter = 0 # index of list of DSS indeces
for i in xrange(len(keys)):
for i in range(len(keys)):
if int(keys[i][-1])!=indeces[counter]:
self.append(DSSFactory(kdeque,vdeque))
kdeque.clear(); vdeque.clear()
Expand Down Expand Up @@ -228,7 +228,7 @@ def delete(self,index):
""" Delete a DSS entry and re-index the remaining ones."""
ret = self.pop(index)
if index < len(self)-1:
for i in xrange(index,len(self)):
for i in range(index,len(self)):
self[i]['index'] = i+1
return ret

Expand Down Expand Up @@ -259,7 +259,7 @@ def roi_info(self,tol=0.01,delete_duplicates=False):
optionally delete them."""
roi_info = None
offset = 0
for i in xrange(len(self)):
for i in range(len(self)):
i += offset
d = self[i]
r = d.roi_info()
Expand Down Expand Up @@ -306,7 +306,7 @@ def process_pixeldata(pd):
ptlvars = ['zenithcut','thetacut','emin','emax']
indices = [1,1,0,1]

for i in xrange(len(ptlvars)):
for i in range(len(ptlvars)):
# check the appropriate simple cut
ptl_var = pd.__dict__[ptlvars[i]]
dss = dsse.get_simple_entry(colnames[i])
Expand Down
2 changes: 1 addition & 1 deletion python/uw/irfs/psfman.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def _calculate_weights(self,event_type, skydir=None):

dummy = skydir or SkyDir()
weights = np.zeros([len(elo),len(clo)])
for i in xrange(len(elo)): # iterator over energies
for i in range(len(elo)): # iterator over energies
em = (elo[i]*ehi[i])**0.5
for k,(c0,c1) in enumerate(zip(clo,chi)):# iterator over cos(theta) on-axis to edge
if c0 < 0.35: continue # exclude bins below cos(theta)=0.4
Expand Down
8 changes: 4 additions & 4 deletions python/uw/like/Models.py
Original file line number Diff line number Diff line change
Expand Up @@ -666,7 +666,7 @@ def __str__(self,absolute=False, indent=''):
l=[]
if (not self.background and np.any(lo_p[0:-2]!=0)) or \
(self.background and np.any(lo_p!=0)): #if statistical errors are present
for i in xrange(len(pnames)):
for i in range(len(pnames)):
t_n = '%-10s' % pnames[i]
if i < self.npar:
# if free is empty (shouldn't happen normally) treat as all False
Expand All @@ -685,7 +685,7 @@ def __str__(self,absolute=False, indent=''):
l+=[t_n+': %.3g + %.3g - %.3g (avg = %.3g) %s'%(p[i],hi_p[i],lo_p[i],(hi_p[i]*lo_p[i])**0.5,frozen)]
return indent+ ('\n'+indent).join(l)
else: #if no errors are present
for i in xrange(len(pnames)):
for i in range(len(pnames)):
t_n = '%-10s' % pnames[i]
if i < self.npar:
frozen = '' if self.free[i] else '(FROZEN)'
Expand Down Expand Up @@ -904,7 +904,7 @@ def __flux_derivs__(self,*args):
errs = np.asarray([delta] * len(self._p) )
hi,lo = self.copy(),self.copy()
derivs = []
for i in xrange(len(self._p)):
for i in range(len(self._p)):
hi.setp(i,hi._p[i] + errs[i],internal=True)
lo.setp(i,lo._p[i] - errs[i],internal=True)
derivs += [(hi.i_flux(*args) - lo.i_flux(*args))/(2*errs[i])]
Expand Down Expand Up @@ -2006,7 +2006,7 @@ def __call__(self,e):
f = lambda x: kv(5./3,x)
if hasattr(e,'__len__'):
rvals = np.empty(len(e),dtype=float)
for i in xrange(len(rvals)):
for i in range(len(rvals)):
rvals[i] = n0*quad(f,e[i]/cutoff,np.inf)[0]
else:
rvals = n0*quad(f,float(e)/cutoff,np.inf)[0]
Expand Down
8 changes: 4 additions & 4 deletions python/uw/like/counts_plotter.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

def counts(r,integral=False):

groupings = [deque() for x in xrange(len(r.bin_centers))]
groupings = [deque() for x in range(len(r.bin_centers))]

#group slw by energy
for i,ei in enumerate(r.bin_centers):
Expand All @@ -30,7 +30,7 @@ def counts(r,integral=False):
src = np.asarray([ np.asarray([band.phase_factor*band.ps_counts*band.overlaps for band in g]).sum(axis=0) for g in groupings])

if integral:
for i in xrange(len(iso)):
for i in range(len(iso)):
#iso[i] = iso[i:].sum()
#gal[i] = gal[i:].sum()
dif[i] = dif[i:].sum(axis=0)
Expand Down Expand Up @@ -61,14 +61,14 @@ def get_counts(roi, merge_non_free=True, merge_all=False, integral=False):
free_mask = np.asarray([np.any(m.free) for m in roi.psm.models])
new_src = np.zeros([len(en),free_mask.sum()+1])
counter = 0
for i in xrange(len(free_mask)):
for i in range(len(free_mask)):
if free_mask[i] :
new_src[:,counter] = src[:,i]
counter += 1
else:
new_src[:,-1] += src[:,i]
src = new_src
ps_names = [ps_names[i] for i in xrange(len(ps_names)) if free_mask[i] ]
ps_names = [ps_names[i] for i in range(len(ps_names)) if free_mask[i] ]
ps_names += ['Other Point Sources' ]
models = zip(bg_names+ps_names, np.hstack((dif,src)).T)
return dict(energies=en, observed=obs, models=models, total=tot)
Expand Down
4 changes: 2 additions & 2 deletions python/uw/like/likelihood_fit.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def find_logl_change(self,initial_value,delta_logl):
hi = initial_value
ll_0 = self.function(hi)
if ll_0-self.function(lo)>delta_logl:
for i in xrange(20):
for i in range(20):
avg = .5*(hi+lo)
ll = self.function(avg)
if ll_0-ll<delta_logl: hi = avg
Expand All @@ -124,7 +124,7 @@ def find_logl_change(self,initial_value,delta_logl):
lo = initial_value
hi = initial_value*10
while ll_0-self.function(hi)<delta_logl: hi+=1
for i in xrange(20):
for i in range(20):
avg = .5*(lo+hi)
ll = self.function(avg)
if ll_0-ll<delta_logl: lo = avg
Expand Down
2 changes: 1 addition & 1 deletion python/uw/like/mapplots.py
Original file line number Diff line number Diff line change
Expand Up @@ -686,7 +686,7 @@ def ppf(prob,mean):
return n.ppf(prob)
d = poisson(mean)
prev = 0
for i in xrange(1,200):
for i in range(1,200):
new = d.cdf(i)
if new >= prob: break
prev = new
Expand Down
2 changes: 1 addition & 1 deletion python/uw/like/pointspec2.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def __init__(self, data_specification, **kwargs):
"""
if not isinstance(data_specification,dataman.DataSpec):
if os.path.exists(data_specification):
from cPickle import load
from pickle import load
try:
data_specification = load(file(data_specification))
except UnpicklingError:
Expand Down
2 changes: 1 addition & 1 deletion python/uw/like/pointspec_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def __init__(self,skydir,name,model=None,free_parameters=True,leave_parameters=F
self.model = PowerLaw() if model is None else model
#if not free_parameters:
if not leave_parameters:
for i in xrange(len(self.model.free)): self.model.free[i] = free_parameters
for i in range(len(self.model.free)): self.model.free[i] = free_parameters
self.duplicate = False
def __str__(self):
return '\n'.join(['\n',
Expand Down
7 changes: 3 additions & 4 deletions python/uw/like/pypsf.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import numpy as np
from os.path import join
import os
from cPickle import load
from skymaps import ExposureWeighter,SkyDir,PySkyFunction,Hep3Vector,\
WeightedSkyDirList,PythonUtilities,PythonPsf
from scipy.integrate import quad,simps
Expand Down Expand Up @@ -98,7 +97,7 @@ def __calc_weights__(self,livetimefile='',skydir=None):
elo,ehi,clo,chi = self.e_los,self.e_his,self.c_los[::-1],self.c_his[::-1]

weights = np.zeros([2,len(elo),len(clo)])
for i in xrange(len(elo)): # iterator over energies
for i in range(len(elo)): # iterator over energies
em = (elo[i]*ehi[i])**0.5
for j in [0,1]: # iterate over conversion types
for k,(c0,c1) in enumerate(zip(clo,chi)):# iterator over cos(theta) on-axis to edge
Expand Down Expand Up @@ -169,7 +168,7 @@ def _norm_pars(self):
ens = (self.e_los*self.e_his)**0.5
for ct in [0,1]: # iterate through conversion type
scale_func = self.scale_func[ct]
for i in xrange(self.tables.shape[2]): # iterate through energy
for i in range(self.tables.shape[2]): # iterate through energy
sf = scale_func(ens[i])
# vector operations in incidence angle
nc,nt,gc,gt,sc,st = self.tables[ct,:,i,:]
Expand Down Expand Up @@ -310,7 +309,7 @@ def __init__(self,psf,band,**kwargs):

# calculate the actual PSF integral over sub-bands
fopt = np.zeros_like(dom)
for i in xrange(len(b.sp_points)):
for i in range(len(b.sp_points)):
n = psf(b.sp_points[i],b.ct,dom,density=True)*b.sp_vector[i]*b.sp_points[i]**-index
fopt += n
n = (b.sp_vector*f(b.sp_points)).sum()
Expand Down
8 changes: 4 additions & 4 deletions python/uw/like/roi_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -326,10 +326,10 @@ def check_gradient(self,tol=1e-3,get_approx_gradient=False):
p0 = self.get_parameters().copy()
grad = np.empty_like(p0)
max_iter = 40
for i in xrange(len(p0)):
for i in range(len(p0)):
delta = 1e-2
prev_grad = np.inf
for j in xrange(max_iter):
for j in range(max_iter):
pwork = p0.copy()
pwork[i] += delta
lhi = self.logLikelihood(pwork)
Expand Down Expand Up @@ -444,7 +444,7 @@ def fit(self,method='simplex', tolerance = 0.01, save_values = True,
from uw.utilities.minuit import Minuit
temp_params = self.parameters()
npars = self.parameters().shape[0]
param_names = ['p%i'%i for i in xrange(npars)]
param_names = ['p%i'%i for i in range(npars)]

if use_gradient:
gradient = self.gradient
Expand Down Expand Up @@ -476,7 +476,7 @@ def fit(self,method='simplex', tolerance = 0.01, save_values = True,
ll_0 = self.logLikelihood(self.parameters())
if use_gradient:
f0 = fmin_bfgs(self.logLikelihood,self.parameters(),self.gradient,full_output=1,maxiter=500,gtol=gtol,disp=0)
for i in xrange(10):
for i in range(10):
f = self._save_bfgs = fmin_bfgs(self.logLikelihood,self.parameters(),self.gradient,full_output=1,maxiter=500,gtol=gtol,disp=0)
if abs(f0[1] - f[1]) < tolerance: break # note absolute tolerance
if not self.quiet:
Expand Down
4 changes: 2 additions & 2 deletions python/uw/like/roi_diffuse.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ def sub_energy_binning(band,nsimps):

def initialize_counts(self,bands,roi_dir=None):
rd = self.roi_dir if roi_dir is None else roi_dir
self.bands = [SmallBand() for i in xrange(len(bands))]
self.bands = [SmallBand() for i in range(len(bands))]

for iband,(myband,band) in enumerate(zip(self.bands,bands)):
if not self.quiet:
Expand Down Expand Up @@ -342,7 +342,7 @@ def gradient(self,bands,model_index):
for myband,band in zip(self.bands,bands):
pts = sm.gradient(myband.bg_points)
cp = 0
for j in xrange(npar):
for j in range(npar):
if not sm.free[j]: continue
apterm = band.phase_factor*(myband.ap_evals * pts[j,:]).sum()
if band.has_pixels:
Expand Down
2 changes: 1 addition & 1 deletion python/uw/like/roi_extended.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def set_state(self,band):

def initialize_counts(self,bands,roi_dir=None):
rd = self.roi_dir if roi_dir is None else roi_dir
self.bands = [SmallBand() for i in xrange(len(bands))]
self.bands = [SmallBand() for i in range(len(bands))]

es = self.extended_source
sm = es.model
Expand Down
2 changes: 1 addition & 1 deletion python/uw/like/roi_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def process_filedata(roi,selected_bands):

data = get_fields(ft1files,['RA','DEC','TIME','ENERGY','CONVERSION_TYPE'],cuts)
# convert into skydirs
skydirs = [ SkyDir(float(data['RA'][i]),float(data['DEC'][i])) for i in xrange(len(data['RA']))]
skydirs = [ SkyDir(float(data['RA'][i]),float(data['DEC'][i])) for i in range(len(data['RA']))]

# apply the same gti cut used to read in the initial WSDL.
gti=roi.sa.pixeldata.gti
Expand Down
2 changes: 1 addition & 1 deletion python/uw/like/roi_localize.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ def localize(self):
print (('\t'+7*'%10.4f')% (diff,diff, l.par[0],l.par[1],l.par[3],l.par[4], l.par[6]))

old_sigma=1.0
for i in xrange(self.max_iteration):
for i in range(self.max_iteration):
try:

l.fit(update=True)
Expand Down
2 changes: 1 addition & 1 deletion python/uw/like/roi_modify.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def modify_model(roi,which,model,free=None,keep_old_flux=True):
free=np.asarray([free]*len(model.get_all_parameters()))

assert(len(free)==len(model.get_all_parameters()))
for i in xrange(len(free)):
for i in range(len(free)):
model.freeze(i,freeze=not free[i])

roi.__update_state__()
Expand Down
8 changes: 4 additions & 4 deletions python/uw/like/roi_save.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
author: Joshua Lande
"""
import os
import cPickle
import pickle
import collections
import numpy as N

Expand Down Expand Up @@ -75,7 +75,7 @@ def save(roi,filename):

d['LATEXTDIR']=os.environ['LATEXTDIR'] if os.environ.has_key('LATEXTDIR') else None

cPickle.dump(d,open(path.expand(filename),'w'))
pickle.dump(d,open(path.expand(filename),'w'))

def load(filename,**kwargs):
""" Factory method to return a ROIAnalysis object
Expand All @@ -84,7 +84,7 @@ def load(filename,**kwargs):
Any additional kwargs is used to modify DataSpecification, SpectralAnalysis,
and ROIAnalysis objects."""
if isinstance(filename, basestring):
d=cPickle.load(open(path.expand(filename),'r'))
d=pickle.load(open(path.expand(filename),'r'))
elif isinstance(filename, dict):
d=filename
else:
Expand All @@ -100,7 +100,7 @@ def load(filename,**kwargs):
hasattr(ds.spatial_model,'skyfun') \
and ds.spatial_model.skyfun is None
for ds in d['diffuse_sources']]):
d=cPickle.load(open(path.expand(filename),'r'))
d=pickle.load(open(path.expand(filename),'r'))

from . pointspec import DataSpecification,SpectralAnalysis
from . roi_analysis import ROIAnalysis
Expand Down
Loading