source: trunk/python/scantable.py@ 1592

Last change on this file since 1592 was 1590, checked in by Malte Marquarding, 16 years ago

Tidy up/factored out printin part of stats to _row_callback

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 69.8 KB
RevLine 
[876]1from asap._asap import Scantable
[226]2from asap import rcParams
[1589]3from asap import print_log, print_log_dec
[1118]4from asap import asaplog
[946]5from asap import selector
[1153]6from asap import linecatalog
[1295]7from asap import _n_bools, mask_not, mask_and, mask_or
[102]8
[876]9class scantable(Scantable):
[102]10 """
11 The ASAP container for scans
12 """
[1589]13 @print_log_dec
[1586]14 def __init__(self, filename, average=None, unit=None, parallactify=None):
[102]15 """
16 Create a scantable from a saved one or make a reference
17 Parameters:
[181]18 filename: the name of an asap table on disk
19 or
20 the name of a rpfits/sdfits/ms file
21 (integrations within scans are auto averaged
22 and the whole file is read)
23 or
24 [advanced] a reference to an existing
[102]25 scantable
[484]26 average: average all integrations withinb a scan on read.
27 The default (True) is taken from .asaprc.
28 unit: brightness unit; must be consistent with K or Jy.
[340]29 Over-rides the default selected by the reader
30 (input rpfits/sdfits/ms) or replaces the value
31 in existing scantables
[1586]32 parallactify: Indcicate that the data had been parallatified.
33 Default is taken form rc file.
[710]34 """
[976]35 if average is None:
[710]36 average = rcParams['scantable.autoaverage']
[1586]37 if parallactify is None:
38 parallactify = rcParams['scantable.parallactify']
[1259]39 varlist = vars()
[876]40 from asap._asap import stmath
41 self._math = stmath()
42 if isinstance(filename, Scantable):
43 Scantable.__init__(self, filename)
[181]44 else:
[1435]45 if isinstance(filename, str):# or \
46# (isinstance(filename, list) or isinstance(filename, tuple)) \
47# and isinstance(filename[-1], str):
[976]48 import os.path
49 filename = os.path.expandvars(filename)
50 filename = os.path.expanduser(filename)
51 if not os.path.exists(filename):
52 s = "File '%s' not found." % (filename)
[718]53 if rcParams['verbose']:
[976]54 asaplog.push(s)
[1589]55 #print asaplog.pop().strip()
[718]56 return
[976]57 raise IOError(s)
[1115]58 if os.path.isdir(filename) \
[1118]59 and not os.path.exists(filename+'/table.f1'):
[976]60 # crude check if asap table
61 if os.path.exists(filename+'/table.info'):
[1118]62 ondisk = rcParams['scantable.storage'] == 'disk'
63 Scantable.__init__(self, filename, ondisk)
[976]64 if unit is not None:
65 self.set_fluxunit(unit)
66 self.set_freqframe(rcParams['scantable.freqframe'])
[718]67 else:
[1118]68 msg = "The given file '%s'is not a valid " \
69 "asap table." % (filename)
[976]70 if rcParams['verbose']:
71 print msg
72 return
73 else:
74 raise IOError(msg)
[226]75 else:
[1118]76 self._fill([filename], unit, average)
77 elif (isinstance(filename, list) or isinstance(filename, tuple)) \
[976]78 and isinstance(filename[-1], str):
79 self._fill(filename, unit, average)
[1586]80 self.parallactify(parallactify)
[1259]81 self._add_history("scantable", varlist)
[102]82
[1589]83 @print_log_dec
[876]84 def save(self, name=None, format=None, overwrite=False):
[116]85 """
[1280]86 Store the scantable on disk. This can be an asap (aips++) Table,
87 SDFITS or MS2 format.
[116]88 Parameters:
[1093]89 name: the name of the outputfile. For format "ASCII"
90 this is the root file name (data in 'name'.txt
[497]91 and header in 'name'_header.txt)
[116]92 format: an optional file format. Default is ASAP.
[280]93 Allowed are - 'ASAP' (save as ASAP [aips++] Table),
[194]94 'SDFITS' (save as SDFITS file)
[200]95 'ASCII' (saves as ascii text file)
[226]96 'MS2' (saves as an aips++
97 MeasurementSet V2)
[1573]98 'FITS' (save as image FITS - not
[1443]99 readable by class)
100 'CLASS' (save as FITS readable by CLASS)
[411]101 overwrite: If the file should be overwritten if it exists.
[256]102 The default False is to return with warning
[411]103 without writing the output. USE WITH CARE.
[116]104 Example:
105 scan.save('myscan.asap')
[1118]106 scan.save('myscan.sdfits', 'SDFITS')
[116]107 """
[411]108 from os import path
[226]109 if format is None: format = rcParams['scantable.save']
[256]110 suffix = '.'+format.lower()
[1118]111 if name is None or name == "":
[256]112 name = 'scantable'+suffix
[718]113 msg = "No filename given. Using default name %s..." % name
114 asaplog.push(msg)
[411]115 name = path.expandvars(name)
[256]116 if path.isfile(name) or path.isdir(name):
117 if not overwrite:
[718]118 msg = "File %s exists." % name
119 if rcParams['verbose']:
120 print msg
121 return
122 else:
123 raise IOError(msg)
[451]124 format2 = format.upper()
125 if format2 == 'ASAP':
[116]126 self._save(name)
127 else:
[989]128 from asap._asap import stwriter as stw
[1118]129 writer = stw(format2)
130 writer.write(self, name)
[116]131 return
132
[102]133 def copy(self):
134 """
135 Return a copy of this scantable.
[1348]136 Note:
137 This makes a full (deep) copy. scan2 = scan1 makes a reference.
[102]138 Parameters:
[113]139 none
[102]140 Example:
141 copiedscan = scan.copy()
142 """
[876]143 sd = scantable(Scantable._copy(self))
[113]144 return sd
145
[1093]146 def drop_scan(self, scanid=None):
147 """
148 Return a new scantable where the specified scan number(s) has(have)
149 been dropped.
150 Parameters:
151 scanid: a (list of) scan number(s)
152 """
153 from asap import _is_sequence_or_number as _is_valid
154 from asap import _to_list
155 from asap import unique
156 if not _is_valid(scanid):
157 if rcParams['verbose']:
158 print "Please specify a scanno to drop from the scantable"
159 return
160 else:
161 raise RuntimeError("No scan given")
162 try:
163 scanid = _to_list(scanid)
164 allscans = unique([ self.getscan(i) for i in range(self.nrow())])
165 for sid in scanid: allscans.remove(sid)
[1118]166 if len(allscans) == 0:
167 raise ValueError("Can't remove all scans")
[1093]168 except ValueError:
169 if rcParams['verbose']:
170 print "Couldn't find any match."
171 return
172 else: raise
173 try:
174 bsel = self.get_selection()
175 sel = selector()
176 sel.set_scans(allscans)
177 self.set_selection(bsel+sel)
178 scopy = self._copy()
179 self.set_selection(bsel)
180 return scantable(scopy)
181 except RuntimeError:
[1118]182 if rcParams['verbose']:
183 print "Couldn't find any match."
184 else:
185 raise
[1093]186
187
[102]188 def get_scan(self, scanid=None):
189 """
190 Return a specific scan (by scanno) or collection of scans (by
191 source name) in a new scantable.
[1348]192 Note:
193 See scantable.drop_scan() for the inverse operation.
[102]194 Parameters:
[513]195 scanid: a (list of) scanno or a source name, unix-style
196 patterns are accepted for source name matching, e.g.
197 '*_R' gets all 'ref scans
[102]198 Example:
[513]199 # get all scans containing the source '323p459'
200 newscan = scan.get_scan('323p459')
201 # get all 'off' scans
202 refscans = scan.get_scan('*_R')
203 # get a susbset of scans by scanno (as listed in scan.summary())
[1118]204 newscan = scan.get_scan([0, 2, 7, 10])
[102]205 """
206 if scanid is None:
[718]207 if rcParams['verbose']:
[1118]208 print "Please specify a scan no or name to " \
209 "retrieve from the scantable"
[718]210 return
211 else:
212 raise RuntimeError("No scan given")
213
[102]214 try:
[946]215 bsel = self.get_selection()
216 sel = selector()
[102]217 if type(scanid) is str:
[946]218 sel.set_name(scanid)
219 self.set_selection(bsel+sel)
[876]220 scopy = self._copy()
[946]221 self.set_selection(bsel)
[876]222 return scantable(scopy)
[102]223 elif type(scanid) is int:
[946]224 sel.set_scans([scanid])
225 self.set_selection(bsel+sel)
[876]226 scopy = self._copy()
[946]227 self.set_selection(bsel)
[876]228 return scantable(scopy)
[381]229 elif type(scanid) is list:
[946]230 sel.set_scans(scanid)
231 self.set_selection(sel)
[876]232 scopy = self._copy()
[946]233 self.set_selection(bsel)
[876]234 return scantable(scopy)
[381]235 else:
[718]236 msg = "Illegal scanid type, use 'int' or 'list' if ints."
237 if rcParams['verbose']:
238 print msg
239 else:
240 raise TypeError(msg)
[102]241 except RuntimeError:
[718]242 if rcParams['verbose']: print "Couldn't find any match."
243 else: raise
[102]244
245 def __str__(self):
[1118]246 return Scantable._summary(self, True)
[102]247
[976]248 def summary(self, filename=None):
[102]249 """
250 Print a summary of the contents of this scantable.
251 Parameters:
252 filename: the name of a file to write the putput to
253 Default - no file output
[381]254 verbose: print extra info such as the frequency table
255 The default (False) is taken from .asaprc
[102]256 """
[976]257 info = Scantable._summary(self, True)
258 #if verbose is None: verbose = rcParams['scantable.verbosesummary']
[102]259 if filename is not None:
[256]260 if filename is "":
261 filename = 'scantable_summary.txt'
[415]262 from os.path import expandvars, isdir
[411]263 filename = expandvars(filename)
[415]264 if not isdir(filename):
[413]265 data = open(filename, 'w')
266 data.write(info)
267 data.close()
268 else:
[718]269 msg = "Illegal file name '%s'." % (filename)
270 if rcParams['verbose']:
271 print msg
272 else:
273 raise IOError(msg)
274 if rcParams['verbose']:
[794]275 try:
276 from IPython.genutils import page as pager
277 except ImportError:
278 from pydoc import pager
279 pager(info)
[718]280 else:
281 return info
[710]282
[1512]283 def get_spectrum(self, rowno):
[1471]284 """Return the spectrum for the current row in the scantable as a list.
285 Parameters:
[1573]286 rowno: the row number to retrieve the spectrum from
[1471]287 """
288 return self._getspectrum(rowno)
[946]289
[1471]290 def get_mask(self, rowno):
291 """Return the mask for the current row in the scantable as a list.
292 Parameters:
[1573]293 rowno: the row number to retrieve the mask from
[1471]294 """
295 return self._getmask(rowno)
296
297 def set_spectrum(self, spec, rowno):
298 """Return the spectrum for the current row in the scantable as a list.
299 Parameters:
300 spec: the spectrum
[1573]301 rowno: the row number to set the spectrum for
[1471]302 """
303 assert(len(spec) == self.nchan())
304 return self._setspectrum(spec, rowno)
305
[946]306 def get_selection(self):
307 """
[1005]308 Get the selection object currently set on this scantable.
309 Parameters:
310 none
311 Example:
312 sel = scan.get_selection()
313 sel.set_ifs(0) # select IF 0
314 scan.set_selection(sel) # apply modified selection
[946]315 """
316 return selector(self._getselection())
317
[1576]318 def set_selection(self, selection=None, **kw):
[946]319 """
[1005]320 Select a subset of the data. All following operations on this scantable
321 are only applied to thi selection.
322 Parameters:
323 selection: a selector object (default unset the selection)
324 Examples:
325 sel = selector() # create a selection object
[1118]326 self.set_scans([0, 3]) # select SCANNO 0 and 3
[1005]327 scan.set_selection(sel) # set the selection
328 scan.summary() # will only print summary of scanno 0 an 3
329 scan.set_selection() # unset the selection
[946]330 """
[1576]331 if selection is None:
332 # reset
333 if len(kw) == 0:
334 selection = selector()
335 else:
336 # try keywords
337 for k in kw:
338 if k not in selector.fields:
339 raise KeyError("Invalid selection key '%s', valid keys are %s" % (k, selector.fields))
340 selection = selector(**kw)
[946]341 self._setselection(selection)
342
[876]343 def stats(self, stat='stddev', mask=None):
[102]344 """
[135]345 Determine the specified statistic of the current beam/if/pol
[102]346 Takes a 'mask' as an optional parameter to specify which
347 channels should be excluded.
348 Parameters:
[135]349 stat: 'min', 'max', 'sumsq', 'sum', 'mean'
350 'var', 'stddev', 'avdev', 'rms', 'median'
351 mask: an optional mask specifying where the statistic
[102]352 should be determined.
353 Example:
[113]354 scan.set_unit('channel')
[1118]355 msk = scan.create_mask([100, 200], [500, 600])
[135]356 scan.stats(stat='mean', mask=m)
[102]357 """
358 if mask == None:
[876]359 mask = []
360 if not self._check_ifs():
[1118]361 raise ValueError("Cannot apply mask as the IFs have different "
362 "number of channels. Please use setselection() "
363 "to select individual IFs")
[256]364
[876]365 statvals = self._math._stats(self, mask, stat)
[1590]366 def cb(i):
367 return statvals[i]
[256]368
[1590]369 return self._row_callback(cb, stat)
[102]370
[1118]371 def stddev(self, mask=None):
[135]372 """
373 Determine the standard deviation of the current beam/if/pol
374 Takes a 'mask' as an optional parameter to specify which
375 channels should be excluded.
376 Parameters:
377 mask: an optional mask specifying where the standard
378 deviation should be determined.
379
380 Example:
381 scan.set_unit('channel')
[1118]382 msk = scan.create_mask([100, 200], [500, 600])
[135]383 scan.stddev(mask=m)
384 """
[1118]385 return self.stats(stat='stddev', mask=mask);
[135]386
[1003]387
[1259]388 def get_column_names(self):
[1003]389 """
390 Return a list of column names, which can be used for selection.
391 """
[1259]392 return list(Scantable.get_column_names(self))
[1003]393
[876]394 def get_tsys(self):
[113]395 """
396 Return the System temperatures.
397 Returns:
[876]398 a list of Tsys values for the current selection
[113]399 """
[256]400
[876]401 return self._row_callback(self._gettsys, "Tsys")
[256]402
[876]403 def _row_callback(self, callback, label):
404 out = ""
[1118]405 outvec = []
[1590]406 sep = '-'*50
[876]407 for i in range(self.nrow()):
408 tm = self._gettime(i)
409 src = self._getsourcename(i)
[1590]410 out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
[876]411 out += 'Time[%s]:\n' % (tm)
[1590]412 if self.nbeam(-1) > 1:
413 out += ' Beam[%d] ' % (self.getbeam(i))
414 if self.nif(-1) > 1: out += ' IF[%d] ' % (self.getif(i))
415 if self.npol(-1) > 1: out += ' Pol[%d] ' % (self.getpol(i))
[876]416 outvec.append(callback(i))
417 out += '= %3.3f\n' % (outvec[i])
[1590]418 out += sep+'\n'
[876]419 if rcParams['verbose']:
[1590]420 print sep
[876]421 print " %s" % (label)
[1590]422 print sep
[876]423 print out
[1175]424 return outvec
[256]425
[1070]426 def _get_column(self, callback, row=-1):
427 """
428 """
429 if row == -1:
430 return [callback(i) for i in range(self.nrow())]
431 else:
432 if 0 <= row < self.nrow():
433 return callback(row)
[256]434
[1070]435
[1348]436 def get_time(self, row=-1, asdatetime=False):
[113]437 """
438 Get a list of time stamps for the observations.
[1348]439 Return a datetime object for each integration time stamp in the scantable.
[113]440 Parameters:
[1348]441 row: row no of integration. Default -1 return all rows
442 asdatetime: return values as datetime objects rather than strings
[113]443 Example:
444 none
445 """
[1175]446 from time import strptime
447 from datetime import datetime
[1392]448 times = self._get_column(self._gettime, row)
[1348]449 if not asdatetime:
[1392]450 return times
[1175]451 format = "%Y/%m/%d/%H:%M:%S"
452 if isinstance(times, list):
453 return [datetime(*strptime(i, format)[:6]) for i in times]
454 else:
455 return datetime(*strptime(times, format)[:6])
[102]456
[1348]457
458 def get_inttime(self, row=-1):
459 """
460 Get a list of integration times for the observations.
461 Return a time in seconds for each integration in the scantable.
462 Parameters:
463 row: row no of integration. Default -1 return all rows.
464 Example:
465 none
466 """
[1573]467 return self._get_column(self._getinttime, row)
[1348]468
[1573]469
[714]470 def get_sourcename(self, row=-1):
471 """
[794]472 Get a list source names for the observations.
[714]473 Return a string for each integration in the scantable.
474 Parameters:
[1348]475 row: row no of integration. Default -1 return all rows.
[714]476 Example:
477 none
478 """
[1070]479 return self._get_column(self._getsourcename, row)
[714]480
[794]481 def get_elevation(self, row=-1):
482 """
483 Get a list of elevations for the observations.
484 Return a float for each integration in the scantable.
485 Parameters:
[1348]486 row: row no of integration. Default -1 return all rows.
[794]487 Example:
488 none
489 """
[1070]490 return self._get_column(self._getelevation, row)
[794]491
492 def get_azimuth(self, row=-1):
493 """
494 Get a list of azimuths for the observations.
495 Return a float for each integration in the scantable.
496 Parameters:
[1348]497 row: row no of integration. Default -1 return all rows.
[794]498 Example:
499 none
500 """
[1070]501 return self._get_column(self._getazimuth, row)
[794]502
503 def get_parangle(self, row=-1):
504 """
505 Get a list of parallactic angles for the observations.
506 Return a float for each integration in the scantable.
507 Parameters:
[1348]508 row: row no of integration. Default -1 return all rows.
[794]509 Example:
510 none
511 """
[1070]512 return self._get_column(self._getparangle, row)
[794]513
[1070]514 def get_direction(self, row=-1):
515 """
516 Get a list of Positions on the sky (direction) for the observations.
517 Return a float for each integration in the scantable.
518 Parameters:
519 row: row no of integration. Default -1 return all rows
520 Example:
521 none
522 """
523 return self._get_column(self._getdirection, row)
524
[1391]525 def get_directionval(self, row=-1):
526 """
527 Get a list of Positions on the sky (direction) for the observations.
528 Return a float for each integration in the scantable.
529 Parameters:
530 row: row no of integration. Default -1 return all rows
531 Example:
532 none
533 """
534 return self._get_column(self._getdirectionvec, row)
535
536
[102]537 def set_unit(self, unit='channel'):
538 """
539 Set the unit for all following operations on this scantable
540 Parameters:
541 unit: optional unit, default is 'channel'
[1118]542 one of '*Hz', 'km/s', 'channel', ''
[102]543 """
[484]544 varlist = vars()
[1118]545 if unit in ['', 'pixel', 'channel']:
[113]546 unit = ''
547 inf = list(self._getcoordinfo())
548 inf[0] = unit
549 self._setcoordinfo(inf)
[1118]550 self._add_history("set_unit", varlist)
[113]551
[1589]552 @print_log_dec
[484]553 def set_instrument(self, instr):
[358]554 """
[1348]555 Set the instrument for subsequent processing.
[358]556 Parameters:
[710]557 instr: Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
[407]558 'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
[358]559 """
560 self._setInstrument(instr)
[1118]561 self._add_history("set_instument", vars())
[358]562
[1589]563 @print_log_dec
[1190]564 def set_feedtype(self, feedtype):
565 """
566 Overwrite the feed type, which might not be set correctly.
567 Parameters:
568 feedtype: 'linear' or 'circular'
569 """
570 self._setfeedtype(feedtype)
571 self._add_history("set_feedtype", vars())
572
[1589]573 @print_log_dec
[276]574 def set_doppler(self, doppler='RADIO'):
575 """
576 Set the doppler for all following operations on this scantable.
577 Parameters:
578 doppler: One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
579 """
[484]580 varlist = vars()
[276]581 inf = list(self._getcoordinfo())
582 inf[2] = doppler
583 self._setcoordinfo(inf)
[1118]584 self._add_history("set_doppler", vars())
[710]585
[1589]586 @print_log_dec
[226]587 def set_freqframe(self, frame=None):
[113]588 """
589 Set the frame type of the Spectral Axis.
590 Parameters:
[591]591 frame: an optional frame type, default 'LSRK'. Valid frames are:
[1118]592 'REST', 'TOPO', 'LSRD', 'LSRK', 'BARY',
593 'GEO', 'GALACTO', 'LGROUP', 'CMB'
[113]594 Examples:
595 scan.set_freqframe('BARY')
596 """
[484]597 if frame is None: frame = rcParams['scantable.freqframe']
598 varlist = vars()
[1118]599 valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
600 'GEO', 'GALACTO', 'LGROUP', 'CMB']
[591]601
[989]602 if frame in valid:
[113]603 inf = list(self._getcoordinfo())
604 inf[1] = frame
605 self._setcoordinfo(inf)
[1118]606 self._add_history("set_freqframe", varlist)
[102]607 else:
[1118]608 msg = "Please specify a valid freq type. Valid types are:\n", valid
[718]609 if rcParams['verbose']:
610 print msg
611 else:
612 raise TypeError(msg)
[710]613
[989]614 def set_dirframe(self, frame=""):
615 """
616 Set the frame type of the Direction on the sky.
617 Parameters:
618 frame: an optional frame type, default ''. Valid frames are:
619 'J2000', 'B1950', 'GALACTIC'
620 Examples:
621 scan.set_dirframe('GALACTIC')
622 """
623 varlist = vars()
624 try:
625 Scantable.set_dirframe(self, frame)
[1118]626 except RuntimeError, msg:
[989]627 if rcParams['verbose']:
628 print msg
629 else:
630 raise
[1118]631 self._add_history("set_dirframe", varlist)
[989]632
[113]633 def get_unit(self):
634 """
635 Get the default unit set in this scantable
636 Returns:
637 A unit string
638 """
639 inf = self._getcoordinfo()
640 unit = inf[0]
641 if unit == '': unit = 'channel'
642 return unit
[102]643
[158]644 def get_abcissa(self, rowno=0):
[102]645 """
[158]646 Get the abcissa in the current coordinate setup for the currently
[113]647 selected Beam/IF/Pol
648 Parameters:
[226]649 rowno: an optional row number in the scantable. Default is the
650 first row, i.e. rowno=0
[113]651 Returns:
[1348]652 The abcissa values and the format string (as a dictionary)
[113]653 """
[256]654 abc = self._getabcissa(rowno)
[710]655 lbl = self._getabcissalabel(rowno)
[158]656 return abc, lbl
[113]657
[1118]658 def flag(self, mask=None):
[1001]659 """
660 Flag the selected data using an optional channel mask.
661 Parameters:
662 mask: an optional channel mask, created with create_mask. Default
663 (no mask) is all channels.
664 """
665 varlist = vars()
[1118]666 if mask is None:
667 mask = []
[1001]668 try:
669 self._flag(mask)
[1118]670 except RuntimeError, msg:
[1001]671 if rcParams['verbose']:
672 print msg
673 return
674 else: raise
675 self._add_history("flag", varlist)
676
[1589]677 @print_log_dec
[1584]678 def lag_flag(self, start, end, unit="MHz", insitu=None):
[1192]679 """
680 Flag the data in 'lag' space by providing a frequency to remove.
[1584]681 Flagged data in the scantable gets interpolated over the region.
[1192]682 No taper is applied.
683 Parameters:
[1579]684 start: the start frequency (really a period within the
685 bandwidth) or period to remove
686 end: the end frequency or period to remove
[1584]687 unit: the frequency unit (default "MHz") or "" for
[1579]688 explicit lag channels
[1203]689 Notes:
[1579]690 It is recommended to flag edges of the band or strong
[1348]691 signals beforehand.
[1192]692 """
693 if insitu is None: insitu = rcParams['insitu']
694 self._math._setinsitu(insitu)
695 varlist = vars()
[1579]696 base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
697 if not (unit == "" or base.has_key(unit)):
[1192]698 raise ValueError("%s is not a valid unit." % unit)
699 try:
[1579]700 if unit == "":
701 s = scantable(self._math._lag_flag(self, start, end, "lags"))
702 else:
703 s = scantable(self._math._lag_flag(self, start*base[unit],
704 end*base[unit], "frequency"))
[1192]705 except RuntimeError, msg:
706 if rcParams['verbose']:
707 print msg
708 return
709 else: raise
710 s._add_history("lag_flag", varlist)
711 if insitu:
712 self._assign(s)
713 else:
714 return s
[1001]715
[1589]716 @print_log_dec
[113]717 def create_mask(self, *args, **kwargs):
718 """
[1118]719 Compute and return a mask based on [min, max] windows.
[189]720 The specified windows are to be INCLUDED, when the mask is
[113]721 applied.
[102]722 Parameters:
[1118]723 [min, max], [min2, max2], ...
[1024]724 Pairs of start/end points (inclusive)specifying the regions
[102]725 to be masked
[189]726 invert: optional argument. If specified as True,
727 return an inverted mask, i.e. the regions
728 specified are EXCLUDED
[513]729 row: create the mask using the specified row for
730 unit conversions, default is row=0
731 only necessary if frequency varies over rows.
[102]732 Example:
[113]733 scan.set_unit('channel')
734 a)
[1118]735 msk = scan.create_mask([400, 500], [800, 900])
[189]736 # masks everything outside 400 and 500
[113]737 # and 800 and 900 in the unit 'channel'
738
739 b)
[1118]740 msk = scan.create_mask([400, 500], [800, 900], invert=True)
[189]741 # masks the regions between 400 and 500
[113]742 # and 800 and 900 in the unit 'channel'
[1024]743 c)
744 mask only channel 400
[1554]745 msk = scan.create_mask([400])
[102]746 """
[1554]747 row = kwargs.get("row", 0)
[513]748 data = self._getabcissa(row)
[113]749 u = self._getcoordinfo()[0]
[718]750 if rcParams['verbose']:
[113]751 if u == "": u = "channel"
[718]752 msg = "The current mask window unit is %s" % u
[1118]753 i = self._check_ifs()
754 if not i:
[876]755 msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
[718]756 asaplog.push(msg)
[102]757 n = self.nchan()
[1295]758 msk = _n_bools(n, False)
[710]759 # test if args is a 'list' or a 'normal *args - UGLY!!!
760
[1118]761 ws = (isinstance(args[-1][-1], int) or isinstance(args[-1][-1], float)) \
762 and args or args[0]
[710]763 for window in ws:
[1554]764 if len(window) == 1:
765 window = [window[0], window[0]]
766 if len(window) == 0 or len(window) > 2:
767 raise ValueError("A window needs to be defined as [start(, end)]")
[1545]768 if window[0] > window[1]:
769 tmp = window[0]
770 window[0] = window[1]
771 window[1] = tmp
[102]772 for i in range(n):
[1024]773 if data[i] >= window[0] and data[i] <= window[1]:
[1295]774 msk[i] = True
[113]775 if kwargs.has_key('invert'):
776 if kwargs.get('invert'):
[1295]777 msk = mask_not(msk)
[102]778 return msk
[710]779
[256]780 def get_restfreqs(self):
781 """
782 Get the restfrequency(s) stored in this scantable.
783 The return value(s) are always of unit 'Hz'
784 Parameters:
785 none
786 Returns:
787 a list of doubles
788 """
789 return list(self._getrestfreqs())
[102]790
[402]791
[931]792 def set_restfreqs(self, freqs=None, unit='Hz'):
793 """
794 Set or replace the restfrequency specified and
795 If the 'freqs' argument holds a scalar,
796 then that rest frequency will be applied to all the selected
797 data. If the 'freqs' argument holds
798 a vector, then it MUST be of equal or smaller length than
799 the number of IFs (and the available restfrequencies will be
800 replaced by this vector). In this case, *all* data have
801 the restfrequency set per IF according
802 to the corresponding value you give in the 'freqs' vector.
[1118]803 E.g. 'freqs=[1e9, 2e9]' would mean IF 0 gets restfreq 1e9 and
[931]804 IF 1 gets restfreq 2e9.
[1395]805 You can also specify the frequencies via a linecatalog.
[1153]806
[931]807 Parameters:
808 freqs: list of rest frequency values or string idenitfiers
809 unit: unit for rest frequency (default 'Hz')
[402]810
[931]811 Example:
812 # set the given restfrequency for the whole table
813 scan.set_restfreqs(freqs=1.4e9)
[1348]814 # If thee number of IFs in the data is >= 2 IF0 gets the first
[931]815 # value IF1 the second...
[1118]816 scan.set_restfreqs(freqs=[1.4e9, 1.67e9])
[931]817 #set the given restfrequency for the whole table (by name)
818 scan.set_restfreqs(freqs="OH1667")
[391]819
[931]820 Note:
821 To do more sophisticate Restfrequency setting, e.g. on a
822 source and IF basis, use scantable.set_selection() before using
823 this function.
824 # provide your scantable is call scan
825 selection = selector()
826 selection.set_name("ORION*")
827 selection.set_ifs([1])
828 scan.set_selection(selection)
829 scan.set_restfreqs(freqs=86.6e9)
830
831 """
832 varlist = vars()
[1157]833 from asap import linecatalog
834 # simple value
[1118]835 if isinstance(freqs, int) or isinstance(freqs, float):
[1153]836 self._setrestfreqs(freqs, "",unit)
[1157]837 # list of values
[1118]838 elif isinstance(freqs, list) or isinstance(freqs, tuple):
[1157]839 # list values are scalars
[1118]840 if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
[931]841 sel = selector()
842 savesel = self._getselection()
[1322]843 iflist = self.getifnos()
[931]844 for i in xrange(len(freqs)):
[1322]845 sel.set_ifs(iflist[i])
[931]846 self._setselection(sel)
[1153]847 self._setrestfreqs(freqs[i], "",unit)
[931]848 self._setselection(savesel)
[1157]849 # list values are tuples, (value, name)
850 elif isinstance(freqs[-1], dict):
851 sel = selector()
852 savesel = self._getselection()
[1322]853 iflist = self.getifnos()
[1157]854 for i in xrange(len(freqs)):
[1322]855 sel.set_ifs(iflist[i])
[1259]856 self._setselection(sel)
[1157]857 self._setrestfreqs(freqs[i]["value"],
858 freqs[i]["name"], "MHz")
859 self._setselection(savesel)
860 # freqs are to be taken from a linecatalog
[1153]861 elif isinstance(freqs, linecatalog):
862 sel = selector()
863 savesel = self._getselection()
[1322]864 iflist = self.getifnos()
[1153]865 for i in xrange(freqs.nrow()):
[1322]866 sel.set_ifs(iflist[i])
[1153]867 self._setselection(sel)
868 self._setrestfreqs(freqs.get_frequency(i),
869 freqs.get_name(i), "MHz")
870 # ensure that we are not iterating past nIF
871 if i == self.nif()-1: break
872 self._setselection(savesel)
[931]873 else:
874 return
875 self._add_history("set_restfreqs", varlist)
876
[1360]877 def shift_refpix(self, delta):
[1589]878 """
879 Shift the reference pixel of the Spectra Coordinate by an
880 integer amount.
881 Parameters:
882 delta: the amount to shift by
[1360]883 Note:
[1589]884 Be careful using this with broadband data.
[1360]885 """
[1589]886 Scantable.shift(self, delta)
[931]887
[1259]888 def history(self, filename=None):
889 """
890 Print the history. Optionally to a file.
[1348]891 Parameters:
892 filename: The name of the file to save the history to.
[1259]893 """
[484]894 hist = list(self._gethistory())
[794]895 out = "-"*80
[484]896 for h in hist:
[489]897 if h.startswith("---"):
[794]898 out += "\n"+h
[489]899 else:
900 items = h.split("##")
901 date = items[0]
902 func = items[1]
903 items = items[2:]
[794]904 out += "\n"+date+"\n"
905 out += "Function: %s\n Parameters:" % (func)
[489]906 for i in items:
907 s = i.split("=")
[1118]908 out += "\n %s = %s" % (s[0], s[1])
[794]909 out += "\n"+"-"*80
[1259]910 if filename is not None:
911 if filename is "":
912 filename = 'scantable_history.txt'
913 import os
914 filename = os.path.expandvars(os.path.expanduser(filename))
915 if not os.path.isdir(filename):
916 data = open(filename, 'w')
917 data.write(out)
918 data.close()
919 else:
920 msg = "Illegal file name '%s'." % (filename)
921 if rcParams['verbose']:
922 print msg
923 else:
924 raise IOError(msg)
925 if rcParams['verbose']:
926 try:
927 from IPython.genutils import page as pager
928 except ImportError:
929 from pydoc import pager
930 pager(out)
931 else:
932 return out
[484]933 return
[513]934 #
935 # Maths business
936 #
[1589]937 @print_log_dec
[931]938 def average_time(self, mask=None, scanav=False, weight='tint', align=False):
[513]939 """
[1070]940 Return the (time) weighted average of a scan.
[513]941 Note:
[1070]942 in channels only - align if necessary
[513]943 Parameters:
944 mask: an optional mask (only used for 'var' and 'tsys'
945 weighting)
[558]946 scanav: True averages each scan separately
947 False (default) averages all scans together,
[1099]948 weight: Weighting scheme.
949 'none' (mean no weight)
950 'var' (1/var(spec) weighted)
951 'tsys' (1/Tsys**2 weighted)
952 'tint' (integration time weighted)
953 'tintsys' (Tint/Tsys**2)
954 'median' ( median averaging)
[535]955 The default is 'tint'
[931]956 align: align the spectra in velocity before averaging. It takes
957 the time of the first spectrum as reference time.
[513]958 Example:
959 # time average the scantable without using a mask
[710]960 newscan = scan.average_time()
[513]961 """
962 varlist = vars()
[976]963 if weight is None: weight = 'TINT'
[513]964 if mask is None: mask = ()
[1099]965 if scanav: scanav = "SCAN"
966 else: scanav = "NONE"
[1118]967 scan = (self, )
[989]968 try:
[1118]969 if align:
970 scan = (self.freq_align(insitu=False), )
971 s = None
972 if weight.upper() == 'MEDIAN':
973 s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
974 scanav))
975 else:
976 s = scantable(self._math._average(scan, mask, weight.upper(),
977 scanav))
978 except RuntimeError, msg:
[989]979 if rcParams['verbose']:
980 print msg
981 return
982 else: raise
[1099]983 s._add_history("average_time", varlist)
[513]984 return s
[710]985
[1589]986 @print_log_dec
[876]987 def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
[513]988 """
989 Return a scan where all spectra are converted to either
990 Jansky or Kelvin depending upon the flux units of the scan table.
991 By default the function tries to look the values up internally.
992 If it can't find them (or if you want to over-ride), you must
993 specify EITHER jyperk OR eta (and D which it will try to look up
994 also if you don't set it). jyperk takes precedence if you set both.
995 Parameters:
996 jyperk: the Jy / K conversion factor
997 eta: the aperture efficiency
998 d: the geomtric diameter (metres)
999 insitu: if False a new scantable is returned.
1000 Otherwise, the scaling is done in-situ
1001 The default is taken from .asaprc (False)
1002 """
1003 if insitu is None: insitu = rcParams['insitu']
[876]1004 self._math._setinsitu(insitu)
[513]1005 varlist = vars()
1006 if jyperk is None: jyperk = -1.0
1007 if d is None: d = -1.0
1008 if eta is None: eta = -1.0
[876]1009 s = scantable(self._math._convertflux(self, d, eta, jyperk))
1010 s._add_history("convert_flux", varlist)
1011 if insitu: self._assign(s)
1012 else: return s
[513]1013
[1589]1014 @print_log_dec
[876]1015 def gain_el(self, poly=None, filename="", method="linear", insitu=None):
[513]1016 """
1017 Return a scan after applying a gain-elevation correction.
1018 The correction can be made via either a polynomial or a
1019 table-based interpolation (and extrapolation if necessary).
1020 You specify polynomial coefficients, an ascii table or neither.
1021 If you specify neither, then a polynomial correction will be made
1022 with built in coefficients known for certain telescopes (an error
1023 will occur if the instrument is not known).
1024 The data and Tsys are *divided* by the scaling factors.
1025 Parameters:
1026 poly: Polynomial coefficients (default None) to compute a
1027 gain-elevation correction as a function of
1028 elevation (in degrees).
1029 filename: The name of an ascii file holding correction factors.
1030 The first row of the ascii file must give the column
1031 names and these MUST include columns
1032 "ELEVATION" (degrees) and "FACTOR" (multiply data
1033 by this) somewhere.
1034 The second row must give the data type of the
1035 column. Use 'R' for Real and 'I' for Integer.
1036 An example file would be
1037 (actual factors are arbitrary) :
1038
1039 TIME ELEVATION FACTOR
1040 R R R
1041 0.1 0 0.8
1042 0.2 20 0.85
1043 0.3 40 0.9
1044 0.4 60 0.85
1045 0.5 80 0.8
1046 0.6 90 0.75
1047 method: Interpolation method when correcting from a table.
1048 Values are "nearest", "linear" (default), "cubic"
1049 and "spline"
1050 insitu: if False a new scantable is returned.
1051 Otherwise, the scaling is done in-situ
1052 The default is taken from .asaprc (False)
1053 """
1054
1055 if insitu is None: insitu = rcParams['insitu']
[876]1056 self._math._setinsitu(insitu)
[513]1057 varlist = vars()
1058 if poly is None:
[1118]1059 poly = ()
[513]1060 from os.path import expandvars
1061 filename = expandvars(filename)
[876]1062 s = scantable(self._math._gainel(self, poly, filename, method))
1063 s._add_history("gain_el", varlist)
1064 if insitu: self._assign(s)
1065 else: return s
[710]1066
[1589]1067 @print_log_dec
[931]1068 def freq_align(self, reftime=None, method='cubic', insitu=None):
[513]1069 """
1070 Return a scan where all rows have been aligned in frequency/velocity.
1071 The alignment frequency frame (e.g. LSRK) is that set by function
1072 set_freqframe.
1073 Parameters:
1074 reftime: reference time to align at. By default, the time of
1075 the first row of data is used.
1076 method: Interpolation method for regridding the spectra.
1077 Choose from "nearest", "linear", "cubic" (default)
1078 and "spline"
1079 insitu: if False a new scantable is returned.
1080 Otherwise, the scaling is done in-situ
1081 The default is taken from .asaprc (False)
1082 """
[931]1083 if insitu is None: insitu = rcParams["insitu"]
[876]1084 self._math._setinsitu(insitu)
[513]1085 varlist = vars()
[931]1086 if reftime is None: reftime = ""
1087 s = scantable(self._math._freq_align(self, reftime, method))
[876]1088 s._add_history("freq_align", varlist)
1089 if insitu: self._assign(s)
1090 else: return s
[513]1091
[1589]1092 @print_log_dec
[876]1093 def opacity(self, tau, insitu=None):
[513]1094 """
1095 Apply an opacity correction. The data
1096 and Tsys are multiplied by the correction factor.
1097 Parameters:
1098 tau: Opacity from which the correction factor is
1099 exp(tau*ZD)
1100 where ZD is the zenith-distance
1101 insitu: if False a new scantable is returned.
1102 Otherwise, the scaling is done in-situ
1103 The default is taken from .asaprc (False)
1104 """
1105 if insitu is None: insitu = rcParams['insitu']
[876]1106 self._math._setinsitu(insitu)
[513]1107 varlist = vars()
[876]1108 s = scantable(self._math._opacity(self, tau))
1109 s._add_history("opacity", varlist)
1110 if insitu: self._assign(s)
1111 else: return s
[513]1112
[1589]1113 @print_log_dec
[513]1114 def bin(self, width=5, insitu=None):
1115 """
1116 Return a scan where all spectra have been binned up.
[1348]1117 Parameters:
[513]1118 width: The bin width (default=5) in pixels
1119 insitu: if False a new scantable is returned.
1120 Otherwise, the scaling is done in-situ
1121 The default is taken from .asaprc (False)
1122 """
1123 if insitu is None: insitu = rcParams['insitu']
[876]1124 self._math._setinsitu(insitu)
[513]1125 varlist = vars()
[876]1126 s = scantable(self._math._bin(self, width))
[1118]1127 s._add_history("bin", varlist)
[1589]1128 if insitu:
1129 self._assign(s)
1130 else:
1131 return s
[513]1132
[1589]1133 @print_log_dec
[513]1134 def resample(self, width=5, method='cubic', insitu=None):
1135 """
[1348]1136 Return a scan where all spectra have been binned up.
[1573]1137
[1348]1138 Parameters:
[513]1139 width: The bin width (default=5) in pixels
1140 method: Interpolation method when correcting from a table.
1141 Values are "nearest", "linear", "cubic" (default)
1142 and "spline"
1143 insitu: if False a new scantable is returned.
1144 Otherwise, the scaling is done in-situ
1145 The default is taken from .asaprc (False)
1146 """
1147 if insitu is None: insitu = rcParams['insitu']
[876]1148 self._math._setinsitu(insitu)
[513]1149 varlist = vars()
[876]1150 s = scantable(self._math._resample(self, method, width))
[1118]1151 s._add_history("resample", varlist)
[876]1152 if insitu: self._assign(s)
1153 else: return s
[513]1154
[1589]1155 @print_log_dec
[946]1156 def average_pol(self, mask=None, weight='none'):
1157 """
1158 Average the Polarisations together.
1159 Parameters:
1160 mask: An optional mask defining the region, where the
1161 averaging will be applied. The output will have all
1162 specified points masked.
1163 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1164 weighted), or 'tsys' (1/Tsys**2 weighted)
1165 """
1166 varlist = vars()
1167 if mask is None:
1168 mask = ()
[1010]1169 s = scantable(self._math._averagepol(self, mask, weight.upper()))
[1118]1170 s._add_history("average_pol", varlist)
[992]1171 return s
[513]1172
[1589]1173 @print_log_dec
[1145]1174 def average_beam(self, mask=None, weight='none'):
1175 """
1176 Average the Beams together.
1177 Parameters:
1178 mask: An optional mask defining the region, where the
1179 averaging will be applied. The output will have all
1180 specified points masked.
1181 weight: Weighting scheme. 'none' (default), 'var' (1/var(spec)
1182 weighted), or 'tsys' (1/Tsys**2 weighted)
1183 """
1184 varlist = vars()
1185 if mask is None:
1186 mask = ()
1187 s = scantable(self._math._averagebeams(self, mask, weight.upper()))
1188 s._add_history("average_beam", varlist)
1189 return s
1190
[1586]1191 def parallactify(self, pflag):
1192 varlist = vars()
1193 self._parallactify(pflag)
1194 self._add_history("parallactify", varlist)
1195
[1589]1196 @print_log_dec
[992]1197 def convert_pol(self, poltype=None):
1198 """
1199 Convert the data to a different polarisation type.
[1565]1200 Note that you will need cross-polarisation terms for most conversions.
[992]1201 Parameters:
1202 poltype: The new polarisation type. Valid types are:
[1565]1203 "linear", "circular", "stokes" and "linpol"
[992]1204 """
1205 varlist = vars()
1206 try:
1207 s = scantable(self._math._convertpol(self, poltype))
[1118]1208 except RuntimeError, msg:
[992]1209 if rcParams['verbose']:
[1118]1210 print msg
1211 return
[992]1212 else:
1213 raise
[1118]1214 s._add_history("convert_pol", varlist)
[992]1215 return s
1216
[1589]1217 @print_log_dec
[1574]1218 def smooth(self, kernel="hanning", width=5.0, order=2, insitu=None):
[513]1219 """
1220 Smooth the spectrum by the specified kernel (conserving flux).
1221 Parameters:
1222 kernel: The type of smoothing kernel. Select from
[1574]1223 'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
1224 or 'poly'
[513]1225 width: The width of the kernel in pixels. For hanning this is
1226 ignored otherwise it defauls to 5 pixels.
1227 For 'gaussian' it is the Full Width Half
1228 Maximum. For 'boxcar' it is the full width.
[1574]1229 For 'rmedian' and 'poly' it is the half width.
1230 order: Optional parameter for 'poly' kernel (default is 2), to
1231 specify the order of the polnomial. Ignored by all other
1232 kernels.
[513]1233 insitu: if False a new scantable is returned.
1234 Otherwise, the scaling is done in-situ
1235 The default is taken from .asaprc (False)
1236 Example:
1237 none
1238 """
1239 if insitu is None: insitu = rcParams['insitu']
[876]1240 self._math._setinsitu(insitu)
[513]1241 varlist = vars()
[1574]1242 s = scantable(self._math._smooth(self, kernel.lower(), width, order))
[876]1243 s._add_history("smooth", varlist)
1244 if insitu: self._assign(s)
1245 else: return s
[513]1246
[1589]1247 @print_log_dec
[1391]1248 def poly_baseline(self, mask=None, order=0, plot=False, uselin=False, insitu=None):
[513]1249 """
1250 Return a scan which has been baselined (all rows) by a polynomial.
1251 Parameters:
[794]1252 mask: an optional mask
1253 order: the order of the polynomial (default is 0)
[1061]1254 plot: plot the fit and the residual. In this each
1255 indivual fit has to be approved, by typing 'y'
1256 or 'n'
[1391]1257 uselin: use linear polynomial fit
[794]1258 insitu: if False a new scantable is returned.
1259 Otherwise, the scaling is done in-situ
1260 The default is taken from .asaprc (False)
[513]1261 Example:
1262 # return a scan baselined by a third order polynomial,
1263 # not using a mask
1264 bscan = scan.poly_baseline(order=3)
[579]1265 """
[513]1266 if insitu is None: insitu = rcParams['insitu']
1267 varlist = vars()
1268 if mask is None:
[1295]1269 mask = [True for i in xrange(self.nchan(-1))]
[513]1270 from asap.asapfitter import fitter
[1217]1271 try:
1272 f = fitter()
1273 f.set_scan(self, mask)
[1391]1274 #f.set_function(poly=order)
1275 if uselin:
1276 f.set_function(lpoly=order)
1277 else:
1278 f.set_function(poly=order)
[1217]1279 s = f.auto_fit(insitu, plot=plot)
1280 s._add_history("poly_baseline", varlist)
1281 if insitu: self._assign(s)
1282 else: return s
1283 except RuntimeError:
1284 msg = "The fit failed, possibly because it didn't converge."
1285 if rcParams['verbose']:
1286 print msg
1287 return
1288 else:
1289 raise RuntimeError(msg)
[513]1290
[1118]1291 def auto_poly_baseline(self, mask=[], edge=(0, 0), order=0,
[1280]1292 threshold=3, chan_avg_limit=1, plot=False,
1293 insitu=None):
[880]1294 """
1295 Return a scan which has been baselined (all rows) by a polynomial.
1296 Spectral lines are detected first using linefinder and masked out
1297 to avoid them affecting the baseline solution.
1298
1299 Parameters:
1300 mask: an optional mask retreived from scantable
1301 edge: an optional number of channel to drop at
1302 the edge of spectrum. If only one value is
1303 specified, the same number will be dropped from
1304 both sides of the spectrum. Default is to keep
[907]1305 all channels. Nested tuples represent individual
[976]1306 edge selection for different IFs (a number of spectral
1307 channels can be different)
[880]1308 order: the order of the polynomial (default is 0)
1309 threshold: the threshold used by line finder. It is better to
1310 keep it large as only strong lines affect the
1311 baseline solution.
[1280]1312 chan_avg_limit:
1313 a maximum number of consequtive spectral channels to
1314 average during the search of weak and broad lines.
1315 The default is no averaging (and no search for weak
1316 lines). If such lines can affect the fitted baseline
1317 (e.g. a high order polynomial is fitted), increase this
1318 parameter (usually values up to 8 are reasonable). Most
1319 users of this method should find the default value
1320 sufficient.
[1061]1321 plot: plot the fit and the residual. In this each
1322 indivual fit has to be approved, by typing 'y'
1323 or 'n'
[880]1324 insitu: if False a new scantable is returned.
1325 Otherwise, the scaling is done in-situ
1326 The default is taken from .asaprc (False)
1327
1328 Example:
1329 scan2=scan.auto_poly_baseline(order=7)
1330 """
1331 if insitu is None: insitu = rcParams['insitu']
1332 varlist = vars()
1333 from asap.asapfitter import fitter
1334 from asap.asaplinefind import linefinder
1335 from asap import _is_sequence_or_number as _is_valid
1336
[976]1337 # check whether edge is set up for each IF individually
[1118]1338 individualedge = False;
1339 if len(edge) > 1:
1340 if isinstance(edge[0], list) or isinstance(edge[0], tuple):
1341 individualedge = True;
[907]1342
[1118]1343 if not _is_valid(edge, int) and not individualedge:
[909]1344 raise ValueError, "Parameter 'edge' has to be an integer or a \
[907]1345 pair of integers specified as a tuple. Nested tuples are allowed \
1346 to make individual selection for different IFs."
[919]1347
[1118]1348 curedge = (0, 0)
1349 if individualedge:
1350 for edgepar in edge:
1351 if not _is_valid(edgepar, int):
1352 raise ValueError, "Each element of the 'edge' tuple has \
1353 to be a pair of integers or an integer."
[907]1354 else:
[1118]1355 curedge = edge;
[880]1356
1357 # setup fitter
1358 f = fitter()
1359 f.set_function(poly=order)
1360
1361 # setup line finder
[1118]1362 fl = linefinder()
[1268]1363 fl.set_options(threshold=threshold,avg_limit=chan_avg_limit)
[880]1364
1365 if not insitu:
[1118]1366 workscan = self.copy()
[880]1367 else:
[1118]1368 workscan = self
[880]1369
[907]1370 fl.set_scan(workscan)
1371
[1118]1372 rows = range(workscan.nrow())
[880]1373 asaplog.push("Processing:")
1374 for r in rows:
[1118]1375 msg = " Scan[%d] Beam[%d] IF[%d] Pol[%d] Cycle[%d]" % \
1376 (workscan.getscan(r), workscan.getbeam(r), workscan.getif(r), \
1377 workscan.getpol(r), workscan.getcycle(r))
[880]1378 asaplog.push(msg, False)
[907]1379
[976]1380 # figure out edge parameter
[1118]1381 if individualedge:
1382 if len(edge) >= workscan.getif(r):
1383 raise RuntimeError, "Number of edge elements appear to " \
1384 "be less than the number of IFs"
1385 curedge = edge[workscan.getif(r)]
[919]1386
[976]1387 # setup line finder
[1118]1388 fl.find_lines(r, mask, curedge)
[1536]1389 f.set_data(workscan._getabcissa(r), workscan._getspectrum(r),
1390 mask_and(workscan._getmask(r), fl.get_mask()))
[880]1391 f.fit()
1392 x = f.get_parameters()
[1061]1393 if plot:
1394 f.plot(residual=True)
1395 x = raw_input("Accept fit ( [y]/n ): ")
1396 if x.upper() == 'N':
1397 continue
[880]1398 workscan._setspectrum(f.fitter.getresidual(), r)
[1061]1399 if plot:
1400 f._p.unmap()
1401 f._p = None
1402 workscan._add_history("auto_poly_baseline", varlist)
[880]1403 if insitu:
1404 self._assign(workscan)
1405 else:
1406 return workscan
1407
[1589]1408 @print_log_dec
[914]1409 def rotate_linpolphase(self, angle):
1410 """
1411 Rotate the phase of the complex polarization O=Q+iU correlation.
1412 This is always done in situ in the raw data. So if you call this
1413 function more than once then each call rotates the phase further.
1414 Parameters:
1415 angle: The angle (degrees) to rotate (add) by.
1416 Examples:
1417 scan.rotate_linpolphase(2.3)
1418 """
1419 varlist = vars()
[936]1420 self._math._rotate_linpolphase(self, angle)
[914]1421 self._add_history("rotate_linpolphase", varlist)
1422 return
[710]1423
[1589]1424 @print_log_dec
[914]1425 def rotate_xyphase(self, angle):
1426 """
1427 Rotate the phase of the XY correlation. This is always done in situ
1428 in the data. So if you call this function more than once
1429 then each call rotates the phase further.
1430 Parameters:
1431 angle: The angle (degrees) to rotate (add) by.
1432 Examples:
1433 scan.rotate_xyphase(2.3)
1434 """
1435 varlist = vars()
[936]1436 self._math._rotate_xyphase(self, angle)
[914]1437 self._add_history("rotate_xyphase", varlist)
1438 return
1439
[1589]1440 @print_log_dec
[914]1441 def swap_linears(self):
1442 """
[1573]1443 Swap the linear polarisations XX and YY, or better the first two
[1348]1444 polarisations as this also works for ciculars.
[914]1445 """
1446 varlist = vars()
[936]1447 self._math._swap_linears(self)
[914]1448 self._add_history("swap_linears", varlist)
1449 return
1450
[1589]1451 @print_log_dec
[914]1452 def invert_phase(self):
1453 """
1454 Invert the phase of the complex polarisation
1455 """
1456 varlist = vars()
[936]1457 self._math._invert_phase(self)
[914]1458 self._add_history("invert_phase", varlist)
1459 return
1460
[1589]1461 @print_log_dec
[876]1462 def add(self, offset, insitu=None):
[513]1463 """
1464 Return a scan where all spectra have the offset added
1465 Parameters:
1466 offset: the offset
1467 insitu: if False a new scantable is returned.
1468 Otherwise, the scaling is done in-situ
1469 The default is taken from .asaprc (False)
1470 """
1471 if insitu is None: insitu = rcParams['insitu']
[876]1472 self._math._setinsitu(insitu)
[513]1473 varlist = vars()
[876]1474 s = scantable(self._math._unaryop(self, offset, "ADD", False))
[1118]1475 s._add_history("add", varlist)
[876]1476 if insitu:
1477 self._assign(s)
1478 else:
[513]1479 return s
1480
[1589]1481 @print_log_dec
[1308]1482 def scale(self, factor, tsys=True, insitu=None):
[513]1483 """
1484 Return a scan where all spectra are scaled by the give 'factor'
1485 Parameters:
1486 factor: the scaling factor
1487 insitu: if False a new scantable is returned.
1488 Otherwise, the scaling is done in-situ
1489 The default is taken from .asaprc (False)
1490 tsys: if True (default) then apply the operation to Tsys
1491 as well as the data
1492 """
1493 if insitu is None: insitu = rcParams['insitu']
[876]1494 self._math._setinsitu(insitu)
[513]1495 varlist = vars()
[876]1496 s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
[1118]1497 s._add_history("scale", varlist)
[876]1498 if insitu:
1499 self._assign(s)
1500 else:
[513]1501 return s
1502
[1504]1503 def set_sourcetype(self, match, matchtype="pattern",
1504 sourcetype="reference"):
[1502]1505 """
1506 Set the type of the source to be an source or reference scan
1507 using the provided pattern:
1508 Parameters:
[1504]1509 match: a Unix style pattern, regular expression or selector
1510 matchtype: 'pattern' (default) UNIX style pattern or
1511 'regex' regular expression
[1502]1512 sourcetype: the type of the source to use (source/reference)
1513 """
1514 varlist = vars()
1515 basesel = self.get_selection()
1516 stype = -1
1517 if sourcetype.lower().startswith("r"):
1518 stype = 1
1519 elif sourcetype.lower().startswith("s"):
1520 stype = 0
[1504]1521 else:
[1502]1522 raise ValueError("Illegal sourcetype use s(ource) or r(eference)")
[1504]1523 if matchtype.lower().startswith("p"):
1524 matchtype = "pattern"
1525 elif matchtype.lower().startswith("r"):
1526 matchtype = "regex"
1527 else:
1528 raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
[1502]1529 sel = selector()
1530 if isinstance(match, selector):
1531 sel = match
1532 else:
[1504]1533 sel.set_query("SRCNAME == %s('%s')" % (matchtype, match))
[1502]1534 self.set_selection(basesel+sel)
1535 self._setsourcetype(stype)
1536 self.set_selection(basesel)
[1573]1537 self._add_history("set_sourcetype", varlist)
[1502]1538
[1589]1539 @print_log_dec
[1348]1540 def auto_quotient(self, preserve=True, mode='paired'):
[670]1541 """
1542 This function allows to build quotients automatically.
1543 It assumes the observation to have the same numer of
1544 "ons" and "offs"
1545 Parameters:
[710]1546 preserve: you can preserve (default) the continuum or
1547 remove it. The equations used are
[670]1548 preserve: Output = Toff * (on/off) - Toff
[1070]1549 remove: Output = Toff * (on/off) - Ton
[1573]1550 mode: the on/off detection mode
[1348]1551 'paired' (default)
1552 identifies 'off' scans by the
1553 trailing '_R' (Mopra/Parkes) or
1554 '_e'/'_w' (Tid) and matches
1555 on/off pairs from the observing pattern
[1502]1556 'time'
1557 finds the closest off in time
[1348]1558
[670]1559 """
[1348]1560 modes = ["time", "paired"]
[670]1561 if not mode in modes:
[876]1562 msg = "please provide valid mode. Valid modes are %s" % (modes)
1563 raise ValueError(msg)
1564 varlist = vars()
[1348]1565 s = None
1566 if mode.lower() == "paired":
1567 basesel = self.get_selection()
[1356]1568 sel = selector()+basesel
1569 sel.set_query("SRCTYPE==1")
1570 self.set_selection(sel)
[1348]1571 offs = self.copy()
1572 sel.set_query("SRCTYPE==0")
[1356]1573 self.set_selection(sel)
[1348]1574 ons = self.copy()
1575 s = scantable(self._math._quotient(ons, offs, preserve))
1576 self.set_selection(basesel)
1577 elif mode.lower() == "time":
1578 s = scantable(self._math._auto_quotient(self, mode, preserve))
[1118]1579 s._add_history("auto_quotient", varlist)
[876]1580 return s
[710]1581
[1589]1582 @print_log_dec
[1145]1583 def mx_quotient(self, mask = None, weight='median', preserve=True):
[1141]1584 """
[1143]1585 Form a quotient using "off" beams when observing in "MX" mode.
1586 Parameters:
[1145]1587 mask: an optional mask to be used when weight == 'stddev'
[1143]1588 weight: How to average the off beams. Default is 'median'.
[1145]1589 preserve: you can preserve (default) the continuum or
1590 remove it. The equations used are
1591 preserve: Output = Toff * (on/off) - Toff
1592 remove: Output = Toff * (on/off) - Ton
[1217]1593 """
[1143]1594 if mask is None: mask = ()
[1141]1595 varlist = vars()
1596 on = scantable(self._math._mx_extract(self, 'on'))
[1143]1597 preoff = scantable(self._math._mx_extract(self, 'off'))
1598 off = preoff.average_time(mask=mask, weight=weight, scanav=False)
[1217]1599 from asapmath import quotient
[1145]1600 q = quotient(on, off, preserve)
[1143]1601 q._add_history("mx_quotient", varlist)
[1217]1602 return q
[513]1603
[1589]1604 @print_log_dec
[718]1605 def freq_switch(self, insitu=None):
1606 """
1607 Apply frequency switching to the data.
1608 Parameters:
1609 insitu: if False a new scantable is returned.
1610 Otherwise, the swictching is done in-situ
1611 The default is taken from .asaprc (False)
1612 Example:
1613 none
1614 """
1615 if insitu is None: insitu = rcParams['insitu']
[876]1616 self._math._setinsitu(insitu)
[718]1617 varlist = vars()
[876]1618 s = scantable(self._math._freqswitch(self))
[1118]1619 s._add_history("freq_switch", varlist)
[876]1620 if insitu: self._assign(s)
1621 else: return s
[718]1622
[1589]1623 @print_log_dec
[780]1624 def recalc_azel(self):
1625 """
1626 Recalculate the azimuth and elevation for each position.
1627 Parameters:
1628 none
1629 Example:
1630 """
1631 varlist = vars()
[876]1632 self._recalcazel()
[780]1633 self._add_history("recalc_azel", varlist)
1634 return
1635
[1589]1636 @print_log_dec
[513]1637 def __add__(self, other):
1638 varlist = vars()
1639 s = None
1640 if isinstance(other, scantable):
[1573]1641 s = scantable(self._math._binaryop(self, other, "ADD"))
[513]1642 elif isinstance(other, float):
[876]1643 s = scantable(self._math._unaryop(self, other, "ADD", False))
[513]1644 else:
[718]1645 raise TypeError("Other input is not a scantable or float value")
[513]1646 s._add_history("operator +", varlist)
1647 return s
1648
[1589]1649 @print_log_dec
[513]1650 def __sub__(self, other):
1651 """
1652 implicit on all axes and on Tsys
1653 """
1654 varlist = vars()
1655 s = None
1656 if isinstance(other, scantable):
[1588]1657 s = scantable(self._math._binaryop(self, other, "SUB"))
[513]1658 elif isinstance(other, float):
[876]1659 s = scantable(self._math._unaryop(self, other, "SUB", False))
[513]1660 else:
[718]1661 raise TypeError("Other input is not a scantable or float value")
[513]1662 s._add_history("operator -", varlist)
1663 return s
[710]1664
[1589]1665 @print_log_dec
[513]1666 def __mul__(self, other):
1667 """
1668 implicit on all axes and on Tsys
1669 """
1670 varlist = vars()
1671 s = None
1672 if isinstance(other, scantable):
[1588]1673 s = scantable(self._math._binaryop(self, other, "MUL"))
[513]1674 elif isinstance(other, float):
[876]1675 s = scantable(self._math._unaryop(self, other, "MUL", False))
[513]1676 else:
[718]1677 raise TypeError("Other input is not a scantable or float value")
[513]1678 s._add_history("operator *", varlist)
1679 return s
1680
[710]1681
[1589]1682 @print_log_dec
[513]1683 def __div__(self, other):
1684 """
1685 implicit on all axes and on Tsys
1686 """
1687 varlist = vars()
1688 s = None
1689 if isinstance(other, scantable):
[1589]1690 s = scantable(self._math._binaryop(self, other, "DIV"))
[513]1691 elif isinstance(other, float):
1692 if other == 0.0:
[718]1693 raise ZeroDivisionError("Dividing by zero is not recommended")
[876]1694 s = scantable(self._math._unaryop(self, other, "DIV", False))
[513]1695 else:
[718]1696 raise TypeError("Other input is not a scantable or float value")
[513]1697 s._add_history("operator /", varlist)
1698 return s
1699
[530]1700 def get_fit(self, row=0):
1701 """
1702 Print or return the stored fits for a row in the scantable
1703 Parameters:
1704 row: the row which the fit has been applied to.
1705 """
1706 if row > self.nrow():
1707 return
[976]1708 from asap.asapfit import asapfit
[530]1709 fit = asapfit(self._getfit(row))
[718]1710 if rcParams['verbose']:
[530]1711 print fit
1712 return
1713 else:
1714 return fit.as_dict()
1715
[1483]1716 def flag_nans(self):
1717 """
1718 Utility function to flag NaN values in the scantable.
1719 """
1720 import numpy
1721 basesel = self.get_selection()
1722 for i in range(self.nrow()):
[1589]1723 sel = self.get_row_selector(i)
1724 self.set_selection(basesel+sel)
[1483]1725 nans = numpy.isnan(self._getspectrum(0))
1726 if numpy.any(nans):
1727 bnans = [ bool(v) for v in nans]
1728 self.flag(bnans)
1729 self.set_selection(basesel)
1730
[1588]1731 def get_row_selector(self, rowno):
1732 return selector(beams=self.getbeam(rowno),
1733 ifs=self.getif(rowno),
1734 pols=self.getpol(rowno),
1735 scans=self.getscan(rowno),
1736 cycles=self.getcycle(rowno))
[1573]1737
[484]1738 def _add_history(self, funcname, parameters):
[1435]1739 if not rcParams['scantable.history']:
1740 return
[484]1741 # create date
1742 sep = "##"
1743 from datetime import datetime
1744 dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
1745 hist = dstr+sep
1746 hist += funcname+sep#cdate+sep
1747 if parameters.has_key('self'): del parameters['self']
[1118]1748 for k, v in parameters.iteritems():
[484]1749 if type(v) is dict:
[1118]1750 for k2, v2 in v.iteritems():
[484]1751 hist += k2
1752 hist += "="
[1118]1753 if isinstance(v2, scantable):
[484]1754 hist += 'scantable'
1755 elif k2 == 'mask':
[1118]1756 if isinstance(v2, list) or isinstance(v2, tuple):
[513]1757 hist += str(self._zip_mask(v2))
1758 else:
1759 hist += str(v2)
[484]1760 else:
[513]1761 hist += str(v2)
[484]1762 else:
1763 hist += k
1764 hist += "="
[1118]1765 if isinstance(v, scantable):
[484]1766 hist += 'scantable'
1767 elif k == 'mask':
[1118]1768 if isinstance(v, list) or isinstance(v, tuple):
[513]1769 hist += str(self._zip_mask(v))
1770 else:
1771 hist += str(v)
[484]1772 else:
1773 hist += str(v)
1774 hist += sep
1775 hist = hist[:-2] # remove trailing '##'
1776 self._addhistory(hist)
1777
[710]1778
[484]1779 def _zip_mask(self, mask):
1780 mask = list(mask)
1781 i = 0
1782 segments = []
1783 while mask[i:].count(1):
1784 i += mask[i:].index(1)
1785 if mask[i:].count(0):
1786 j = i + mask[i:].index(0)
1787 else:
[710]1788 j = len(mask)
[1118]1789 segments.append([i, j])
[710]1790 i = j
[484]1791 return segments
[714]1792
[626]1793 def _get_ordinate_label(self):
1794 fu = "("+self.get_fluxunit()+")"
1795 import re
1796 lbl = "Intensity"
[1118]1797 if re.match(".K.", fu):
[626]1798 lbl = "Brightness Temperature "+ fu
[1118]1799 elif re.match(".Jy.", fu):
[626]1800 lbl = "Flux density "+ fu
1801 return lbl
[710]1802
[876]1803 def _check_ifs(self):
1804 nchans = [self.nchan(i) for i in range(self.nif(-1))]
[889]1805 nchans = filter(lambda t: t > 0, nchans)
[876]1806 return (sum(nchans)/len(nchans) == nchans[0])
[976]1807
1808 def _fill(self, names, unit, average):
1809 import os
1810 from asap._asap import stfiller
1811 first = True
1812 fullnames = []
1813 for name in names:
1814 name = os.path.expandvars(name)
1815 name = os.path.expanduser(name)
1816 if not os.path.exists(name):
1817 msg = "File '%s' does not exists" % (name)
1818 if rcParams['verbose']:
1819 asaplog.push(msg)
1820 print asaplog.pop().strip()
1821 return
1822 raise IOError(msg)
1823 fullnames.append(name)
1824 if average:
1825 asaplog.push('Auto averaging integrations')
[1079]1826 stype = int(rcParams['scantable.storage'].lower() == 'disk')
[976]1827 for name in fullnames:
[1073]1828 tbl = Scantable(stype)
1829 r = stfiller(tbl)
[1504]1830 rx = rcParams['scantable.reference']
1831 r._setreferenceexpr(rx)
[976]1832 msg = "Importing %s..." % (name)
[1118]1833 asaplog.push(msg, False)
[976]1834 print_log()
[1118]1835 r._open(name, -1, -1)
[976]1836 r._read()
1837 if average:
[1118]1838 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
[976]1839 if not first:
1840 tbl = self._math._merge([self, tbl])
1841 Scantable.__init__(self, tbl)
1842 r._close()
[1118]1843 del r, tbl
[976]1844 first = False
1845 if unit is not None:
1846 self.set_fluxunit(unit)
1847 self.set_freqframe(rcParams['scantable.freqframe'])
1848
[1402]1849 def __getitem__(self, key):
1850 if key < 0:
1851 key += self.nrow()
1852 if key >= self.nrow():
1853 raise IndexError("Row index out of range.")
1854 return self._getspectrum(key)
1855
1856 def __setitem__(self, key, value):
1857 if key < 0:
1858 key += self.nrow()
1859 if key >= self.nrow():
1860 raise IndexError("Row index out of range.")
1861 if not hasattr(value, "__len__") or \
1862 len(value) > self.nchan(self.getif(key)):
1863 raise ValueError("Spectrum length doesn't match.")
1864 return self._setspectrum(value, key)
1865
1866 def __len__(self):
1867 return self.nrow()
1868
1869 def __iter__(self):
1870 for i in range(len(self)):
1871 yield self[i]
Note: See TracBrowser for help on using the repository browser.