1 | \documentclass[11pt]{article}
|
---|
2 | \usepackage{a4}
|
---|
3 | \usepackage{calc}
|
---|
4 | \usepackage{ifthen}
|
---|
5 | \usepackage{smartref}
|
---|
6 | \usepackage{longtable}
|
---|
7 |
|
---|
8 | \def\complete{no}
|
---|
9 |
|
---|
10 | % Adjust the page size
|
---|
11 | \addtolength{\oddsidemargin}{-0.4in}
|
---|
12 | \addtolength{\evensidemargin}{+0.4in}
|
---|
13 | \addtolength{\textwidth}{+0.8in}
|
---|
14 |
|
---|
15 | \setlength{\parindent}{0mm}
|
---|
16 | \setlength{\parskip}{1ex}
|
---|
17 |
|
---|
18 |
|
---|
19 | \title{ASAP - ATNF Spectral Analysis Package\\
|
---|
20 | Software Requirements - Development Cycle 2 }
|
---|
21 | \author{Chris Phillips \& Malte Marquarding}
|
---|
22 |
|
---|
23 | \newcounter{requirement}
|
---|
24 | \newcounter{subrequirement}
|
---|
25 |
|
---|
26 | \addtoreflist{requirement}
|
---|
27 | \newcommand{\reqref}[1]{R\ref{#1}-\requirementref{#1}}
|
---|
28 |
|
---|
29 | \newcommand{\makenote}[1]{{\bf \tt \em#1}}
|
---|
30 |
|
---|
31 | \newcommand{\anitem}[2]{\smallskip \parbox[t]{2cm}{#1}%
|
---|
32 | \parbox[t]{\textwidth-2cm}{#2}}
|
---|
33 |
|
---|
34 | \newcommand{\showreqcounter}{
|
---|
35 | R\arabic{section}.\arabic{subsection}-\arabic{requirement}
|
---|
36 | }
|
---|
37 |
|
---|
38 | \newcommand{\showsubreqcounter}{
|
---|
39 | R\arabic{section}.\arabic{subsection}-\arabic{requirement}.\arabic{subrequirement}
|
---|
40 | }
|
---|
41 |
|
---|
42 | \newcommand{\status}[2]{
|
---|
43 | \ifthenelse{\equal{#1}{Started}}{Started \hspace*{1cm} {\em Priority #2}}
|
---|
44 | {\ifthenelse{\equal{#1}{Not started}}{Not Started\hspace*{1cm} {\em Priority #2}}
|
---|
45 | {\ifthenelse{\equal{#1}{Done1}}{Completed}
|
---|
46 | { \ifthenelse{\equal{#1}{Duplicate}}{Duplicate?}
|
---|
47 | {#1}
|
---|
48 | }}}
|
---|
49 | }
|
---|
50 |
|
---|
51 | % Also Deferred, Obsolete
|
---|
52 |
|
---|
53 | % Requirement command
|
---|
54 | % Summary
|
---|
55 | % Status
|
---|
56 | % Priority
|
---|
57 | % Time estimate
|
---|
58 | % Percent complete
|
---|
59 | % Completion date
|
---|
60 | % Text
|
---|
61 |
|
---|
62 | %\newarray\Requirements
|
---|
63 |
|
---|
64 | %\newcounter{numreq}
|
---|
65 |
|
---|
66 | \newcommand{\requirement}[7]{
|
---|
67 | \setcounter{subrequirement}{0}
|
---|
68 | \stepcounter{requirement}
|
---|
69 |
|
---|
70 | \ifthenelse{\equal{\complete}{yes}}
|
---|
71 | {\dorequirement{#1}{#2}{#3}{#4}{#5}{#6}{#7}}
|
---|
72 | {\ifthenelse{\equal{#2}{Done1}
|
---|
73 | \or \equal{#2}{Deferred}
|
---|
74 | \or \equal{#2}{Removed}
|
---|
75 | \or \equal{#2}{Obsolete}}{}
|
---|
76 | {\dorequirement{#1}{#2}{#3}{#4}{#5}{#6}{#7}}}
|
---|
77 | }
|
---|
78 |
|
---|
79 | \newcommand{\dorequirement}[7]{
|
---|
80 | \hspace*{2mm}\begin{minipage}{\textwidth-2mm}
|
---|
81 | \setlength{\parindent}{-2mm}
|
---|
82 | {\bf \showreqcounter\ \bf #1} \\
|
---|
83 | #7 \\
|
---|
84 | \hspace*{1cm} \status{#2}{#3}
|
---|
85 | \end{minipage}
|
---|
86 |
|
---|
87 | \typeout{REQUIREMENT: \showreqcounter & #1 & #2 & #3 & #4 & #5 & #6 :ENDREQ}
|
---|
88 | }
|
---|
89 |
|
---|
90 | \newcommand{\subrequirement}[7]{
|
---|
91 | \stepcounter{subrequirement}
|
---|
92 |
|
---|
93 | \ifthenelse{\equal{\complete}{yes}}
|
---|
94 | {\dosubrequirement{#1}{#2}{#3}{#4}{#5}{#6}{#7}}
|
---|
95 | {\ifthenelse{\equal{#2}{Done1}
|
---|
96 | \or \equal{#2}{Deferred}
|
---|
97 | \or \equal{#2}{Obsolete}
|
---|
98 | \or \equal{#2}{Removed}}{}
|
---|
99 | {\dosubrequirement{#1}{#2}{#3}{#4}{#5}{#6}{#7}}}
|
---|
100 | }
|
---|
101 |
|
---|
102 | \newcommand{\dosubrequirement}[7]{
|
---|
103 | \hspace*{2mm}\begin{minipage}{\textwidth-2mm}
|
---|
104 | \setlength{\parindent}{-2mm}
|
---|
105 | {\bf \showsubreqcounter\ \bf #1} \\
|
---|
106 | #7 \\
|
---|
107 | \hspace*{1cm} \status{#2}{#3}
|
---|
108 | \end{minipage}
|
---|
109 |
|
---|
110 | \typeout{REQUIREMENT: \showsubreqcounter & #1 & #2 & #3 & #4 & #5 & #6 :ENDREQ}
|
---|
111 | }
|
---|
112 |
|
---|
113 |
|
---|
114 | \newcommand{\extendedrequirement}[7]{
|
---|
115 | \setcounter{subrequirement}{0}
|
---|
116 |
|
---|
117 | \hspace*{2mm}\begin{minipage}{\textwidth-2mm}
|
---|
118 | \setlength{\parindent}{-2mm}
|
---|
119 | \stepcounter{requirement}
|
---|
120 | {\bf \showreqcounter\ #1}
|
---|
121 | #7
|
---|
122 | \hspace*{1cm} \status{#2}{#3}
|
---|
123 | \end{minipage}
|
---|
124 |
|
---|
125 | \typeout{REQUIREMENT: \showreqcounter & #1 & #2 & #3 & #4 & #5 & #6 :ENDREQ}
|
---|
126 | }
|
---|
127 |
|
---|
128 |
|
---|
129 | \newcommand{\reqeqn}[1]{\\\hspace*{1cm} $#1$}
|
---|
130 |
|
---|
131 | \let\oldsection\section
|
---|
132 | \renewcommand{\section}[1]{\setcounter{requirement}{0}\oldsection{#1}}
|
---|
133 |
|
---|
134 | \let\oldsubsection\subsection
|
---|
135 | \renewcommand{\subsection}[1]{\setcounter{requirement}{0}\oldsubsection{#1}}
|
---|
136 |
|
---|
137 | \begin{document}
|
---|
138 |
|
---|
139 | \maketitle
|
---|
140 |
|
---|
141 |
|
---|
142 | %\tableofcontents
|
---|
143 | %\newpage
|
---|
144 |
|
---|
145 | \section{Introduction}
|
---|
146 |
|
---|
147 | ASAP has been written to replace the venerable single-dish software
|
---|
148 | {\tt spc} for processing of single dish spectral line data from all
|
---|
149 | ATNF observatories. Version 1.0 of ASAP was released in March
|
---|
150 | 2005. This document reflects an update of the initial requirements
|
---|
151 | document. Some new requirements have been added and the requirement
|
---|
152 | priorities have been reassessed for the next development cycle.
|
---|
153 |
|
---|
154 | \section{Scope}
|
---|
155 |
|
---|
156 | ASAP should be able to process all spectral line single-dish
|
---|
157 | observations from ATNF telescopes (Parkes, Mopra \& Tidbinbilla). This
|
---|
158 | includes reading the data produced by the telescope, calibration and
|
---|
159 | reduction of the data and basic analysis of the data such as fitting
|
---|
160 | line profiles etc.
|
---|
161 |
|
---|
162 | It has been assumed that the following processing is out of the scope
|
---|
163 | of ASAP.
|
---|
164 | \begin{itemize}
|
---|
165 | \item Raster or ``on-the-fly'' mapping (This is handled by
|
---|
166 | ``livedata'' and gridzilla).
|
---|
167 | \item Very complex or specific data processing. (A route into
|
---|
168 | Class\footnote{Part of the GLIDAS software package, produced by
|
---|
169 | Institut de Radio Astronomie Millime\'trique http://www.iram.fr}
|
---|
170 | should be available for advanced processing).
|
---|
171 | %%TODO%% give example
|
---|
172 | \item Continuum data.
|
---|
173 | \item Pulsar timing observations.
|
---|
174 | \end{itemize}
|
---|
175 |
|
---|
176 | \section{Priorities}
|
---|
177 |
|
---|
178 | Requirements have been given a value of 1 to 3. The other requirements
|
---|
179 | will be implemented mainly depending on priority, with ``1'' the
|
---|
180 | highest. Priority 3 and some priority 2 requirements will probably not
|
---|
181 | get implemented in the duration of the second development cycle.
|
---|
182 |
|
---|
183 | \section{User Interface}
|
---|
184 |
|
---|
185 | The user interface (UI) is the most important part of a single dish
|
---|
186 | processing package, but probably the most difficult to get right. Long
|
---|
187 | term the UI for this software will consist of three parts.
|
---|
188 | \begin{itemize}
|
---|
189 | \item A graphical user interface (GUI).
|
---|
190 | \item An interactive command line interface (CLI).
|
---|
191 | \item A scriptable interface for batch processing.
|
---|
192 | \end{itemize}
|
---|
193 |
|
---|
194 | The CLI and scriptable interface are essentially be the same.
|
---|
195 |
|
---|
196 | The software does not {\em need} to be able to run solely from a
|
---|
197 | ``vt100'' style terminal. It can be assumed that the user is running
|
---|
198 | the software from within a windowed (i.e. X11) environment. This will
|
---|
199 | mean it will not necessarily be possible to run the software remotely
|
---|
200 | over a slow network connection (e.g. internationally or from home).
|
---|
201 | Where possible, operations on the data should be possible from all
|
---|
202 | three aspects of the user interface.
|
---|
203 |
|
---|
204 | The user interface needs to be implemented so that the user can easily
|
---|
205 | and transparently work on spectra either one at a time or by
|
---|
206 | processing multiple spectra in parallel. This means there must be an
|
---|
207 | easy way to select specific or multiple spectra to display or process.
|
---|
208 |
|
---|
209 | At this stage the development of a GUI has been deferred until the
|
---|
210 | basic package has stabilised and most features have been
|
---|
211 | implemented. At that stage a decision will be made on how to best
|
---|
212 | implement a GUI. On a shorter timescale specific purpose GUIs (such as
|
---|
213 | a simple Wizard for processing standard Mopra data) may be produced on
|
---|
214 | an as-needed basic.
|
---|
215 |
|
---|
216 | \subsection{Graphical User Interface}
|
---|
217 |
|
---|
218 | At this stage a general ASAP GUI has been deferred to a later date.
|
---|
219 |
|
---|
220 | \smallskip
|
---|
221 |
|
---|
222 | \requirement{Simple interface}{Deferred}{}{}{}{}{It should be simple,
|
---|
223 | intuitive and uncluttered. Specifically, use of many windows
|
---|
224 | simultaneously should be discouraged, as should hiding functionality
|
---|
225 | behind layers of dialog boxes.}
|
---|
226 |
|
---|
227 | \requirement{Integrated plotter}{Deferred}{}{}{}{}{The plotting window
|
---|
228 | should be a major component of the GUI control, not a separate
|
---|
229 | isolated window.}
|
---|
230 |
|
---|
231 | \requirement{Minimal controls}{Deferred}{}{}{}{}{The interface should use
|
---|
232 | minimal ``always visible'' controls, with use of pull down menus and
|
---|
233 | maybe a toolbar for frequency used functions. }
|
---|
234 |
|
---|
235 | \requirement{Keyboard shortcuts}{Deferred}{}{}{}{}{Keyboard shortcuts should
|
---|
236 | be available.}
|
---|
237 |
|
---|
238 | \requirement{GUI user preferences}{Deferred}{}{}{}{}{Most user preferences
|
---|
239 | (i.e. keywords in the CLI) should be presented in a popup, tabbed,
|
---|
240 | dialog box.}
|
---|
241 |
|
---|
242 | \requirement{GUI line fitting}{Deferred}{}{}{}{}{When performing line
|
---|
243 | profile fitting, a spreadsheet type window should be viewable which
|
---|
244 | shows the current parameter values (amplitude, velocity etc) for each
|
---|
245 | line fitted and allow the user to change these parameters or set the
|
---|
246 | current value as fixed. This GUI should stay synchronised with any CLI
|
---|
247 | changes to these values.}
|
---|
248 |
|
---|
249 | \requirement{Mopra online GUI}{Not started}{1}{}{}{}
|
---|
250 | {A simple GUI to do very basic online reduction of Mopra data should
|
---|
251 | be available. This should include averaging data, quotient }
|
---|
252 |
|
---|
253 | \subsection{Command Line Interface}
|
---|
254 |
|
---|
255 | The command line interface is the main user interface to ASAP. It is
|
---|
256 | implemented in ipython using a objected oriented command approach.
|
---|
257 |
|
---|
258 | \requirement{Virtual CLI}{Obsolete}{}{}{}{}{While the GUI should be the main
|
---|
259 | interface for new users and for basic manipulation, some tasks can be
|
---|
260 | more efficiently performed using a CLI. A virtual CLI could be
|
---|
261 | integrated as part of the GUI.}
|
---|
262 |
|
---|
263 | \requirement{CLI keyword/argument}{Obsolete}{}{}{}{}{The CLI should have a
|
---|
264 | keyword/argument form and never prompt the user for specific values
|
---|
265 | (the user should be able to change values which are retained until
|
---|
266 | they wants to change them again).}
|
---|
267 |
|
---|
268 | \requirement{CLI case insensitive}{Obsolete}{}{}{}{}{The CLI should be case
|
---|
269 | insensitive and accept minimum matching and short forms of
|
---|
270 | keywords.}
|
---|
271 |
|
---|
272 | \requirement{CLI available routines}{Done1}{}{}{}{}{The user must be able to
|
---|
273 | quickly and easily see from the command line the available routines
|
---|
274 | and keywords which affect it, so they can see which parameters may
|
---|
275 | need changing.}
|
---|
276 |
|
---|
277 | \subsection{Scripting}
|
---|
278 |
|
---|
279 | \requirement{Scripting}{Done1}{1}{}{}{}{It must be possible to run the
|
---|
280 | software in a scripting mode. This would be to process large amounts
|
---|
281 | of data in a routine manner and also to automatically reproduce
|
---|
282 | specific plots etc (So the scripting must have full control of the
|
---|
283 | plotter). Preferably the scripting ``language'' and the CLI would be
|
---|
284 | the same.}
|
---|
285 |
|
---|
286 | \requirement{Standard scripts}{Not started}{1}{}{}{} {ASAP should come
|
---|
287 | with sample scripts which users can use for standard observing modes
|
---|
288 | (e.g. standard Mopra observing, Parkes polarimetry etc)}
|
---|
289 |
|
---|
290 | %\requirement{Scripts from History}{Duplicate}{}{}{}{}{It would be worthwhile
|
---|
291 | %having a method to auto-generate scripts (for reduction or plotting)
|
---|
292 | %from current spectra history, or some similar method.}
|
---|
293 |
|
---|
294 | \section{Plotter}
|
---|
295 |
|
---|
296 | The plotter should be fully interactive and be an integral part of the
|
---|
297 | GUI and software interface.
|
---|
298 |
|
---|
299 | \requirement{High quality plots}{Done1}{}{}{}{}{It must be able to
|
---|
300 | produce plots of publishable quality.}
|
---|
301 |
|
---|
302 | \subrequirement{Histogram plots}{Not started}{1}{0.5d}{}{} {As well as line
|
---|
303 | plots, there needs to be an option to plot spectra in ``Histogram''
|
---|
304 | mode}
|
---|
305 |
|
---|
306 |
|
---|
307 | The user must be able to specify:
|
---|
308 |
|
---|
309 | \subrequirement{Line Thickness}{Started}{1}{0.1d}{}{}{}
|
---|
310 |
|
---|
311 | \subrequirement{Character size}{Not started}{1}{0.1d}{}{}{}
|
---|
312 |
|
---|
313 | \subrequirement{Colours}{Started}{1}{0.75d}{}{}{}
|
---|
314 |
|
---|
315 | \subrequirement{Line styles}{Not started}{1}{0.5d}{}{}{}
|
---|
316 |
|
---|
317 | \subrequirement{Position of axis ticks}{Done1}{2}{}{}{}{}
|
---|
318 |
|
---|
319 | \subrequirement{Hard Copies}{Done1}{1}{}{}{}{Producing hard copies
|
---|
320 | in postscript and .png format. Other formats may be added on an as
|
---|
321 | need basic.}
|
---|
322 |
|
---|
323 | \subrequirement{Non-interactive hard copies}{Not started}{1}{2d}{}{}
|
---|
324 | {It must be possible to produce hard copiers without an interactive
|
---|
325 | (i.e X11) plotter starting}.
|
---|
326 |
|
---|
327 | \subrequirement{Scriptable plotting}{Done1}{1}{}{}{} {All aspects of
|
---|
328 | the plotter (zooming etc) must be setable from the command line for
|
---|
329 | scripting}
|
---|
330 |
|
---|
331 | \requirement{Arbitrary plots}{Not started}{3}{}{}{}
|
---|
332 | {It must be possible to flexibly select the data to plot (e.g. Tsys vs
|
---|
333 | time etc as well as plots such as amplitude vs channel number or
|
---|
334 | velocity). Preferably any of the header values for a selection of
|
---|
335 | scans could be plotted on a scatter plot (e.g. Tsys vs elevation)}
|
---|
336 |
|
---|
337 | \requirement{Overlay spectra}{Done1}{}{}{}{}{It must be possible to overlay
|
---|
338 | multiple spectra on a single plot using different colours and/or
|
---|
339 | different line styles. (Including multiple stokes data and multiple
|
---|
340 | IFs).[[CHECK]]}
|
---|
341 |
|
---|
342 | \requirement{Plot individual spectra}{Done1}{}{}{}{}{It must be possible to
|
---|
343 | plot either the individual integrations (in either a stacked
|
---|
344 | fashion, or using a new subplot per integration)}
|
---|
345 |
|
---|
346 | \subrequirement{Auto-average integrations for plotting}{Not started}{2}{}{}{}
|
---|
347 | {It should be possible to optionally auto-average integrations of a
|
---|
348 | scan for plotting (for data thats has not already been scan averaged)}
|
---|
349 |
|
---|
350 | \requirement{Plotter multi-panelling}{Done1}{1}{}{}{}
|
---|
351 | {It must be possible to multi-panel spectra in an n$\times$m size
|
---|
352 | grid. It must be possible to easily change the number of plots per
|
---|
353 | page, ie define the ``n'' and ``m'' values.}
|
---|
354 |
|
---|
355 | \subrequirement{Step between plots}{Not started}{2}{}{}{}
|
---|
356 | {If more spectra than can fit on the plot matrix are to be plotted,
|
---|
357 | then it must be possible to step back and forth between the viewable
|
---|
358 | spectra (i.e. ``multi-page'' plots). This includes stepping through a
|
---|
359 | single plot on the pages at a time.}
|
---|
360 |
|
---|
361 | \requirement{Multi-panel: change \# panels}{Not started}{2}{}{}{}
|
---|
362 | {When using multi-panelling, the plotter should automatically update
|
---|
363 | the plot when the plot matrix dimensions (``n'' and ``m'' are changed)}
|
---|
364 |
|
---|
365 | \requirement{Plotter interactive zooming}{Done1}{}{}{}{}{It must be possible
|
---|
366 | to interactively zoom the plot (channel range selection and
|
---|
367 | amplitude of the spectra etc.) This includes both GUI control of
|
---|
368 | the zooming as well as command line control of either the zoom
|
---|
369 | factor or directly specifying the zoom bounds. }
|
---|
370 |
|
---|
371 | \requirement{Zoomed subplot}{Not started}{2}{}{}{}
|
---|
372 | {On a single plot, it should be possible to plot the full spectrum and
|
---|
373 | a zoomed copy of the data (using a different lie style) to see weak
|
---|
374 | features. The user must be able to specify the zoom factor.}
|
---|
375 |
|
---|
376 | \requirement{Offset plots}{Not started}{2}{}{}{}{Optionally when stacking
|
---|
377 | multiple spectral plots in one subwindow, a (user definable) offset
|
---|
378 | in the ``y'' direction should be added to each subsequent
|
---|
379 | spectra.}
|
---|
380 |
|
---|
381 | \requirement{Plotter auto-update}{Not started}{3}{}{}{}
|
---|
382 | {The plotter should automatically update to reflect user processing,
|
---|
383 | either from the CLI or GUI. The user should have to option to turn
|
---|
384 | this feature off if they so wish.}
|
---|
385 |
|
---|
386 | \requirement{Waterfall plot}{Not started}{3}{}{}{}
|
---|
387 | {It should be possible to plot individual integrations (possibly from
|
---|
388 | multiple scans) in a ``waterfall'' plot. This is an image based
|
---|
389 | display, where spectral channel is along the x-axis of the plot, time
|
---|
390 | (or integration number) along the y-axis and greyscale or colour
|
---|
391 | represent the amplitude of spectra. Interactive zooming and panning of
|
---|
392 | this image should be supported. }
|
---|
393 |
|
---|
394 | \requirement{Waterfall editing}{Not started}{3}{}{}{}
|
---|
395 | {When plotting ``waterfall'' plots, it should be possible to
|
---|
396 | interactively select regions or points and mark them as invalid
|
---|
397 | (i.e. to remove RFI affected data). The plotter should also show the
|
---|
398 | time/velocity of the pixel beneath the cursor.}
|
---|
399 |
|
---|
400 | \requirement{Export waterfall to FITS}{Not started}{3}{}{}{}
|
---|
401 | {It should be possible to export the ``waterfall'' plot images as a
|
---|
402 | FITs file, for user specific analysis.}
|
---|
403 |
|
---|
404 | \requirement{Plot line catalog overlays}{Not started}{1}{3d}{}{} {Line markers overlays,
|
---|
405 | read from a catalogue should be optionally available. This would
|
---|
406 | include the full Lovas catalogue, the JPL line catalogue and radio
|
---|
407 | recombination lines. The lines would be Doppler corrected to a
|
---|
408 | specified velocity. The user must be able to plot just a sub-section
|
---|
409 | of the lines in any specific catalogue (to avoid clutter).}
|
---|
410 |
|
---|
411 | \subrequirement{Plot user catalog overlays}{Not started}{2}{}{}{}
|
---|
412 | {Simple user definable catalogue should be definable for plot overlays}
|
---|
413 |
|
---|
414 | \requirement{Plot fitted functions}{Done1}{}{}{}{}
|
---|
415 | {Optionally plot fitted functions (e.g line profiles or baseline
|
---|
416 | fit). If multiple components (e.g. Gaussian) have been fit, it should
|
---|
417 | be possible to show the individual functions or the sum of the
|
---|
418 | components}
|
---|
419 |
|
---|
420 | \requirement{Plot residual data}{Started}{1}{0.5d}{}{}
|
---|
421 | {It should be possible to plot the residual data with or without
|
---|
422 | subtraction of fit functions. This includes plotting the spectra with
|
---|
423 | or without baseline removal and the residual after subtracting
|
---|
424 | Gaussian fits. The default should be to plot the data with baseline
|
---|
425 | subtracted but profile fits not subtracted.}
|
---|
426 |
|
---|
427 | \requirement{Plot header data}{Not started}{2}{}{}{} {Basic header data
|
---|
428 | (source name, molecule, observation time, Tsys, elevation, parallactic
|
---|
429 | angle etc) should be optionally shown, either on the plot or next to
|
---|
430 | it. This may either consist of a set of values, or only one or two
|
---|
431 | values the user specifically wants to see (source name and molecule,
|
---|
432 | for example).}
|
---|
433 |
|
---|
434 | \subrequirement{User define header plot positions}{Not started}{3}{}{}{}
|
---|
435 | {The user should be able to define where on the plot the header info
|
---|
436 | would appear.}
|
---|
437 |
|
---|
438 | \requirement{Realtime cursor position}{Done1}{}{}{}{}
|
---|
439 | {Optionally, relevant data such as the current mouse position should
|
---|
440 | be displayed (maybe with a mode to display an extended cross,
|
---|
441 | horizontal or vertical line at the current cursor position).}
|
---|
442 |
|
---|
443 | \requirement{Plot annotations}{Not started}{2}{}{}{}{The user should be able
|
---|
444 | to define simple annotations. This would include text overlay and
|
---|
445 | probably simple graphics (lines, arrows etc).}
|
---|
446 |
|
---|
447 | The user must be able to use the plotter window to interactively set
|
---|
448 | initial values and ranges used for fitting functions etc. The use of
|
---|
449 | keyboard ``shortcuts'' or other similar ``power user'' features should
|
---|
450 | be available to save the time of experienced users.
|
---|
451 |
|
---|
452 | {\em The following requirements are deferred as they require a version
|
---|
453 | of Matplotlib not yet available.}
|
---|
454 |
|
---|
455 | The plotter should be used to set the following values:
|
---|
456 |
|
---|
457 | \requirement{Interactive channel selection}{Deferred}{1}{}{}{}{Range of
|
---|
458 | spectral channels needed for specific tasks (ie the channel mask)
|
---|
459 | (See requirement \reqref{ref:chansel})}
|
---|
460 |
|
---|
461 | \requirement{Interactive line fitting}{Deferred}{1}{}{}{}{Initial
|
---|
462 | Gaussian parameters (velocity, width, amplitude) for profile
|
---|
463 | fitting.}
|
---|
464 |
|
---|
465 | \requirement{Plotter change fit values}{Deferred}{1}{}{}{}
|
---|
466 | {Change the parameter values of existing line profile
|
---|
467 | fits, or channel ranges used for baseline fits.}
|
---|
468 |
|
---|
469 | \section{Functionality}
|
---|
470 |
|
---|
471 | \subsection{Import/export}
|
---|
472 |
|
---|
473 | The software needs a set of import/export functions to deal with a
|
---|
474 | variety of data formats and to be able to exchange data with other
|
---|
475 | popular packages. These functions should be flexible enough to allow
|
---|
476 | the user to perform analysis functions in an different package and
|
---|
477 | re-import the data (or vice versa). The import function must be
|
---|
478 | modular enough to easily add new file formats when the need arises.
|
---|
479 | To properly import data, extra information may have to be read from
|
---|
480 | secondary calibration files (such as GTP, Gated Total Power, for 3~mm
|
---|
481 | wavelength data taken with Mopra). The import functions should be
|
---|
482 | flexible enough to gracefully handle data files with missing headers
|
---|
483 | etc. They should also be able to make telescope and date specific
|
---|
484 | corrections to the data (for ATNF observatories).
|
---|
485 |
|
---|
486 | The software must be able to read (import) the following file formats.
|
---|
487 |
|
---|
488 | \requirement{Read rpfits}{Done1}{}{}{}{}{The rpfits file format produced by
|
---|
489 | all current ATNF correlators.}
|
---|
490 |
|
---|
491 | \requirement{Read sdfits}{Done1}{}{}{}{}{SDFITS (currently written by {\tt SPC}).}
|
---|
492 |
|
---|
493 | \requirement{Read simple FITS}{Not started}{2}{}{}{}{Simple ``image'' FITS
|
---|
494 | (used by CLASS}
|
---|
495 |
|
---|
496 | \requirement{Read historic formats}{Not started}{3}{}{}{}
|
---|
497 | {Historic ATNF single dish formats (Spectra, SPC, SLAP). Possibly a
|
---|
498 | set of routines to translate these formats to SDFITs would suffice.}
|
---|
499 |
|
---|
500 | \requirement{Read PSRFITS}{Deferred}{}{}{}{}{PSRFIT for pulsar spectroscopy.}
|
---|
501 |
|
---|
502 | \requirement{Read online data}{Not started}{1}{0.1d}{}{}
|
---|
503 | {For online analysis, the software should be able to read an rpfits
|
---|
504 | file which is is still currently open for writing by the telescope
|
---|
505 | backend processor.}
|
---|
506 |
|
---|
507 | \requirement{Handle Doppler data}{Done1}{1}{}{}{}{Data which has been
|
---|
508 | observed in either a fixed frequency or Doppler tracked fashion needs
|
---|
509 | to be handled.}
|
---|
510 |
|
---|
511 | The software should be able to export the data in the following formats.
|
---|
512 |
|
---|
513 | \requirement{Write SDFITS}{Done1}{}{}{}{}{Single Dish FITS.}
|
---|
514 |
|
---|
515 | \requirement{Write simple FITS}{Done1}{}{}{}{}
|
---|
516 | {Simple ``image'' FITS (as used by CLASS). It must be possible to to
|
---|
517 | export multiple spectra simultaneously, using default file name and
|
---|
518 | postfix.}
|
---|
519 |
|
---|
520 | \requirement{}{Removed}{}{}{}{}{}{In a format which can be imported by other popular
|
---|
521 | packages such as Class.
|
---|
522 |
|
---|
523 | \requirement{Write ASCIIs}{Done1}{}{}{}{}
|
---|
524 | {Simple ASCIIs format, suitable for use with programs such as Perl,
|
---|
525 | Python, SuperMongo etc.}
|
---|
526 |
|
---|
527 | \requirement{Header writing}{Started}{1}{0d}{}{}
|
---|
528 | {The exported data should retain as much header data as possible. It
|
---|
529 | should also be possible to request specific data be written in the
|
---|
530 | desired form (B1950 coordinates, optical velocity definition etc).}
|
---|
531 |
|
---|
532 | \requirement{Import corrections}{Done1}{}{}{}{}
|
---|
533 | {The import function should apply relevant corrections (especially
|
---|
534 | those which are time dependent) to specific telescopes. See
|
---|
535 | $\S$\ref{sec:issues} for a list of currently known issues.}
|
---|
536 |
|
---|
537 | \requirement{Append output files}{Not started}{1}{3d}{}{} {It must be possible
|
---|
538 | to append spectra to existing output files, specifically sdfits and
|
---|
539 | asap output files.}
|
---|
540 |
|
---|
541 | \subsection{Sky subtraction}
|
---|
542 | \label{sec:skysubtraction}
|
---|
543 | To remove the effects of the passband filter shape and atmospheric
|
---|
544 | fluctuations across the band, sky subtraction must be performed on the
|
---|
545 | data. The software must be able to do sky subtraction using both
|
---|
546 | position switching (quotient spectra) and frequency switching
|
---|
547 | techniques.
|
---|
548 |
|
---|
549 | \requirement{Quotient Spectra}{Done1}{}{}{}{}
|
---|
550 | {\label{ref:skysub} Position switched sky subtraction should be
|
---|
551 | implemented using the algorithm \medskip\reqeqn{T_{ref} \times
|
---|
552 | \frac{S}{R} - T_{sig}} -- removes continuum\bigskip \reqeqn{T_{ref}
|
---|
553 | \times \frac{S}{R} - T_{ref}} -- preserves continuum\medskip}
|
---|
554 |
|
---|
555 | \requirement{Arbitrary reference}{Not started}{2}{}{}{}
|
---|
556 | {The user should be able to specify an arbitrarily complex
|
---|
557 | reference/source order (which repeats), which can then be used to make
|
---|
558 | perform multiple sky subtractions in parallel.}
|
---|
559 |
|
---|
560 | \requirement{Frequency switching}{Not started}{2}{}{}{}
|
---|
561 | {Frequency switched sky subtraction should be supported. (Ref. Liszt,
|
---|
562 | 1997, A\&AS, 124, 183) }
|
---|
563 |
|
---|
564 | %\requirement{For wideband multibit sampled data it may be desirable or
|
---|
565 | %even required to assume Tsys has a frequency dependency. Appropriate
|
---|
566 | %sky subtraction algorithms will need to be investigated.}{3}
|
---|
567 |
|
---|
568 | \requirement{Pulsar off pulse quotient}{}{}{}{Deferred}{3}
|
---|
569 | {For pulsar binned data, the (user specified) off pulse bins can be
|
---|
570 | used as the reference spectra. Due to potentially rapid amplitude
|
---|
571 | fluctuations, sky subtractions may need to be done on a
|
---|
572 | integration basis.}
|
---|
573 |
|
---|
574 | Multibeam systems can observe in a nodding fashion (called MX mode at
|
---|
575 | Parkes), where the telescope position is nodded between scans so that
|
---|
576 | the source is observed in turn by two beams and a reference spectra
|
---|
577 | for one beam is obtained while the other is observing the target source.
|
---|
578 |
|
---|
579 | \requirement{Multibeam MX mode}{Not started}{2}{}{}{}
|
---|
580 | {For multibeam systems, it must be possible to perform sky subtraction
|
---|
581 | with the source and reference in an alternate pair of beams}
|
---|
582 |
|
---|
583 | \subsection{Baseline removal}
|
---|
584 |
|
---|
585 | Baseline removal is needed to correct for imperfections in sky
|
---|
586 | subtraction. Depending on the stability of the system, the residual
|
---|
587 | spectral baseline errors can be small or quite large. Baseline removal
|
---|
588 | is usually done by fitting a function to the (user specified) line
|
---|
589 | free channels.
|
---|
590 |
|
---|
591 | \requirement{Baseline removal}{Done1}{}{}{}{}
|
---|
592 | {The software must be able to do baseline removal by fitting a n'th
|
---|
593 | order polynomials to the line free channels using a least squares
|
---|
594 | method.}
|
---|
595 |
|
---|
596 | \requirement{Standing wave ripples}{Not started}{3}{}{}{}
|
---|
597 | {Removal of standing wave ripples should be done by fitting a Sine
|
---|
598 | function to the line free channels.}
|
---|
599 |
|
---|
600 | \requirement{Robust fitting}{Not started}{3}{}{}{}
|
---|
601 | {``Robust'' fitting functions should be available, which are more
|
---|
602 | tolerant to RFI.}
|
---|
603 |
|
---|
604 | \requirement{Auto-baseline}{Done1}{}{}{}{}
|
---|
605 | {Automatic techniques for baselining should be investigated.}
|
---|
606 |
|
---|
607 | \subsection{Line Profile Fitting}
|
---|
608 |
|
---|
609 | The user will want to fit multicomponent line profiles to the data in
|
---|
610 | a simple manner and be able to manipulate the exact fitting
|
---|
611 | parameters.
|
---|
612 |
|
---|
613 | \requirement{Gaussian fitting}{Done1}{}{}{}{}
|
---|
614 | {The software must be able to do multi-component Gaussian fitting of
|
---|
615 | the spectra. The initial amplitude, width and velocity of each
|
---|
616 | component should be able to be set by the user and specific values to
|
---|
617 | be fit should be easily set.}
|
---|
618 |
|
---|
619 | \requirement{Chi squared}{Done1}{}{}{}{}
|
---|
620 | {The reduce Chi squared (or similar statistic) of the fit should given
|
---|
621 | to the user, so that they can easily see if adding extra components
|
---|
622 | give a statistically significant improvement to the fit.}
|
---|
623 |
|
---|
624 | %\requirement{The fit parameters should be stored with the data so that
|
---|
625 | %the user can work on multiple data sets simultaneously and experiment
|
---|
626 | %with different fitting values. These values should be saved to disk
|
---|
627 | %along with the data.}{1}
|
---|
628 |
|
---|
629 | \requirement{Fit multipol data}{}{}{}{Done1}{}
|
---|
630 | {For multiple polarisation data, the individual stokes parameters or
|
---|
631 | polarisation products should be fit independently.}
|
---|
632 |
|
---|
633 | \requirement{Export fit parameters}{Not started}{1}{1d}{}{}
|
---|
634 | {There should be an easy way of exporting the fit parameter from
|
---|
635 | multiple spectra, e.g. as an ASCII table.}
|
---|
636 |
|
---|
637 | \requirement{Constrained fitting}{Not started}{2}{}{}{}
|
---|
638 | {It should be also possible to do constrained fitting of multiple
|
---|
639 | hyperfine components, e.g. the NH$_3$ hyperfine components. (The
|
---|
640 | constraints may be either the frequency separation of the individual
|
---|
641 | components or the amplitude ratio etc.)}
|
---|
642 |
|
---|
643 | \subrequirement{Specific constrained fitting}{Not started}{1}{4d}{}{}
|
---|
644 | {Before arbitrary constrained fitting is available, support for
|
---|
645 | specific cases, such as the NH$_3$ hyperfine transition, should e
|
---|
646 | added on a case-by-case basis.}
|
---|
647 |
|
---|
648 | \requirement{Edit fits parameters}{Done1}{}{}{}{}
|
---|
649 | {It must be possible to alter the line profile fit parameter values by
|
---|
650 | hand at any stage.}
|
---|
651 |
|
---|
652 | \requirement{Fix fit parameters}{Done1}{}{}{}{}
|
---|
653 | {It must be possible to ``fix'' particular values of the line profile
|
---|
654 | parameters, so that only subset of lines or (say) the width of a line
|
---|
655 | is fit.}
|
---|
656 |
|
---|
657 | \requirement{Arbitrary line fitting}{Done1}{}{}{}{}
|
---|
658 | {The software should allow hooks for line profile shapes other than
|
---|
659 | Gaussian to be added in the future, possible user specified.}
|
---|
660 |
|
---|
661 | \requirement{Save fit parameters}{Done1}{}{}{}{}
|
---|
662 | {The fitting parameters for functions which have been fit to the data
|
---|
663 | (e.g. for baseline removal or Gaussian fits) should be retained as an
|
---|
664 | integral part of the data and stored permanently on disk.}
|
---|
665 |
|
---|
666 | \requirement{Undo subtracted fits}{Not started}{3}{}{}{}
|
---|
667 | {It should be possible to ``undo'' functions which have been
|
---|
668 | subtracted from the data (e.g. baseline polynomials).}
|
---|
669 |
|
---|
670 | \requirement{Gaussian line area}{Not started}{1}{0.2d}{}{}
|
---|
671 | {Optionally the area under a fitted Gaussian should be calculated for
|
---|
672 | the user.}
|
---|
673 |
|
---|
674 | %\makenote{Should it be possible to attach multiple sets of fits to the
|
---|
675 | %data (similar to CL tables in classic AIPS), so the user can
|
---|
676 | %experiment with different ways of fitting the data?}
|
---|
677 |
|
---|
678 | %\makenote{Should calculations of rotational temperatures etc be
|
---|
679 | %handled when fitting hyperfine components, or should the user be doing
|
---|
680 | %this themselves?}
|
---|
681 |
|
---|
682 | \subsection{Calibration}
|
---|
683 |
|
---|
684 | The software should handle all basic system temperature (Tsys) and
|
---|
685 | gain calibration as well as opacity corrections where relevant. The
|
---|
686 | Tsys value should be contained in the rpfits files. The actual
|
---|
687 | application of the T$_{\mbox{sys}}$ factor will be applied as part of
|
---|
688 | the sky subtraction ($\S$\ref{sec:skysubtraction}). The units of Tsys
|
---|
689 | recorded in the data may be either in Jy or Kelvin, which will affect
|
---|
690 | how the data is calibrated. The rpfits file does {\em not} distinguish
|
---|
691 | if the flux units are Kelvin or Janskys.
|
---|
692 |
|
---|
693 | \requirement{Gain-elevation}{Done1}{}{}{}{}
|
---|
694 | {Gain elevation corrections should be implemented using a elevation
|
---|
695 | dependent polynomial. The polynomial coefficients will be telescope
|
---|
696 | and frequency dependent. They will also have a (long term) time
|
---|
697 | dependence.}
|
---|
698 |
|
---|
699 | \requirement{User gain polynomial}{Done1}{}{}{}{}
|
---|
700 | {The user may wish to supply their own gain polynomial.}
|
---|
701 |
|
---|
702 | \requirement{K-Jy conversion}{Done1}{}{}{}{}
|
---|
703 | {When required by the user, the spectral units must be converted from
|
---|
704 | Kelvin to Jansky. At higher (3mm) frequencies this conversion is often
|
---|
705 | not applied. The conversion factor is\medskip \reqeqn{\mbox{Flux (Jy)}
|
---|
706 | = \frac{T \times 2 k_b \times 10^{26}}{\eta A}},\medskip\\where $k_b$
|
---|
707 | is Boltzmann's constant, A is the illuminated area of the telescope
|
---|
708 | and $\eta$ is the efficiency of the telescope (frequency, telescope
|
---|
709 | and time dependent)}
|
---|
710 |
|
---|
711 | \requirement{Scale Tsys}{Done1}{}{}{}{}
|
---|
712 | {In some cases the recorded Tsys values will be wrong. There needs to
|
---|
713 | be a mechanism to scale the Tsys value and the spectrum if the Tsys
|
---|
714 | value has already been applied (i.e. a simple and consistent rescaling
|
---|
715 | factor).}
|
---|
716 |
|
---|
717 | \requirement{Opacity}{Done1}{}{}{}{}
|
---|
718 | {The data may need to be corrected for opacity effects, particularly
|
---|
719 | at frequencies of 20~GHz and higher. The opacity factor to apply is
|
---|
720 | given by\medskip\reqeqn{C_o = e^{\tau/cos(z)}}\medskip\\ where $\tau$
|
---|
721 | is the opacity and z is the zenith angle (90-El). These corrections
|
---|
722 | will generally be derived from periodic ``skydip'' measurements. These
|
---|
723 | values will not be contained in the rpfits files, so there should be a
|
---|
724 | simple way of the software obtaining them and interpolating in time
|
---|
725 | (the user should not {\em have} to type them in, but may want
|
---|
726 | to). Reading in an ASCIIs file which contains the skydip data along
|
---|
727 | with a time-stamp would be one possibility.}
|
---|
728 |
|
---|
729 | \requirement{Tsys variation with freq}{Not started}{3}{}{}{}
|
---|
730 | {For wideband, multibit observations, the software should have the
|
---|
731 | option to handle Tsys which varies across the band. The exact
|
---|
732 | implementation will have to be decided once experience is gained with
|
---|
733 | the new Mopra digital filterbank. This will affect the sky subtraction
|
---|
734 | algorithms (requirement \reqref{ref:skysub}).}
|
---|
735 |
|
---|
736 | %\makenote{Is the dependence of gain on frequency weak enough for one
|
---|
737 | %set of coefficients for each receiver, or is a full frequency dependent
|
---|
738 | %set of values needed?}
|
---|
739 |
|
---|
740 | %\makenote{Should it be possible to read ``correct'' Tsys values from
|
---|
741 | %an external ascii file?}
|
---|
742 |
|
---|
743 | \subsection{Editing \& RFI robustness}
|
---|
744 |
|
---|
745 | In a data set with many observations, individual spectra may be
|
---|
746 | corrupted or the data may be affected by RFI and ``birdies''. The user
|
---|
747 | needs to be able to easily flag individual spectra or channels. This
|
---|
748 | may affect other routines such as sky-subtraction, as this will
|
---|
749 | disrupt the reference/source sequence.
|
---|
750 |
|
---|
751 | \requirement{Spectra flagging}{Started}{1}{1d}{}{}
|
---|
752 | {The user must be able to set an entire spectra or part thereof
|
---|
753 | (individual polarisation, IF etc) as being invalid.}
|
---|
754 |
|
---|
755 | \requirement{Channel flagging}{Started}{1}{0.5d}{}{}
|
---|
756 | {The user must be able to indicate an individual spectral point or
|
---|
757 | range of spectral points are invalid. This should be applied to an
|
---|
758 | individual spectra, or set of spectra.}
|
---|
759 |
|
---|
760 | \subrequirement{Flagged channel plotting}{Done1}{1}{}{}{} {When plotting data
|
---|
761 | with flagged spectral channels, the plotting should left a gap (blank)
|
---|
762 | in the plotted data|}.
|
---|
763 |
|
---|
764 | \subrequirement{Flagged channel interpolation}{Not started}{2}{}{}{}
|
---|
765 | {When plotting or processing data (e.g. quotient spectra), the users
|
---|
766 | should be able to request that the values for flagged data are
|
---|
767 | obtained by interpolation from good data either side of the flagged
|
---|
768 | points.}
|
---|
769 |
|
---|
770 | \requirement{Plot average flux vs time}{Not started}{3}{}{}{}
|
---|
771 | {The user should be able to plot the average spectral flux across the
|
---|
772 | band, or part of the band, as a function of time and interactively
|
---|
773 | select sections of data which should be marked as invalid (similar to
|
---|
774 | IBLED in classic aips).}
|
---|
775 |
|
---|
776 | \requirement{Robust Fitting}{Duplicate}{2}{}{}{}
|
---|
777 | {Where relevant, fitting routines etc should have the option of
|
---|
778 | selecting RFI tolerant (``robust'') algorithms. This will require
|
---|
779 | investigating alternate fitting routines other than the least-squares
|
---|
780 | approach.}
|
---|
781 |
|
---|
782 | \requirement{Birdie finder}{Not started}{2}{}{}{}
|
---|
783 | {A routine to automatically find birdies or RFI corrupted data and
|
---|
784 | indicate the data as invalid would be useful.}
|
---|
785 |
|
---|
786 | \requirement{Handle flagged data}{Done1}{}{}{}{}
|
---|
787 | {Other routines must be able to cope with portions of data which are
|
---|
788 | marked as invalid.}
|
---|
789 |
|
---|
790 | \subsection{Spectra mathematics and manipulation}
|
---|
791 |
|
---|
792 | A flexible suite of mathematical operations on the spectra should be
|
---|
793 | possible. This should include options such as adding, subtracting,
|
---|
794 | averaging and scaling the data. For common operations such as
|
---|
795 | averaging and smoothing, it must be simple for the user to invoke the
|
---|
796 | function (i.e. not to have to start up a complex spectral
|
---|
797 | calculator). Where it makes sense, it should be possible to manipulate
|
---|
798 | multiple spectra simultaneously.
|
---|
799 |
|
---|
800 | The spectral manipulations which should be available are:
|
---|
801 |
|
---|
802 | \requirement{Add spectra}{Done1}{}{}{}{}{Add or subtract multiple spectra.}
|
---|
803 |
|
---|
804 | \requirement{Average spectra}{Done1}{1}{}{}{}
|
---|
805 | {Averaging multiple spectra, with optional weighting based on Tsys,
|
---|
806 | integration or rms.}
|
---|
807 |
|
---|
808 | \subrequirement{Average spectra with velocity shift}{Not started}{1}{0.2d}{}{}
|
---|
809 | {If the velocity of the spectra to be averaged is different, the data
|
---|
810 | should be aligned in velocity. The user should be able to turn this
|
---|
811 | feature on or off.}
|
---|
812 |
|
---|
813 | \requirement{Robust averaging}{Not started}{2}{}{}{}
|
---|
814 | {Various robust averaging possibilities (e.g. median averaging,
|
---|
815 | clipped means etc) should be possible.}
|
---|
816 |
|
---|
817 | \requirement{Data re-binning}{Done1}{}{}{}{}
|
---|
818 | {Re-sampling or re-binning of the data to a lower (or higher) spectral
|
---|
819 | resolution (i.e. change the number of spectral points). The
|
---|
820 | re-sampling factor may not necessarily be an integer.}
|
---|
821 |
|
---|
822 | \requirement{Velocity shift}{Done1}{}{}{}{}
|
---|
823 | {It must be possible to shift the data in ``frequency/velocity''. This
|
---|
824 | should include channel, frequency and velocity shifts of an arbitrary
|
---|
825 | amount.}
|
---|
826 |
|
---|
827 | \requirement{Spectra smoothing}{Done1}{}{}{}{}
|
---|
828 | {Spectral smoothing of the data. Hanning, Tukey, boxcar and Gaussian
|
---|
829 | smoothing of variable widths should be possible.}
|
---|
830 |
|
---|
831 | \requirement{Spectra scaling}{Done1}{}{}{}{}{Scaling of the spectra.}
|
---|
832 |
|
---|
833 | \requirement{Spectra statistics}{Done1}{}{}{}{}
|
---|
834 | {Calculate basic statistical values (maximum, minimum, rms, mean) on a
|
---|
835 | range of spectral points. The range may not be contiguous. The
|
---|
836 | calculated rms value should be retained with the spectra so it can be
|
---|
837 | optionally used for weighted averaging of spectra.}
|
---|
838 |
|
---|
839 | \requirement{Line flux}{Not started}{2}{}{}{}
|
---|
840 | {It must be possible to calculate the flux integral over a range of
|
---|
841 | channels. The units should be Jy.km/s (or Kelvin.km/s). The channel
|
---|
842 | range for the calculation should be specific via the GUI or CLI.}
|
---|
843 |
|
---|
844 | \requirement{Line width}{Not started}{2}{}{}{}
|
---|
845 | {It must be possible to calculate the numerical ``width'' of a line
|
---|
846 | (full width at half maximum type measurement). This should be
|
---|
847 | calculated by specifying a channel range and finding the maximum value
|
---|
848 | in this range and then finding the interpolated crossing point of the
|
---|
849 | data as a user defined fraction of the maximum (default 50\%). The
|
---|
850 | profile width and velocity mid-point should then be computed. If the
|
---|
851 | profile shape is complex (e.g. double arch) with multiple crossing
|
---|
852 | points of the fraction value, the minimum and maximum width values
|
---|
853 | should be calculated. There should be the option of using a user
|
---|
854 | specified ``maximum value''.}
|
---|
855 |
|
---|
856 | \requirement{Change rest frequency}{Done1}{}{}{}{}
|
---|
857 | {The user must be able to easily change the rest-frequency to which
|
---|
858 | the velocity is referenced.}
|
---|
859 |
|
---|
860 | \requirement{FFT filtering}{Not started}{3}{}{}{}
|
---|
861 | {FFT filtering for high- and lowpass filtering and tapering.}
|
---|
862 |
|
---|
863 | \requirement{FFT to/from autocorrelation function}{Not started}{3}{}{}{}
|
---|
864 | {It should be possible to FFT the data to and from power spectra to
|
---|
865 | the autocorrelation function.}
|
---|
866 |
|
---|
867 | \requirement{Cross correlation}{Not started}{3}{}{}{}
|
---|
868 | {The user may wish to compute the cross correlation function of two
|
---|
869 | spectra. The result should be a standard ``spectra'', which can be
|
---|
870 | displayed and analysed using other functions (max, rms etc).}
|
---|
871 |
|
---|
872 | \requirement{Spectral calculator}{Started}{1}{?}{}{}
|
---|
873 | {Complex experiment specific processing can often be done using a
|
---|
874 | series of the simple of basic functions. A spectral calculator options
|
---|
875 | should be added to the CLI to perform a series of manipulations on a
|
---|
876 | set of spectra.}
|
---|
877 |
|
---|
878 | The user may want to perform specific analysis on the data using the
|
---|
879 | functionality above, but wish to do the manipulation between two
|
---|
880 | polarisations or IFs. Allowing the functions to also, optionally,
|
---|
881 | specify specific polarisations or IF would be an implementation and
|
---|
882 | interface nightmare. The simplest solution is to allow the data to be
|
---|
883 | ``split'' into separate spectra.
|
---|
884 |
|
---|
885 | \requirement{Slice data}{Not started}{1}{10d}{}{} {It must be possible to
|
---|
886 | take multi IF, multibeam or polarisation data and split out the a
|
---|
887 | splice of individual spectral portions to form self contained
|
---|
888 | spectra.}
|
---|
889 |
|
---|
890 | \requirement{Slice spectral channels}{Not started}{1}{0d}{}{} {It must be
|
---|
891 | possible to select a range of spectral channels to form self contained
|
---|
892 | spectra. The channel selection may be different for different IFs.}
|
---|
893 |
|
---|
894 | \requirement{Merge scantables}{Not started}{1}{5d}{}{}
|
---|
895 | {It must be possible to append rows from one scantable onto another}
|
---|
896 |
|
---|
897 | \subsection{Polarimetry}
|
---|
898 |
|
---|
899 | The software must fully support polarmetric analysis. This includes
|
---|
900 | calibration and basic conversions. Observations may be made with
|
---|
901 | linear or circular feed and the backend may or may not compute the
|
---|
902 | cross polarisation products. As such the software must cope with a
|
---|
903 | variety of conversions. The software should be able to calculate
|
---|
904 | stokes parameters with or without solving for leakage terms.
|
---|
905 |
|
---|
906 | %\makenote{It is debatable whether stokes I is the sum or average or
|
---|
907 | %two dual polarisation measurements.}
|
---|
908 |
|
---|
909 | \requirement{Support polarimetry}{Started}{1}{?}{}{}
|
---|
910 | {All functions on the data (calibration, sky subtraction spectral
|
---|
911 | mathematics) must support arbitrary, multiple polarisation (linear,
|
---|
912 | circular \& stokes and single, dual \& cross polarisations.}
|
---|
913 |
|
---|
914 | \requirement{Calculate stokes I}{Done1}{}{}{}{}
|
---|
915 | {It must be possible to calculate stokes I from single or dual
|
---|
916 | polarisation observations.}
|
---|
917 |
|
---|
918 | \requirement{Average mixed pol data}{Not started}{2}{}{}{}
|
---|
919 | {Average a mixture of dual polarisation and single polarisation data
|
---|
920 | and form average stokes I (e.g. for a long observation of a source, in
|
---|
921 | which one polarisation is missing for some time.}
|
---|
922 |
|
---|
923 | \requirement{Calculate stokes}{Done1}{}{}{}{}
|
---|
924 | {Full stokes parameters should be obtained from dual pol (linear or
|
---|
925 | circular) observations where the cross polarisation products have been
|
---|
926 | calculated.}
|
---|
927 |
|
---|
928 | %\requirement{If the observations used linear polarisations and the
|
---|
929 | %cross polarisations were not computed, the source needs to be
|
---|
930 | %observed with the feeds set at least 3 different parallactic angles
|
---|
931 | %(note that if dual linear feeds are available, 2 orthogonal
|
---|
932 | %parallactic angles are obtained at once). The Stokes parameters can be
|
---|
933 | %solved using a least squares fit to the equation:
|
---|
934 | %\reqeqn{Iu/2 + Ip * cos^2 (PA + p)},\\
|
---|
935 | %where PA is the linear feed position angle, p is the polarisation
|
---|
936 | %angle, Iu and Ip and the unpolarised and linearly polarised
|
---|
937 | %intensity. {\em Stolen from SPC. I need to write this in more useful
|
---|
938 | %language. Is this technique likely to be used anymore?.}}{}{}{}{3}
|
---|
939 |
|
---|
940 | \requirement{Compute stokes V without crosspol}{Not started}{2}{}{}{}
|
---|
941 | {If dual circular polarisation measurements are taken, without
|
---|
942 | computing the cross products, the software should still be able to
|
---|
943 | compute stokes I and V.}
|
---|
944 |
|
---|
945 | \requirement{Polarisation leakages}{Not started}{3}{}{}{}
|
---|
946 | {The software should be able to calculate leakage terms from a
|
---|
947 | calibrator source and correct the data either before or after
|
---|
948 | conversion to Stokes. (ref. Johnston, 2002, PASA, 19, 277)}
|
---|
949 |
|
---|
950 | \requirement{Calibrate position angle}{Not started}{3}{}{}{}
|
---|
951 | {The software should be able to determine absolute position angle from
|
---|
952 | a calibrator source and correct the data either before or after
|
---|
953 | conversion to Stokes.}
|
---|
954 |
|
---|
955 | \requirement{Zeeman splitting}{Not started}{3}{}{}{}
|
---|
956 | {Zeeman splitting factors should be derived from (previous) profile
|
---|
957 | fitting and the left and right circular polarisations. The velocity
|
---|
958 | shift varies linearly with the magnetic field, but the scaling factor
|
---|
959 | depends on the molecule and transition. Scaling factor for common
|
---|
960 | transitions should be known by the software and the user able to enter
|
---|
961 | factors for less common transitions. Correctly identifying Zeeman
|
---|
962 | pairs is crucial in getting the correct result. The software should
|
---|
963 | attempt to make an initial guess of pairs (based on component velocity
|
---|
964 | and width) but make the user confirm and override the pairing if
|
---|
965 | required.}
|
---|
966 |
|
---|
967 | \subsection{Data Selection}
|
---|
968 | While the software is running the user will usually have loaded
|
---|
969 | multiple (possibly many) spectra each of which may have multiple IFs,
|
---|
970 | data from multiple beams and multiple polarisations. The user will
|
---|
971 | want to be able to quickly flip from considering one spectra to
|
---|
972 | another and, where relevant, want to perform parallel processing on
|
---|
973 | multiple spectra at once (e.g. baselining a sequence of on/off
|
---|
974 | observations of the same source which will later be averaged
|
---|
975 | together).
|
---|
976 |
|
---|
977 | \requirement{Spectra selection}{Started}{1}{0d}{}{}
|
---|
978 | {The software needs an easy-to-use mechanism to select either
|
---|
979 | individual or multiple spectra for viewing, parallel processing
|
---|
980 | etc.}
|
---|
981 |
|
---|
982 | \requirement{Beam/IF selection}{Started}{1}{0d}{}{}
|
---|
983 | {An easy-to-use mechanism to select individual IFs, beams or
|
---|
984 | polarisations is needed.}
|
---|
985 |
|
---|
986 | \requirement{Interactive channel selection}{Started}{1}{}{}{}
|
---|
987 | {\label{ref:chansel} The range of spectral points to use for baseline
|
---|
988 | removal, statistical calculations, RFI editing, analysis etc must be
|
---|
989 | easily set by the user from both the CLI and GUI. From the CLI there
|
---|
990 | must be the option of setting the range using a variety of units
|
---|
991 | (channel number, velocity, frequency). The selection range will
|
---|
992 | probably not be a contiguous set of channels, but many sets of
|
---|
993 | disjoint channel ranges. For some tasks (such as baseline subtraction
|
---|
994 | and statistical values), the channel range should be retained and be
|
---|
995 | available as a plot overlay.}
|
---|
996 |
|
---|
997 | \requirement{Auto-identify reference spectra}{Not started}{1}{5d+}{}{}
|
---|
998 | {When performing sky subtraction on many spectra simultaneously, the
|
---|
999 | software should have a mechanism for identifying ``on'' and ``off''
|
---|
1000 | spectra and automatically selecting the signal and quotient
|
---|
1001 | spectra. The algorithm needs to cope with on/off/on/off sequences as
|
---|
1002 | well as off/on/on/off. If an individual quotient spectra has been
|
---|
1003 | marked as invalid, an alternative should be found. User specified
|
---|
1004 | preference such as ``closest in time'' to ``first reference before
|
---|
1005 | source'' should be accommodated.}
|
---|
1006 |
|
---|
1007 | \requirement{Select source via header values}{Started}{1}{}{}{}
|
---|
1008 | {The software should be able to select sets of sources based on simple
|
---|
1009 | regular expression type filtering (wild cards) on a range of header
|
---|
1010 | values. }
|
---|
1011 |
|
---|
1012 | \subrequirement{Select on source name}{Done1}{1}{}{}{}
|
---|
1013 | {The use should be able to select dana on source name, e.g G309$*$ or
|
---|
1014 | G309$*$w}
|
---|
1015 |
|
---|
1016 | \subrequirement{Select on molecule}{Done1}{2}{}{}{}
|
---|
1017 | {The use should be able to select data on molecule name, e.g. NH3$*$.}
|
---|
1018 |
|
---|
1019 | \subsection{Plugins}
|
---|
1020 |
|
---|
1021 | \requirement{Plugins}{Started}{1}{?}{}{}
|
---|
1022 | {The package should support ``plugins'', user definable
|
---|
1023 | functions for specific processing. The plugin code must have full
|
---|
1024 | access (read/write) to the spectra data and headers.}
|
---|
1025 |
|
---|
1026 | \requirement{Plugins can reduce dimensions}{Not started}{2}{}{}{}
|
---|
1027 | {Plugins need to be able to create ``derived'' spectra with reduced
|
---|
1028 | dimensions (i.e. less beams, IFs, polarisations or spectral
|
---|
1029 | channels)}
|
---|
1030 |
|
---|
1031 | \requirement{Simulated data}{Not stated}{3}{}{}{}
|
---|
1032 | {The user should be able to create new spectra which the software
|
---|
1033 | treats the same as the original data. This includes full specification
|
---|
1034 | of the header items.}
|
---|
1035 |
|
---|
1036 | \subsection{Pipelining}
|
---|
1037 |
|
---|
1038 | \requirement{Pipelining}{Done1}{}{}{}{}
|
---|
1039 | {Some sort of pipelining mode is required. This would involve doing a
|
---|
1040 | quotient spectra, applying appropriate calibration and possibly
|
---|
1041 | fitting a Gaussian to any lines present.}
|
---|
1042 |
|
---|
1043 | \subsection{Methanol Multibeam Survey}
|
---|
1044 |
|
---|
1045 | The software may need to support reduction of data from the methanol
|
---|
1046 | multibeam project. If so the pipelining will need to be flexible and
|
---|
1047 | powerful enough to support this.
|
---|
1048 |
|
---|
1049 | \subsection{Miscellaneous functionality}
|
---|
1050 |
|
---|
1051 | \requirement{Position fitting}{Not started}{2}{}{}{}
|
---|
1052 | {The software should be able to take a simple ``grid'' of observations
|
---|
1053 | (normally a set of observations in a cross pattern on the sky) and,
|
---|
1054 | for a subset of channels, fit the position of the emission. The fit
|
---|
1055 | positions should be either plotted on the screen or exported in a
|
---|
1056 | simple ASCIIs form.}
|
---|
1057 |
|
---|
1058 | \requirement{Kinematic distance}{Not started}{3}{}{}{}
|
---|
1059 | {The kinematic distance of a source should be calculated using basic
|
---|
1060 | Galactic rotation models. Multiple Galactic rotation models must be
|
---|
1061 | supported and a mechanism for easily adding more.}
|
---|
1062 |
|
---|
1063 | \requirement{Plot sigma errors on spectra}{Not started}{3}{}{}{}
|
---|
1064 | {For 1420 MHz observations of HI, the rms (Tsys) values vary
|
---|
1065 | significantly across the band. The software should be able to compute
|
---|
1066 | the rms as a function of frequency across the spectra from the
|
---|
1067 | off-pulse data and then be able to plot n-sigma error bars on the
|
---|
1068 | spectra.}
|
---|
1069 |
|
---|
1070 | \requirement{Simple Mapping}{Not started}{3}{}{}{}
|
---|
1071 | {It should be possible to take a selection of calibrated spectra which
|
---|
1072 | are then passed to the ``Gridzilla'' program to produce an image
|
---|
1073 | cube. Analysis of this cube would be done using external programs
|
---|
1074 | (e.g. Miriad, aips++)}
|
---|
1075 |
|
---|
1076 | \section{Help}
|
---|
1077 |
|
---|
1078 | \requirement{Built in help}{Done1}{}{}{}{}
|
---|
1079 | {There should be built-in and web-based documentation, which can be
|
---|
1080 | easily kept up-to-date}
|
---|
1081 |
|
---|
1082 | \requirement{Cookbook}{Done1}{}{}{}{}
|
---|
1083 | {A short and simple end-to-end cookbook for basic data analysis should
|
---|
1084 | be available.}
|
---|
1085 |
|
---|
1086 | \requirement{Programmers Documentation}{Not started}{2}{}{}{}
|
---|
1087 | {There should be documentation aimed at astronomers wishing to write
|
---|
1088 | there own scripts, detailing the methods needed and how to get low
|
---|
1089 | level access to the data.}
|
---|
1090 |
|
---|
1091 | \section{Data and meta-data}
|
---|
1092 |
|
---|
1093 | \requirement{Handle multi dimensional data}{Done1}{}{}{}{}
|
---|
1094 | {The software must be capable of handling multi-IF (potentially dozens
|
---|
1095 | of IFs) and multi-beam data with arbitrary polarisation (e.g. single
|
---|
1096 | pol, dual pol, full stokes etc).}
|
---|
1097 |
|
---|
1098 | \requirement{Handle pulsar data}{Deferred}{}{}{}{}
|
---|
1099 | {The software should handle pulsar binned data for pulsar absorption
|
---|
1100 | experiments.}
|
---|
1101 |
|
---|
1102 | \subsection{History}
|
---|
1103 |
|
---|
1104 | \requirement{History}{Done1}{}{}{}{}
|
---|
1105 | {A user viewable history of data processing steps should be kept as
|
---|
1106 | part of the data. Where possible this should be retained when data is
|
---|
1107 | imported from other packages.}{
|
---|
1108 |
|
---|
1109 | \requirement{Convert history to script}{Not started}{2}{}{}{}
|
---|
1110 | {It should be possible to use the history information to create
|
---|
1111 | template pipeline scripts for batch processing.}
|
---|
1112 |
|
---|
1113 | \subsection{Multiple IFs}
|
---|
1114 |
|
---|
1115 | \requirement{Transparently handle multi-IF data}{Done1}{}{}{}{}
|
---|
1116 | {If multiple IFs are present (currently Tidbinbilla can produce two
|
---|
1117 | IFs and the new wideband spectrometer for Mopra may have dozens of
|
---|
1118 | IFs) the software should handle the data transparently. Potentially
|
---|
1119 | each IF may have a significantly different sky frequency and be
|
---|
1120 | observing a different molecule or transition with a different rest
|
---|
1121 | frequency. From the users point of view, simultaneously obtained IFs
|
---|
1122 | should be kept within the same ``container'' (not split into a myriad
|
---|
1123 | of separate ``container'').}
|
---|
1124 |
|
---|
1125 | \requirement{IFs with different number of spectral channels}{Not started}{2}{}{}{}
|
---|
1126 | {Separate IFs may have a different number of spectral channels.}
|
---|
1127 |
|
---|
1128 | \subsection{Multibeam}
|
---|
1129 |
|
---|
1130 | \requirement{Handle multibeam data}{Done1}{}{}{}{}
|
---|
1131 | {Basic handling of multibeam data should be possible (ie in general
|
---|
1132 | each beam will be treated as a separate observation, but all within
|
---|
1133 | the same container). The user should be able to view or process either
|
---|
1134 | individual beams or all beams in parallel.}
|
---|
1135 |
|
---|
1136 | \requirement{Multibeam simultaneous reference/signal}{Not started}{3}{}{}{}
|
---|
1137 | {The use of a single beam observing a source and the rest of the beams
|
---|
1138 | as reference beams for sky-subtraction should be investigated.}
|
---|
1139 |
|
---|
1140 | \subsection{Robust fitting}
|
---|
1141 |
|
---|
1142 | \requirement{Retain raw correlator integrations}{Done1}{}{}{}{}
|
---|
1143 | {If robust fitting using median filtering is used, then the individual
|
---|
1144 | integrations from the observations should {\em not} be averaged when
|
---|
1145 | the data is imported, but retained within a single
|
---|
1146 | container. Inspection of this data should be optionally of the
|
---|
1147 | averaged or individual data.}
|
---|
1148 |
|
---|
1149 | \subsection{Coordinate frames and units}
|
---|
1150 |
|
---|
1151 | \requirement{Flexible coordinate frames}{Done1}{}{}{}{}
|
---|
1152 | {Coordinate frames and unit selection and handling needs to be
|
---|
1153 | flexible and relatively transparent to the user (i.e. if the users
|
---|
1154 | preference is for LSRK velocities, they do not need to worry about the
|
---|
1155 | reference frame in which the data was observed).}
|
---|
1156 |
|
---|
1157 | \requirement{Specific reference frames}{Done1}{}{}{}{}
|
---|
1158 | {At a minimum the following reference frames and conventions should be
|
---|
1159 | handled: \setlength{\parindent}{0pt}
|
---|
1160 |
|
---|
1161 | \smallskip
|
---|
1162 | \anitem{Position}{(RA,Dec) in J2000 \& B1950 (as well as other
|
---|
1163 | arbitrary epochs), Galactic, (Az,El).}
|
---|
1164 |
|
---|
1165 | \anitem{Frequency}{Velocity (Topocentric, Geocentric, Barycentric,
|
---|
1166 | Heliocentric, kinematical LSR, dynamical LSR, Rest), Frequency
|
---|
1167 | (MHz, GHz), channel number.}
|
---|
1168 |
|
---|
1169 | \anitem{Velocity}{ Optical, Radio, Relativistic.}
|
---|
1170 |
|
---|
1171 | \anitem{Flux}{ Jansky, Kelvin (mJy etc).}}
|
---|
1172 |
|
---|
1173 | \requirement{Data units and frames properly labelled}{Done1}{}{}{}{}
|
---|
1174 | {All data should be internally labelled with the appropriate
|
---|
1175 | coordinate frame and units. If this information is ambiguous for some
|
---|
1176 | reason, it should be set when the data is imported and the user
|
---|
1177 | should not have to worry about it again.}
|
---|
1178 |
|
---|
1179 | \requirement{Current reference frames clear to user}{Done1}{}{}{}{}
|
---|
1180 | {It should be clear to the user what coordinate frame (velocity,
|
---|
1181 | position etc) the data is being presented as.}
|
---|
1182 |
|
---|
1183 | \requirement{Positional Reference Frame}{Not started}{1}{0.2d}{}{} {The user
|
---|
1184 | should be able to specify the reference frame (Epoch,Equinox etc) for
|
---|
1185 | which is used for exporting data, simple mapping output etc. J2000,
|
---|
1186 | B1950 and Galactic should be supported. The default should be the frame
|
---|
1187 | in what the data was recorded.}
|
---|
1188 |
|
---|
1189 | \requirement{Non-conformist Positional Reference Frame}{Not started}{2}{}{}{}
|
---|
1190 | {Non-confirmist positional frames such as Az-El should be supported.}
|
---|
1191 |
|
---|
1192 | \subsection{Meta-data}
|
---|
1193 |
|
---|
1194 | A comprehensive set of header data should be read from the input data
|
---|
1195 | files. In general all meta-data available in the rpfits file should be
|
---|
1196 | retained. The user may wish to enter some specific values by hand.
|
---|
1197 |
|
---|
1198 | \requirement{View and edit header data}{Started}{1}{?}{}{}
|
---|
1199 | {All header data should be viewable and editable by the user. This
|
---|
1200 | includes changes such as scaling the given Tsys values.}
|
---|
1201 |
|
---|
1202 | \requirement{Missing header data}{Done1}{}{}{}{}
|
---|
1203 | {Missing header data should be handled gracefully, i.e. the software
|
---|
1204 | should fill the values with ``blanks'' and be able to continue to
|
---|
1205 | process the data if possible.}
|
---|
1206 |
|
---|
1207 | \requirement{User add missing header data}{Not started}{2}{}{}{}
|
---|
1208 | {The user must be able to add missing header data, which is not
|
---|
1209 | present in the RPFITs file. It must be possible to add the same header
|
---|
1210 | data to multiple scans simultaneously.}
|
---|
1211 |
|
---|
1212 | \extendedrequirement{Itemised header items}{Started}{1}{}{}{}
|
---|
1213 | {The following header data would be required per scan:
|
---|
1214 | \begin{itemize}
|
---|
1215 | \item Source name
|
---|
1216 | \item Scan type (signal or reference)
|
---|
1217 | \item Integration time
|
---|
1218 | \item Scan length (actual time of observation, $\ge$ integration time)
|
---|
1219 | \item Telescope
|
---|
1220 | \item UT time and date of observation
|
---|
1221 | \item Telescope elevation of observation
|
---|
1222 | \item Parallactic angle
|
---|
1223 | \item Beam size
|
---|
1224 | \item Scan ID
|
---|
1225 | \item Observer
|
---|
1226 | \item Project
|
---|
1227 | \item Polarisation
|
---|
1228 | \item Receiver
|
---|
1229 | \item Telescope coordinates
|
---|
1230 | \item Weather info (temperature, pressure, humidity)
|
---|
1231 | \item User axis display preference (LSR velocity, frequency etc).
|
---|
1232 | \end{itemize}
|
---|
1233 | }
|
---|
1234 |
|
---|
1235 | \extendedrequirement{IF header items}{Started}{1}{}{}{}
|
---|
1236 | {\label{req:if}
|
---|
1237 | The following header data is required for each IF, beam etc:
|
---|
1238 | \begin{itemize}
|
---|
1239 | \item Source coordinates and coordinate frame
|
---|
1240 | \item Frequency/velocity axis definition and type
|
---|
1241 | \item System Temperature
|
---|
1242 | \item Beam number
|
---|
1243 | \item Molecule rest frequency$^\dagger$
|
---|
1244 | \item Molecular name$^\dagger$
|
---|
1245 | \item Molecular transition$^\dagger$
|
---|
1246 | \item Molecular formula$^\dagger$
|
---|
1247 | \end{itemize}
|
---|
1248 | }
|
---|
1249 |
|
---|
1250 | \requirement{Pretty print formula}{Not started}{3}{}{}{}
|
---|
1251 | {The molecular formula could be stored with embedded superscripted and
|
---|
1252 | subscripted symbols for ``pretty'' printing on the plotted, but
|
---|
1253 | printed in plain text on the CLI or in ASCIIs output}
|
---|
1254 |
|
---|
1255 | Some molecular line rest-frequencies are close enough that two or more
|
---|
1256 | molecules or transitions may be observed in a single IF. Typical
|
---|
1257 | examples include the 1665/1667~MHz OH maser pair, NH$_3$ transitions,
|
---|
1258 | and many observations in the 3~mm band.
|
---|
1259 | \vspace{\parskip}
|
---|
1260 |
|
---|
1261 | \requirement{Multiple rest frequencies per IF}{Not started}{2}{}{}{}
|
---|
1262 | {The software should optionally support multiple lines per IF, by
|
---|
1263 | storing a set of rest frequencies per IF, rather than a single
|
---|
1264 | value. The header values in requirement \reqref{req:if} marked with a
|
---|
1265 | $\dagger$ would all have to be stored as an array of values rather
|
---|
1266 | than a scalar. A simple mechanism must be possible to change the
|
---|
1267 | currently ``active'' rest-frequency.}
|
---|
1268 |
|
---|
1269 | \section{Installation}
|
---|
1270 |
|
---|
1271 | \requirement{Easy installation}{Started}{1}{?}{}{}
|
---|
1272 | {It must be possible for astronomers to install the software at their
|
---|
1273 | own institute with either a moderate amount of OS experience or some
|
---|
1274 | help from the local system administrators. This includes installation
|
---|
1275 | on a central ``NFS'' server as well as local desktops.}
|
---|
1276 |
|
---|
1277 | \requirement{Linux Support}{Started}{1}{3d}{}{}
|
---|
1278 | {The software must run on major flavours of Linux
|
---|
1279 | (Fedora/Redhat, Debian, etc).}
|
---|
1280 |
|
---|
1281 | \subrequirement{Solaris Support}{Started}{1}{?}{}{}
|
---|
1282 | {The software must run on Solaris}
|
---|
1283 |
|
---|
1284 | \requirement{Run on laptop}{Done1}{}{}{}{}
|
---|
1285 | {It must be possible for users to install the software on their
|
---|
1286 | laptops and run with no network connection.}
|
---|
1287 |
|
---|
1288 | \requirement{Easy upgrade}{Done1}{}{}{}{}
|
---|
1289 | {It should be relatively easy to upgrade to the latest version of the
|
---|
1290 | software.}
|
---|
1291 |
|
---|
1292 | \requirement{MacOS/X support}{Not started}{1}{3+d}{}{}
|
---|
1293 | {The software should run on MacOS/X}
|
---|
1294 |
|
---|
1295 | \requirement{Windows support}{Not started}{3}{}{}{}
|
---|
1296 | {It would be desirable for the software to run on Windows.}
|
---|
1297 |
|
---|
1298 | \section{Known Issues}
|
---|
1299 | \label{sec:issues}
|
---|
1300 | The following issue are known problems with the data from ATNF
|
---|
1301 | telescopes, which probably should be automatically corrected for if at
|
---|
1302 | all possible. The best place to do this is while loading the data.
|
---|
1303 |
|
---|
1304 | \subsection{General}
|
---|
1305 |
|
---|
1306 | \begin{itemize}
|
---|
1307 | \item All polarisations in the RPFITS files are labelled as
|
---|
1308 | XX/YY. These need to be relabelled as LL/RR when appropriate.
|
---|
1309 | \end{itemize}
|
---|
1310 |
|
---|
1311 | \subsection{Mopra}
|
---|
1312 |
|
---|
1313 | \begin{itemize}
|
---|
1314 | \item Data obtained in 2002 \& 2003 (and probably before) have an
|
---|
1315 | error in the frequency headers (this may be corrected by an external
|
---|
1316 | program). \makenote{Nedd Ladd}
|
---|
1317 |
|
---|
1318 | \item The (RA,Dec) positions in the data file are in date coordinates
|
---|
1319 | not J2000. This causes problems for packages like Class when
|
---|
1320 | averaging the data. \makenote{Maria Hunt}
|
---|
1321 |
|
---|
1322 | \item It is possible Tsys calibration is inconsistent currently.
|
---|
1323 | \makenote{Cormac Purcell??}
|
---|
1324 |
|
---|
1325 | \end{itemize}
|
---|
1326 |
|
---|
1327 | \subsection{Parkes}
|
---|
1328 |
|
---|
1329 | \begin{itemize}
|
---|
1330 | \item For pulsar data the automatic gain control is disabled. This
|
---|
1331 | means the nominal Tsys measurement does not change and Tsys per
|
---|
1332 | integration is encoded in a non-standard way. \makenote{Simon
|
---|
1333 | Johnston}
|
---|
1334 | \end{itemize}
|
---|
1335 |
|
---|
1336 | \subsection{Tidbinbilla}
|
---|
1337 |
|
---|
1338 | \begin{itemize}
|
---|
1339 | \item All 20-GHz data is calibrated in flux units of Kelvin.
|
---|
1340 | \item Elevation is not written into the rpfits file.
|
---|
1341 | \end{itemize}
|
---|
1342 |
|
---|
1343 |
|
---|
1344 | \section{Requirements Matrix}
|
---|
1345 |
|
---|
1346 | \begin{longtable}{|l|l|l|c|l|}
|
---|
1347 |
|
---|
1348 | \hline
|
---|
1349 | \bf Requirement & & \bf Status & \bf Priority & \bf Effort \\
|
---|
1350 | \hline
|
---|
1351 | \endhead
|
---|
1352 | \hline
|
---|
1353 | \endfoot
|
---|
1354 | \input{reqsum.tex}
|
---|
1355 |
|
---|
1356 | \end{longtable}
|
---|
1357 |
|
---|
1358 |
|
---|
1359 | \end{document}
|
---|