Skip to content
Snippets Groups Projects
Commit 34b7566f authored by Markus Demleitner's avatar Markus Demleitner
Browse files

adding draft apertif archive RD

parent 4125432d
No related branches found
No related tags found
No related merge requests found
This is a draft RD (with a lot of dachs start siap egg shells still
clinging to it) for the APERTIF archive, quickly hacked by Markus
in the expectation that a webdav mount to the data might become
available.
Meanwhile, we switched to dumping the FITS file headers (primary ony, so
far) on the archive machine (the bin/dump_fits_headers.py script).
The RD as it is on 2019-01-10T11:34:00 already expects the dump of that
in data.
<resource schema="svc_2019">
<meta name="creationDate">2020-01-09T10:47:35Z</meta>
<meta name="title">APERTIF Data 2019</meta>
<meta name="description">
%this should be a paragraph or two (take care to mention salient terms)%
</meta>
<!-- Take keywords from
http://astrothesaurus.org/thesaurus
if at all possible -->
<meta name="subject">%keywords; repeat the element as needed%</meta>
<meta name="creator">%authors in the format Last, F.I; Next, A.%</meta>
<meta name="instrument">APERTIF Correlator at Westerbork</meta>
<meta name="facility">Westerbork Radio Observatory</meta>
<meta name="source">%ideally, a bibcode%</meta>
<meta name="contentLevel">Research</meta>
<meta name="type">Catalog</meta> <!-- or Archive, Survey, Simulation -->
<meta name="coverage.waveband">Radio</meta>
<table id="main" onDisk="True" mixin="//siap#pgs" adql="False">
<!-- <mixin
calibLevel="2"
collectionName="'%a few letters identifying this data%'"
targetName="%column name of an object designation%"
expTime="%column name of an exposure time%"
targetClass="'%simbad taget class%'"
>//obscore#publishSIAP</mixin>-->
<column name="beam_major"
unit="arcsec" ucd=""
tablehead="Beam (maj)"
description="Major axis of beam (perhaps half-sensitivity) size."
verbLevel="15"/>
<column name="beam_minor"
unit="arcsec" ucd=""
tablehead="Beam (min)"
description="Minor axis of beam (perhaps half-sensitivity) size."
verbLevel="15"/>
<column name="beam_pa"
unit="degree" ucd=""
tablehead="Beam (PA)"
description="Position agle of beam."
verbLevel="15"/>
<column name="object" type="text"
ucd=""
tablehead="Object"
description="Target object observed"
verbLevel="10"/>
</table>
<coverage>
<updater sourceTable="main"/>
</coverage>
<data id="import">
<sources pattern="data/*.pickle" recurse="True"/>
<embeddedGrammar>
<!-- this parses from a pickle of the server-side files generated
by a little script called dump_fits_headers.py (that's hopefully
on the upstream server) -->
<iterator>
<code>
import pickle
from gavo.utils import fitstools
with open(self.sourceToken, "rb") as f:
fitses = pickle.load(f)
for fitsdesc in fitses:
rawdict = dict(
(card.keyword.replace("-", "_"), card.value)
for card in fitstools.parseCards(fitsdesc["header"]))
rawdict["accref"] = fitsdesc["name"]
rawdict["fsize"] = fitsdesc["size"]
yield rawdict
</code>
</iterator>
<rowfilter procDef="//products#define">
<bind key="table">"\schema.data"</bind>
<bind key="accref">@accref</bind>
<bind key="fsize">@fsize</bind>
<bind key="path"
>"https://alta.astron.nl/webdav/SVC_2019_Imaging/"+accref</bind>
<bind key="preview"
>"https://alta.astron.nl/alta-static/media/"+(
accref[:-5]+".png")</bind>
<bind key="preview_mime">"image/png"</bind>
</rowfilter>
</embeddedGrammar>
<make table="main">
<rowmaker>
<map key="beam_major">@BMAJ</map>
<map key="beam_minor">@BMIN</map>
<map key="beam_pa">@BPA</map>
<map key="object">@OBJECT</map>
<map key="bandpassRefval">LIGHT_C/@CRVAL3</map>
<map key="bandpassLo">LIGHT_C/(@CRVAL3+@CDELT3)</map>
<map key="bandpassHi">LIGHT_C/(@CRVAL3-@CDELT3)</map>
<apply procDef="//siap#setMeta">
<bind key="dateObs">@DATE_OBS</bind>
<bind key="bandpassId">"L-Band"</bind>
<bind key="pixflags">"F"</bind>
<bind key="title">"APERTIF %s %s"%(@DATE_OBS, @OBJECT)</bind>
</apply>
<apply>
<code>
if @NAXIS4==1:
@NAXIS -= 1
if @NAXIS3==1:
@NAXIS -= 1
</code>
</apply>
<apply procDef="//siap#computePGS"/>
<!-- any custom columns need to be mapped here; do *not* use
idmaps="*" with SIAP -->
</rowmaker>
</make>
</data>
<!-- if you want to build an attractive form-based service from
SIAP, you probably want to have a custom form service; for
just basic functionality, this should do, however. -->
<service id="i" allowed="form,siap.xml">
<meta name="shortName">%up to 16 characters%</meta>
<!-- other sia.types: Cutout, Mosaic, Atlas -->
<meta name="sia.type">Pointed</meta>
<meta name="testQuery.pos.ra">%ra one finds an image at%</meta>
<meta name="testQuery.pos.dec">%dec one finds an image at%</meta>
<meta name="testQuery.size.ra">0.1</meta>
<meta name="testQuery.size.dec">0.1</meta>
<!-- this is the VO publication -->
<publish render="scs.xml" sets="ivo_managed"/>
<!-- this puts the service on the root page -->
<publish render="form" sets="local,ivo_managed"/>
<!-- all publish elements only become active after you run
dachs pub q -->
<dbCore queriedTable="main">
<condDesc original="//siap#protoInput"/>
<condDesc original="//siap#humanInput"/>
<!-- enable further parameters like
<condDesc buildFrom="dateObs"/>
or
<condDesc>
<inputKey name="object" type="text"
tablehead="Target Object"
description="Object being observed, Simbad-resolvable form"
ucd="meta.name" verbLevel="5" required="True">
<values fromdb="object FROM lensunion.main"/>
</inputKey>
</condDesc> -->
</dbCore>
</service>
<regSuite title="svc_2019 regression">
<!-- see http://docs.g-vo.org/DaCHS/ref.html#regression-testing
for more info on these. -->
<regTest title="svc_2019 SIAP serves some data">
<url POS="%ra,dec that has a bit of data%" SIZE="0.1,0.1"
>i/siap.xml</url>
<code>
<!-- to figure out some good strings to use here, run
dachs test -D tmp.xml q
and look at tmp.xml -->
self.assertHasStrings(
"%some characteristic string returned by the query%",
"%another characteristic string returned by the query%")
</code>
</regTest>
<!-- add more tests: image actually delivered, form-based service
renders custom widgets, etc. -->
</regSuite>
</resource>
#!/usr/bin/python3
"""
A minimal script to create a dump of primary headers of fits files below
where this script is started.
The result is a pickle of a list of dictionaries {name, size, mtime, header}.
"""
import os
import pickle
CARD_SIZE = 80
END_CARD = b'END'+b' '*(CARD_SIZE-3)
FITS_BLOCK_SIZE = CARD_SIZE*36
def read_header_bytes(f, maxHeaderBlocks=80):
"""returns the bytes beloning to a FITS header starting at the current
position within the file f.
If the header is not complete after reading maxHeaderBlocks blocks,
an IOError is raised.
"""
parts = []
while True:
block = f.read(FITS_BLOCK_SIZE)
if not block:
raise IOError('Premature end of file while reading header')
parts.append(block)
endCardPos = block.find(END_CARD)
if not endCardPos%CARD_SIZE:
break
if len(parts)>=maxHeaderBlocks:
raise IOError("No end card found within %d blocks"%maxHeaderBlocks)
return b"".join(parts)
def make_fits_pack(fname):
with open(fname, "rb") as f:
header = read_header_bytes(f)
return {
"name": fname,
"size": os.path.getsize(fname),
"mtime": os.path.getmtime(fname),
"header": header}
def iter_fitses():
for dirpath, dirnames, filenames in os.walk("."):
for fname in filenames:
if fname.endswith(".fits"):
yield make_fits_pack(
os.path.join(dirpath, fname))
def main():
fitses = list(iter_fitses())
with open("fitses.pickle", "wb") as f:
pickle.dump(fitses, f)
if __name__=="__main__":
main()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment