stemToSource = {}
for dirpath, dirnames, filenames in os.walk(
rd.getAbsPath(root)):
dirpath = getInputsRelativePath(dirpath)
for name in filenames:
stemToSource[name.split(".")[0]
] = os.path.join(dirpath, name)
srcKey = re.sub("^red_", "", \srcstem).lower()
@raw_file = stemToSource.get(srcKey)
mat = re.search("'FLATFIELD'.*?\\n *([a-z0-9._]+)",
str(vars["header_"]["HISTORY"]))
if mat:
@flat = stemToSource.get(
mat.group(1).split(".")[0], mat.group(1))
else:
@flat = None
yield descriptor.makeLink(
makeAbsoluteURL("\rdId/raw-dl/dlmeta?ID="+descriptor.accref),
contentType="application/x-votable+xml;content=datalink",
description="Raw data and calibration files",
contentLength=5000)
def guessBiasVal(fName):
"""guesses the bias value from the overscan pixels.
Regrettably, the XOVERSC header is always 0. We assume the
10 border pixels are always overscan. Experimentally, that
seems to be the case (we've tried on almost all frames).
"""
hdus = pyfits.open(fName)
pixels = hdus[0].data
if pixels.shape[0]>2000:
# that's probably data already reduced
raise ValueError("No raw data available")
biasAvg = (numpy.average(pixels[:,:10])
+numpy.average(pixels[:,-10:]))/2
hdus.close()
return pixels.shape, biasAvg
# Regrettably, the XOVERSC header always has a 0 insize
# unreliable at best.
with base.getTableConn() as conn:
rawPath = os.path.join(base.getConfig("inputsDir"),
next(conn.query("SELECT raw_file"
" FROM \schema.provenance"
" WHERE accref=%(accref)s",
{"accref": inputTable.args["accref"]}))[0])
shape, val = guessBiasVal(rawPath)
hdu = pyfits.PrimaryHDU(numpy.ones(shape, numpy.float32)*val)
hdu.header["history"] = "Bias frame for {}".format(
os.path.basename(rawPath))
hdu.header["history"] = ("Computed by maidanak/rawframes"
" make-bias based on the overscan pixels found in the file")
serialized = io.BytesIO()
pyfits.HDUList([hdu]).writeto(serialized)
return "image/fits+gzip", gzip.compress(serialized.getvalue())
yield descriptor.makeLinkFromFile(
os.path.join(base.getConfig("inputsDir"),
descriptor.metadata["raw_file"]),
description="Original, unreduced observation",
contentType="image/fits+gzip",
contentQualifier="#image")
yield descriptor.makeLinkFromFile(
os.path.join(base.getConfig("inputsDir"),
descriptor.metadata["flat"]),
description="Recommended superflat for the time at which #this"
" was taken",
contentType="image/fits+gzip")
yield descriptor.makeLink(
makeAbsoluteURL("\rdId/make-bias/qp/"
+descriptor.metadata["accref"]),
description="Bias frame derived from the overscan regions"
" at both ends of the raw exposure",
contentType="image/fits+gzip",
contentLength=100000)
accref = descriptor.metadata["accref"]
yield descriptor.makeLink(
makeAbsoluteURL("/getproduct/"+accref),
description="This dataset, reduced with #flat and #bias",
contentType="image/fits+gzip",
contentLength=os.path.getsize(
os.path.join(base.getConfig("inputsDir"),
accref)),
contentQualifier="#image")
')
self.assertXpath('//v:INFO[@name="request"]', {
"value": "/maidanak/res/rawframes/siap/siap.xml?POS=340.12%2C3.3586&SIZE=0.1&INTERSECT=OVERLAPS&RESPONSEFORMAT=votabletd&MAXREC=10",})
]]>
self.assertHasStrings('STREAM encoding="base64">AAA')
self.assertHasStrings('<TD>AZT 22')
self.assertHasStrings('name="QUERY_STATUS" value="ERROR"')
self.assertXpath('//v:INFO[@name="request"]', {
"value": "/maidanak/res/rawframes/siap/siap.xml?POS=q2237%252B0305&SIZE=0.a1&INTERSECT=OVERLAPS&RESPONSEFORMAT=votabletd&MAXREC=10"})
self.assertHasStrings(b'\\x1f\\x8b\\x08\\x08')
self.assertXpath("//v:FIELD[@name='wcs_cdmatrix']", {
"datatype": "double",
"ucd": "VOX:WCS_CDMatrix",
"arraysize": "*",
"unit": "deg/pixel"})
self.assertXpath("//v:INFO[@name='QUERY_STATUS']", {
"value": "OK",
None: "OK",})
self.assertXpath("//v:PARAM[@name='INPUT:POS']", {
"datatype": "char",
"ucd": "pos.eq"})
', 'Matched:'
]]>
self.assertHeader("content-disposition",
"attachment; filename=truncated_data.tar")
self.assertHeader("content-type", "application/x-tar")
self.assertHasStrings("dc_data/",
# end of tar header, start of gzipped file
b"\\0\\0\\0\\0\\0\\0\\x1f\\x8b\\x08\\x08")
self.assertHasStrings('oai:GetRecord>', 'dc:title')
self.assertValidatesXSD()
self.assertValidatesXSD()
self.assertValidatesXSD()
self.assertHasStrings("JFIF")
from gavo.protocols import products
try:
os.unlink(
products.PreviewCacheManager.getCacheName(
"maidanak/data/QSO_B1422p231"
"/Johnson_V/red_mf290044.fits.gz"))
except IOError:
pass
self.assertHasStrings("SIAP Query", "siap.xml", "form",
"Other services", "SIZE</td>", "Verb. Level")
bySemantics = dict((r["semantics"], r) for r in self.getVOTableRows())
progRow = bySemantics["#progenitor"]
self.assertTrue(progRow["access_url"].endswith(
"/maidanak/res/rawframes/raw-dl/dlmeta?ID=maidanak/data/Q2237p0305/Johnson_R/red_kk050001.fits.gz"))
self.assertTrue(progRow["description"], "Raw data and"
" calibration files")
bySemantics = dict((r["semantics"], r) for r in self.getVOTableRows())
self.assertEqual(bySemantics["#this"]["access_url"],
EqualingRE("http.*/raw-dl/static/cd005/nov0501/q2237/kk050001.gz"))
self.assertEqual(bySemantics["#this"]["content_type"],
"image/fits+gzip")
self.assertEqual(bySemantics["#this"]["content_length"], 1766879)
self.assertEqual(bySemantics["#this"]["content_qualifier"],
"#image")
self.assertEqual(bySemantics["#flat"]["access_url"],
EqualingRE("http.*/maidanak/res/rawframes/"
"raw-dl/static/cd010/superfla/sfr.fit.gz"))
self.assertEqual(bySemantics["#flat"]["content_type"],
"image/fits+gzip")
self.assertEqual(bySemantics["#flat"]["content_length"],
5025753)
self.assertEqual(bySemantics["#derivation"]["access_url"],
EqualingRE("http.*/getproduct/maidanak/data/Q2237p0305"
"/Johnson_R/red_kk050001.fits.gz"))
self.assertEqual(bySemantics["#derivation"]["content_type"],
"image/fits+gzip")
self.assertEqual(bySemantics["#derivation"]["content_length"],
5282044)
self.assertEqual(bySemantics["#bias"]["access_url"],
EqualingRE("http.*/make-bias/qp/maidanak/data/Q2237p0305"
"/Johnson_R/red_kk050001.fits.gz"))
self.assertEqual(bySemantics["#bias"]["content_type"],
"image/fits+gzip")
self.assertEqual(bySemantics["#bias"]["content_length"],
100000)
import gzip
self.data = gzip.decompress(self.data)
self.assertHasStrings("BITPIX = -32",
"NAXIS1 = 2030",
"NAXIS2 = 800",
"Bias frame for kk050001.gz",
b"C\\xbb\\x07RC\\xbb\\x07")
import gzip
self.data = gzip.decompress(self.data)
self.assertHasStrings(
"NAXIS1 = 2000",
"DATE = '2001-12-09T20:22:10'")
import gzip
self.data = gzip.decompress(self.data)
self.assertHasStrings(
"NAXIS1 = 2030",
"DATE = '2001-11-05'")