dlAccessURL = rd.getById("sdl").getURL("dlget", absolute=True)
baseAccref = os.path.splitext(
row["prodtblPath"])[0]
row["prodtblAccref"] = baseAccref+".txt"
row["prodtblPath"] = ("\getConfig{web}{serverURL}/\rdId/gettxt/qp/"
+urllib.parse.quote(row["prodtblPath"]))
row["prodtblMime"] = "text/plain"
yield row
row["prodtblAccref"] = baseAccref+".vot"
row["prodtblPath"] = "%s?ID=%s"%(
dlAccessURL,
getStandardPubDID(row["prodtblAccref"]))
row["prodtblMime"] = "application/x-votable+xml"
yield row
for key in ["ssa_targname", "alpha", "delta", "ssa_aperture"]:
row[key] = row.get(key)
yield row
if vars["ssa_collection"]=="TMAP":
vars["ssa_instrument"] = "TMAP NLTE model-atmosphere spectra"
vars["ssa_reference"] = "2003ASPC..288..103R"
@ssa_binSize = (float(@specend)-float(@specstart)
)/float(@ssa_length)/1e10
import gzip
import io
import re
from gavo import rsc
from gavo import svcs
_ACCPATH_RE = re.compile("Access.Reference=([^\n]*)")
def getSrcForAccref(accref):
# Figure out where the text data is; we pull it
# from the URL specified in the meta file, which again
# we find by replacing whatever extension we have with
# "meta"
metaPath = os.path.join(base.getConfig("inputsDir"),
os.path.splitext(accref)[0]+".meta")
with open(metaPath, "r", encoding="utf-8") as f:
return _ACCPATH_RE.search(f.read()).group(1)
def parseTMAPText(inBytes):
if inBytes.startswith(b"\\x1f\\x8b"):
inBytes = gzip.GzipFile(fileobj=io.BytesIO(inBytes)).read()
dataPairs, cDID = [], None
inStr = inBytes.decode("utf-8")
for line in inStr.split("\n"):
if not line.strip():
# ignore empty lines
pass
elif line.startswith("*"):
# Metadata, ignore
pass
else:
# all other lines must be n cols of data
parts = line.split()
lam, flux = parts[:2]
normalized = continuum = None
if len(parts)==3:
normalized = float(parts[2])
elif len(parts)==4:
normalized = float(parts[2])
continuum = float(parts[3])
yield {"spectral": float(lam),
"flux": float(flux),
"flux_norm": normalized,
"flux_cont": continuum}
cacheDir = os.path.join(parent.rd.resdir, "txcache")
accref = self.sourceToken["accref"]
try:
srcURL = getSrcForAccref(accref)
src = utils.getWithCache(srcURL, cacheDir)
except IOError as msg:
raise svcs.UnknownURI("The upstream data at %s is not"
"available or does not exist (%s)"%(srcURL, str(msg)))
return parseTMAPText(src)
=%%(%s)s"%(colName,
base.getSQLKey(minKey.name, minVal, outPars))
else:
yield "%s BETWEEN %%(%s)s AND %%(%s)s"%(colName,
base.getSQLKey(minKey.name, minVal, outPars),
base.getSQLKey(maxKey.name, maxVal, outPars))
]]>
yield "mime=%%(%s)s"%(
base.getSQLKey("format", "application/x-votable+xml",
outPars))
Field REQUEST: Missing or invalid value for REQUEST.')
]]>
self.assertHasStrings(
'xmlns:ssa="http://www.ivoa.net/xml/DalSsap/v1.0"')
self.assertXpath(
"v:RESOURCE[@type='results']/v:INFO[@name='QUERY_STATUS']", {
"value": "OK"})
self.assertXpath("v:RESOURCE[@type='results']/v:INFO[@name="
"'SERVICE_PROTOCOL']", {None: "SSAP"})
self.assertXpath('v:RESOURCE/v:TABLE/v:FIELD[@name="accref"]', {
"utype": "ssa:Access.Reference"})
self.assertXpath("//v:TABLEDATA/v:TR[1]/v:TD["
"count(//v:FIELD[@name='ssa_creatorDID']/preceding::v:FIELD)+1]", {
None: "ivo://tmap.iaat/0038000_5.70_H_9.968E-01_HE_3.167E"
"-03_02000-03000A_2008-08-02_07_20_01"})
self.assertXpath("//v:TABLEDATA/v:TR[1]/v:TD["
"count(//v:FIELD[@name='ssa_pubDID']/preceding::v:FIELD)+1]", {
None: "ivo://org.gavo.dc/~?theossa/spec/spec_HHe/0038000_5.70_H_9"
".968E-01_HE_3.167E-03_02000-03000A_2008-08-02_07_20_01.txt"})
self.assertXpath("//v:TABLEDATA/v:TR[1]/v:TD["
"count(//v:FIELD[@name='t_eff']/preceding::v:FIELD)+1]", {
None: "38000.0"})
self.assertXpath('//v:FIELD[@utype="ssa:DataID.DataSource"]',
{"name": "ssa_datasource"})
# "direct SODA" service
self.assertXpath(
"v:RESOURCE[@utype='adhoc:service' and "
"v:PARAM[@name='standardID']/@value='ivo://ivoa.net/std/soda#"
"sync-1.0']/v:GROUP[@name='inputParams']/v:PARAM[@name='BAND']"
"/v:VALUES/v:MIN",
{"value": EqualingRE("2(.00000002)?e-07")})
# datalink meta service
self.assertXpath(
"v:RESOURCE[@utype='adhoc:service' and "
"v:PARAM[@name='standardID']/@value='ivo://ivoa.net/std/datalink"
"#links-1.1']/v:GROUP[@name='inputParams']/v:PARAM[@name='ID']",
{"ref": "ssa_pubDID"})
self.assertXpath("*[@type='results']/v:INFO[@name='QUERY_STATUS']", {
"value": "ERROR",
None: "Field REQUEST: Missing or invalid value for REQUEST."})
self.assertXpath("v:RESOURCE/v:INFO[@name='QUERY_STATUS']", {
"value": "OK"})
self.assertXpath("v:RESOURCE/v:PARAM[@name='INPUT:POS']", {
"utype": "ssa:Char.SpatialAxis.Coverage.Location.Value",
"datatype": "char",
"unit": "deg"})
self.assertXpath(
"v:RESOURCE/v:PARAM[@name='INPUT:POS']/v:DESCRIPTION", {
None: "ICRS position of target object",})
self.assertXpath("v:RESOURCE/v:PARAM[@name='INPUT:log_g']", {
"ucd": "phys.gravity",
"unit": "cm/s**2"})
self.assertXpath(
"v:RESOURCE/v:TABLE/v:FIELD[@utype='ssa:Access.Format']", {
"name": "mime",
"datatype": "char"})
self.assertXpath("v:RESOURCE/v:INFO[@name='QUERY_STATUS']", {
"value": "OVERFLOW"})
self.assertXpath("v:RESOURCE[@utype='spec:Spectrum']", {})
self.assertXpath("v:RESOURCE/v:TABLE/v:GROUP["
"@utype='spec:Spectrum.Target']", {})
self.assertXpath("//v:FIELD[@name='flux']", {
"utype": "spec:Spectrum.Data.FluxAxis.Value",
"unit": "0.1J/(m**2.s.m)"})
self.assertXpath("//v:PARAM[@name='accref']", {
"value": EqualingRE(r"http://[^/]*/getproduct/theossa/spec/"
"spec_HHe/0038000_5.70_H_9.968E-01_HE_3.167E-03_02000-03000A_"
"2008-08-02_07_20_01.vot")})
self.assertXpath("//v:PARAM[@name='ssa_length']", {
"value": "10787",})
self.assertXpath("v:RESOURCE/v:TABLE/v:DATA/v:BINARY/v:STREAM", {
"encoding": "base64"})
self.assertXpath("//v:FIELD[2]", {"name": "flux",
"unit": "0.1J/(m**2.s.m)"})
self.assertXpath("//v:FIELD[3]", {"name": "flux_norm",
"ucd": "phot.flux.density;arith.ratio"})
self.assertXpath("//v:FIELD[4]", {"name": "flux_cont",
"ucd": "phot.flux.density;spect.continuum"})
row = self.getFirstVOTableRow(rejectExtras=False)
self.assertAlmostEqual(row["spectral"], 910.1)
self.assertAlmostEqual(row["flux_norm"], 1.0)
self.assertAlmostEqual(row["flux_cont"], 1.67424e+19)
self.assertHasStrings("* Dataset.Length=10787",
"* Mass.Fraction.HE=3.167E-03",
"2.0000000E+03 6.204675E+17")
self.assertHasStrings("dc_data/", ".vot", "0000000", "<VOTABLE ",
"spec:Spectrum")
self.assertHTTPStatus(401)
self.assertHTTPStatus(401)
self.assertHasStrings("PNG", "PLTE")
self.assertEqual(len(self.getVOTableRows()), 0)
self.assertEqual(self.data,
b"Field File: Cannot enter broken.txt in database: At unspecified location: Not a key value pair: '---'")
self.assertHasStrings("2097.3", "5.304633e+17",
'utype="spec:Spectrum.Curation.Reference"')
Spectrum id: 0020000_4.01_000001-400000_2016-04-06_10_25_33"
"",
# 1 in text line is # of records dropped.
'2 ')
]]>
self.assertEqual(self.data,
b"Uploading 0020000_4.01_000001-400000_2016-04-06_10_25_33.meta"
b" did not change data database.\\nThis"
b" usually happens when the file already existed for an insert"
b" or did not exist for an update.\n")
self.assertEqual(self.data,
b"0020000_4.01_000001-400000_2016-04-06_10_25_33.meta"
b" uploaded, 2 records modified\n")