Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

223

224

225

226

227

228

229

230

231

232

233

234

235

236

237

238

239

240

241

242

243

244

245

246

247

248

249

250

251

252

253

254

255

256

257

258

259

260

261

262

263

264

265

266

267

268

269

270

271

272

273

274

275

276

277

278

279

280

281

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296

297

298

299

300

301

302

303

304

305

306

307

308

309

310

311

312

313

314

315

316

import os 

import re 

import numpy as np 

 

from .utils import astropyHeaderToDict, astropyHeaderFromDict, inheritDocstrings 

from .masks import MaskHelper 

from .target import Target 

from .observations import Observations 

from .identity import Identity 

 

__all__ = ["PfsFiberArraySet"] 

 

 

@inheritDocstrings 

class PfsFiberArraySet: 

"""A collection of spectra from a common source 

 

The collection may be from a single arm within a single spectrograph, or 

from the entire instrument. The only requirement is that the elements all 

have the same number of samples. 

 

Parameters 

---------- 

identity : `pfs.datamodel.Identity` 

Identity of the data. 

fiberId : `numpy.ndarray` of `int` 

Fiber identifiers for each spectrum. 

wavelength : `numpy.ndarray` of `float` 

Array of wavelengths for each spectrum. 

flux : `numpy.ndarray` of `float` 

Array of fluxes for each spectrum. 

mask : `numpy.ndarray` of `int` 

Array of mask pixels for each spectrum. 

sky : `numpy.ndarray` of `float` 

Array of sky values for each spectrum. 

covar : `numpy.ndarray` of `float` 

Array of covariances for each spectrum. 

flags : `dict` 

Mapping of symbolic mask names to mask planes. 

metadata : `dict` 

Keyword-value pairs for the header. 

""" 

filenameFormat = None # Subclasses must override 

"""Format for filename (`str`) 

 

Should include formatting directives for the ``identity`` dict. 

""" 

filenameRegex = None # Subclasses must override 

"""Regex for extracting dataId from filename (`str`) 

 

Should capture the necessary values for the ``identity`` dict. 

""" 

filenameKeys = None # Subclasses must override 

"""Key name and type (`list` of `tuple` of `str` and `type`) 

 

Keys should be in the same order as for the regex. 

""" 

 

def __init__(self, identity, fiberId, wavelength, flux, mask, sky, covar, flags, metadata): 

self.identity = identity 

self.fiberId = fiberId 

self.wavelength = wavelength 

self.flux = flux 

self.mask = mask 

self.sky = sky 

self.covar = covar 

self.flags = flags 

self.metadata = metadata 

 

self.numSpectra = wavelength.shape[0] 

self.length = wavelength.shape[1] 

self.validate() 

 

def validate(self): 

"""Validate that all the arrays are of the expected shape""" 

assert self.fiberId.shape == (self.numSpectra,) 

assert self.wavelength.shape == (self.numSpectra, self.length) 

assert self.flux.shape == (self.numSpectra, self.length) 

assert self.mask.shape == (self.numSpectra, self.length) 

assert self.sky.shape == (self.numSpectra, self.length) 

assert self.covar.shape == (self.numSpectra, 3, self.length) 

 

@property 

def variance(self): 

"""Shortcut for variance""" 

return self.covar[:, 0, :] 

 

def __len__(self): 

"""Return number of spectra""" 

return self.numSpectra 

 

def __str__(self): 

"""Stringify""" 

return "%s{%d spectra of length %d}" % (self.__class__.__name__, self.numSpectra, self.length) 

 

@property 

def filename(self): 

"""Filename, without directory""" 

return self.getFilename(self.identity) 

 

@classmethod 

def getFilename(cls, identity): 

"""Calculate filename 

 

Parameters 

---------- 

identity : `pfs.datamodel.Identity` 

Identity of the data. 

 

Returns 

------- 

filename : `str` 

Filename, without directory. 

""" 

return cls.filenameFormat % identity.getDict() 

 

@classmethod 

def _parseFilename(cls, path): 

"""Parse filename to get the file's identity 

 

Uses the class attributes ``filenameRegex`` and ``filenameKeys`` to 

construct the identity from the filename. 

 

Parameters 

---------- 

path : `str` 

Path to the file of interest. 

 

Returns 

------- 

identity : `pfs.datamodel.Identity` 

Identity of the data of interest. 

""" 

dirName, fileName = os.path.split(path) 

matches = re.search(cls.filenameRegex, fileName) 

if not matches: 

raise RuntimeError("Unable to parse filename: %s" % (fileName,)) 

return Identity.fromDict({kk: tt(vv) for (kk, tt), vv in zip(cls.filenameKeys, matches.groups())}) 

 

@classmethod 

def readFits(cls, filename): 

"""Read from FITS file 

 

This API is intended for use by the LSST data butler, which handles 

translating the desired identity into a filename. 

 

Parameters 

---------- 

filename : `str` 

Filename of FITS file. 

 

Returns 

------- 

self : ``cls`` 

Constructed instance, from FITS file. 

""" 

data = {} 

import astropy.io.fits 

with astropy.io.fits.open(filename) as fd: 

data["metadata"] = astropyHeaderToDict(fd[0].header) 

for attr in ("fiberId", "wavelength", "flux", "mask", "sky", "covar"): 

hduName = attr.upper() 

data[attr] = fd[hduName].data 

data["identity"] = Identity.fromFits(fd) 

 

data["flags"] = MaskHelper.fromFitsHeader(data["metadata"]) 

return cls(**data) 

 

@classmethod 

def read(cls, identity, dirName="."): 

"""Read file given an identity 

 

This API is intended for use by science users, as it allows selection 

of the correct file by identity (e.g., visit, arm, spectrograph), 

without knowing the file naming convention. 

 

Parameters 

---------- 

identity : `pfs.datamodel.Identity` 

Identification of the data of interest. 

dirName : `str`, optional 

Directory from which to read. 

 

Returns 

------- 

self : `PfsFiberArraySet` 

Spectra read from file. 

""" 

filename = os.path.join(dirName, cls.getFilename(identity)) 

return cls.readFits(filename) 

 

def writeFits(self, filename): 

"""Write to FITS file 

 

This API is intended for use by the LSST data butler, which handles 

translating the desired identity into a filename. 

 

Parameters 

---------- 

filename : `str` 

Filename of FITS file. 

""" 

self.validate() 

import astropy.io.fits 

fits = astropy.io.fits.HDUList() 

header = self.metadata.copy() 

header.update(self.flags.toFitsHeader()) 

fits.append(astropy.io.fits.PrimaryHDU(header=astropyHeaderFromDict(header))) 

for attr in ("fiberId", "wavelength", "flux", "mask", "sky", "covar"): 

hduName = attr.upper() 

data = getattr(self, attr) 

fits.append(astropy.io.fits.ImageHDU(data, name=hduName)) 

 

self.identity.toFits(fits) 

with open(filename, "wb") as fd: 

fits.writeto(fd) 

 

def write(self, dirName="."): 

"""Write to file 

 

This API is intended for use by science users, as it allows setting the 

correct filename from parameters that make sense, such as which 

exposure, spectrograph, etc. 

 

Parameters 

---------- 

dirName : `str`, optional 

Directory to which to write. 

""" 

filename = os.path.join(dirName, self.filename) 

return self.writeFits(filename) 

 

@classmethod 

def fromMerge(cls, spectraList, metadata=None): 

"""Construct from merging multiple spectra 

 

Parameters 

---------- 

spectraList : iterable of `PfsFiberArraySet` 

Spectra to combine. 

metadata : `dict` (`str`: POD), optional 

Keyword-value pairs for the header. 

 

Returns 

------- 

self : `PfsFiberArraySet` 

Merged spectra. 

""" 

num = sum(len(ss) for ss in spectraList) 

length = set([ss.length for ss in spectraList]) 

if len(length) != 1: 

raise RuntimeError("Multiple lengths when merging spectra: %s" % (length,)) 

length = length.pop() 

fiberId = np.empty(num, dtype=int) 

wavelength = np.empty((num, length), dtype=float) 

flux = np.empty((num, length), dtype=float) 

mask = np.empty((num, length), dtype=int) 

sky = np.empty((num, length), dtype=float) 

covar = np.empty((num, 3, length), dtype=float) 

index = 0 

for ss in spectraList: 

select = slice(index, index + len(ss)) 

fiberId[select] = ss.fiberId 

wavelength[select] = ss.wavelength 

flux[select] = ss.flux 

mask[select] = ss.mask 

sky[select] = ss.sky 

covar[select] = ss.covar 

index += len(ss) 

identity = Identity.fromMerge([ss.identity for ss in spectraList]) 

flags = MaskHelper.fromMerge(list(ss.flags for ss in spectraList)) 

return cls(identity, fiberId, wavelength, flux, mask, sky, covar, flags, 

metadata if metadata else {}) 

 

def extractFiber(self, FiberArrayClass, pfsConfig, fiberId): 

"""Extract a single fiber 

 

Pulls a single fiber out into a subclass of `pfs.datamodel.PfsFiberArray`. 

 

Parameters 

---------- 

FiberArrayClass : `type` 

Subclass of `pfs.datamodel.PfsFiberArray` to which to export. 

pfsConfig : `pfs.datamodel.PfsConfig` 

PFS top-end configuration. 

fiberId : `int` 

Fiber ID to export. 

 

Returns 

------- 

spectrum : ``SpectrumClass`` 

Extracted spectrum. 

""" 

ii = np.nonzero(self.fiberId == fiberId)[0] 

if len(ii) != 1: 

raise RuntimeError("Number of fibers in PfsFiberArraySet with fiberId = %d is not unity (%d)" % 

(fiberId, len(ii))) 

ii = ii[0] 

jj = np.nonzero(pfsConfig.fiberId == fiberId)[0] 

if len(jj) != 1: 

raise RuntimeError("Number of fibers in PfsConfig with fiberId = %d is not unity (%d)" % 

(fiberId, len(jj))) 

jj = jj[0] 

 

fiberFlux = dict(zip(pfsConfig.filterNames[jj], pfsConfig.fiberFlux[jj])) 

target = Target(pfsConfig.catId[jj], pfsConfig.tract[jj], pfsConfig.patch[jj], 

pfsConfig.objId[jj], pfsConfig.ra[jj], pfsConfig.dec[jj], 

pfsConfig.targetType[jj], fiberFlux) 

obs = Observations.makeSingle(self.identity, pfsConfig, fiberId) 

 

# XXX not dealing with covariance properly. 

covar = np.zeros((3, self.length), dtype=self.covar.dtype) 

covar[:] = self.covar[ii] 

covar2 = np.zeros((1, 1), dtype=self.covar.dtype) 

return FiberArrayClass(target, obs, self.wavelength[ii], self.flux[ii], self.mask[ii], self.sky[ii], 

covar, covar2, self.flags)