1
2
3 """
4
5 License: GPLv2+
6
7 Authors:
8 ........
9 * Henning O. Sorensen & Erik Knudsen:
10 Center for Fundamental Research: Metal Structures in Four Dimensions;
11 Risoe National Laboratory;
12 Frederiksborgvej 399;
13 DK-4000 Roskilde;
14 email:erik.knudsen@risoe.dk
15 * Jon Wright & Jérôme Kieffer:
16 European Synchrotron Radiation Facility;
17 Grenoble (France)
18
19
20 """
21 from __future__ import with_statement
22 import os, logging, types
23 logger = logging.getLogger("edfimage")
24 import numpy
25 from fabioimage import fabioimage
26 from fabioutils import isAscii, toAscii, nice_int
27 from compression import decBzip2, decGzip, decZlib
28
29
30 BLOCKSIZE = 512
31 DATA_TYPES = { "SignedByte" : numpy.int8,
32 "Signed8" : numpy.int8,
33 "UnsignedByte" : numpy.uint8,
34 "Unsigned8" : numpy.uint8,
35 "SignedShort" : numpy.int16,
36 "Signed16" : numpy.int16,
37 "UnsignedShort" : numpy.uint16,
38 "Unsigned16" : numpy.uint16,
39 "UnsignedShortInteger" : numpy.uint16,
40 "SignedInteger" : numpy.int32,
41 "Signed32" : numpy.int32,
42 "UnsignedInteger": numpy.uint32,
43 "Unsigned32" : numpy.uint32,
44 "SignedLong" : numpy.int32,
45 "UnsignedLong" : numpy.uint32,
46 "Signed64" : numpy.int64,
47 "Unsigned64" : numpy.uint64,
48 "FloatValue" : numpy.float32,
49 "FLOATVALUE" : numpy.float32,
50 "FLOAT" : numpy.float32,
51 "Float" : numpy.float32,
52 "FloatIEEE32" : numpy.float32,
53 "Float32" : numpy.float32,
54 "Double" : numpy.float64,
55 "DoubleValue" : numpy.float64,
56 "FloatIEEE64" : numpy.float64,
57 "DoubleIEEE64" : numpy.float64}
58 try:
59 DATA_TYPES["FloatIEEE128" ] = numpy.float128
60 DATA_TYPES["DoubleIEEE128" ] = numpy.float128
61 DATA_TYPES["QuadrupleValue" ] = numpy.float128
62
63 except AttributeError:
64
65 pass
66
67 NUMPY_EDF_DTYPE = {"int8" :"SignedByte",
68 "int16" :"SignedShort",
69 "int32" :"SignedInteger",
70 "int64" :"Signed64",
71 "uint8" :"UnsignedByte",
72 "uint16" :"UnsignedShort",
73 "uint32" :"UnsignedInteger",
74 "uint64" :"Unsigned64",
75 "float32" :"FloatValue",
76 "float64" :"DoubleValue",
77 "float128" :"QuadrupleValue",
78 }
79
80 MINIMUM_KEYS = ['HEADERID',
81 'IMAGE',
82 'BYTEORDER',
83 'DATATYPE',
84 'DIM_1',
85 'DIM_2',
86 'SIZE']
87
88 DEFAULT_VALUES = {
89
90
91 }
94 """
95 A class representing a single frame in an EDF file
96 """
97 - def __init__(self, data=None, header=None, header_keys=None, number=None):
131
133 """
134 Parse the header in some EDF format from an already open file
135
136 @param block: string representing the header block
137 @type block: string, should be full ascii
138 @return: size of the binary blob
139 """
140
141 self.header = {}
142 self.capsHeader = {}
143 self.header_keys = []
144 self.size = None
145 calcsize = 1
146 self.dims = []
147
148 for line in block.split(';'):
149 if '=' in line:
150 key, val = line.split('=' , 1)
151
152 key = key.replace("\x00"," ").strip()
153 self.header[key] = val.replace("\x00"," ").strip()
154 self.capsHeader[key.upper()] = key
155 self.header_keys.append(key)
156
157
158 if "SIZE" in self.capsHeader:
159 try:
160 self.size = nice_int(self.header[self.capsHeader["SIZE"]])
161 except ValueError:
162 logger.warning("Unable to convert to integer : %s %s " % (self.capsHeader["SIZE"], self.header[self.capsHeader["SIZE"]]))
163 if "DIM_1" in self.capsHeader:
164 try:
165 dim1 = nice_int(self.header[self.capsHeader['DIM_1']])
166 except ValueError:
167 logger.error("Unable to convert to integer Dim_1: %s %s" % (self.capsHeader["DIM_1"], self.header[self.capsHeader["DIM_1"]]))
168 else:
169 calcsize *= dim1
170 self.dims.append(dim1)
171 else:
172 logger.error("No Dim_1 in headers !!!")
173 if "DIM_2" in self.capsHeader:
174 try:
175 dim2 = nice_int(self.header[self.capsHeader['DIM_2']])
176 except ValueError:
177 logger.error("Unable to convert to integer Dim_3: %s %s" % (self.capsHeader["DIM_2"], self.header[self.capsHeader["DIM_2"]]))
178 else:
179 calcsize *= dim2
180 self.dims.append(dim2)
181 else:
182 logger.error("No Dim_2 in headers !!!")
183 iDim = 3
184
185 while iDim is not None:
186 strDim = "DIM_%i" % iDim
187 if strDim in self.capsHeader:
188 try:
189 dim3 = nice_int(self.header[self.capsHeader[strDim]])
190 except ValueError:
191 logger.error("Unable to convert to integer %s: %s %s"
192 % (strDim, self.capsHeader[strDim], self.header[self.capsHeader[strDim]]))
193 dim3 = None
194 iDim = None
195 else:
196 if dim3 > 1:
197
198 calcsize *= dim3
199 self.dims.append(dim3)
200 iDim += 1
201
202 else:
203 logger.debug("No Dim_3 -> it is a 2D image")
204 iDim = None
205 if self._bytecode is None:
206 if "DATATYPE" in self.capsHeader:
207 self._bytecode = DATA_TYPES[self.header[self.capsHeader['DATATYPE']]]
208 else:
209 self._bytecode = numpy.uint16
210 logger.warning("Defaulting type to uint16")
211 self.bpp = len(numpy.array(0, self._bytecode).tostring())
212 calcsize *= self.bpp
213 if (self.size is None):
214 self.size = calcsize
215 elif (self.size != calcsize):
216 if ("COMPRESSION" in self.capsHeader) and (self.header[self.capsHeader['COMPRESSION']].upper().startswith("NO")):
217 logger.info("Mismatch between the expected size %s and the calculated one %s" % (self.size, calcsize))
218 self.size = calcsize
219
220 for i, n in enumerate(self.dims):
221 setattr(self, "dim%i" % (i + 1), n)
222
223 return self.size
224
225
227 """
228 Decide if we need to byteswap
229 """
230 if ('Low' in self.header[self.capsHeader['BYTEORDER']] and numpy.little_endian) or \
231 ('High' in self.header[self.capsHeader['BYTEORDER']] and not numpy.little_endian):
232 return False
233 if ('High' in self.header[self.capsHeader['BYTEORDER']] and numpy.little_endian) or \
234 ('Low' in self.header[self.capsHeader['BYTEORDER']] and not numpy.little_endian):
235 if self.bpp in [2, 4, 8]:
236 return True
237 else:
238 return False
239
240
242 """
243 Unpack a binary blob according to the specification given in the header
244
245 @return: dataset as numpy.ndarray
246 """
247 data = None
248 if self._data is not None:
249 data = self._data
250 elif self.file is None:
251 data = self._data
252 else:
253 if self._bytecode is None:
254 if "DATATYPE" in self.capsHeader:
255 self._bytecode = DATA_TYPES[self.header[self.capsHeader["DATATYPE"]]]
256 else:
257 self._bytecode = numpy.uint16
258 dims = self.dims[:]
259 dims.reverse()
260 with self.file.lock:
261 if self.file.closed:
262 logger.error("file: %s from %s is closed. Cannot read data." % (self.file, self.file.filename))
263 return
264 else:
265 self.file.seek(self.start)
266 fileData = self.file.read(self.size)
267
268 if ("COMPRESSION" in self.capsHeader):
269 compression = self.header[self.capsHeader["COMPRESSION"]].upper()
270 uncompressed_size = self.bpp
271 for i in dims:
272 uncompressed_size *= i
273 if "OFFSET" in compression :
274 try:
275 import byte_offset
276 except ImportError, error:
277 logger.error("Unimplemented compression scheme: %s (%s)" % (compression, error))
278 else:
279 myData = byte_offset.analyseCython(fileData, size=uncompressed_size)
280 rawData = myData.astype(self._bytecode).tostring()
281 self.size = uncompressed_size
282 elif compression == "NONE":
283 rawData = fileData
284 elif "GZIP" in compression:
285 rawData = decGzip(fileData)
286 self.size = uncompressed_size
287 elif "BZ" in compression :
288 rawData = decBzip2(fileData)
289 self.size = uncompressed_size
290 elif "Z" in compression :
291 rawData = decZlib(fileData)
292 self.size = uncompressed_size
293 else:
294 logger.warning("Unknown compression scheme %s" % compression)
295 rawData = fileData
296
297 else:
298 rawData = fileData
299
300 expected = self.size
301 obtained = len(rawData)
302 if expected > obtained:
303 logger.error("Data stream is incomplete: %s < expected %s bytes" % (obtained, expected))
304 rawData += "\x00" * (expected - obtained)
305 elif expected < len(rawData):
306 logger.info("Data stream contains trailing junk : %s > expected %s bytes" % (obtained, expected))
307 rawData = rawData[:expected]
308 if self.swap_needed():
309 data = numpy.fromstring(rawData, self._bytecode).byteswap().reshape(tuple(dims))
310 else:
311 data = numpy.fromstring(rawData, self._bytecode).reshape(tuple(dims))
312 self._data = data
313 self._bytecode = data.dtype.type
314 return data
316 """Setter for data in edf frame"""
317 self._data = npa
318 data = property(getData, setData, "property: (edf)frame.data, uncompress the datablock when needed")
320 if self._bytecode is None:
321 self._bytecode = self.data.dtype.type
322
323 return self._bytecode
325 self._bytecode = _iVal
326 bytecode = property(getByteCode, setByteCode)
327
328 - def getEdfBlock(self, force_type=None, fit2dMode=False):
329 """
330 @param force_type: type of the dataset to be enforced like "float64" or "uint16"
331 @type force_type: string or numpy.dtype
332 @param fit2dMode: enforce compatibility with fit2d and starts countimg number of images at 1
333 @type fit2dMode: boolean
334 @return: ascii header block
335 @rtype: python string with the concatenation of the ascii header and the binary data block
336 """
337 if force_type is not None:
338 data = self.data.astype(force_type)
339 else:
340 data = self.data
341 fit2dMode = bool(fit2dMode)
342 for key in self.header:
343 KEY = key.upper()
344 if KEY not in self.capsHeader:
345 self.capsHeader[KEY] = key
346 if key not in self.header_keys:
347 self.header_keys.append(key)
348
349 header = self.header.copy()
350 header_keys = self.header_keys[:]
351 capsHeader = self.capsHeader.copy()
352
353 listHeader = ["{\n"]
354
355 for i in capsHeader:
356 if "DIM_" in i:
357 header.pop(capsHeader[i])
358 header_keys.remove(capsHeader[i])
359 for KEY in ["SIZE", "EDF_BINARYSIZE", "EDF_HEADERSIZE", "BYTEORDER", "DATATYPE", "HEADERID", "IMAGE"]:
360 if KEY in capsHeader:
361 header.pop(capsHeader[KEY])
362 header_keys.remove(capsHeader[KEY])
363 if "EDF_DATABLOCKID" in capsHeader:
364 header_keys.remove(capsHeader["EDF_DATABLOCKID"])
365
366 if capsHeader["EDF_DATABLOCKID"] != "EDF_DataBlockID":
367 header["EDF_DataBlockID"] = header.pop(capsHeader["EDF_DATABLOCKID"])
368 capsHeader["EDF_DATABLOCKID"] = "EDF_DataBlockID"
369
370
371 header_keys.insert(0, "Size")
372 header["Size"] = len(data.tostring())
373 header_keys.insert(0, "HeaderID")
374 header["HeaderID"] = "EH:%06d:000000:000000" % (self.iFrame + fit2dMode)
375 header_keys.insert(0, "Image")
376 header["Image"] = str(self.iFrame + fit2dMode)
377
378 dims = list(data.shape)
379 nbdim = len(dims)
380 for i in dims:
381 key = "Dim_%i" % nbdim
382 header[key] = i
383 header_keys.insert(0, key)
384 nbdim -= 1
385 header_keys.insert(0, "DataType")
386 header["DataType"] = NUMPY_EDF_DTYPE[str(numpy.dtype(data.dtype))]
387 header_keys.insert(0, "ByteOrder")
388 if numpy.little_endian:
389 header["ByteOrder"] = "LowByteFirst"
390 else:
391 header["ByteOrder"] = "HighByteFirst"
392 approxHeaderSize = 100
393 for key in header:
394 approxHeaderSize += 7 + len(key) + len(str(header[key]))
395 approxHeaderSize = BLOCKSIZE * (approxHeaderSize // BLOCKSIZE + 1)
396 header_keys.insert(0, "EDF_HeaderSize")
397 header["EDF_HeaderSize"] = str(BLOCKSIZE * (approxHeaderSize // BLOCKSIZE + 1))
398 header_keys.insert(0, "EDF_BinarySize")
399 header["EDF_BinarySize"] = len(data.tostring())
400 header_keys.insert(0, "EDF_DataBlockID")
401 if not "EDF_DataBlockID" in header:
402 header["EDF_DataBlockID"] = "%i.Image.Psd" % (self.iFrame + fit2dMode)
403 preciseSize = 4
404 for key in header_keys:
405
406 strKey = str(key)
407 if not isAscii(strKey, listExcluded=["}", "{"]):
408 logger.warning("Non ascii key %s, skipping" % strKey)
409 continue
410 strValue = str(header[key])
411 if not isAscii(strValue, listExcluded=["}", "{"]):
412 logger.warning("Non ascii value %s, skipping" % strValue)
413 continue
414 line = strKey + " = " + strValue + " ;\n"
415 preciseSize += len(line)
416 listHeader.append(line)
417 if preciseSize > approxHeaderSize:
418 logger.error("I expected the header block only at %s in fact it is %s" % (approxHeaderSize, preciseSize))
419 for idx, line in enumerate(listHeader[:]):
420 if line.startswith("EDF_HeaderSize"):
421 headerSize = BLOCKSIZE * (preciseSize // BLOCKSIZE + 1)
422 newline = "EDF_HeaderSize = %s ;\n" % headerSize
423 delta = len(newline) - len(line)
424 if (preciseSize // BLOCKSIZE) != ((preciseSize + delta) // BLOCKSIZE):
425 headerSize = BLOCKSIZE * ((preciseSize + delta) // BLOCKSIZE + 1)
426 newline = "EDF_HeaderSize = %s ;\n" % headerSize
427 preciseSize = preciseSize + delta
428 listHeader[idx] = newline
429 break
430 else:
431 headerSize = approxHeaderSize
432 listHeader.append(" "*(headerSize - preciseSize) + "}\n")
433 return "".join(listHeader) + data.tostring()
434
438 """ Read and try to write the ESRF edf data format """
439
440 - def __init__(self, data=None , header=None, header_keys=None, frames=None):
441 self.currentframe = 0
442 self.filesize = None
443 try:
444 dim = len(data.shape)
445 except Exception, error:
446 logger.debug("Data don't look like a numpy array (%s), resetting all!!" % error)
447 data = None
448 dim = 0
449 fabioimage.__init__(self, data, header)
450 if dim == 2:
451 fabioimage.__init__(self, data, header)
452 elif dim == 1 :
453 data.shape = (0, len(data))
454 fabioimage.__init__(self, data, header)
455 elif dim == 3 :
456 fabioimage.__init__(self, data[0, :, :], header)
457 elif dim == 4 :
458 fabioimage.__init__(self, data[0, 0, :, :], header)
459 elif dim == 5 :
460 fabioimage.__init__(self, data[0, 0, 0, :, :], header)
461
462 if frames is None:
463 frame = Frame(data=self.data, header=self.header,
464 header_keys=header_keys ,
465 number=self.currentframe)
466 self.__frames = [frame]
467 else:
468 self.__frames = frames
469
470 @staticmethod
472 """
473 Empty for fabioimage but may be populated by others classes
474 """
475 if type(header) != types.DictionaryType:
476 return {}
477 new = {}
478 for key, value in header.items():
479 new[toAscii(key, ";{}")] = toAscii(value, ";{}")
480 return new
481
482 @staticmethod
484 """
485 Read in a header in some EDF format from an already open file
486
487 @param infile: file object open in read mode
488 @return: string (or None if no header was found.
489 """
490
491 block = infile.read(BLOCKSIZE)
492 if len(block) < BLOCKSIZE:
493 logger.debug("Under-short header: only %i bytes in %s" % (len(block), infile.name))
494 return
495 if (block.find("{") < 0) :
496
497 logger.warning("no opening {. Corrupt header of EDF file %s" % infile.name)
498 return
499 while '}' not in block:
500 block = block + infile.read(BLOCKSIZE)
501 if len(block) > BLOCKSIZE * 20:
502 logger.warning("Runaway header in EDF file")
503 return
504 start = block.find("{") + 1
505 end = block.find("}")
506
507
508 if block[end: end + 3] == "}\r\n":
509 offset = end + 3 - len(block)
510 elif block[end: end + 2] == "}\n":
511 offset = end + 2 - len(block)
512 else:
513 logger.error("Unable to locate start of the binary section")
514 offset = None
515 if offset is not None:
516 infile.seek(offset, os.SEEK_CUR)
517 return block[start:end]
518
519
521 """
522 Read all headers in a file and populate self.header
523 data is not yet populated
524 @type infile: file object open in read mode
525 """
526 self.__frames = []
527 bContinue = True
528 while bContinue:
529 block = self._readHeaderBlock(infile)
530 if block is None:
531 bContinue = False
532 break
533 frame = Frame(number=self.nframes)
534 size = frame.parseheader(block)
535 frame.file = infile
536 frame.start = infile.tell()
537 frame.size = size
538 self.__frames += [frame]
539 try:
540 infile.seek(size, os.SEEK_CUR)
541 except Exception, error:
542 logger.warning("infile is %s" % infile)
543 logger.warning("Position is %s" % infile.tell())
544 logger.warning("size is %s" % size)
545 logger.error("It seams this error occurs under windows when reading a (large-) file over network: %s ", error)
546 raise Exception(error)
547
548 if frame.start + size > infile.size:
549 logger.warning("Non complete datablock: got %s, expected %s" % (infile.size - frame.start, size))
550 bContinue = False
551 break
552
553 for i, frame in enumerate(self.__frames):
554 missing = []
555 for item in MINIMUM_KEYS:
556 if item not in frame.capsHeader:
557 missing.append(item)
558 if len(missing) > 0:
559 logger.info("EDF file %s frame %i misses mandatory keys: %s " % (self.filename, i, " ".join(missing)))
560 self.currentframe = 0
561
562
563 - def read(self, fname, frame=None):
564 """
565 Read in header into self.header and
566 the data into self.data
567 """
568 self.resetvals()
569 self.filename = fname
570
571 infile = self._open(fname, "rb")
572 self._readheader(infile)
573 if frame is None:
574 pass
575 elif frame < self.nframes:
576 self = self.getframe(frame)
577 else:
578 logger.error("Reading file %s You requested frame %s but only %s frames are available", fname, frame, self.nframes)
579 self.resetvals()
580
581 self.pilimage = None
582 return self
583
585 """
586 Decide if we need to byteswap
587 """
588 if ('Low' in self.header[self.capsHeader['BYTEORDER']] and numpy.little_endian) or \
589 ('High' in self.header[self.capsHeader['BYTEORDER']] and not numpy.little_endian):
590 return False
591 if ('High' in self.header[self.capsHeader['BYTEORDER']] and numpy.little_endian) or \
592 ('Low' in self.header[self.capsHeader['BYTEORDER']] and not numpy.little_endian):
593 if self.bpp in [2, 4, 8]:
594 return True
595 else:
596 return False
597
599 """
600 Unpack a binary blob according to the specification given in the header and return the dataset
601
602 @return: dataset as numpy.ndarray
603 """
604 return self.__frames[self.currentframe].getData()
605
606
608 """ returns the file numbered 'num' in the series as a fabioimage """
609 newImage = None
610 if self.nframes == 1:
611 logger.debug("Single frame EDF; having fabioimage default behavour: %s" % num)
612 newImage = fabioimage.getframe(self, num)
613 elif num in xrange(self.nframes):
614 logger.debug("Multi frame EDF; having edfimage specific behavour: %s/%s" % (num, self.nframes))
615 newImage = edfimage(frames=self.__frames)
616 newImage.currentframe = num
617 newImage.filename = self.filename
618 else:
619 txt = "Cannot access frame: %s/%s" % (num, self.nframes)
620 logger.error(txt)
621 raise ValueError("edfimage.getframe:" + txt)
622 return newImage
623
624
626 """ returns the previous file in the series as a fabioimage """
627 newImage = None
628 if self.nframes == 1:
629 newImage = fabioimage.previous(self)
630 else:
631 newFrameId = self.currentframe - 1
632 newImage = self.getframe(newFrameId)
633 return newImage
634
635
637 """ returns the next file in the series as a fabioimage """
638 newImage = None
639 if self.nframes == 1:
640 newImage = fabioimage.next(self)
641 else:
642 newFrameId = self.currentframe + 1
643 newImage = self.getframe(newFrameId)
644 return newImage
645
646
647 - def write(self, fname, force_type=None, fit2dMode=False):
648 """
649 Try to write a file
650 check we can write zipped also
651 mimics that fabian was writing uint16 (we sometimes want floats)
652
653 @param force_type: can be numpy.uint16 or simply "float"
654 @return: None
655
656 """
657
658 outfile = self._open(fname, mode="wb")
659 for i, frame in enumerate(self.__frames):
660 frame.iFrame = i
661 outfile.write(frame.getEdfBlock(force_type=force_type, fit2dMode=fit2dMode))
662 outfile.close()
663
664
665 - def appendFrame(self, frame=None, data=None, header=None):
666 """
667 Method used add a frame to an EDF file
668 @param frame: frame to append to edf image
669 @type frame: instance of Frame
670 @return: None
671 """
672 if isinstance(frame, Frame):
673 self.__frames.append(frame)
674 else:
675 self.__frames.append(Frame(data, header))
676
677
679 """
680 Method used to remove a frame from an EDF image. by default the last one is removed.
681 @param frameNb: frame number to remove, by default the last.
682 @type frameNb: integer
683 @return: None
684 """
685 if frameNb is None:
686 self.__frames.pop()
687 else:
688 self.__frames.pop(frameNb)
689
691 """
692 This is a special method that will read and return the data from another file ...
693 The aim is performances, ... but only supports uncompressed files.
694
695 @return: data from another file using positions from current edfimage
696 """
697 if (filename is None) or not os.path.isfile(filename):
698 raise RuntimeError("edfimage.fastReadData is only valid with another file: %s does not exist" % (filename))
699 data = None
700 frame = self.__frames[self.currentframe]
701 with open(filename, "rb")as f:
702 f.seek(frame.start)
703 raw = f.read(frame.size)
704 try:
705 data = numpy.fromstring(raw, dtype=self.bytecode)
706 data.shape = self.data.shape
707 except Exception, err :
708 logger.error("unable to convert file content to numpy array: %s", err)
709 return data
710
712 """
713 Method reading Region of Interest of another file based on metadata available in current edfimage.
714 The aim is performances, ... but only supports uncompressed files.
715
716 @return: ROI-data from another file using positions from current edfimage
717 @rtype: numpy 2darray
718 """
719 if (filename is None) or not os.path.isfile(filename):
720 raise RuntimeError("edfimage.fastReadData is only valid with another file: %s does not exist" % (filename))
721 data = None
722 frame = self.__frames[self.currentframe]
723
724 if len(coords) == 4:
725 slice1 = self.make_slice(coords)
726 elif len(coords) == 2 and isinstance(coords[0], slice) and \
727 isinstance(coords[1], slice):
728 slice1 = coords
729 else:
730 logger.warning('readROI: Unable to understand Region Of Interest: got %s', coords)
731 return
732 d1 = self.data.shape[-1]
733 start0 = slice1[0].start
734 start1 = slice1[1].start
735 slice2 = (slice(0, slice1[0].stop - start0, slice1[0].step),
736 slice(0, slice1[1].stop - start1, slice1[1].step))
737 start = frame.start + self.bpp * (d1 * start0 + start1)
738 size = self.bpp * ((slice2[0].stop) * d1)
739 with open(filename, "rb")as f:
740 f.seek(start)
741 raw = f.read(size)
742 try:
743 data = numpy.fromstring(raw, dtype=self.bytecode)
744 data.shape = -1, d1
745 except Exception, err :
746 logger.error("unable to convert file content to numpy array: %s", err)
747 return data[slice2]
748
749
750
751
752
754 """
755 Getter for number of frames
756 """
757 return len(self.__frames)
759 """
760 Setter for number of frames ... should do nothing. Here just to avoid bugs
761 """
762 if val != len(self.__frames):
763 logger.warning("trying to set the number of frames ")
764 nframes = property(getNbFrames, setNbFrames, "property: number of frames in EDF file")
765
766
768 """
769 Getter for the headers. used by the property header,
770 """
771 return self.__frames[self.currentframe].header
773 """
774 Enforces the propagation of the header to the list of frames
775 """
776 try:
777 self.__frames[self.currentframe].header = _dictHeader
778 except AttributeError:
779 self.__frames = [Frame(header=_dictHeader)]
780 except IndexError:
781 if self.currentframe < len(self.__frames):
782 self.__frames.append(Frame(header=_dictHeader))
784 """
785 Deleter for edf header
786 """
787 self.__frames[self.currentframe].header = {}
788 header = property(getHeader, setHeader, delHeader, "property: header of EDF file")
789
791 """
792 Getter for edf header_keys
793 """
794 return self.__frames[self.currentframe].header_keys
796 """
797 Enforces the propagation of the header_keys to the list of frames
798 @param _listtHeader: list of the (ordered) keys in the header
799 @type _listtHeader: python list
800 """
801 try:
802 self.__frames[self.currentframe].header_keys = _listtHeader
803 except AttributeError:
804 self.__frames = [Frame(header_keys=_listtHeader)]
805 except IndexError:
806 if self.currentframe < len(self.__frames):
807 self.__frames.append(Frame(header_keys=_listtHeader))
809 """
810 Deleter for edf header_keys
811 """
812 self.__frames[self.currentframe].header_keys = []
813 header_keys = property(getHeaderKeys, setHeaderKeys, delHeaderKeys, "property: header_keys of EDF file")
814
816 """
817 getter for edf Data
818 @return: data for current frame
819 @rtype: numpy.ndarray
820 """
821 npaData = None
822 try:
823 npaData = self.__frames[self.currentframe].data
824 except AttributeError:
825 self.__frames = [Frame()]
826 npaData = self.__frames[self.currentframe].data
827 except IndexError:
828 if self.currentframe < len(self.__frames):
829 self.__frames.append(Frame())
830 npaData = self.__frames[self.currentframe].data
831 return npaData
832
834 """
835 Enforces the propagation of the data to the list of frames
836 @param _data: numpy array representing data
837 """
838 try:
839 self.__frames[self.currentframe].data = _data
840 except AttributeError:
841 self.__frames = [Frame(data=_data)]
842 except IndexError:
843 if self.currentframe < len(self.__frames):
844 self.__frames.append(Frame(data=_data))
846 """
847 deleter for edf Data
848 """
849 self.__frames[self.currentframe].data = None
850 data = property(getData, setData, delData, "property: data of EDF file")
851
853 """
854 getter for edf headers keys in upper case
855 @return: data for current frame
856 @rtype: dict
857 """
858 return self.__frames[self.currentframe].capsHeader
860 """
861 Enforces the propagation of the header_keys to the list of frames
862 @param _data: numpy array representing data
863 """
864 self.__frames[self.currentframe].capsHeader = _data
866 """
867 deleter for edf capsHeader
868 """
869 self.__frames[self.currentframe].capsHeader = {}
870 capsHeader = property(getCapsHeader, setCapsHeader, delCapsHeader, "property: capsHeader of EDF file, i.e. the keys of the header in UPPER case.")
871
873 return self.__frames[self.currentframe].dim1
875 try:
876 self.__frames[self.currentframe].dim1 = _iVal
877 except AttributeError:
878 self.__frames = [Frame()]
879 except IndexError:
880 if self.currentframe < len(self.__frames):
881 self.__frames.append(Frame())
882 self.__frames[self.currentframe].dim1 = _iVal
883 dim1 = property(getDim1, setDim1)
885 return self.__frames[self.currentframe].dim2
887 try:
888 self.__frames[self.currentframe].dim2 = _iVal
889 except AttributeError:
890 self.__frames = [Frame()]
891 except IndexError:
892 if self.currentframe < len(self.__frames):
893 self.__frames.append(Frame())
894 self.__frames[self.currentframe].dim2 = _iVal
895 dim2 = property(getDim2, setDim2)
896
898 return self.__frames[self.currentframe].dims
899 dims = property(getDims)
901 return self.__frames[self.currentframe].bytecode
903 try:
904 self.__frames[self.currentframe].bytecode = _iVal
905 except AttributeError:
906 self.__frames = [Frame()]
907 except IndexError:
908 if self.currentframe < len(self.__frames):
909 self.__frames.append(Frame())
910 self.__frames[self.currentframe].bytecode = _iVal
911 bytecode = property(getByteCode, setByteCode)
913 return self.__frames[self.currentframe].bpp
915 try:
916 self.__frames[self.currentframe].bpp = _iVal
917 except AttributeError:
918 self.__frames = [Frame()]
919 except IndexError:
920 if self.currentframe < len(self.__frames):
921 self.__frames.append(Frame())
922 self.__frames[self.currentframe].bpp = _iVal
923 bpp = property(getBpp, setBpp)
924