1 "table definitions"
2 import os
3 import sys
4 import csv
5 import codecs
6 import locale
7 import unicodedata
8 import weakref
9 from array import array
10 from bisect import bisect_left, bisect_right
11 from decimal import Decimal
12 from shutil import copyfileobj
13 from dbf import _io as io
14 from dbf.dates import Date, DateTime, Time
15 from dbf.exceptions import Bof, Eof, DbfError, DataOverflow, FieldMissing, NonUnicode, DoNotIndex
16
17 input_decoding = locale.getdefaultlocale()[1]
18 default_codepage = 'cp1252'
19 return_ascii = True
20
21 version_map = {
22 '\x02' : 'FoxBASE',
23 '\x03' : 'dBase III Plus',
24 '\x04' : 'dBase IV',
25 '\x05' : 'dBase V',
26 '\x30' : 'Visual FoxPro',
27 '\x31' : 'Visual FoxPro (auto increment field)',
28 '\x43' : 'dBase IV SQL',
29 '\x7b' : 'dBase IV w/memos',
30 '\x83' : 'dBase III Plus w/memos',
31 '\x8b' : 'dBase IV w/memos',
32 '\x8e' : 'dBase IV w/SQL table',
33 '\xf5' : 'FoxPro w/memos'}
34
35 code_pages = {
36 '\x00' : ('ascii', "plain ol' ascii"),
37 '\x01' : ('cp437', 'U.S. MS-DOS'),
38 '\x02' : ('cp850', 'International MS-DOS'),
39 '\x03' : ('cp1252', 'Windows ANSI'),
40 '\x04' : ('mac_roman', 'Standard Macintosh'),
41 '\x08' : ('cp865', 'Danish OEM'),
42 '\x09' : ('cp437', 'Dutch OEM'),
43 '\x0A' : ('cp850', 'Dutch OEM (secondary)'),
44 '\x0B' : ('cp437', 'Finnish OEM'),
45 '\x0D' : ('cp437', 'French OEM'),
46 '\x0E' : ('cp850', 'French OEM (secondary)'),
47 '\x0F' : ('cp437', 'German OEM'),
48 '\x10' : ('cp850', 'German OEM (secondary)'),
49 '\x11' : ('cp437', 'Italian OEM'),
50 '\x12' : ('cp850', 'Italian OEM (secondary)'),
51 '\x13' : ('cp932', 'Japanese Shift-JIS'),
52 '\x14' : ('cp850', 'Spanish OEM (secondary)'),
53 '\x15' : ('cp437', 'Swedish OEM'),
54 '\x16' : ('cp850', 'Swedish OEM (secondary)'),
55 '\x17' : ('cp865', 'Norwegian OEM'),
56 '\x18' : ('cp437', 'Spanish OEM'),
57 '\x19' : ('cp437', 'English OEM (Britain)'),
58 '\x1A' : ('cp850', 'English OEM (Britain) (secondary)'),
59 '\x1B' : ('cp437', 'English OEM (U.S.)'),
60 '\x1C' : ('cp863', 'French OEM (Canada)'),
61 '\x1D' : ('cp850', 'French OEM (secondary)'),
62 '\x1F' : ('cp852', 'Czech OEM'),
63 '\x22' : ('cp852', 'Hungarian OEM'),
64 '\x23' : ('cp852', 'Polish OEM'),
65 '\x24' : ('cp860', 'Portugese OEM'),
66 '\x25' : ('cp850', 'Potugese OEM (secondary)'),
67 '\x26' : ('cp866', 'Russian OEM'),
68 '\x37' : ('cp850', 'English OEM (U.S.) (secondary)'),
69 '\x40' : ('cp852', 'Romanian OEM'),
70 '\x4D' : ('cp936', 'Chinese GBK (PRC)'),
71 '\x4E' : ('cp949', 'Korean (ANSI/OEM)'),
72 '\x4F' : ('cp950', 'Chinese Big 5 (Taiwan)'),
73 '\x50' : ('cp874', 'Thai (ANSI/OEM)'),
74 '\x57' : ('cp1252', 'ANSI'),
75 '\x58' : ('cp1252', 'Western European ANSI'),
76 '\x59' : ('cp1252', 'Spanish ANSI'),
77 '\x64' : ('cp852', 'Eastern European MS-DOS'),
78 '\x65' : ('cp866', 'Russian MS-DOS'),
79 '\x66' : ('cp865', 'Nordic MS-DOS'),
80 '\x67' : ('cp861', 'Icelandic MS-DOS'),
81 '\x68' : (None, 'Kamenicky (Czech) MS-DOS'),
82 '\x69' : (None, 'Mazovia (Polish) MS-DOS'),
83 '\x6a' : ('cp737', 'Greek MS-DOS (437G)'),
84 '\x6b' : ('cp857', 'Turkish MS-DOS'),
85 '\x78' : ('cp950', 'Traditional Chinese (Hong Kong SAR, Taiwan) Windows'),
86 '\x79' : ('cp949', 'Korean Windows'),
87 '\x7a' : ('cp936', 'Chinese Simplified (PRC, Singapore) Windows'),
88 '\x7b' : ('cp932', 'Japanese Windows'),
89 '\x7c' : ('cp874', 'Thai Windows'),
90 '\x7d' : ('cp1255', 'Hebrew Windows'),
91 '\x7e' : ('cp1256', 'Arabic Windows'),
92 '\xc8' : ('cp1250', 'Eastern European Windows'),
93 '\xc9' : ('cp1251', 'Russian Windows'),
94 '\xca' : ('cp1254', 'Turkish Windows'),
95 '\xcb' : ('cp1253', 'Greek Windows'),
96 '\x96' : ('mac_cyrillic', 'Russian Macintosh'),
97 '\x97' : ('mac_latin2', 'Macintosh EE'),
98 '\x98' : ('mac_greek', 'Greek Macintosh') }
99
100 if sys.version_info[:2] < (2, 6):
103 "Emulate PyProperty_Type() in Objects/descrobject.c"
104
105 - def __init__(self, fget=None, fset=None, fdel=None, doc=None):
106 self.fget = fget
107 self.fset = fset
108 self.fdel = fdel
109 self.__doc__ = doc or fget.__doc__
111 self.fget = func
112 if not self.__doc__:
113 self.__doc__ = fget.__doc__
114 - def __get__(self, obj, objtype=None):
115 if obj is None:
116 return self
117 if self.fget is None:
118 raise AttributeError, "unreadable attribute"
119 return self.fget(obj)
121 if self.fset is None:
122 raise AttributeError, "can't set attribute"
123 self.fset(obj, value)
125 if self.fdel is None:
126 raise AttributeError, "can't delete attribute"
127 self.fdel(obj)
129 self.fset = func
130 return self
132 self.fdel = func
133 return self
134
136 """Provides routines to extract and save data within the fields of a dbf record."""
137 __slots__ = ['_recnum', '_layout', '_data', '_dirty', '__weakref__']
139 """calls appropriate routine to fetch value stored in field from array
140 @param record_data: the data portion of the record
141 @type record_data: array of characters
142 @param fielddef: description of the field definition
143 @type fielddef: dictionary with keys 'type', 'start', 'length', 'end', 'decimals', and 'flags'
144 @returns: python data stored in field"""
145
146 field_type = fielddef['type']
147 retrieve = yo._layout.fieldtypes[field_type]['Retrieve']
148 datum = retrieve(record_data, fielddef, yo._layout.memo)
149 if field_type in yo._layout.character_fields:
150 datum = yo._layout.decoder(datum)[0]
151 if yo._layout.return_ascii:
152 try:
153 datum = yo._layout.output_encoder(datum)[0]
154 except UnicodeEncodeError:
155 datum = unicodedata.normalize('NFD', datum).encode('ascii','ignore')
156 return datum
158 "calls appropriate routine to convert value to ascii bytes, and save it in record"
159 field_type = fielddef['type']
160 update = yo._layout.fieldtypes[field_type]['Update']
161 if field_type in yo._layout.character_fields:
162 if not isinstance(value, unicode):
163 if yo._layout.input_decoder is None:
164 raise NonUnicode("String not in unicode format, no default encoding specified")
165 value = yo._layout.input_decoder(value)[0]
166 value = yo._layout.encoder(value)[0]
167 bytes = array('c', update(value, fielddef, yo._layout.memo))
168 size = fielddef['length']
169 if len(bytes) > size:
170 raise DataOverflow("tried to store %d bytes in %d byte field" % (len(bytes), size))
171 blank = array('c', ' ' * size)
172 start = fielddef['start']
173 end = start + size
174 blank[:len(bytes)] = bytes[:]
175 yo._data[start:end] = blank[:]
176 yo._dirty = True
191 results = []
192 if not specs:
193 specs = yo._layout.index
194 specs = _normalize_tuples(tuples=specs, length=2, filler=[_nop])
195 for field, func in specs:
196 results.append(func(yo[field]))
197 return tuple(results)
198
204 if name[0:2] == '__' and name[-2:] == '__':
205 raise AttributeError, 'Method %s is not implemented.' % name
206 elif not name in yo._layout.fields:
207 raise FieldMissing(name)
208 try:
209 fielddef = yo._layout[name]
210 value = yo._retrieveFieldValue(yo._data[fielddef['start']:fielddef['end']], fielddef)
211 return value
212 except DbfError, error:
213 error.message = "field --%s-- is %s -> %s" % (name, yo._layout.fieldtypes[fielddef['type']]['Type'], error.message)
214 raise
231 - def __new__(cls, recnum, layout, kamikaze='', _fromdisk=False):
270 if type(name) == str:
271 yo.__setattr__(name, value)
272 elif type(name) in (int, long):
273 yo.__setattr__(yo._layout.fields[name], value)
274 elif type(name) == slice:
275 sequence = []
276 for field in yo._layout.fields[name]:
277 sequence.append(field)
278 if len(sequence) != len(value):
279 raise DbfError("length of slices not equal")
280 for field, val in zip(sequence, value):
281 yo[field] = val
282 else:
283 raise TypeError("%s is not a field name" % name)
285 result = []
286 for seq, field in enumerate(yo.field_names):
287 result.append("%3d - %-10s: %s" % (seq, field, yo[field]))
288 return '\n'.join(result)
290 return yo._data.tostring()
292 "creates a blank record data chunk"
293 layout = yo._layout
294 ondisk = layout.ondisk
295 layout.ondisk = False
296 yo._data = array('c', ' ' * layout.header.record_length)
297 layout.memofields = []
298 for field in layout.fields:
299 yo._updateFieldValue(layout[field], layout.fieldtypes[layout[field]['type']]['Blank']())
300 if layout[field]['type'] in layout.memotypes:
301 layout.memofields.append(field)
302 layout.blankrecord = yo._data[:]
303 layout.ondisk = ondisk
305 "marks record as deleted"
306 yo._data[0] = '*'
307 yo._dirty = True
308 return yo
309 @property
314 "saves a dictionary into a record's fields\nkeys with no matching field will raise a FieldMissing exception unless drop_missing = True"
315 old_data = yo._data[:]
316 try:
317 for key in dictionary:
318 if not key in yo.field_names:
319 if drop:
320 continue
321 raise FieldMissing(key)
322 yo.__setattr__(key, dictionary[key])
323 except:
324 yo._data[:] = old_data
325 raise
326 return yo
327 @property
329 "marked for deletion?"
330 return yo._data[0] == '*'
339 @property
341 "physical record number"
342 return yo._recnum
343 @property
345 table = yo._layout.table()
346 if table is None:
347 raise DbfError("table is no longer available")
348 return table
350 for dbfindex in yo._layout.table()._indexen:
351 dbfindex(yo)
353 "blanks record"
354 if keep_fields is None:
355 keep_fields = []
356 keep = {}
357 for field in keep_fields:
358 keep[field] = yo[field]
359 if yo._layout.blankrecord == None:
360 yo._createBlankRecord()
361 yo._data[:] = yo._layout.blankrecord[:]
362 for field in keep_fields:
363 yo[field] = keep[field]
364 yo._dirty = True
365 return yo
367 "returns a dictionary of fieldnames and values which can be used with gather_fields(). if blank is True, values are empty."
368 keys = yo._layout.fields
369 if blank:
370 values = [yo._layout.fieldtypes[yo._layout[key]['type']]['Blank']() for key in keys]
371 else:
372 values = [yo[field] for field in keys]
373 return dict(zip(keys, values))
375 "marks record as active"
376 yo._data[0] = ' '
377 yo._dirty = True
378 return yo
379 - def write(yo, **kwargs):
386 """Provides access to memo fields as dictionaries
387 must override _init, _get_memo, and _put_memo to
388 store memo contents to disk"""
390 "initialize disk file usage"
392 "retrieve memo contents from disk"
394 "store memo contents to disk"
396 ""
397 yo.meta = meta
398 yo.memory = {}
399 yo.nextmemo = 1
400 yo._init()
401 yo.meta.newmemofile = False
403 "gets the memo in block"
404 if yo.meta.ignorememos or not block:
405 return ''
406 if yo.meta.ondisk:
407 return yo._get_memo(block)
408 else:
409 return yo.memory[block]
411 "stores data in memo file, returns block number"
412 if yo.meta.ignorememos or data == '':
413 return 0
414 if yo.meta.inmemory:
415 thismemo = yo.nextmemo
416 yo.nextmemo += 1
417 yo.memory[thismemo] = data
418 else:
419 thismemo = yo._put_memo(data)
420 return thismemo
438 block = int(block)
439 yo.meta.mfd.seek(block * yo.meta.memo_size)
440 eom = -1
441 data = ''
442 while eom == -1:
443 newdata = yo.meta.mfd.read(yo.meta.memo_size)
444 if not newdata:
445 return data
446 data += newdata
447 eom = data.find('\x1a\x1a')
448 return data[:eom].rstrip()
450 data = data.rstrip()
451 length = len(data) + yo.record_header_length
452 blocks = length // yo.meta.memo_size
453 if length % yo.meta.memo_size:
454 blocks += 1
455 thismemo = yo.nextmemo
456 yo.nextmemo = thismemo + blocks
457 yo.meta.mfd.seek(0)
458 yo.meta.mfd.write(io.packLongInt(yo.nextmemo))
459 yo.meta.mfd.seek(thismemo * yo.meta.memo_size)
460 yo.meta.mfd.write(data)
461 yo.meta.mfd.write('\x1a\x1a')
462 double_check = yo._get_memo(thismemo)
463 if len(double_check) != len(data):
464 uhoh = open('dbf_memo_dump.err','wb')
465 uhoh.write('thismemo: %d' % thismemo)
466 uhoh.write('nextmemo: %d' % yo.nextmemo)
467 uhoh.write('saved: %d bytes' % len(data))
468 uhoh.write(data)
469 uhoh.write('retrieved: %d bytes' % len(double_check))
470 uhoh.write(double_check)
471 uhoh.close()
472 raise DbfError("unknown error: memo not saved")
473 return thismemo
476 "Visual Foxpro 6 specific"
477 if yo.meta.ondisk and not yo.meta.ignorememos:
478 yo.record_header_length = 8
479 if yo.meta.newmemofile:
480 if yo.meta.memo_size == 0:
481 yo.meta.memo_size = 1
482 elif 1 < yo.meta.memo_size < 33:
483 yo.meta.memo_size *= 512
484 yo.meta.mfd = open(yo.meta.memoname, 'w+b')
485 nextmemo = 512 // yo.meta.memo_size
486 if nextmemo * yo.meta.memo_size < 512:
487 nextmemo += 1
488 yo.nextmemo = nextmemo
489 yo.meta.mfd.write(io.packLongInt(nextmemo, bigendian=True) + '\x00\x00' + \
490 io.packShortInt(yo.meta.memo_size, bigendian=True) + '\x00' * 504)
491 else:
492 try:
493 yo.meta.mfd = open(yo.meta.memoname, 'r+b')
494 yo.meta.mfd.seek(0)
495 header = yo.meta.mfd.read(512)
496 yo.nextmemo = io.unpackLongInt(header[:4], bigendian=True)
497 yo.meta.memo_size = io.unpackShortInt(header[6:8], bigendian=True)
498 except:
499 raise DbfError("memo file appears to be corrupt")
501 yo.meta.mfd.seek(block * yo.meta.memo_size)
502 header = yo.meta.mfd.read(8)
503 length = io.unpackLongInt(header[4:], bigendian=True)
504 return yo.meta.mfd.read(length)
506 data = data.rstrip()
507 yo.meta.mfd.seek(0)
508 thismemo = io.unpackLongInt(yo.meta.mfd.read(4), bigendian=True)
509 yo.meta.mfd.seek(0)
510 length = len(data) + yo.record_header_length
511 blocks = length // yo.meta.memo_size
512 if length % yo.meta.memo_size:
513 blocks += 1
514 yo.meta.mfd.write(io.packLongInt(thismemo+blocks, bigendian=True))
515 yo.meta.mfd.seek(thismemo*yo.meta.memo_size)
516 yo.meta.mfd.write('\x00\x00\x00\x01' + io.packLongInt(len(data), bigendian=True) + data)
517 return thismemo
518
520 """Provides a framework for dbf style tables."""
521 _version = 'basic memory table'
522 _versionabbv = 'dbf'
523 _fieldtypes = {
524 'D' : { 'Type':'Date', 'Init':io.addDate, 'Blank':Date.today, 'Retrieve':io.retrieveDate, 'Update':io.updateDate, },
525 'L' : { 'Type':'Logical', 'Init':io.addLogical, 'Blank':bool, 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, },
526 'M' : { 'Type':'Memo', 'Init':io.addMemo, 'Blank':str, 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, } }
527 _memoext = ''
528 _memotypes = tuple('M', )
529 _memoClass = _DbfMemo
530 _yesMemoMask = ''
531 _noMemoMask = ''
532 _fixed_fields = ('M','D','L')
533 _variable_fields = tuple()
534 _character_fields = tuple('M', )
535 _decimal_fields = tuple()
536 _numeric_fields = tuple()
537 _dbfTableHeader = array('c', '\x00' * 32)
538 _dbfTableHeader[0] = '\x00'
539 _dbfTableHeader[8:10] = array('c', io.packShortInt(33))
540 _dbfTableHeader[10] = '\x01'
541 _dbfTableHeader[29] = '\x00'
542 _dbfTableHeader = _dbfTableHeader.tostring()
543 _dbfTableHeaderExtra = ''
544 _supported_tables = []
545 _read_only = False
546 _meta_only = False
547 _use_deleted = True
548 _backed_up = False
550 "implements the weakref structure for DbfLists"
554 yo._lists = set([s for s in yo._lists if s() is not None])
555 return (s() for s in yo._lists if s() is not None)
557 yo._lists = set([s for s in yo._lists if s() is not None])
558 return len(yo._lists)
559 - def add(yo, new_list):
560 yo._lists.add(weakref.ref(new_list))
561 yo._lists = set([s for s in yo._lists if s() is not None])
563 "implements the weakref structure for seperate indexes"
567 yo._indexen = set([s for s in yo._indexen if s() is not None])
568 return (s() for s in yo._indexen if s() is not None)
570 yo._indexen = set([s for s in yo._indexen if s() is not None])
571 return len(yo._indexen)
572 - def add(yo, new_list):
573 yo._indexen.add(weakref.ref(new_list))
574 yo._indexen = set([s for s in yo._indexen if s() is not None])
589 if len(data) != 32:
590 raise DbfError('table header should be 32 bytes, but is %d bytes' % len(data))
591 yo._data = array('c', data + '\x0d')
593 "get/set code page of table"
594 if cp is None:
595 return yo._data[29]
596 else:
597 cp, sd, ld = _codepage_lookup(cp)
598 yo._data[29] = cp
599 return cp
600 @property
606 @data.setter
608 if len(bytes) < 32:
609 raise DbfError("length for data of %d is less than 32" % len(bytes))
610 yo._data[:] = array('c', bytes)
611 @property
613 "extra dbf info (located after headers, before data records)"
614 fieldblock = yo._data[32:]
615 for i in range(len(fieldblock)//32+1):
616 cr = i * 32
617 if fieldblock[cr] == '\x0d':
618 break
619 else:
620 raise DbfError("corrupt field structure")
621 cr += 33
622 return yo._data[cr:].tostring()
623 @extra.setter
625 fieldblock = yo._data[32:]
626 for i in range(len(fieldblock)//32+1):
627 cr = i * 32
628 if fieldblock[cr] == '\x0d':
629 break
630 else:
631 raise DbfError("corrupt field structure")
632 cr += 33
633 yo._data[cr:] = array('c', data)
634 yo._data[8:10] = array('c', io.packShortInt(len(yo._data)))
635 @property
637 "number of fields (read-only)"
638 fieldblock = yo._data[32:]
639 for i in range(len(fieldblock)//32+1):
640 cr = i * 32
641 if fieldblock[cr] == '\x0d':
642 break
643 else:
644 raise DbfError("corrupt field structure")
645 return len(fieldblock[:cr]) // 32
646 @property
648 "field block structure"
649 fieldblock = yo._data[32:]
650 for i in range(len(fieldblock)//32+1):
651 cr = i * 32
652 if fieldblock[cr] == '\x0d':
653 break
654 else:
655 raise DbfError("corrupt field structure")
656 return fieldblock[:cr].tostring()
657 @fields.setter
659 fieldblock = yo._data[32:]
660 for i in range(len(fieldblock)//32+1):
661 cr = i * 32
662 if fieldblock[cr] == '\x0d':
663 break
664 else:
665 raise DbfError("corrupt field structure")
666 cr += 32
667 fieldlen = len(block)
668 if fieldlen % 32 != 0:
669 raise DbfError("fields structure corrupt: %d is not a multiple of 32" % fieldlen)
670 yo._data[32:cr] = array('c', block)
671 yo._data[8:10] = array('c', io.packShortInt(len(yo._data)))
672 fieldlen = fieldlen // 32
673 recordlen = 1
674 for i in range(fieldlen):
675 recordlen += ord(block[i*32+16])
676 yo._data[10:12] = array('c', io.packShortInt(recordlen))
677 @property
679 "number of records (maximum 16,777,215)"
680 return io.unpackLongInt(yo._data[4:8].tostring())
681 @record_count.setter
684 @property
686 "length of a record (read_only) (max of 65,535)"
687 return io.unpackShortInt(yo._data[10:12].tostring())
688 @property
690 "starting position of first record in file (must be within first 64K)"
691 return io.unpackShortInt(yo._data[8:10].tostring())
692 @start.setter
695 @property
697 "date of last table modification (read-only)"
698 return io.unpackDate(yo._data[1:4].tostring())
699 @property
701 "dbf version"
702 return yo._data[0]
703 @version.setter
707 "implements the weakref table for records"
709 yo._meta = meta
710 yo._weakref_list = [weakref.ref(lambda x: None)] * count
724 yo._weakref_list.append(weakref.ref(record))
726 yo._weakref_list[:] = []
728 "returns records using current index"
730 yo._table = table
731 yo._index = -1
732 yo._more_records = True
736 while yo._more_records:
737 yo._index += 1
738 if yo._index >= len(yo._table):
739 yo._more_records = False
740 continue
741 record = yo._table[yo._index]
742 if not yo._table.use_deleted and record.has_been_deleted:
743 continue
744 return record
745 else:
746 raise StopIteration
748 "constructs fieldblock for disk table"
749 fieldblock = array('c', '')
750 memo = False
751 yo._meta.header.version = chr(ord(yo._meta.header.version) & ord(yo._noMemoMask))
752 for field in yo._meta.fields:
753 if yo._meta.fields.count(field) > 1:
754 raise DbfError("corrupted field structure (noticed in _buildHeaderFields)")
755 fielddef = array('c', '\x00' * 32)
756 fielddef[:11] = array('c', io.packStr(field))
757 fielddef[11] = yo._meta[field]['type']
758 fielddef[12:16] = array('c', io.packLongInt(yo._meta[field]['start']))
759 fielddef[16] = chr(yo._meta[field]['length'])
760 fielddef[17] = chr(yo._meta[field]['decimals'])
761 fielddef[18] = chr(yo._meta[field]['flags'])
762 fieldblock.extend(fielddef)
763 if yo._meta[field]['type'] in yo._meta.memotypes:
764 memo = True
765 yo._meta.header.fields = fieldblock.tostring()
766 if memo:
767 yo._meta.header.version = chr(ord(yo._meta.header.version) | ord(yo._yesMemoMask))
768 if yo._meta.memo is None:
769 yo._meta.memo = yo._memoClass(yo._meta)
771 "dBase III specific"
772 if yo._meta.header.version == '\x83':
773 try:
774 yo._meta.memo = yo._memoClass(yo._meta)
775 except:
776 yo._meta.dfd.close()
777 yo._meta.dfd = None
778 raise
779 if not yo._meta.ignorememos:
780 for field in yo._meta.fields:
781 if yo._meta[field]['type'] in yo._memotypes:
782 if yo._meta.header.version != '\x83':
783 yo._meta.dfd.close()
784 yo._meta.dfd = None
785 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos")
786 elif not os.path.exists(yo._meta.memoname):
787 yo._meta.dfd.close()
788 yo._meta.dfd = None
789 raise DbfError("Table structure corrupt: memo fields exist without memo file")
790 break
792 "builds the FieldList of names, types, and descriptions from the disk file"
793 yo._meta.fields[:] = []
794 offset = 1
795 fieldsdef = yo._meta.header.fields
796 if len(fieldsdef) % 32 != 0:
797 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef))
798 if len(fieldsdef) // 32 != yo.field_count:
799 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32))
800 for i in range(yo.field_count):
801 fieldblock = fieldsdef[i*32:(i+1)*32]
802 name = io.unpackStr(fieldblock[:11])
803 type = fieldblock[11]
804 if not type in yo._meta.fieldtypes:
805 raise DbfError("Unknown field type: %s" % type)
806 start = offset
807 length = ord(fieldblock[16])
808 offset += length
809 end = start + length
810 decimals = ord(fieldblock[17])
811 flags = ord(fieldblock[18])
812 if name in yo._meta.fields:
813 raise DbfError('Duplicate field name found: %s' % name)
814 yo._meta.fields.append(name)
815 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
817 "Returns field information Name Type(Length[,Decimals])"
818 name = yo._meta.fields[i]
819 type = yo._meta[name]['type']
820 length = yo._meta[name]['length']
821 decimals = yo._meta[name]['decimals']
822 if type in yo._decimal_fields:
823 description = "%s %s(%d,%d)" % (name, type, length, decimals)
824 elif type in yo._fixed_fields:
825 description = "%s %s" % (name, type)
826 else:
827 description = "%s %s(%d)" % (name, type, length)
828 return description
830 "loads the records from disk to memory"
831 if yo._meta_only:
832 raise DbfError("%s has been closed, records are unavailable" % yo.filename)
833 dfd = yo._meta.dfd
834 header = yo._meta.header
835 dfd.seek(header.start)
836 allrecords = dfd.read()
837 dfd.seek(0)
838 length = header.record_length
839 for i in range(header.record_count):
840 record_data = allrecords[length*i:length*i+length]
841 yo._table.append(_DbfRecord(i, yo._meta, allrecords[length*i:length*i+length], _fromdisk=True))
842 dfd.seek(0)
844 if specs is None:
845 specs = yo.field_names
846 elif isinstance(specs, str):
847 specs = specs.split(sep)
848 else:
849 specs = list(specs)
850 specs = [s.strip() for s in specs]
851 return specs
853 "synchronizes the disk file with current data"
854 if yo._meta.inmemory:
855 return
856 fd = yo._meta.dfd
857 fd.seek(0)
858 fd.write(yo._meta.header.data)
859 if not headeronly:
860 for record in yo._table:
861 record._update_disk()
862 fd.flush()
863 fd.truncate(yo._meta.header.start + yo._meta.header.record_count * yo._meta.header.record_length)
871 if name in ('_table'):
872 if yo._meta.ondisk:
873 yo._table = yo._Table(len(yo), yo._meta)
874 else:
875 yo._table = []
876 yo._loadtable()
877 return object.__getattribute__(yo, name)
879 if type(value) == int:
880 if not -yo._meta.header.record_count <= value < yo._meta.header.record_count:
881 raise IndexError("Record %d is not in table." % value)
882 return yo._table[value]
883 elif type(value) == slice:
884 sequence = List(desc='%s --> %s' % (yo.filename, value))
885 yo._dbflists.add(sequence)
886 for index in range(len(yo))[value]:
887 record = yo._table[index]
888 if yo.use_deleted is True or not record.has_been_deleted:
889 sequence.append(record)
890 return sequence
891 else:
892 raise TypeError('type <%s> not valid for indexing' % type(value))
893 - def __init__(yo, filename=':memory:', field_specs=None, memo_size=128, ignore_memos=False,
894 read_only=False, keep_memos=False, meta_only=False, codepage=None):
895 """open/create dbf file
896 filename should include path if needed
897 field_specs can be either a ;-delimited string or a list of strings
898 memo_size is always 512 for db3 memos
899 ignore_memos is useful if the memo file is missing or corrupt
900 read_only will load records into memory, then close the disk file
901 keep_memos will also load any memo fields into memory
902 meta_only will ignore all records, keeping only basic table information
903 codepage will override whatever is set in the table itself"""
904 if filename[0] == filename[-1] == ':':
905 if field_specs is None:
906 raise DbfError("field list must be specified for memory tables")
907 elif type(yo) is DbfTable:
908 raise DbfError("only memory tables supported")
909 yo._dbflists = yo._DbfLists()
910 yo._indexen = yo._Indexen()
911 yo._meta = meta = yo._MetaData()
912 meta.table = weakref.ref(yo)
913 meta.filename = filename
914 meta.fields = []
915 meta.fieldtypes = yo._fieldtypes
916 meta.fixed_fields = yo._fixed_fields
917 meta.variable_fields = yo._variable_fields
918 meta.character_fields = yo._character_fields
919 meta.decimal_fields = yo._decimal_fields
920 meta.numeric_fields = yo._numeric_fields
921 meta.memotypes = yo._memotypes
922 meta.ignorememos = ignore_memos
923 meta.memo_size = memo_size
924 meta.input_decoder = codecs.getdecoder(input_decoding)
925 meta.output_encoder = codecs.getencoder(input_decoding)
926 meta.return_ascii = return_ascii
927 meta.header = header = yo._TableHeader(yo._dbfTableHeader)
928 header.extra = yo._dbfTableHeaderExtra
929 header.data
930 if filename[0] == filename[-1] == ':':
931 yo._table = []
932 meta.ondisk = False
933 meta.inmemory = True
934 meta.memoname = filename
935 else:
936 base, ext = os.path.splitext(filename)
937 if ext == '':
938 meta.filename = base + '.dbf'
939 meta.memoname = base + yo._memoext
940 meta.ondisk = True
941 meta.inmemory = False
942 if field_specs:
943 if meta.ondisk:
944 meta.dfd = open(meta.filename, 'w+b')
945 meta.newmemofile = True
946 yo.add_fields(field_specs)
947 header.codepage(codepage or default_codepage)
948 cp, sd, ld = _codepage_lookup(meta.header.codepage())
949 meta.decoder = codecs.getdecoder(sd)
950 meta.encoder = codecs.getencoder(sd)
951 return
952 dfd = meta.dfd = open(meta.filename, 'r+b')
953 dfd.seek(0)
954 meta.header = header = yo._TableHeader(dfd.read(32))
955 if not header.version in yo._supported_tables:
956 dfd.close()
957 dfd = None
958 raise DbfError("Unsupported dbf type: %s [%x]" % (version_map.get(meta.header.version, 'Unknown: %s' % meta.header.version), ord(meta.header.version)))
959 cp, sd, ld = _codepage_lookup(meta.header.codepage())
960 yo._meta.decoder = codecs.getdecoder(sd)
961 yo._meta.encoder = codecs.getencoder(sd)
962 fieldblock = dfd.read(header.start - 32)
963 for i in range(len(fieldblock)//32+1):
964 fieldend = i * 32
965 if fieldblock[fieldend] == '\x0d':
966 break
967 else:
968 raise DbfError("corrupt field structure in header")
969 if len(fieldblock[:fieldend]) % 32 != 0:
970 raise DbfError("corrupt field structure in header")
971 header.fields = fieldblock[:fieldend]
972 header.extra = fieldblock[fieldend+1:]
973 yo._initializeFields()
974 yo._checkMemoIntegrity()
975 meta.current = -1
976 if len(yo) > 0:
977 meta.current = 0
978 dfd.seek(0)
979 if meta_only:
980 yo.close(keep_table=False, keep_memos=False)
981 elif read_only:
982 yo.close(keep_table=True, keep_memos=keep_memos)
983 if codepage is not None:
984 cp, sd, ld = _codepage_lookup(codepage)
985 yo._meta.decoder = codecs.getdecoder(sd)
986 yo._meta.encoder = codecs.getencoder(sd)
987
995 if yo._read_only:
996 return __name__ + ".Table('%s', read_only=True)" % yo._meta.filename
997 elif yo._meta_only:
998 return __name__ + ".Table('%s', meta_only=True)" % yo._meta.filename
999 else:
1000 return __name__ + ".Table('%s')" % yo._meta.filename
1002 if yo._read_only:
1003 status = "read-only"
1004 elif yo._meta_only:
1005 status = "meta-only"
1006 else:
1007 status = "read/write"
1008 str = """
1009 Table: %s
1010 Type: %s
1011 Codepage: %s
1012 Status: %s
1013 Last updated: %s
1014 Record count: %d
1015 Field count: %d
1016 Record length: %d """ % (yo.filename, version_map.get(yo._meta.header.version,
1017 'unknown - ' + hex(ord(yo._meta.header.version))), yo.codepage, status,
1018 yo.last_update, len(yo), yo.field_count, yo.record_length)
1019 str += "\n --Fields--\n"
1020 for i in range(len(yo._meta.fields)):
1021 str += "%11d) %s\n" % (i, yo._fieldLayout(i))
1022 return str
1023 @property
1025 return "%s (%s)" % code_pages[yo._meta.header.codepage()]
1026 @codepage.setter
1027 - def codepage(yo, cp):
1028 cp = code_pages[yo._meta.header.codepage(cp)][0]
1029 yo._meta.decoder = codecs.getdecoder(cp)
1030 yo._meta.encoder = codecs.getencoder(cp)
1031 yo._update_disk(headeronly=True)
1032 @property
1034 "the number of fields in the table"
1035 return yo._meta.header.field_count
1036 @property
1038 "a list of the fields in the table"
1039 return yo._meta.fields[:]
1040 @property
1042 "table's file name, including path (if specified on open)"
1043 return yo._meta.filename
1044 @property
1046 "date of last update"
1047 return yo._meta.header.update
1048 @property
1050 "table's memo name (if path included in filename on open)"
1051 return yo._meta.memoname
1052 @property
1054 "number of bytes in a record"
1055 return yo._meta.header.record_length
1056 @property
1058 "index number of the current record"
1059 return yo._meta.current
1060 @property
1064 @property
1066 "process or ignore deleted records"
1067 return yo._use_deleted
1068 @use_deleted.setter
1071 @property
1073 "returns the dbf type of the table"
1074 return yo._version
1076 """adds field(s) to the table layout; format is Name Type(Length,Decimals)[; Name Type(Length,Decimals)[...]]
1077 backup table is created with _backup appended to name
1078 then modifies current structure"""
1079 all_records = [record for record in yo]
1080 if yo:
1081 yo.create_backup()
1082 yo._meta.blankrecord = None
1083 meta = yo._meta
1084 offset = meta.header.record_length
1085 fields = yo._list_fields(field_specs, sep=';')
1086 for field in fields:
1087 try:
1088 name, format = field.split()
1089 if name[0] == '_' or name[0].isdigit() or not name.replace('_','').isalnum():
1090 raise DbfError("Field names cannot start with _ or digits, and can only contain the _, letters, and digits")
1091 name = name.lower()
1092 if name in meta.fields:
1093 raise DbfError("Field '%s' already exists" % name)
1094 field_type = format[0].upper()
1095 if len(name) > 10:
1096 raise DbfError("Maximum field name length is 10. '%s' is %d characters long." % (name, len(name)))
1097 if not field_type in meta.fieldtypes.keys():
1098 raise DbfError("Unknown field type: %s" % field_type)
1099 length, decimals = yo._meta.fieldtypes[field_type]['Init'](format)
1100 except ValueError:
1101 raise DbfError("invalid field specifier: %s" % field)
1102 start = offset
1103 end = offset + length
1104 offset = end
1105 meta.fields.append(name)
1106 meta[name] = {'type':field_type, 'start':start, 'length':length, 'end':end, 'decimals':decimals, 'flags':0}
1107 if meta[name]['type'] in yo._memotypes and meta.memo is None:
1108 meta.memo = yo._memoClass(meta)
1109 for record in yo:
1110 record[name] = meta.fieldtypes[field_type]['Blank']()
1111 yo._buildHeaderFields()
1112 yo._update_disk()
1113 - def append(yo, kamikaze='', drop=False, multiple=1):
1114 "adds <multiple> blank records, and fills fields with dict/tuple values if present"
1115 if not yo.field_count:
1116 raise DbfError("No fields defined, cannot append")
1117 empty_table = len(yo) == 0
1118 dictdata = False
1119 tupledata = False
1120 if not isinstance(kamikaze, _DbfRecord):
1121 if isinstance(kamikaze, dict):
1122 dictdata = kamikaze
1123 kamikaze = ''
1124 elif isinstance(kamikaze, tuple):
1125 tupledata = kamikaze
1126 kamikaze = ''
1127 newrecord = _DbfRecord(recnum=yo._meta.header.record_count, layout=yo._meta, kamikaze=kamikaze)
1128 yo._table.append(newrecord)
1129 yo._meta.header.record_count += 1
1130 if dictdata:
1131 newrecord.gather_fields(dictdata, drop=drop)
1132 elif tupledata:
1133 for index, item in enumerate(tupledata):
1134 newrecord[index] = item
1135 elif kamikaze == str:
1136 for field in yo._meta.memofields:
1137 newrecord[field] = ''
1138 elif kamikaze:
1139 for field in yo._meta.memofields:
1140 newrecord[field] = kamikaze[field]
1141 newrecord.write()
1142 multiple -= 1
1143 if multiple:
1144 data = newrecord._data
1145 single = yo._meta.header.record_count
1146 total = single + multiple
1147 while single < total:
1148 multi_record = _DbfRecord(single, yo._meta, kamikaze=data)
1149 yo._table.append(multi_record)
1150 for field in yo._meta.memofields:
1151 multi_record[field] = newrecord[field]
1152 single += 1
1153 multi_record.write()
1154 yo._meta.header.record_count = total
1155 yo._meta.current = yo._meta.header.record_count - 1
1156 newrecord = multi_record
1157 yo._update_disk(headeronly=True)
1158 if empty_table:
1159 yo._meta.current = 0
1160 return newrecord
1162 "moves record pointer to previous usable record; returns True if no more usable records"
1163 while yo._meta.current > 0:
1164 yo._meta.current -= 1
1165 if yo.use_deleted or not yo.current().has_been_deleted:
1166 break
1167 else:
1168 yo._meta.current = -1
1169 return True
1170 return False
1171 - def bottom(yo, get_record=False):
1172 """sets record pointer to bottom of table
1173 if get_record, seeks to and returns last (non-deleted) record
1174 DbfError if table is empty
1175 Bof if all records deleted and use_deleted is False"""
1176 yo._meta.current = yo._meta.header.record_count
1177 if get_record:
1178 try:
1179 return yo.prev()
1180 except Bof:
1181 yo._meta.current = yo._meta.header.record_count
1182 raise Eof()
1183 - def close(yo, keep_table=False, keep_memos=False):
1184 """closes disk files
1185 ensures table data is available if keep_table
1186 ensures memo data is available if keep_memos"""
1187 yo._meta.inmemory = True
1188 if '_table' in dir(yo):
1189 del yo._table
1190 if keep_table:
1191 yo._table
1192 yo._read_only = True
1193 else:
1194 if yo._meta.ondisk:
1195 yo._meta.dfd.close()
1196 yo._meta.dfd = None
1197 yo._meta_only = True
1198 if yo._meta.mfd is not None:
1199 if not keep_memos:
1200 yo._meta.ignorememos = True
1201 else:
1202 memo_fields = []
1203 for field in yo.field_names:
1204 if yo.is_memotype(field):
1205 memo_fields.append(field)
1206 for record in yo:
1207 for field in memo_fields:
1208 record[field] = record[field]
1209 yo._meta.mfd.close()
1210 yo._meta.mfd = None
1211 yo._meta.ondisk = False
1213 "creates a backup table -- ignored if memory table"
1214 if yo.filename[0] == yo.filename[-1] == ':':
1215 return
1216 if new_name is None:
1217 new_name = os.path.splitext(yo.filename)[0] + '_backup.dbf'
1218 else:
1219 overwrite = True
1220 if overwrite or not yo._backed_up:
1221 bkup = open(new_name, 'wb')
1222 try:
1223 yo._meta.dfd.seek(0)
1224 copyfileobj(yo._meta.dfd, bkup)
1225 yo._backed_up = True
1226 finally:
1227 bkup.close()
1231 "returns current logical record, or its index"
1232 if yo._meta.current < 0:
1233 raise Bof()
1234 elif yo._meta.current >= yo._meta.header.record_count:
1235 raise Eof()
1236 if index:
1237 return yo._meta.current
1238 return yo._table[yo._meta.current]
1240 """removes field(s) from the table
1241 creates backup files with _backup appended to the file name,
1242 then modifies current structure"""
1243 doomed = yo._list_fields(doomed)
1244 for victim in doomed:
1245 if victim not in yo._meta.fields:
1246 raise DbfError("field %s not in table -- delete aborted" % victim)
1247 all_records = [record for record in yo]
1248 yo.create_backup()
1249 for victim in doomed:
1250 yo._meta.fields.pop(yo._meta.fields.index(victim))
1251 start = yo._meta[victim]['start']
1252 end = yo._meta[victim]['end']
1253 for record in yo:
1254 record._data = record._data[:start] + record._data[end:]
1255 for field in yo._meta.fields:
1256 if yo._meta[field]['start'] == end:
1257 end = yo._meta[field]['end']
1258 yo._meta[field]['start'] = start
1259 yo._meta[field]['end'] = start + yo._meta[field]['length']
1260 start = yo._meta[field]['end']
1261 yo._buildHeaderFields()
1262 yo._update_disk()
1273 - def export(yo, records=None, filename=None, field_specs=None, format='csv', header=True):
1274 """writes the table using CSV or tab-delimited format, using the filename
1275 given if specified, otherwise the table name"""
1276 if filename is not None:
1277 path, filename = os.path.split(filename)
1278 else:
1279 path, filename = os.path.split(yo.filename)
1280 filename = os.path.join(path, filename)
1281 field_specs = yo._list_fields(field_specs)
1282 if records is None:
1283 records = yo
1284 format = format.lower()
1285 if format not in ('csv', 'tab', 'fixed'):
1286 raise DbfError("export format: csv, tab, or fixed -- not %s" % format)
1287 if format == 'fixed':
1288 format = 'txt'
1289 base, ext = os.path.splitext(filename)
1290 if ext.lower() in ('', '.dbf'):
1291 filename = base + "." + format[:3]
1292 fd = open(filename, 'w')
1293 try:
1294 if format == 'csv':
1295 csvfile = csv.writer(fd, dialect='dbf')
1296 if header:
1297 csvfile.writerow(field_specs)
1298 for record in records:
1299 fields = []
1300 for fieldname in field_specs:
1301 fields.append(record[fieldname])
1302 csvfile.writerow(fields)
1303 elif format == 'tab':
1304 if header:
1305 fd.write('\t'.join(field_specs) + '\n')
1306 for record in records:
1307 fields = []
1308 for fieldname in field_specs:
1309 fields.append(str(record[fieldname]))
1310 fd.write('\t'.join(fields) + '\n')
1311 else:
1312 header = open("%s_layout.txt" % os.path.splitext(filename)[0], 'w')
1313 header.write("%-15s Size\n" % "Field Name")
1314 header.write("%-15s ----\n" % ("-" * 15))
1315 sizes = []
1316 for field in field_specs:
1317 size = yo.size(field)[0]
1318 sizes.append(size)
1319 header.write("%-15s %3d\n" % (field, size))
1320 header.write('\nTotal Records in file: %d\n' % len(records))
1321 header.close()
1322 for record in records:
1323 fields = []
1324 for i, field_name in enumerate(field_specs):
1325 fields.append("%-*s" % (sizes[i], record[field_name]))
1326 fd.write(''.join(fields) + '\n')
1327 finally:
1328 fd.close()
1329 fd = None
1330 return len(records)
1332 "returns record at physical_index[recno]"
1333 return yo._table[recno]
1334 - def goto(yo, criteria):
1335 """changes the record pointer to the first matching (non-deleted) record
1336 criteria should be either a tuple of tuple(value, field, func) triples,
1337 or an integer to go to"""
1338 if isinstance(criteria, int):
1339 if not -yo._meta.header.record_count <= criteria < yo._meta.header.record_count:
1340 raise IndexError("Record %d does not exist" % criteria)
1341 if criteria < 0:
1342 criteria += yo._meta.header.record_count
1343 yo._meta.current = criteria
1344 return yo.current()
1345 criteria = _normalize_tuples(tuples=criteria, length=3, filler=[_nop])
1346 specs = tuple([(field, func) for value, field, func in criteria])
1347 match = tuple([value for value, field, func in criteria])
1348 current = yo.current(index=True)
1349 matchlen = len(match)
1350 while not yo.Eof():
1351 record = yo.current()
1352 results = record(*specs)
1353 if results == match:
1354 return record
1355 return yo.goto(current)
1357 "returns True if name is a variable-length field type"
1358 return yo._meta[name]['type'] in yo._decimal_fields
1360 "returns True if name is a memo type field"
1361 return yo._meta[name]['type'] in yo._memotypes
1362 - def new(yo, filename, field_specs=None):
1363 "returns a new table of the same type"
1364 if field_specs is None:
1365 field_specs = yo.structure()
1366 if not (filename[0] == filename[-1] == ':'):
1367 path, name = os.path.split(filename)
1368 if path == "":
1369 filename = os.path.join(os.path.split(yo.filename)[0], filename)
1370 elif name == "":
1371 filename = os.path.join(path, os.path.split(yo.filename)[1])
1372 return yo.__class__(filename, field_specs)
1374 "set record pointer to next (non-deleted) record, and return it"
1375 if yo.eof():
1376 raise Eof()
1377 return yo.current()
1413
1414 - def pack(yo, _pack=True):
1415 "physically removes all deleted records"
1416 for dbfindex in yo._indexen:
1417 dbfindex.clear()
1418 newtable = []
1419 index = 0
1420 offset = 0
1421 for record in yo._table:
1422 found = False
1423 if record.has_been_deleted and _pack:
1424 for dbflist in yo._dbflists:
1425 if dbflist._purge(record, record.record_number - offset, 1):
1426 found = True
1427 record._recnum = -1
1428 else:
1429 record._recnum = index
1430 newtable.append(record)
1431 index += 1
1432 if found:
1433 offset += 1
1434 found = False
1435 yo._table.clear()
1436 for record in newtable:
1437 yo._table.append(record)
1438 yo._meta.header.record_count = index
1439 yo._current = -1
1440 yo._update_disk()
1441 yo.reindex()
1443 "set record pointer to previous (non-deleted) record, and return it"
1444 if yo.bof():
1445 raise Bof
1446 return yo.current()
1447 - def query(yo, sql_command=None, python=None):
1448 "uses exec to perform queries on the table"
1449 if sql_command:
1450 return sql(yo, sql_command)
1451 elif python is None:
1452 raise DbfError("query: python parameter must be specified")
1453 possible = List(desc="%s --> %s" % (yo.filename, python))
1454 yo._dbflists.add(possible)
1455 query_result = {}
1456 select = 'query_result["keep"] = %s' % python
1457 g = {}
1458 use_deleted = yo.use_deleted
1459 for record in yo:
1460 query_result['keep'] = False
1461 g['query_result'] = query_result
1462 exec select in g, record
1463 if query_result['keep']:
1464 possible.append(record)
1465 record.write()
1466 return possible
1468 for dbfindex in yo._indexen:
1469 dbfindex.reindex()
1471 "renames an existing field"
1472 if yo:
1473 yo.create_backup()
1474 if not oldname in yo._meta.fields:
1475 raise DbfError("field --%s-- does not exist -- cannot rename it." % oldname)
1476 if newname[0] == '_' or newname[0].isdigit() or not newname.replace('_','').isalnum():
1477 raise DbfError("field names cannot start with _ or digits, and can only contain the _, letters, and digits")
1478 newname = newname.lower()
1479 if newname in yo._meta.fields:
1480 raise DbfError("field --%s-- already exists" % newname)
1481 if len(newname) > 10:
1482 raise DbfError("maximum field name length is 10. '%s' is %d characters long." % (newname, len(newname)))
1483 yo._meta[newname] = yo._meta[oldname]
1484 yo._meta.fields[yo._meta.fields.index(oldname)] = newname
1485 yo._buildHeaderFields()
1486 yo._update_disk(headeronly=True)
1487 - def size(yo, field):
1488 "returns size of field as a tuple of (length, decimals)"
1489 if field in yo:
1490 return (yo._meta[field]['length'], yo._meta[field]['decimals'])
1491 raise DbfError("%s is not a field in %s" % (field, yo.filename))
1493 """return list of fields suitable for creating same table layout
1494 @param fields: list of fields or None for all fields"""
1495 field_specs = []
1496 fields = yo._list_fields(fields)
1497 try:
1498 for name in fields:
1499 field_specs.append(yo._fieldLayout(yo.field_names.index(name)))
1500 except ValueError:
1501 raise DbfError("field --%s-- does not exist" % name)
1502 return field_specs
1503 - def top(yo, get_record=False):
1504 """sets record pointer to top of table; if get_record, seeks to and returns first (non-deleted) record
1505 DbfError if table is empty
1506 Eof if all records are deleted and use_deleted is False"""
1507 yo._meta.current = -1
1508 if get_record:
1509 try:
1510 return yo.next()
1511 except Eof:
1512 yo._meta.current = -1
1513 raise Bof()
1514 - def type(yo, field):
1515 "returns type of field"
1516 if field in yo:
1517 return yo._meta[field]['type']
1518 raise DbfError("%s is not a field in %s" % (field, yo.filename))
1519 - def zap(yo, areyousure=False):
1520 """removes all records from table -- this cannot be undone!
1521 areyousure must be True, else error is raised"""
1522 if areyousure:
1523 if yo._meta.inmemory:
1524 yo._table = []
1525 else:
1526 yo._table.clear()
1527 yo._meta.header.record_count = 0
1528 yo._current = -1
1529 yo._update_disk()
1530 else:
1531 raise DbfError("You must say you are sure to wipe the table")
1532
1534 """Provides an interface for working with dBase III tables."""
1535 _version = 'dBase III Plus'
1536 _versionabbv = 'db3'
1537 _fieldtypes = {
1538 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter},
1539 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate},
1540 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical},
1541 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo},
1542 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addNumeric} }
1543 _memoext = '.dbt'
1544 _memotypes = ('M',)
1545 _memoClass = _Db3Memo
1546 _yesMemoMask = '\x80'
1547 _noMemoMask = '\x7f'
1548 _fixed_fields = ('D','L','M')
1549 _variable_fields = ('C','N')
1550 _character_fields = ('C','M')
1551 _decimal_fields = ('N',)
1552 _numeric_fields = ('N',)
1553 _dbfTableHeader = array('c', '\x00' * 32)
1554 _dbfTableHeader[0] = '\x03'
1555 _dbfTableHeader[8:10] = array('c', io.packShortInt(33))
1556 _dbfTableHeader[10] = '\x01'
1557 _dbfTableHeader[29] = '\x03'
1558 _dbfTableHeader = _dbfTableHeader.tostring()
1559 _dbfTableHeaderExtra = ''
1560 _supported_tables = ['\x03', '\x83']
1561 _read_only = False
1562 _meta_only = False
1563 _use_deleted = True
1565 "dBase III specific"
1566 if yo._meta.header.version == '\x83':
1567 try:
1568 yo._meta.memo = yo._memoClass(yo._meta)
1569 except:
1570 yo._meta.dfd.close()
1571 yo._meta.dfd = None
1572 raise
1573 if not yo._meta.ignorememos:
1574 for field in yo._meta.fields:
1575 if yo._meta[field]['type'] in yo._memotypes:
1576 if yo._meta.header.version != '\x83':
1577 yo._meta.dfd.close()
1578 yo._meta.dfd = None
1579 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos")
1580 elif not os.path.exists(yo._meta.memoname):
1581 yo._meta.dfd.close()
1582 yo._meta.dfd = None
1583 raise DbfError("Table structure corrupt: memo fields exist without memo file")
1584 break
1586 "builds the FieldList of names, types, and descriptions"
1587 yo._meta.fields[:] = []
1588 offset = 1
1589 fieldsdef = yo._meta.header.fields
1590 if len(fieldsdef) % 32 != 0:
1591 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef))
1592 if len(fieldsdef) // 32 != yo.field_count:
1593 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32))
1594 for i in range(yo.field_count):
1595 fieldblock = fieldsdef[i*32:(i+1)*32]
1596 name = io.unpackStr(fieldblock[:11])
1597 type = fieldblock[11]
1598 if not type in yo._meta.fieldtypes:
1599 raise DbfError("Unknown field type: %s" % type)
1600 start = offset
1601 length = ord(fieldblock[16])
1602 offset += length
1603 end = start + length
1604 decimals = ord(fieldblock[17])
1605 flags = ord(fieldblock[18])
1606 yo._meta.fields.append(name)
1607 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1609 'Provides an interface for working with FoxPro 2 tables'
1610 _version = 'Foxpro'
1611 _versionabbv = 'fp'
1612 _fieldtypes = {
1613 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter},
1614 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric},
1615 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric},
1616 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical},
1617 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate},
1618 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addVfpMemo},
1619 'G' : {'Type':'General', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo},
1620 'P' : {'Type':'Picture', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo},
1621 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} }
1622 _memoext = '.fpt'
1623 _memotypes = ('G','M','P')
1624 _memoClass = _VfpMemo
1625 _yesMemoMask = '\xf5'
1626 _noMemoMask = '\x03'
1627 _fixed_fields = ('B','D','G','I','L','M','P','T','Y')
1628 _variable_fields = ('C','F','N')
1629 _character_fields = ('C','M')
1630 _decimal_fields = ('F','N')
1631 _numeric_fields = ('B','F','I','N','Y')
1632 _supported_tables = ('\x03', '\xf5')
1633 _dbfTableHeader = array('c', '\x00' * 32)
1634 _dbfTableHeader[0] = '\x30'
1635 _dbfTableHeader[8:10] = array('c', io.packShortInt(33+263))
1636 _dbfTableHeader[10] = '\x01'
1637 _dbfTableHeader[29] = '\x03'
1638 _dbfTableHeader = _dbfTableHeader.tostring()
1639 _dbfTableHeaderExtra = '\x00' * 263
1640 _use_deleted = True
1642 if os.path.exists(yo._meta.memoname):
1643 try:
1644 yo._meta.memo = yo._memoClass(yo._meta)
1645 except:
1646 yo._meta.dfd.close()
1647 yo._meta.dfd = None
1648 raise
1649 if not yo._meta.ignorememos:
1650 for field in yo._meta.fields:
1651 if yo._meta[field]['type'] in yo._memotypes:
1652 if not os.path.exists(yo._meta.memoname):
1653 yo._meta.dfd.close()
1654 yo._meta.dfd = None
1655 raise DbfError("Table structure corrupt: memo fields exist without memo file")
1656 break
1658 "builds the FieldList of names, types, and descriptions"
1659 yo._meta.fields[:] = []
1660 offset = 1
1661 fieldsdef = yo._meta.header.fields
1662 if len(fieldsdef) % 32 != 0:
1663 raise DbfError("field definition block corrupt: %d bytes in size" % len(fieldsdef))
1664 if len(fieldsdef) // 32 != yo.field_count:
1665 raise DbfError("Header shows %d fields, but field definition block has %d fields" % (yo.field_count, len(fieldsdef)//32))
1666 for i in range(yo.field_count):
1667 fieldblock = fieldsdef[i*32:(i+1)*32]
1668 name = io.unpackStr(fieldblock[:11])
1669 type = fieldblock[11]
1670 if not type in yo._meta.fieldtypes:
1671 raise DbfError("Unknown field type: %s" % type)
1672 elif type == '0':
1673 return
1674 start = offset
1675 length = ord(fieldblock[16])
1676 offset += length
1677 end = start + length
1678 decimals = ord(fieldblock[17])
1679 flags = ord(fieldblock[18])
1680 yo._meta.fields.append(name)
1681 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1682
1684 'Provides an interface for working with Visual FoxPro 6 tables'
1685 _version = 'Visual Foxpro v6'
1686 _versionabbv = 'vfp'
1687 _fieldtypes = {
1688 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter},
1689 'Y' : {'Type':'Currency', 'Retrieve':io.retrieveCurrency, 'Update':io.updateCurrency, 'Blank':Decimal(), 'Init':io.addVfpCurrency},
1690 'B' : {'Type':'Double', 'Retrieve':io.retrieveDouble, 'Update':io.updateDouble, 'Blank':float, 'Init':io.addVfpDouble},
1691 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric},
1692 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric},
1693 'I' : {'Type':'Integer', 'Retrieve':io.retrieveInteger, 'Update':io.updateInteger, 'Blank':int, 'Init':io.addVfpInteger},
1694 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical},
1695 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate},
1696 'T' : {'Type':'DateTime', 'Retrieve':io.retrieveVfpDateTime, 'Update':io.updateVfpDateTime, 'Blank':DateTime.now, 'Init':io.addVfpDateTime},
1697 'M' : {'Type':'Memo', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo},
1698 'G' : {'Type':'General', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo},
1699 'P' : {'Type':'Picture', 'Retrieve':io.retrieveVfpMemo, 'Update':io.updateVfpMemo, 'Blank':str, 'Init':io.addVfpMemo},
1700 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} }
1701 _memoext = '.fpt'
1702 _memotypes = ('G','M','P')
1703 _memoClass = _VfpMemo
1704 _yesMemoMask = '\x30'
1705 _noMemoMask = '\x30'
1706 _fixed_fields = ('B','D','G','I','L','M','P','T','Y')
1707 _variable_fields = ('C','F','N')
1708 _character_fields = ('C','M')
1709 _decimal_fields = ('F','N')
1710 _numeric_fields = ('B','F','I','N','Y')
1711 _supported_tables = ('\x30',)
1712 _dbfTableHeader = array('c', '\x00' * 32)
1713 _dbfTableHeader[0] = '\x30'
1714 _dbfTableHeader[8:10] = array('c', io.packShortInt(33+263))
1715 _dbfTableHeader[10] = '\x01'
1716 _dbfTableHeader[29] = '\x03'
1717 _dbfTableHeader = _dbfTableHeader.tostring()
1718 _dbfTableHeaderExtra = '\x00' * 263
1719 _use_deleted = True
1721 if os.path.exists(yo._meta.memoname):
1722 try:
1723 yo._meta.memo = yo._memoClass(yo._meta)
1724 except:
1725 yo._meta.dfd.close()
1726 yo._meta.dfd = None
1727 raise
1728 if not yo._meta.ignorememos:
1729 for field in yo._meta.fields:
1730 if yo._meta[field]['type'] in yo._memotypes:
1731 if not os.path.exists(yo._meta.memoname):
1732 yo._meta.dfd.close()
1733 yo._meta.dfd = None
1734 raise DbfError("Table structure corrupt: memo fields exist without memo file")
1735 break
1737 "builds the FieldList of names, types, and descriptions"
1738 yo._meta.fields[:] = []
1739 offset = 1
1740 fieldsdef = yo._meta.header.fields
1741 for i in range(yo.field_count):
1742 fieldblock = fieldsdef[i*32:(i+1)*32]
1743 name = io.unpackStr(fieldblock[:11])
1744 type = fieldblock[11]
1745 if not type in yo._meta.fieldtypes:
1746 raise DbfError("Unknown field type: %s" % type)
1747 elif type == '0':
1748 return
1749 start = io.unpackLongInt(fieldblock[12:16])
1750 length = ord(fieldblock[16])
1751 offset += length
1752 end = start + length
1753 decimals = ord(fieldblock[17])
1754 flags = ord(fieldblock[18])
1755 yo._meta.fields.append(name)
1756 yo._meta[name] = {'type':type,'start':start,'length':length,'end':end,'decimals':decimals,'flags':flags}
1757 -class List(object):
1758 "list of Dbf records, with set-like behavior"
1759 _desc = ''
1760 - def __init__(yo, new_records=None, desc=None, key=None):
1761 yo._list = []
1762 yo._set = set()
1763 if key is not None:
1764 yo.key = key
1765 if key.__doc__ is None:
1766 key.__doc__ = 'unknown'
1767 key = yo.key
1768 yo._current = -1
1769 if isinstance(new_records, yo.__class__) and key is new_records.key:
1770 yo._list = new_records._list[:]
1771 yo._set = new_records._set.copy()
1772 yo._current = 0
1773 elif new_records is not None:
1774 for record in new_records:
1775 value = key(record)
1776 item = (record.record_table, record.record_number, value)
1777 if value not in yo._set:
1778 yo._set.add(value)
1779 yo._list.append(item)
1780 yo._current = 0
1781 if desc is not None:
1782 yo._desc = desc
1784 key = yo.key
1785 if isinstance(other, (DbfTable, list)):
1786 other = yo.__class__(other, key=key)
1787 if isinstance(other, yo.__class__):
1788 result = yo.__class__()
1789 result._set = yo._set.copy()
1790 result._list[:] = yo._list[:]
1791 result.key = yo.key
1792 if key is other.key:
1793 for item in other._list:
1794 if item[2] not in result._set:
1795 result._set.add(item[2])
1796 result._list.append(item)
1797 else:
1798 for rec in other:
1799 value = key(rec)
1800 if value not in result._set:
1801 result._set.add(value)
1802 result._list.append((rec.record_table, rec.record_number, value))
1803 result._current = 0 if result else -1
1804 return result
1805 return NotImplemented
1807 if isinstance(record, tuple):
1808 item = record
1809 else:
1810 item = record.record_table, record.record_number, yo.key(record)
1811 return item in yo._set
1813 if isinstance(key, int):
1814 item = yo._list.pop[key]
1815 yo._set.remove(item[2])
1816 elif isinstance(key, slice):
1817 yo._set.difference_update([item[2] for item in yo._list[key]])
1818 yo._list.__delitem__(key)
1819 else:
1820 raise TypeError
1822 if isinstance(key, int):
1823 count = len(yo._list)
1824 if not -count <= key < count:
1825 raise IndexError("Record %d is not in list." % key)
1826 return yo._get_record(*yo._list[key])
1827 elif isinstance(key, slice):
1828 result = yo.__class__()
1829 result._list[:] = yo._list[key]
1830 result._set = set(result._list)
1831 result.key = yo.key
1832 result._current = 0 if result else -1
1833 return result
1834 else:
1835 raise TypeError('indices must be integers')
1837 return (table.get_record(recno) for table, recno, value in yo._list)
1839 return len(yo._list)
1845 if yo._desc:
1846 return "%s(key=%s - %s - %d records)" % (yo.__class__, yo.key.__doc__, yo._desc, len(yo._list))
1847 else:
1848 return "%s(key=%s - %d records)" % (yo.__class__, yo.key.__doc__, len(yo._list))
1850 key = yo.key
1851 if isinstance(other, (DbfTable, list)):
1852 other = yo.__class__(other, key=key)
1853 if isinstance(other, yo.__class__):
1854 result = yo.__class__()
1855 result._list[:] = other._list[:]
1856 result._set = other._set.copy()
1857 result.key = key
1858 lost = set()
1859 if key is other.key:
1860 for item in yo._list:
1861 if item[2] in result._list:
1862 result._set.remove(item[2])
1863 lost.add(item)
1864 else:
1865 for rec in other:
1866 value = key(rec)
1867 if value in result._set:
1868 result._set.remove(value)
1869 lost.add((rec.record_table, rec.record_number, value))
1870 result._list = [item for item in result._list if item not in lost]
1871 result._current = 0 if result else -1
1872 return result
1873 return NotImplemented
1875 key = yo.key
1876 if isinstance(other, (DbfTable, list)):
1877 other = yo.__class__(other, key=key)
1878 if isinstance(other, yo.__class__):
1879 result = yo.__class__()
1880 result._list[:] = yo._list[:]
1881 result._set = yo._set.copy()
1882 result.key = key
1883 lost = set()
1884 if key is other.key:
1885 for item in other._list:
1886 if item[2] in result._set:
1887 result._set.remove(item[2])
1888 lost.add(item[2])
1889 else:
1890 for rec in other:
1891 value = key(rec)
1892 if value in result._set:
1893 result._set.remove(value)
1894 lost.add(value)
1895 result._list = [item for item in result._list if item[2] not in lost]
1896 result._current = 0 if result else -1
1897 return result
1898 return NotImplemented
1900 if item[2] not in yo._set:
1901 yo._set.add(item[2])
1902 yo._list.append(item)
1903 - def _get_record(yo, table=None, rec_no=None, value=None):
1904 if table is rec_no is None:
1905 table, rec_no, value = yo._list[yo._current]
1906 return table.get_record(rec_no)
1907 - def _purge(yo, record, old_record_number, offset):
1908 partial = record.record_table, old_record_number
1909 records = sorted(yo._list, key=lambda item: (item[0], item[1]))
1910 for item in records:
1911 if partial == item[:2]:
1912 found = True
1913 break
1914 elif partial[0] is item[0] and partial[1] < item[1]:
1915 found = False
1916 break
1917 else:
1918 found = False
1919 if found:
1920 yo._list.pop(yo._list.index(item))
1921 yo._set.remove(item[2])
1922 start = records.index(item) + found
1923 for item in records[start:]:
1924 if item[0] is not partial[0]:
1925 break
1926 i = yo._list.index(item)
1927 yo._set.remove(item[2])
1928 item = item[0], (item[1] - offset), item[2]
1929 yo._list[i] = item
1930 yo._set.add(item[2])
1931 return found
1937 if yo._list:
1938 yo._current = len(yo._list) - 1
1939 return yo._get_record()
1940 raise DbfError("dbf.List is empty")
1942 yo._list = []
1943 yo._set = set()
1944 yo._current = -1
1946 if yo._current < 0:
1947 raise Bof()
1948 elif yo._current == len(yo._list):
1949 raise Eof()
1950 return yo._get_record()
1951 - def extend(yo, new_records):
1967 - def goto(yo, index_number):
1968 if yo._list:
1969 if 0 <= index_number <= len(yo._list):
1970 yo._current = index_number
1971 return yo._get_record()
1972 raise DbfError("index %d not in dbf.List of %d records" % (index_number, len(yo._list)))
1973 raise DbfError("dbf.List is empty")
1974 - def index(yo, sort=None, reverse=False):
1975 "sort= ((field_name, func), (field_name, func),) | 'ORIGINAL'"
1976 if sort is None:
1977 results = []
1978 for field, func in yo._meta.index:
1979 results.append("%s(%s)" % (func.__name__, field))
1980 return ', '.join(results + ['reverse=%s' % yo._meta.index_reversed])
1981 yo._meta.index_reversed = reverse
1982 if sort == 'ORIGINAL':
1983 yo._index = range(yo._meta.header.record_count)
1984 yo._meta.index = 'ORIGINAL'
1985 if reverse:
1986 yo._index.reverse()
1987 return
1988 new_sort = _normalize_tuples(tuples=sort, length=2, filler=[_nop])
1989 yo._meta.index = tuple(new_sort)
1990 yo._meta.orderresults = [''] * len(yo)
1991 for record in yo:
1992 yo._meta.orderresults[record.record_number] = record()
1993 yo._index.sort(key=lambda i: yo._meta.orderresults[i], reverse=reverse)
1994 - def index(yo, record, start=None, stop=None):
2006 - def key(yo, record):
2010 if yo._current < len(yo._list):
2011 yo._current += 1
2012 if yo._current < len(yo._list):
2013 return yo._get_record()
2014 raise Eof()
2015 - def pop(yo, index=None):
2016 if index is None:
2017 table, recno, value = yo._list.pop()
2018 else:
2019 table, recno, value = yo._list.pop(index)
2020 yo._set.remove(value)
2021 return yo._get_record(table, recno, value)
2023 if yo._current >= 0:
2024 yo._current -= 1
2025 if yo._current > -1:
2026 return yo._get_record()
2027 raise Bof()
2035 if yo._list:
2036 yo._current = 0
2037 return yo._get_record()
2038 raise DbfError("dbf.List is empty")
2039 - def sort(yo, key=None, reverse=False):
2043
2055 "returns records using this index"
2057 yo.table = table
2058 yo.records = records
2059 yo.index = 0
2072 yo._table = table
2073 yo._values = []
2074 yo._rec_by_val = []
2075 yo._records = {}
2076 yo.__doc__ = key.__doc__ or 'unknown'
2077 yo.key = key
2078 for record in table:
2079 value = key(record)
2080 if value is DoNotIndex:
2081 continue
2082 rec_num = record.record_number
2083 if not isinstance(value, tuple):
2084 value = (value, )
2085 vindex = bisect_right(yo._values, value)
2086 yo._values.insert(vindex, value)
2087 yo._rec_by_val.insert(vindex, rec_num)
2088 yo._records[rec_num] = value
2089 table._indexen.add(yo)
2091 rec_num = record.record_number
2092 if rec_num in yo._records:
2093 value = yo._records[rec_num]
2094 vindex = bisect_left(yo._values, value)
2095 yo._values.pop(vindex)
2096 yo._rec_by_val.pop(vindex)
2097 value = yo.key(record)
2098 if value is DoNotIndex:
2099 return
2100 if not isinstance(value, tuple):
2101 value = (value, )
2102 vindex = bisect_right(yo._values, value)
2103 yo._values.insert(vindex, value)
2104 yo._rec_by_val.insert(vindex, rec_num)
2105 yo._records[rec_num] = value
2107 if isinstance(match, _DbfRecord):
2108 if match.record_table is yo._table:
2109 return match.record_number in yo._records
2110 match = yo.key(match)
2111 elif not isinstance(match, tuple):
2112 match = (match, )
2113 return yo.find(match) != -1
2115 if isinstance(key, int):
2116 count = len(yo._values)
2117 if not -count <= key < count:
2118 raise IndexError("Record %d is not in list." % key)
2119 rec_num = yo._rec_by_val[key]
2120 return yo._table.get_record(rec_num)
2121 elif isinstance(key, slice):
2122 result = List()
2123 yo._table._dbflists.add(result)
2124 start, stop, step = key.start, key.stop, key.step
2125 if start is None: start = 0
2126 if stop is None: stop = len(yo._rec_by_val)
2127 if step is None: step = 1
2128 for loc in range(start, stop, step):
2129 record = yo._table.get_record(yo._rec_by_val[loc])
2130 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record)))
2131 result._current = 0 if result else -1
2132 return result
2133 elif isinstance (key, (str, unicode, tuple, _DbfRecord)):
2134 if isinstance(key, _DbfRecord):
2135 key = yo.key(key)
2136 elif not isinstance(key, tuple):
2137 key = (key, )
2138 loc = yo.find(key)
2139 if loc == -1:
2140 raise KeyError(key)
2141 return yo._table.get_record(yo._rec_by_val[loc])
2142 else:
2143 raise TypeError('indices must be integers, match objects must by strings or tuples')
2147 yo._table.close()
2148 yo._values[:] = []
2149 yo._rec_by_val[:] = []
2150 yo._records.clear()
2151 return False
2155 return len(yo._records)
2157 target = target[:len(match)]
2158 if isinstance(match[-1], (str, unicode)):
2159 target = list(target)
2160 target[-1] = target[-1][:len(match[-1])]
2161 target = tuple(target)
2162 return target == match
2164 value = yo._records.get(rec_num)
2165 if value is not None:
2166 vindex = bisect_left(yo._values, value)
2167 del yo._records[rec_num]
2168 yo._values.pop(vindex)
2169 yo._rec_by_val.pop(vindex)
2170 - def _search(yo, match, lo=0, hi=None):
2171 if hi is None:
2172 hi = len(yo._values)
2173 return bisect_left(yo._values, match, lo, hi)
2175 "removes all entries from index"
2176 yo._values[:] = []
2177 yo._rec_by_val[:] = []
2178 yo._records.clear()
2179 close = __exit__
2180 - def find(yo, match, partial=False):
2181 "returns numeric index of (partial) match, or -1"
2182 if isinstance(match, _DbfRecord):
2183 if match.record_number in yo._records:
2184 return yo._values.index(yo._records[match.record_number])
2185 else:
2186 return -1
2187 if not isinstance(match, tuple):
2188 match = (match, )
2189 loc = yo._search(match)
2190 while loc < len(yo._values) and yo._values[loc] == match:
2191 if not yo._table.use_deleted and yo._table.get_record(yo._rec_by_val[loc]).has_been_deleted:
2192 loc += 1
2193 continue
2194 return loc
2195 if partial:
2196 while loc < len(yo._values) and yo._partial_match(yo._values[loc], match):
2197 if not yo._table.use_deleted and yo._table.get_record(yo._rec_by_val[loc]).has_been_deleted:
2198 loc += 1
2199 continue
2200 return loc
2201 return -1
2203 "returns numeric index of either (partial) match, or position of where match would be"
2204 if isinstance(match, _DbfRecord):
2205 if match.record_number in yo._records:
2206 return yo._values.index(yo._records[match.record_number])
2207 else:
2208 match = yo.key(match)
2209 if not isinstance(match, tuple):
2210 match = (match, )
2211 loc = yo._search(match)
2212 return loc
2213 - def index(yo, match, partial=False):
2214 "returns numeric index of (partial) match, or raises ValueError"
2215 loc = yo.find(match, partial)
2216 if loc == -1:
2217 if isinstance(match, _DbfRecord):
2218 raise ValueError("table <%s> record [%d] not in index <%s>" % (yo._table.filename, match.record_number, yo.__doc__))
2219 else:
2220 raise ValueError("match criteria <%s> not in index" % (match, ))
2221 return loc
2223 "reindexes all records"
2224 for record in yo._table:
2225 yo(record)
2226 - def query(yo, sql_command=None, python=None):
2227 """recognized sql commands are SELECT, UPDATE, INSERT, DELETE, and RECALL"""
2228 if sql_command:
2229 return sql(yo, command)
2230 elif python is None:
2231 raise DbfError("query: python parameter must be specified")
2232 possible = List(desc="%s --> %s" % (yo._table.filename, python))
2233 yo._table._dbflists.add(possible)
2234 query_result = {}
2235 select = 'query_result["keep"] = %s' % python
2236 g = {}
2237 for record in yo:
2238 query_result['keep'] = False
2239 g['query_result'] = query_result
2240 exec select in g, record
2241 if query_result['keep']:
2242 possible.append(record)
2243 record.write()
2244 return possible
2245 - def search(yo, match, partial=False):
2246 "returns dbf.List of all (partially) matching records"
2247 result = List()
2248 yo._table._dbflists.add(result)
2249 if not isinstance(match, tuple):
2250 match = (match, )
2251 loc = yo._search(match)
2252 if loc == len(yo._values):
2253 return result
2254 while loc < len(yo._values) and yo._values[loc] == match:
2255 record = yo._table.get_record(yo._rec_by_val[loc])
2256 if not yo._table.use_deleted and record.has_been_deleted:
2257 loc += 1
2258 continue
2259 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record)))
2260 loc += 1
2261 if partial:
2262 while loc < len(yo._values) and yo._partial_match(yo._values[loc], match):
2263 record = yo._table.get_record(yo._rec_by_val[loc])
2264 if not yo._table.use_deleted and record.has_been_deleted:
2265 loc += 1
2266 continue
2267 result._maybe_add(item=(yo._table, yo._rec_by_val[loc], result.key(record)))
2268 loc += 1
2269 return result
2270
2271 csv.register_dialect('dbf', DbfCsv)
2272
2273 -def sql(records, command):
2274 """recognized sql commands are SELECT, UPDATE, INSERT, DELETE, and RECALL"""
2275 table = records[0].record_table
2276 sql_command = command
2277 if ' for ' in command:
2278 command, condition = command.split(' for ')
2279 else:
2280 condition = 'True'
2281 name, command = command.split(' ', 1)
2282 name = name.lower()
2283 if name not in ('delete','insert','recall','select','update'):
2284 raise DbfError("unrecognized sql command: %s" % name.upper())
2285 if name == 'insert' and condition != 'True':
2286 raise DbfError("FOR clause not allowed with INSERT")
2287 possible = List(desc=sql_command)
2288 tables = set()
2289
2290 query_result = {}
2291 select = 'query_result["keep"] = %s' % condition
2292 g = {}
2293 if name == 'insert':
2294
2295 record = table.append()
2296 exec command in {}, record
2297 record.write()
2298 record.reindex()
2299 possible.append(record)
2300 else:
2301 for record in records:
2302 query_result['keep'] = False
2303 g['query_result'] = query_result
2304 exec select in g, record
2305 if query_result['keep']:
2306 possible.append(record)
2307 tables.add(record.record_table)
2308 if name == 'delete':
2309 record.delete_record()
2310 elif name == 'recall':
2311 record.undelete_record()
2312 elif name == 'select':
2313 pass
2314 elif name == 'update':
2315 exec command in g, record
2316 else:
2317 raise DbfError("unrecognized sql command: %s" % sql.upper)
2318 record.write()
2319 if name == 'select':
2320 fields = command.replace(' ','').split(',')
2321 field_sizes = dict([(field, (0, 0)) for field in fields])
2322 for t in tables:
2323 for field in fields:
2324 field_sizes[field] = max(field_sizes[field], t.size(field))
2325 field_specs = []
2326 for field in fields:
2327 type = table.type(field)
2328 length, decimals = field_sizes[field]
2329 if type in table._decimal_fields:
2330 description = "%s %s(%d,%d)" % (field, type, length, decimals)
2331 elif type in table._fixed_fields:
2332 description = "%s %s" % (field, type)
2333 else:
2334 description = "%s %s(%d)" % (field, type, length)
2335 field_specs.append(description)
2336 select = table.new(filename=':%s:' % sql_command, field_specs=field_specs)
2337 for record in possible:
2338 select.append(record.scatter_fields(), drop=True)
2339 return select
2340 else:
2341 for list_table in tables:
2342 list_table._dbflists.add(possible)
2343 return possible
2345 "returns parameter unchanged"
2346 return value
2348 "ensures each tuple is the same length, using filler[-missing] for the gaps"
2349 final = []
2350 for t in tuples:
2351 if len(t) < length:
2352 final.append( tuple([item for item in t] + filler[len(t)-length:]) )
2353 else:
2354 final.append(t)
2355 return tuple(final)
2357 if cp not in code_pages:
2358 for code_page in sorted(code_pages.keys()):
2359 sd, ld = code_pages[code_page]
2360 if cp == sd or cp == ld:
2361 if sd is None:
2362 raise DbfError("Unsupported codepage: %s" % ld)
2363 cp = code_page
2364 break
2365 else:
2366 raise DbfError("Unsupported codepage: %s" % cp)
2367 sd, ld = code_pages[cp]
2368 return cp, sd, ld
2369 -def ascii(new_setting=None):
2376 -def codepage(cp=None):
2377 "get/set default codepage for any new tables"
2378 global default_codepage
2379 cp, sd, ld = _codepage_lookup(cp or default_codepage)
2380 default_codepage = sd
2381 return "%s (LDID: 0x%02x - %s)" % (sd, ord(cp), ld)
2389 version = 'dBase IV w/memos (non-functional)'
2390 _versionabbv = 'db4'
2391 _fieldtypes = {
2392 'C' : {'Type':'Character', 'Retrieve':io.retrieveCharacter, 'Update':io.updateCharacter, 'Blank':str, 'Init':io.addCharacter},
2393 'Y' : {'Type':'Currency', 'Retrieve':io.retrieveCurrency, 'Update':io.updateCurrency, 'Blank':Decimal(), 'Init':io.addVfpCurrency},
2394 'B' : {'Type':'Double', 'Retrieve':io.retrieveDouble, 'Update':io.updateDouble, 'Blank':float, 'Init':io.addVfpDouble},
2395 'F' : {'Type':'Float', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':float, 'Init':io.addVfpNumeric},
2396 'N' : {'Type':'Numeric', 'Retrieve':io.retrieveNumeric, 'Update':io.updateNumeric, 'Blank':int, 'Init':io.addVfpNumeric},
2397 'I' : {'Type':'Integer', 'Retrieve':io.retrieveInteger, 'Update':io.updateInteger, 'Blank':int, 'Init':io.addVfpInteger},
2398 'L' : {'Type':'Logical', 'Retrieve':io.retrieveLogical, 'Update':io.updateLogical, 'Blank':bool, 'Init':io.addLogical},
2399 'D' : {'Type':'Date', 'Retrieve':io.retrieveDate, 'Update':io.updateDate, 'Blank':Date.today, 'Init':io.addDate},
2400 'T' : {'Type':'DateTime', 'Retrieve':io.retrieveVfpDateTime, 'Update':io.updateVfpDateTime, 'Blank':DateTime.now, 'Init':io.addVfpDateTime},
2401 'M' : {'Type':'Memo', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo},
2402 'G' : {'Type':'General', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo},
2403 'P' : {'Type':'Picture', 'Retrieve':io.retrieveMemo, 'Update':io.updateMemo, 'Blank':str, 'Init':io.addMemo},
2404 '0' : {'Type':'_NullFlags', 'Retrieve':io.unsupportedType, 'Update':io.unsupportedType, 'Blank':int, 'Init':None} }
2405 _memoext = '.dbt'
2406 _memotypes = ('G','M','P')
2407 _memoClass = _VfpMemo
2408 _yesMemoMask = '\x8b'
2409 _noMemoMask = '\x04'
2410 _fixed_fields = ('B','D','G','I','L','M','P','T','Y')
2411 _variable_fields = ('C','F','N')
2412 _character_fields = ('C','M')
2413 _decimal_fields = ('F','N')
2414 _numeric_fields = ('B','F','I','N','Y')
2415 _supported_tables = ('\x04', '\x8b')
2416 _dbfTableHeader = ['\x00'] * 32
2417 _dbfTableHeader[0] = '\x8b'
2418 _dbfTableHeader[10] = '\x01'
2419 _dbfTableHeader[29] = '\x03'
2420 _dbfTableHeader = ''.join(_dbfTableHeader)
2421 _dbfTableHeaderExtra = ''
2422 _use_deleted = True
2424 "dBase III specific"
2425 if yo._meta.header.version == '\x8b':
2426 try:
2427 yo._meta.memo = yo._memoClass(yo._meta)
2428 except:
2429 yo._meta.dfd.close()
2430 yo._meta.dfd = None
2431 raise
2432 if not yo._meta.ignorememos:
2433 for field in yo._meta.fields:
2434 if yo._meta[field]['type'] in yo._memotypes:
2435 if yo._meta.header.version != '\x8b':
2436 yo._meta.dfd.close()
2437 yo._meta.dfd = None
2438 raise DbfError("Table structure corrupt: memo fields exist, header declares no memos")
2439 elif not os.path.exists(yo._meta.memoname):
2440 yo._meta.dfd.close()
2441 yo._meta.dfd = None
2442 raise DbfError("Table structure corrupt: memo fields exist without memo file")
2443 break
2444