Move exceptions to their own file.

Signed-off-by: brian m. carlson <sandals@crustytoothpaste.net>
This commit is contained in:
brian m. carlson 2013-10-12 18:27:56 +00:00
parent 071825a302
commit d692439a08
No known key found for this signature in database
GPG key ID: BF535D811F52F68B
3 changed files with 46 additions and 39 deletions

14
lib/newfol/exception.py Normal file
View file

@ -0,0 +1,14 @@
class NewfolError(Exception):
pass
class FilemanipError(Exception):
pass
class UpgradeNeededError(FilemanipError):
pass
class DataError(FilemanipError):
pass
class RepairError(FilemanipError):
pass

View file

@ -11,17 +11,7 @@ import subprocess
import sys
import uuid
class FilemanipError(Exception):
pass
class UpgradeNeededError(FilemanipError):
pass
class DataError(FilemanipError):
pass
class RepairError(FilemanipError):
pass
import newfol.exception
class Record:
"""A record containing one or more fields."""
@ -138,8 +128,8 @@ class GitTransactionStore(TransactionStore):
if self._options["forgiving"]:
return
else:
raise UpgradeNeededError("git transactions not " +
"initialized")
raise newfol.exception.UpgradeNeededError("git " +
"transactions not initialized")
self._call_git("checkout", self._name)
def commit_close(self):
with self.DirectoryChanger(self._prefix) as dc:
@ -181,11 +171,12 @@ class SHA256TransactionStore(TransactionStore):
line = fp.readline()[:-1]
fp.close()
if line != result:
raise FilemanipError("dtb is corrupt: " + result + " != " +
line)
raise newfol.exception.FilemanipError("dtb is corrupt: " +
result + " != " + line)
except IOError:
if not self._options["forgiving"]:
raise UpgradeNeededError("dtb is missing checksum")
raise newfol.exception.UpgradeNeededError("dtb is missing "+
"checksum")
def commit_close(self):
if "w" in self._mode:
result = self._hash_file(self._filename)
@ -272,7 +263,8 @@ class PickleFile(FileFormat):
"""Store the records to this file."""
for rec in records:
if rec.version() == 0:
raise FilemanipError("cannot pickle version 0 records")
raise newfol.exception.FilemanipError("cannot pickle version 0"+
" records")
self._txnstore.prepare_open(self._filename, "wb")
if self._isfp:
fp = self._file
@ -295,12 +287,15 @@ class PickleFile(FileFormat):
self._txnstore.commit_open(self._filename, "rb")
recs = pickle.load(fp, encoding="UTF-8")
if not isinstance(recs, list):
raise FilemanipError("pickle file is corrupted (not a list)")
raise newfol.exception.FilemanipError("pickle file is corrupted "+
"(not a list)")
for rec in recs:
if not isinstance(rec, Record):
raise FilemanipError("pickle file is corrupted (not a record)")
raise newfol.exceptionFilemanipError("pickle file is corrupted"+
" (not a record)")
if rec.version() == 0:
raise FilemanipError("pickle file is corrupted (old record)")
raise newfol.exception.FilemanipError("pickle file is "+
"corrupted (old record)")
self._txnstore.prepare_close()
if not self._isfp:
fp.close()

36
newfol
View file

@ -17,6 +17,7 @@ import subprocess
import tempfile
import urwid
import newfol.exception
import newfol.filemanip as filemanip
__version__ = "v4~pre"
@ -32,9 +33,6 @@ def log(*args):
print(*args, file=logfd)
logfd.flush()
class NewfolError(Exception):
pass
class KeyboardShortcuts(dict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@ -1172,7 +1170,7 @@ def fix_up_layout(x):
try:
return int(x)+1
except:
raise NewfolError("Invalid dpy value in schema file")
raise newfol.exception.NewfolError("Invalid dpy value in schema file")
def do_import(l):
for i in l:
@ -1198,7 +1196,7 @@ def load_schemata(path):
else:
raise
if recs[0].fields[0] != "fmt":
raise NewfolError("Schema file does not have format declaration")
raise newfol.exception.NewfolError("Schema file does not have format declaration")
if recs[0].fields[1] == "0":
upgrade_schemata_v0(vault, recs)
return load_schemata(path)
@ -1209,7 +1207,7 @@ def load_schemata(path):
upgrade_schemata_v2(vault, recs)
return load_schemata(path)
elif recs[0].fields[1] != "3":
raise NewfolError("Schema file has unknown format")
raise newfol.exception.NewfolError("Schema file has unknown format")
mapping = {}
keyfields = []
nfields = 0
@ -1228,8 +1226,8 @@ def load_schemata(path):
continue
if rectype == "def":
if nfields == 0:
raise NewfolError("Schema does not specify a fixed number "+
"of fields")
raise newfol.exception.NewfolError("Schema does not specify a "+
"fixed number of fields")
mapping[dbname] = Mapping(dbname, nfields)
elif rectype == "fld":
external = int(i.fields[2])
@ -1301,14 +1299,14 @@ def import_into(dtbname, dbtype, minfields=0, strict=False, identity=False):
continue
if minfields > 0 and strict:
if len(rec.fields) != minfields:
raise NewfolError("Record {0} has {1} fields, not {2}".format(
raise newfol.exception.NewfolError("Record {0} has {1} fields, not {2}".format(
recno, len(rec.fields), minfields))
try:
newrec = filemanip.Record(mapobj.map_fields_forward(rec.fields))
newrec.dbname = dtbname
DatabaseData().records.append(newrec)
except TypeError:
raise NewfolError("Data has too many fields or corrupt mapping")
raise newfol.exception.NewfolError("Data has too many fields or corrupt mapping")
def export_from(dtbname):
vault = filemanip.FileStorage('csv', sys.stdout)
@ -1402,13 +1400,13 @@ def lock_database(path):
try:
fd = os.open(lfile, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0o600)
except:
raise NewfolError("Failed to create database lock")
raise newfol.exception.NewfolError("Failed to create database lock")
else:
fd = os.open(lfile, os.O_RDWR)
try:
fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
raise NewfolError("Can't lock database; is it in use?")
raise newfol.exception.NewfolError("Can't lock database; is it in use?")
DatabaseData().lock = fd
def unlock_database(path):
@ -1419,14 +1417,14 @@ def unlock_database(path):
def validate_database(strict=False):
for rec in DatabaseData().records:
if rec.version() < 2:
raise filemanip.UpgradeNeededError("records missing deleted attribute")
raise newfol.exception.UpgradeNeededError("records missing deleted attribute")
if not strict:
return
for rec in DatabaseData().records:
actual = len(rec.fields)
expected = DatabaseData().schemata.nfields
if actual != expected:
raise filemanip.DataError("a record has %d fields, not %d" %
raise newfol.exception.DataError("a record has %d fields, not %d" %
(actual, expected))
def repair_database():
@ -1437,7 +1435,7 @@ def repair_database():
rec.fields.append("")
while len(rec.fields) > expected:
if rec.fields[-1] != "":
raise filemanip.RepairError("record %r has junk beyond end of record" %
raise newfol.exception.RepairError("record %r has junk beyond end of record" %
rec)
rec.fields[:] = rec.fields[0:-1]
newrecs.append(rec)
@ -1469,7 +1467,7 @@ def main(args):
validate_database()
except IOError:
pass
except filemanip.UpgradeNeededError as e:
except newfol.exception.UpgradeNeededError as e:
if argobj.cmd == "upgrade":
# Ignore the error or we won't be able to upgrade at all.
pass
@ -1501,13 +1499,13 @@ def main(args):
try:
locale.setlocale(locale.LC_ALL, '')
main(sys.argv[1:])
except NewfolError as e:
except newfol.exception.NewfolError as e:
print("E: {0}".format(str(e)), file=sys.stderr)
sys.exit(2)
except filemanip.UpgradeNeededError as e:
except newfol.exception.UpgradeNeededError as e:
print("E: {0}; try upgrading".format(str(e)), file=sys.stderr)
sys.exit(2)
except filemanip.FilemanipError as e:
except newfol.exception.FilemanipError as e:
print("E: {0}".format(str(e)), file=sys.stderr)
sys.exit(2)