aboutsummaryrefslogtreecommitdiff
blob: 4ccea2438e0dbabfee6fc9936c30ad16eb436228 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
# Copyright 2010-2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2

__all__ = ['MtimeDB']

import copy
try:
	import cPickle as pickle
except ImportError:
	import pickle

import errno
import io
import json
import sys

import portage
from portage import _encodings
from portage import _unicode_decode
from portage import _unicode_encode
from portage.data import portage_gid, uid
from portage.localization import _
from portage.util import apply_secpass_permissions, atomic_ofstream, writemsg

class MtimeDB(dict):

	# Enable this after JSON read has been supported for some time.
	_json_write = False

	_json_write_opts = {
		"ensure_ascii": False,
		"indent": "\t",
		"sort_keys": True
	}
	if sys.hexversion < 0x3020000:
		# indent only supports int number of spaces
		_json_write_opts["indent"] = 4

	def __init__(self, filename):
		dict.__init__(self)
		self.filename = filename
		self._load(filename)

	def _load(self, filename):
		f = None
		content = None
		try:
			f = open(_unicode_encode(filename), 'rb')
			content = f.read()
		except EnvironmentError as e:
			if getattr(e, 'errno', None) in (errno.ENOENT, errno.EACCES):
				pass
			else:
				writemsg(_("!!! Error loading '%s': %s\n") % \
					(filename, e), noiselevel=-1)
		finally:
			if f is not None:
				f.close()

		d = None
		if content:
			try:
				mypickle = pickle.Unpickler(io.BytesIO(content))
				try:
					mypickle.find_global = None
				except AttributeError:
					# TODO: If py3k, override Unpickler.find_class().
					pass
				d = mypickle.load()
			except SystemExit:
				raise
			except Exception as e:
				try:
					d = json.loads(_unicode_decode(content,
						encoding=_encodings['repo.content'], errors='strict'))
				except SystemExit:
					raise
				except Exception:
					writemsg(_("!!! Error loading '%s': %s\n") % \
						(filename, e), noiselevel=-1)

		if d is None:
			d = {}

		if "old" in d:
			d["updates"] = d["old"]
			del d["old"]
		if "cur" in d:
			del d["cur"]

		d.setdefault("starttime", 0)
		d.setdefault("version", "")
		for k in ("info", "ldpath", "updates"):
			d.setdefault(k, {})

		mtimedbkeys = set(("info", "ldpath", "resume", "resume_backup",
			"starttime", "updates", "version"))

		for k in list(d):
			if k not in mtimedbkeys:
				writemsg(_("Deleting invalid mtimedb key: %s\n") % str(k))
				del d[k]
		self.update(d)
		self._clean_data = copy.deepcopy(d)

	def commit(self):
		if not self.filename:
			return
		d = {}
		d.update(self)
		# Only commit if the internal state has changed.
		if d != self._clean_data:
			d["version"] = str(portage.VERSION)
			try:
				f = atomic_ofstream(self.filename, mode='wb')
			except EnvironmentError:
				pass
			else:
				if self._json_write:
					f.write(_unicode_encode(
						json.dumps(d, **self._json_write_opts),
						encoding=_encodings['repo.content'], errors='strict'))
				else:
					pickle.dump(d, f, protocol=2)
				f.close()
				apply_secpass_permissions(self.filename,
					uid=uid, gid=portage_gid, mode=0o644)
				self._clean_data = copy.deepcopy(d)