Mercurial > libervia-backend
comparison src/memory/sqlite.py @ 993:301b342c697a
core: use of the new core.log module:
/!\ this is a massive refactoring and was largely automated, it probably did bring some bugs /!\
author | Goffi <goffi@goffi.org> |
---|---|
date | Sat, 19 Apr 2014 19:19:19 +0200 |
parents | 6404df5305e3 |
children | 6a16ec17a458 |
comparison
equal
deleted
inserted
replaced
992:f51a1895275c | 993:301b342c697a |
---|---|
17 # You should have received a copy of the GNU Affero General Public License | 17 # You should have received a copy of the GNU Affero General Public License |
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. | 18 # along with this program. If not, see <http://www.gnu.org/licenses/>. |
19 | 19 |
20 from sat.core.i18n import _ | 20 from sat.core.i18n import _ |
21 from sat.core import exceptions | 21 from sat.core import exceptions |
22 from logging import debug, info, warning, error | 22 from sat.core.log import getLogger |
23 log = getLogger(__name__) | |
23 from twisted.enterprise import adbapi | 24 from twisted.enterprise import adbapi |
24 from twisted.internet import defer | 25 from twisted.internet import defer |
25 from collections import OrderedDict | 26 from collections import OrderedDict |
26 from time import time | 27 from time import time |
27 import re | 28 import re |
78 """Connect to the given database | 79 """Connect to the given database |
79 @param db_filename: full path to the Sqlite database""" | 80 @param db_filename: full path to the Sqlite database""" |
80 self.initialized = defer.Deferred() # triggered when memory is fully initialised and ready | 81 self.initialized = defer.Deferred() # triggered when memory is fully initialised and ready |
81 self.profiles = {} # we keep cache for the profiles (key: profile name, value: profile id) | 82 self.profiles = {} # we keep cache for the profiles (key: profile name, value: profile id) |
82 | 83 |
83 info(_("Connecting database")) | 84 log.info(_("Connecting database")) |
84 new_base = not os.path.exists(db_filename) # do we have to create the database ? | 85 new_base = not os.path.exists(db_filename) # do we have to create the database ? |
85 if new_base: # the dir may not exist if it's not the XDG recommended one | 86 if new_base: # the dir may not exist if it's not the XDG recommended one |
86 dir_ = os.path.dirname(db_filename) | 87 dir_ = os.path.dirname(db_filename) |
87 if not os.path.exists(dir_): | 88 if not os.path.exists(dir_): |
88 os.makedirs(dir_, 0700) | 89 os.makedirs(dir_, 0700) |
90 | 91 |
91 # init_defer is the initialisation deferred, initialisation is ok when all its callbacks have been done | 92 # init_defer is the initialisation deferred, initialisation is ok when all its callbacks have been done |
92 # XXX: foreign_keys activation doesn't seem to work, probably because of the multi-threading | 93 # XXX: foreign_keys activation doesn't seem to work, probably because of the multi-threading |
93 # All the requests that need to use this feature should be run with runInteraction instead, | 94 # All the requests that need to use this feature should be run with runInteraction instead, |
94 # so you can set the PRAGMA as it is done in self.deleteProfile | 95 # so you can set the PRAGMA as it is done in self.deleteProfile |
95 init_defer = self.dbpool.runOperation("PRAGMA foreign_keys = ON").addErrback(lambda x: error(_("Can't activate foreign keys"))) | 96 init_defer = self.dbpool.runOperation("PRAGMA foreign_keys = ON").addErrback(lambda x: log.error(_("Can't activate foreign keys"))) |
96 | 97 |
97 def getNewBaseSql(): | 98 def getNewBaseSql(): |
98 info(_("The database is new, creating the tables")) | 99 log.info(_("The database is new, creating the tables")) |
99 database_creation = ["PRAGMA user_version=%d" % CURRENT_DB_VERSION] | 100 database_creation = ["PRAGMA user_version=%d" % CURRENT_DB_VERSION] |
100 database_creation.extend(Updater.createData2Raw(DATABASE_SCHEMAS['current']['CREATE'])) | 101 database_creation.extend(Updater.createData2Raw(DATABASE_SCHEMAS['current']['CREATE'])) |
101 database_creation.extend(Updater.insertData2Raw(DATABASE_SCHEMAS['current']['INSERT'])) | 102 database_creation.extend(Updater.insertData2Raw(DATABASE_SCHEMAS['current']['INSERT'])) |
102 return database_creation | 103 return database_creation |
103 | 104 |
107 | 108 |
108 def commitStatements(statements): | 109 def commitStatements(statements): |
109 | 110 |
110 if statements is None: | 111 if statements is None: |
111 return defer.succeed(None) | 112 return defer.succeed(None) |
112 debug("===== COMMITING STATEMENTS =====\n%s\n============\n\n" % '\n'.join(statements)) | 113 log.debug("===== COMMITING STATEMENTS =====\n%s\n============\n\n" % '\n'.join(statements)) |
113 d = self.dbpool.runInteraction(self._updateDb, tuple(statements)) | 114 d = self.dbpool.runInteraction(self._updateDb, tuple(statements)) |
114 return d | 115 return d |
115 | 116 |
116 init_defer.addCallback(lambda ignore: getNewBaseSql() if new_base else getUpdateSql()) | 117 init_defer.addCallback(lambda ignore: getNewBaseSql() if new_base else getUpdateSql()) |
117 init_defer.addCallback(commitStatements) | 118 init_defer.addCallback(commitStatements) |
164 def deleteProfile(self, name): | 165 def deleteProfile(self, name): |
165 """Delete profile | 166 """Delete profile |
166 @param name: name of the profile | 167 @param name: name of the profile |
167 @return: deferred triggered once profile is actually deleted""" | 168 @return: deferred triggered once profile is actually deleted""" |
168 def deletionError(failure): | 169 def deletionError(failure): |
169 error(_("Can't delete profile [%s]") % name) | 170 log.error(_("Can't delete profile [%s]") % name) |
170 return failure | 171 return failure |
171 | 172 |
172 def delete(txn): | 173 def delete(txn): |
173 del self.profiles[name] | 174 del self.profiles[name] |
174 txn.execute("PRAGMA foreign_keys = ON") | 175 txn.execute("PRAGMA foreign_keys = ON") |
175 txn.execute("DELETE FROM profiles WHERE name = ?", (name,)) | 176 txn.execute("DELETE FROM profiles WHERE name = ?", (name,)) |
176 return None | 177 return None |
177 | 178 |
178 d = self.dbpool.runInteraction(delete) | 179 d = self.dbpool.runInteraction(delete) |
179 d.addCallback(lambda ignore: info(_("Profile [%s] deleted") % name)) | 180 d.addCallback(lambda ignore: log.info(_("Profile [%s] deleted") % name)) |
180 d.addErrback(deletionError) | 181 d.addErrback(deletionError) |
181 return d | 182 return d |
182 | 183 |
183 #Params | 184 #Params |
184 def loadGenParams(self, params_gen): | 185 def loadGenParams(self, params_gen): |
188 | 189 |
189 def fillParams(result): | 190 def fillParams(result): |
190 for param in result: | 191 for param in result: |
191 category, name, value = param | 192 category, name, value = param |
192 params_gen[(category, name)] = value | 193 params_gen[(category, name)] = value |
193 debug(_("loading general parameters from database")) | 194 log.debug(_("loading general parameters from database")) |
194 return self.dbpool.runQuery("SELECT category,name,value FROM param_gen").addCallback(fillParams) | 195 return self.dbpool.runQuery("SELECT category,name,value FROM param_gen").addCallback(fillParams) |
195 | 196 |
196 def loadIndParams(self, params_ind, profile): | 197 def loadIndParams(self, params_ind, profile): |
197 """Load individual parameters | 198 """Load individual parameters |
198 @param params_ind: dictionary to fill | 199 @param params_ind: dictionary to fill |
201 | 202 |
202 def fillParams(result): | 203 def fillParams(result): |
203 for param in result: | 204 for param in result: |
204 category, name, value = param | 205 category, name, value = param |
205 params_ind[(category, name)] = value | 206 params_ind[(category, name)] = value |
206 debug(_("loading individual parameters from database")) | 207 log.debug(_("loading individual parameters from database")) |
207 d = self.dbpool.runQuery("SELECT category,name,value FROM param_ind WHERE profile_id=?", (self.profiles[profile], )) | 208 d = self.dbpool.runQuery("SELECT category,name,value FROM param_ind WHERE profile_id=?", (self.profiles[profile], )) |
208 d.addCallback(fillParams) | 209 d.addCallback(fillParams) |
209 return d | 210 return d |
210 | 211 |
211 def getIndParam(self, category, name, profile): | 212 def getIndParam(self, category, name, profile): |
223 @param category: category of the parameter | 224 @param category: category of the parameter |
224 @param name: name of the parameter | 225 @param name: name of the parameter |
225 @param value: value to set | 226 @param value: value to set |
226 @return: deferred""" | 227 @return: deferred""" |
227 d = self.dbpool.runQuery("REPLACE INTO param_gen(category,name,value) VALUES (?,?,?)", (category, name, value)) | 228 d = self.dbpool.runQuery("REPLACE INTO param_gen(category,name,value) VALUES (?,?,?)", (category, name, value)) |
228 d.addErrback(lambda ignore: error(_("Can't set general parameter (%(category)s/%(name)s) in database" % {"category": category, "name": name}))) | 229 d.addErrback(lambda ignore: log.error(_("Can't set general parameter (%(category)s/%(name)s) in database" % {"category": category, "name": name}))) |
229 return d | 230 return d |
230 | 231 |
231 def setIndParam(self, category, name, value, profile): | 232 def setIndParam(self, category, name, value, profile): |
232 """Save the individual parameters in database | 233 """Save the individual parameters in database |
233 @param category: category of the parameter | 234 @param category: category of the parameter |
234 @param name: name of the parameter | 235 @param name: name of the parameter |
235 @param value: value to set | 236 @param value: value to set |
236 @param profile: a profile which *must* exist | 237 @param profile: a profile which *must* exist |
237 @return: deferred""" | 238 @return: deferred""" |
238 d = self.dbpool.runQuery("REPLACE INTO param_ind(category,name,profile_id,value) VALUES (?,?,?,?)", (category, name, self.profiles[profile], value)) | 239 d = self.dbpool.runQuery("REPLACE INTO param_ind(category,name,profile_id,value) VALUES (?,?,?,?)", (category, name, self.profiles[profile], value)) |
239 d.addErrback(lambda ignore: error(_("Can't set individual parameter (%(category)s/%(name)s) for [%(profile)s] in database" % {"category": category, "name": name, "profile": profile}))) | 240 d.addErrback(lambda ignore: log.error(_("Can't set individual parameter (%(category)s/%(name)s) for [%(profile)s] in database" % {"category": category, "name": name, "profile": profile}))) |
240 return d | 241 return d |
241 | 242 |
242 #History | 243 #History |
243 def addToHistory(self, from_jid, to_jid, message, _type='chat', extra=None, timestamp=None, profile=None): | 244 def addToHistory(self, from_jid, to_jid, message, _type='chat', extra=None, timestamp=None, profile=None): |
244 """Store a new message in history | 245 """Store a new message in history |
254 extra = {} | 255 extra = {} |
255 extra_ = pickle.dumps({k: v.encode('utf-8') for k, v in extra.items()}, 0).decode('utf-8') | 256 extra_ = pickle.dumps({k: v.encode('utf-8') for k, v in extra.items()}, 0).decode('utf-8') |
256 d = self.dbpool.runQuery("INSERT INTO history(source, source_res, dest, dest_res, timestamp, message, type, extra, profile_id) VALUES (?,?,?,?,?,?,?,?,?)", | 257 d = self.dbpool.runQuery("INSERT INTO history(source, source_res, dest, dest_res, timestamp, message, type, extra, profile_id) VALUES (?,?,?,?,?,?,?,?,?)", |
257 (from_jid.userhost(), from_jid.resource, to_jid.userhost(), to_jid.resource, timestamp or time(), | 258 (from_jid.userhost(), from_jid.resource, to_jid.userhost(), to_jid.resource, timestamp or time(), |
258 message, _type, extra_, self.profiles[profile])) | 259 message, _type, extra_, self.profiles[profile])) |
259 d.addErrback(lambda ignore: error(_("Can't save following message in history: from [%(from_jid)s] to [%(to_jid)s] ==> [%(message)s]" % | 260 d.addErrback(lambda ignore: log.error(_("Can't save following message in history: from [%(from_jid)s] to [%(to_jid)s] ==> [%(message)s]" % |
260 {"from_jid": from_jid.full(), "to_jid": to_jid.full(), "message": message}))) | 261 {"from_jid": from_jid.full(), "to_jid": to_jid.full(), "message": message}))) |
261 return d | 262 return d |
262 | 263 |
263 def getHistory(self, from_jid, to_jid, limit=0, between=True, profile=None): | 264 def getHistory(self, from_jid, to_jid, limit=0, between=True, profile=None): |
264 """Store a new message in history | 265 """Store a new message in history |
319 | 320 |
320 def fillPrivates(result): | 321 def fillPrivates(result): |
321 for private in result: | 322 for private in result: |
322 key, value = private | 323 key, value = private |
323 private_gen[key] = value | 324 private_gen[key] = value |
324 debug(_("loading general private values [namespace: %s] from database") % (namespace, )) | 325 log.debug(_("loading general private values [namespace: %s] from database") % (namespace, )) |
325 d = self.dbpool.runQuery("SELECT key,value FROM private_gen WHERE namespace=?", (namespace, )).addCallback(fillPrivates) | 326 d = self.dbpool.runQuery("SELECT key,value FROM private_gen WHERE namespace=?", (namespace, )).addCallback(fillPrivates) |
326 return d.addErrback(lambda x: debug(_("No data present in database for namespace %s") % namespace)) | 327 return d.addErrback(lambda x: log.debug(_("No data present in database for namespace %s") % namespace)) |
327 | 328 |
328 def loadIndPrivates(self, private_ind, namespace, profile): | 329 def loadIndPrivates(self, private_ind, namespace, profile): |
329 """Load individual private values | 330 """Load individual private values |
330 @param privates_ind: dictionary to fill | 331 @param privates_ind: dictionary to fill |
331 @param namespace: namespace of the values | 332 @param namespace: namespace of the values |
334 | 335 |
335 def fillPrivates(result): | 336 def fillPrivates(result): |
336 for private in result: | 337 for private in result: |
337 key, value = private | 338 key, value = private |
338 private_ind[key] = value | 339 private_ind[key] = value |
339 debug(_("loading individual private values [namespace: %s] from database") % (namespace, )) | 340 log.debug(_("loading individual private values [namespace: %s] from database") % (namespace, )) |
340 d = self.dbpool.runQuery("SELECT key,value FROM private_ind WHERE namespace=? AND profile_id=?", (namespace, self.profiles[profile])) | 341 d = self.dbpool.runQuery("SELECT key,value FROM private_ind WHERE namespace=? AND profile_id=?", (namespace, self.profiles[profile])) |
341 d.addCallback(fillPrivates) | 342 d.addCallback(fillPrivates) |
342 return d.addErrback(lambda x: debug(_("No data present in database for namespace %s") % namespace)) | 343 return d.addErrback(lambda x: log.debug(_("No data present in database for namespace %s") % namespace)) |
343 | 344 |
344 def setGenPrivate(self, namespace, key, value): | 345 def setGenPrivate(self, namespace, key, value): |
345 """Save the general private value in database | 346 """Save the general private value in database |
346 @param category: category of the privateeter | 347 @param category: category of the privateeter |
347 @param key: key of the private value | 348 @param key: key of the private value |
348 @param value: value to set | 349 @param value: value to set |
349 @return: deferred""" | 350 @return: deferred""" |
350 d = self.dbpool.runQuery("REPLACE INTO private_gen(namespace,key,value) VALUES (?,?,?)", (namespace, key, value)) | 351 d = self.dbpool.runQuery("REPLACE INTO private_gen(namespace,key,value) VALUES (?,?,?)", (namespace, key, value)) |
351 d.addErrback(lambda ignore: error(_("Can't set general private value (%(key)s) [namespace:%(namespace)s] in database" % | 352 d.addErrback(lambda ignore: log.error(_("Can't set general private value (%(key)s) [namespace:%(namespace)s] in database" % |
352 {"namespace": namespace, "key": key}))) | 353 {"namespace": namespace, "key": key}))) |
353 return d | 354 return d |
354 | 355 |
355 def setIndPrivate(self, namespace, key, value, profile): | 356 def setIndPrivate(self, namespace, key, value, profile): |
356 """Save the individual private value in database | 357 """Save the individual private value in database |
358 @param key: key of the private value | 359 @param key: key of the private value |
359 @param value: value to set | 360 @param value: value to set |
360 @param profile: a profile which *must* exist | 361 @param profile: a profile which *must* exist |
361 @return: deferred""" | 362 @return: deferred""" |
362 d = self.dbpool.runQuery("REPLACE INTO private_ind(namespace,key,profile_id,value) VALUES (?,?,?,?)", (namespace, key, self.profiles[profile], value)) | 363 d = self.dbpool.runQuery("REPLACE INTO private_ind(namespace,key,profile_id,value) VALUES (?,?,?,?)", (namespace, key, self.profiles[profile], value)) |
363 d.addErrback(lambda ignore: error(_("Can't set individual private value (%(key)s) [namespace: %(namespace)s] for [%(profile)s] in database" % | 364 d.addErrback(lambda ignore: log.error(_("Can't set individual private value (%(key)s) [namespace: %(namespace)s] for [%(profile)s] in database" % |
364 {"namespace": namespace, "key": key, "profile": profile}))) | 365 {"namespace": namespace, "key": key, "profile": profile}))) |
365 return d | 366 return d |
366 | 367 |
367 def delGenPrivate(self, namespace, key): | 368 def delGenPrivate(self, namespace, key): |
368 """Delete the general private value from database | 369 """Delete the general private value from database |
369 @param category: category of the privateeter | 370 @param category: category of the privateeter |
370 @param key: key of the private value | 371 @param key: key of the private value |
371 @return: deferred""" | 372 @return: deferred""" |
372 d = self.dbpool.runQuery("DELETE FROM private_gen WHERE namespace=? AND key=?", (namespace, key)) | 373 d = self.dbpool.runQuery("DELETE FROM private_gen WHERE namespace=? AND key=?", (namespace, key)) |
373 d.addErrback(lambda ignore: error(_("Can't delete general private value (%(key)s) [namespace:%(namespace)s] in database" % | 374 d.addErrback(lambda ignore: log.error(_("Can't delete general private value (%(key)s) [namespace:%(namespace)s] in database" % |
374 {"namespace": namespace, "key": key}))) | 375 {"namespace": namespace, "key": key}))) |
375 return d | 376 return d |
376 | 377 |
377 def delIndPrivate(self, namespace, key, profile): | 378 def delIndPrivate(self, namespace, key, profile): |
378 """Delete the individual private value from database | 379 """Delete the individual private value from database |
379 @param namespace: namespace of the value | 380 @param namespace: namespace of the value |
380 @param key: key of the private value | 381 @param key: key of the private value |
381 @param profile: a profile which *must* exist | 382 @param profile: a profile which *must* exist |
382 @return: deferred""" | 383 @return: deferred""" |
383 d = self.dbpool.runQuery("DELETE FROM private_ind WHERE namespace=? AND key=? AND profile=?)", (namespace, key, self.profiles[profile])) | 384 d = self.dbpool.runQuery("DELETE FROM private_ind WHERE namespace=? AND key=? AND profile=?)", (namespace, key, self.profiles[profile])) |
384 d.addErrback(lambda ignore: error(_("Can't delete individual private value (%(key)s) [namespace: %(namespace)s] for [%(profile)s] in database" % | 385 d.addErrback(lambda ignore: log.error(_("Can't delete individual private value (%(key)s) [namespace: %(namespace)s] for [%(profile)s] in database" % |
385 {"namespace": namespace, "key": key, "profile": profile}))) | 386 {"namespace": namespace, "key": key, "profile": profile}))) |
386 return d | 387 return d |
387 | 388 |
388 def loadGenPrivatesBinary(self, private_gen, namespace): | 389 def loadGenPrivatesBinary(self, private_gen, namespace): |
389 """Load general private binary values | 390 """Load general private binary values |
393 | 394 |
394 def fillPrivates(result): | 395 def fillPrivates(result): |
395 for private in result: | 396 for private in result: |
396 key, value = private | 397 key, value = private |
397 private_gen[key] = pickle.loads(str(value)) | 398 private_gen[key] = pickle.loads(str(value)) |
398 debug(_("loading general private binary values [namespace: %s] from database") % (namespace, )) | 399 log.debug(_("loading general private binary values [namespace: %s] from database") % (namespace, )) |
399 d = self.dbpool.runQuery("SELECT key,value FROM private_gen_bin WHERE namespace=?", (namespace, )).addCallback(fillPrivates) | 400 d = self.dbpool.runQuery("SELECT key,value FROM private_gen_bin WHERE namespace=?", (namespace, )).addCallback(fillPrivates) |
400 return d.addErrback(lambda x: debug(_("No binary data present in database for namespace %s") % namespace)) | 401 return d.addErrback(lambda x: log.debug(_("No binary data present in database for namespace %s") % namespace)) |
401 | 402 |
402 def loadIndPrivatesBinary(self, private_ind, namespace, profile): | 403 def loadIndPrivatesBinary(self, private_ind, namespace, profile): |
403 """Load individual private binary values | 404 """Load individual private binary values |
404 @param privates_ind: dictionary to fill | 405 @param privates_ind: dictionary to fill |
405 @param namespace: namespace of the values | 406 @param namespace: namespace of the values |
408 | 409 |
409 def fillPrivates(result): | 410 def fillPrivates(result): |
410 for private in result: | 411 for private in result: |
411 key, value = private | 412 key, value = private |
412 private_ind[key] = pickle.loads(str(value)) | 413 private_ind[key] = pickle.loads(str(value)) |
413 debug(_("loading individual private binary values [namespace: %s] from database") % (namespace, )) | 414 log.debug(_("loading individual private binary values [namespace: %s] from database") % (namespace, )) |
414 d = self.dbpool.runQuery("SELECT key,value FROM private_ind_bin WHERE namespace=? AND profile_id=?", (namespace, self.profiles[profile])) | 415 d = self.dbpool.runQuery("SELECT key,value FROM private_ind_bin WHERE namespace=? AND profile_id=?", (namespace, self.profiles[profile])) |
415 d.addCallback(fillPrivates) | 416 d.addCallback(fillPrivates) |
416 return d.addErrback(lambda x: debug(_("No binary data present in database for namespace %s") % namespace)) | 417 return d.addErrback(lambda x: log.debug(_("No binary data present in database for namespace %s") % namespace)) |
417 | 418 |
418 def setGenPrivateBinary(self, namespace, key, value): | 419 def setGenPrivateBinary(self, namespace, key, value): |
419 """Save the general private binary value in database | 420 """Save the general private binary value in database |
420 @param category: category of the privateeter | 421 @param category: category of the privateeter |
421 @param key: key of the private value | 422 @param key: key of the private value |
422 @param value: value to set | 423 @param value: value to set |
423 @return: deferred""" | 424 @return: deferred""" |
424 d = self.dbpool.runQuery("REPLACE INTO private_gen_bin(namespace,key,value) VALUES (?,?,?)", (namespace, key, pickle.dumps(value, 0))) | 425 d = self.dbpool.runQuery("REPLACE INTO private_gen_bin(namespace,key,value) VALUES (?,?,?)", (namespace, key, pickle.dumps(value, 0))) |
425 d.addErrback(lambda ignore: error(_("Can't set general private binary value (%(key)s) [namespace:%(namespace)s] in database" % | 426 d.addErrback(lambda ignore: log.error(_("Can't set general private binary value (%(key)s) [namespace:%(namespace)s] in database" % |
426 {"namespace": namespace, "key": key}))) | 427 {"namespace": namespace, "key": key}))) |
427 return d | 428 return d |
428 | 429 |
429 def setIndPrivateBinary(self, namespace, key, value, profile): | 430 def setIndPrivateBinary(self, namespace, key, value, profile): |
430 """Save the individual private binary value in database | 431 """Save the individual private binary value in database |
432 @param key: key of the private value | 433 @param key: key of the private value |
433 @param value: value to set | 434 @param value: value to set |
434 @param profile: a profile which *must* exist | 435 @param profile: a profile which *must* exist |
435 @return: deferred""" | 436 @return: deferred""" |
436 d = self.dbpool.runQuery("REPLACE INTO private_ind_bin(namespace,key,profile_id,value) VALUES (?,?,?,?)", (namespace, key, self.profiles[profile], pickle.dumps(value, 0))) | 437 d = self.dbpool.runQuery("REPLACE INTO private_ind_bin(namespace,key,profile_id,value) VALUES (?,?,?,?)", (namespace, key, self.profiles[profile], pickle.dumps(value, 0))) |
437 d.addErrback(lambda ignore: error(_("Can't set individual binary private value (%(key)s) [namespace: %(namespace)s] for [%(profile)s] in database" % | 438 d.addErrback(lambda ignore: log.error(_("Can't set individual binary private value (%(key)s) [namespace: %(namespace)s] for [%(profile)s] in database" % |
438 {"namespace": namespace, "key": key, "profile": profile}))) | 439 {"namespace": namespace, "key": key, "profile": profile}))) |
439 return d | 440 return d |
440 | 441 |
441 def delGenPrivateBinary(self, namespace, key): | 442 def delGenPrivateBinary(self, namespace, key): |
442 """Delete the general private binary value from database | 443 """Delete the general private binary value from database |
443 @param category: category of the privateeter | 444 @param category: category of the privateeter |
444 @param key: key of the private value | 445 @param key: key of the private value |
445 @return: deferred""" | 446 @return: deferred""" |
446 d = self.dbpool.runQuery("DELETE FROM private_gen_bin WHERE namespace=? AND key=?", (namespace, key)) | 447 d = self.dbpool.runQuery("DELETE FROM private_gen_bin WHERE namespace=? AND key=?", (namespace, key)) |
447 d.addErrback(lambda ignore: error(_("Can't delete general private binary value (%(key)s) [namespace:%(namespace)s] in database" % | 448 d.addErrback(lambda ignore: log.error(_("Can't delete general private binary value (%(key)s) [namespace:%(namespace)s] in database" % |
448 {"namespace": namespace, "key": key}))) | 449 {"namespace": namespace, "key": key}))) |
449 return d | 450 return d |
450 | 451 |
451 def delIndPrivateBinary(self, namespace, key, profile): | 452 def delIndPrivateBinary(self, namespace, key, profile): |
452 """Delete the individual private binary value from database | 453 """Delete the individual private binary value from database |
453 @param namespace: namespace of the value | 454 @param namespace: namespace of the value |
454 @param key: key of the private value | 455 @param key: key of the private value |
455 @param profile: a profile which *must* exist | 456 @param profile: a profile which *must* exist |
456 @return: deferred""" | 457 @return: deferred""" |
457 d = self.dbpool.runQuery("DELETE FROM private_ind_bin WHERE namespace=? AND key=? AND profile=?)", (namespace, key, self.profiles[profile])) | 458 d = self.dbpool.runQuery("DELETE FROM private_ind_bin WHERE namespace=? AND key=? AND profile=?)", (namespace, key, self.profiles[profile])) |
458 d.addErrback(lambda ignore: error(_("Can't delete individual private binary value (%(key)s) [namespace: %(namespace)s] for [%(profile)s] in database" % | 459 d.addErrback(lambda ignore: log.error(_("Can't delete individual private binary value (%(key)s) [namespace: %(namespace)s] for [%(profile)s] in database" % |
459 {"namespace": namespace, "key": key, "profile": profile}))) | 460 {"namespace": namespace, "key": key, "profile": profile}))) |
460 return d | 461 return d |
461 ##Helper methods## | 462 ##Helper methods## |
462 | 463 |
463 def __getFirstResult(self, result): | 464 def __getFirstResult(self, result): |
519 local_hash = self.statementHash(local_sch) | 520 local_hash = self.statementHash(local_sch) |
520 current_hash = self.statementHash(current_sch) | 521 current_hash = self.statementHash(current_sch) |
521 | 522 |
522 if local_hash == current_hash: | 523 if local_hash == current_hash: |
523 if local_version != CURRENT_DB_VERSION: | 524 if local_version != CURRENT_DB_VERSION: |
524 warning(_("Your local schema is up-to-date, but database versions mismatch, fixing it...")) | 525 log.warning(_("Your local schema is up-to-date, but database versions mismatch, fixing it...")) |
525 yield self._setLocalVersion(CURRENT_DB_VERSION) | 526 yield self._setLocalVersion(CURRENT_DB_VERSION) |
526 else: | 527 else: |
527 # an update is needed | 528 # an update is needed |
528 | 529 |
529 if local_version == CURRENT_DB_VERSION: | 530 if local_version == CURRENT_DB_VERSION: |
530 # Database mismatch and we have the latest version | 531 # Database mismatch and we have the latest version |
531 if self._sat_version.endswith('D'): | 532 if self._sat_version.endswith('D'): |
532 # we are in a development version | 533 # we are in a development version |
533 update_data = self.generateUpdateData(local_sch, current_sch, False) | 534 update_data = self.generateUpdateData(local_sch, current_sch, False) |
534 warning(_("There is a schema mismatch, but as we are on a dev version, database will be updated")) | 535 log.warning(_("There is a schema mismatch, but as we are on a dev version, database will be updated")) |
535 update_raw = self.update2raw(update_data, True) | 536 update_raw = self.update2raw(update_data, True) |
536 defer.returnValue(update_raw) | 537 defer.returnValue(update_raw) |
537 else: | 538 else: |
538 error(_(u"schema version is up-to-date, but local schema differ from expected current schema")) | 539 log.error(_(u"schema version is up-to-date, but local schema differ from expected current schema")) |
539 update_data = self.generateUpdateData(local_sch, current_sch, True) | 540 update_data = self.generateUpdateData(local_sch, current_sch, True) |
540 warning(_(u"Here are the commands that should fix the situation, use at your own risk (do a backup before modifying database), you can go to SàT's MUC room at sat@chat.jabberfr.org for help\n### SQL###\n%s\n### END SQL ###\n") % u'\n'.join(("%s;" % statement for statement in self.update2raw(update_data)))) | 541 log.warning(_(u"Here are the commands that should fix the situation, use at your own risk (do a backup before modifying database), you can go to SàT's MUC room at sat@chat.jabberfr.org for help\n### SQL###\n%s\n### END SQL ###\n") % u'\n'.join(("%s;" % statement for statement in self.update2raw(update_data)))) |
541 raise exceptions.DatabaseError("Database mismatch") | 542 raise exceptions.DatabaseError("Database mismatch") |
542 else: | 543 else: |
543 # Database is not up-to-date, we'll do the update | 544 # Database is not up-to-date, we'll do the update |
544 info(_("Database schema has changed, local database will be updated")) | 545 log.info(_("Database schema has changed, local database will be updated")) |
545 update_raw = [] | 546 update_raw = [] |
546 for version in xrange(local_version+1, CURRENT_DB_VERSION+1): | 547 for version in xrange(local_version+1, CURRENT_DB_VERSION+1): |
547 try: | 548 try: |
548 update_data = DATABASE_SCHEMAS[version] | 549 update_data = DATABASE_SCHEMAS[version] |
549 except KeyError: | 550 except KeyError: |
610 | 611 |
611 """ | 612 """ |
612 schema_dict = {} | 613 schema_dict = {} |
613 for create_statement in raw_statements: | 614 for create_statement in raw_statements: |
614 if not create_statement.startswith("CREATE TABLE "): | 615 if not create_statement.startswith("CREATE TABLE "): |
615 warning("Unexpected statement, ignoring it") | 616 log.warning("Unexpected statement, ignoring it") |
616 continue | 617 continue |
617 _create_statement = create_statement[13:] | 618 _create_statement = create_statement[13:] |
618 table, raw_col_stats = _create_statement.split(' ',1) | 619 table, raw_col_stats = _create_statement.split(' ',1) |
619 if raw_col_stats[0] != '(' or raw_col_stats[-1] != ')': | 620 if raw_col_stats[0] != '(' or raw_col_stats[-1] != ')': |
620 warning("Unexpected statement structure, ignoring it") | 621 log.warning("Unexpected statement structure, ignoring it") |
621 continue | 622 continue |
622 col_stats = [stmt.strip() for stmt in self.stmnt_regex.findall(raw_col_stats[1:-1])] | 623 col_stats = [stmt.strip() for stmt in self.stmnt_regex.findall(raw_col_stats[1:-1])] |
623 col_defs = [] | 624 col_defs = [] |
624 constraints = [] | 625 constraints = [] |
625 for col_stat in col_stats: | 626 for col_stat in col_stats: |
666 for table in tables_to_check: | 667 for table in tables_to_check: |
667 old_col_defs, old_constraints = old_data[table] | 668 old_col_defs, old_constraints = old_data[table] |
668 new_col_defs, new_constraints = new_data[table] | 669 new_col_defs, new_constraints = new_data[table] |
669 for obj in old_col_defs, old_constraints, new_col_defs, new_constraints: | 670 for obj in old_col_defs, old_constraints, new_col_defs, new_constraints: |
670 if not isinstance(obj, tuple): | 671 if not isinstance(obj, tuple): |
671 raise InternalError("Columns definitions must be tuples") | 672 raise exceptions.InternalError("Columns definitions must be tuples") |
672 defs_create, defs_delete, ignore = getChanges(set(old_col_defs), set(new_col_defs)) | 673 defs_create, defs_delete, ignore = getChanges(set(old_col_defs), set(new_col_defs)) |
673 constraints_create, constraints_delete, ignore = getChanges(set(old_constraints), set(new_constraints)) | 674 constraints_create, constraints_delete, ignore = getChanges(set(old_constraints), set(new_constraints)) |
674 created_col_names = set([name.split(' ',1)[0] for name in defs_create]) | 675 created_col_names = set([name.split(' ',1)[0] for name in defs_create]) |
675 deleted_col_names = set([name.split(' ',1)[0] for name in defs_delete]) | 676 deleted_col_names = set([name.split(' ',1)[0] for name in defs_delete]) |
676 if (created_col_names.intersection(deleted_col_names or constraints_create or constraints_delete) or | 677 if (created_col_names.intersection(deleted_col_names or constraints_create or constraints_delete) or |
704 drop = [] | 705 drop = [] |
705 for table in update.get('delete', tuple()): | 706 for table in update.get('delete', tuple()): |
706 drop.append(self.DROP_SQL % table) | 707 drop.append(self.DROP_SQL % table) |
707 if dev_version: | 708 if dev_version: |
708 if drop: | 709 if drop: |
709 info("Dev version, SQL NOT EXECUTED:\n--\n%s\n--\n" % "\n".join(drop)) | 710 log.info("Dev version, SQL NOT EXECUTED:\n--\n%s\n--\n" % "\n".join(drop)) |
710 else: | 711 else: |
711 ret.extend(drop) | 712 ret.extend(drop) |
712 | 713 |
713 cols_create = update.get('cols create', {}) | 714 cols_create = update.get('cols create', {}) |
714 for table in cols_create: | 715 for table in cols_create: |
715 for col_def in cols_create[table]: | 716 for col_def in cols_create[table]: |
716 ret.append(self.ALTER_SQL % (table, col_def)) | 717 ret.append(self.ALTER_SQL % (table, col_def)) |
717 | 718 |
718 cols_delete = update.get('cols delete', {}) | 719 cols_delete = update.get('cols delete', {}) |
719 for table in cols_delete: | 720 for table in cols_delete: |
720 info("Following columns in table [%s] are not needed anymore, but are kept for dev version: %s" % (table, ", ".join(cols_delete[table]))) | 721 log.info("Following columns in table [%s] are not needed anymore, but are kept for dev version: %s" % (table, ", ".join(cols_delete[table]))) |
721 | 722 |
722 cols_modify = update.get('cols modify', {}) | 723 cols_modify = update.get('cols modify', {}) |
723 for table in cols_modify: | 724 for table in cols_modify: |
724 ret.append(self.RENAME_TABLE_SQL % (table, self.TMP_TABLE)) | 725 ret.append(self.RENAME_TABLE_SQL % (table, self.TMP_TABLE)) |
725 main, extra = DATABASE_SCHEMAS['current']['CREATE'][table] | 726 main, extra = DATABASE_SCHEMAS['current']['CREATE'][table] |