/usr/share/pyshared/hamster/db.py is in hamster-applet 2.91.3+git20120514.b9fec3e1-1ubuntu1.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 | # - coding: utf-8 -
# Copyright (C) 2007-2009 Toms Bauģis <toms.baugis at gmail.com>
# Copyright (C) 2007 Patryk Zawadzki <patrys at pld-linux.org>
# This file is part of Project Hamster.
# Project Hamster is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Project Hamster is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Project Hamster. If not, see <http://www.gnu.org/licenses/>.
"""separate file for database operations"""
import logging
try:
import sqlite3 as sqlite
except ImportError:
try:
logging.warn("Using sqlite2")
from pysqlite2 import dbapi2 as sqlite
except ImportError:
logging.error("Neither sqlite3 nor pysqlite2 found")
raise
import os, time
import datetime
import storage
from shutil import copy as copyfile
import itertools
import datetime as dt
import gio
from xdg.BaseDirectory import xdg_data_home
from lib import stuff, trophies
class Storage(storage.Storage):
con = None # Connection will be created on demand
def __init__(self, loop):
"""
Delayed setup so we don't do everything at the same time
"""
storage.Storage.__init__(self, loop)
self.__con = None
self.__cur = None
self.__last_etag = None
self.db_path = self.__init_db_file()
# add file monitoring so the app does not have to be restarted
# when db file is rewritten
def on_db_file_change(monitor, gio_file, event_uri, event):
if event == gio.FILE_MONITOR_EVENT_CHANGES_DONE_HINT:
if gio_file.query_info(gio.FILE_ATTRIBUTE_ETAG_VALUE).get_etag() == self.__last_etag:
# ours
return
elif event == gio.FILE_MONITOR_EVENT_CREATED:
# treat case when instead of a move, a remove and create has been performed
self.con = None
if event in (gio.FILE_MONITOR_EVENT_CHANGES_DONE_HINT, gio.FILE_MONITOR_EVENT_CREATED):
print "DB file has been modified externally. Calling all stations"
self.dispatch_overwrite()
# plan "b" – synchronize the time tracker's database from external source while the tracker is running
trophies.unlock("plan_b")
self.__database_file = gio.File(self.db_path)
self.__db_monitor = self.__database_file.monitor_file()
self.__db_monitor.connect("changed", on_db_file_change)
self.run_fixtures()
def __init_db_file(self):
home_data_dir = os.path.realpath(os.path.join(xdg_data_home, "hamster-applet"))
if not os.path.exists(home_data_dir):
os.makedirs(home_data_dir, 0744)
# handle the move to xdg_data_home
old_db_file = os.path.expanduser("~/.gnome2/hamster-applet/hamster.db")
new_db_file = os.path.join(home_data_dir, "hamster.db")
if os.path.exists(old_db_file):
if os.path.exists(new_db_file):
logging.info("Have two database %s and %s" % (new_db_file, old_db_file))
else:
os.rename(old_db_file, new_db_file)
db_path = os.path.join(home_data_dir, "hamster.db")
# check if we have a database at all
if not os.path.exists(db_path):
# if not there, copy from the defaults
try:
import defs
data_dir = os.path.join(defs.DATA_DIR, "hamster-applet")
except:
# if defs is not there, we are running from sources
module_dir = os.path.dirname(os.path.realpath(__file__))
data_dir = os.path.join(module_dir, '..', '..', 'data')
data_dir = os.path.realpath(data_dir)
logging.info("Database not found in %s - installing default from %s!" % (db_path, data_dir))
copyfile(os.path.join(data_dir, 'hamster.db'), db_path)
#change also permissions - sometimes they are 444
os.chmod(db_path, 0664)
return db_path
def register_modification(self):
# db.execute calls this so we know that we were the ones
# that modified the DB and no extra refesh is not needed
self.__last_etag = self.__database_file.query_info(gio.FILE_ATTRIBUTE_ETAG_VALUE).get_etag()
#tags, here we come!
def __get_tags(self, only_autocomplete = False):
if only_autocomplete:
return self.fetchall("select * from tags where autocomplete != 'false' order by name")
else:
return self.fetchall("select * from tags order by name")
def __get_tag_ids(self, tags):
"""look up tags by their name. create if not found"""
db_tags = self.fetchall("select * from tags where name in (%s)"
% ",".join(["?"] * len(tags)), tags) # bit of magic here - using sqlites bind variables
changes = False
# check if any of tags needs resurrection
set_complete = [str(tag["id"]) for tag in db_tags if tag["autocomplete"] == "false"]
if set_complete:
changes = True
self.execute("update tags set autocomplete='true' where id in (%s)" % ", ".join(set_complete))
found_tags = [tag["name"] for tag in db_tags]
add = set(tags) - set(found_tags)
if add:
statement = "insert into tags(name) values(?)"
self.execute([statement] * len(add), [(tag,) for tag in add])
return self.__get_tag_ids(tags)[0], True # all done, recurse
else:
return db_tags, changes
def __update_autocomplete_tags(self, tags):
tags = [tag.strip() for tag in tags.split(",") if tag.strip()] # split by comma
#first we will create new ones
tags, changes = self.__get_tag_ids(tags)
tags = [tag["id"] for tag in tags]
#now we will find which ones are gone from the list
query = """
SELECT b.id as id, b.autocomplete, count(a.fact_id) as occurences
FROM tags b
LEFT JOIN fact_tags a on a.tag_id = b.id
WHERE b.id not in (%s)
GROUP BY b.id
""" % ",".join(["?"] * len(tags)) # bit of magic here - using sqlites bind variables
gone = self.fetchall(query, tags)
to_delete = [str(tag["id"]) for tag in gone if tag["occurences"] == 0]
to_uncomplete = [str(tag["id"]) for tag in gone if tag["occurences"] > 0 and tag["autocomplete"] == "true"]
if to_delete:
self.execute("delete from tags where id in (%s)" % ", ".join(to_delete))
if to_uncomplete:
self.execute("update tags set autocomplete='false' where id in (%s)" % ", ".join(to_uncomplete))
return changes or len(to_delete + to_uncomplete) > 0
def __get_categories(self):
return self.fetchall("SELECT id, name FROM categories ORDER BY lower(name)")
def __update_activity(self, id, name, category_id):
query = """
UPDATE activities
SET name = ?,
search_name = ?,
category_id = ?
WHERE id = ?
"""
self.execute(query, (name, name.lower(), category_id, id))
affected_ids = [res[0] for res in self.fetchall("select id from facts where activity_id = ?", (id,))]
self.__remove_index(affected_ids)
def __change_category(self, id, category_id):
# first check if we don't have an activity with same name before us
activity = self.fetchone("select name from activities where id = ?", (id, ))
existing_activity = self.__get_activity_by_name(activity['name'], category_id)
if existing_activity and id == existing_activity['id']: # we are already there, go home
return False
if existing_activity: #ooh, we have something here!
# first move all facts that belong to movable activity to the new one
update = """
UPDATE facts
SET activity_id = ?
WHERE activity_id = ?
"""
self.execute(update, (existing_activity['id'], id))
# and now get rid of our friend
self.__remove_activity(id)
else: #just moving
statement = """
UPDATE activities
SET category_id = ?
WHERE id = ?
"""
self.execute(statement, (category_id, id))
affected_ids = [res[0] for res in self.fetchall("select id from facts where activity_id = ?", (id,))]
if existing_activity:
affected_ids.extend([res[0] for res in self.fetchall("select id from facts where activity_id = ?", (existing_activity['id'],))])
self.__remove_index(affected_ids)
return True
def __add_category(self, name):
query = """
INSERT INTO categories (name, search_name)
VALUES (?, ?)
"""
self.execute(query, (name, name.lower()))
return self.__last_insert_rowid()
def __update_category(self, id, name):
if id > -1: # Update, and ignore unsorted, if that was somehow triggered
update = """
UPDATE categories
SET name = ?, search_name = ?
WHERE id = ?
"""
self.execute(update, (name, name.lower(), id))
affected_query = """
SELECT id
FROM facts
WHERE activity_id in (SELECT id FROM activities where category_id=?)
"""
affected_ids = [res[0] for res in self.fetchall(affected_query, (id,))]
self.__remove_index(affected_ids)
def __get_activity_by_name(self, name, category_id = None, resurrect = True):
"""get most recent, preferably not deleted activity by it's name"""
if category_id:
query = """
SELECT a.id, a.name, a.deleted, coalesce(b.name, ?) as category
FROM activities a
LEFT JOIN categories b ON category_id = b.id
WHERE lower(a.name) = lower(?)
AND category_id = ?
ORDER BY a.deleted, a.id desc
LIMIT 1
"""
res = self.fetchone(query, (_("Unsorted"), name, category_id))
else:
query = """
SELECT a.id, a.name, a.deleted, coalesce(b.name, ?) as category
FROM activities a
LEFT JOIN categories b ON category_id = b.id
WHERE lower(a.name) = lower(?)
ORDER BY a.deleted, a.id desc
LIMIT 1
"""
res = self.fetchone(query, (_("Unsorted"), name, ))
if res:
keys = ('id', 'name', 'deleted', 'category')
res = dict([(key, res[key]) for key in keys])
res['deleted'] = res['deleted'] or False
# if the activity was marked as deleted, resurrect on first call
# and put in the unsorted category
if res['deleted'] and resurrect:
update = """
UPDATE activities
SET deleted = null, category_id = -1
WHERE id = ?
"""
self.execute(update, (res['id'], ))
return res
return None
def __get_category_id(self, name):
"""returns category by it's name"""
query = """
SELECT id from categories
WHERE lower(name) = lower(?)
ORDER BY id desc
LIMIT 1
"""
res = self.fetchone(query, (name, ))
if res:
return res['id']
return None
def __get_fact(self, id):
query = """
SELECT a.id AS id,
a.start_time AS start_time,
a.end_time AS end_time,
a.description as description,
b.name AS name, b.id as activity_id,
coalesce(c.name, ?) as category, coalesce(c.id, -1) as category_id,
e.name as tag
FROM facts a
LEFT JOIN activities b ON a.activity_id = b.id
LEFT JOIN categories c ON b.category_id = c.id
LEFT JOIN fact_tags d ON d.fact_id = a.id
LEFT JOIN tags e ON e.id = d.tag_id
WHERE a.id = ?
ORDER BY e.name
"""
return self.__group_tags(self.fetchall(query, (_("Unsorted"), id)))[0]
def __group_tags(self, facts):
"""put the fact back together and move all the unique tags to an array"""
if not facts: return facts #be it None or whatever
grouped_facts = []
for fact_id, fact_tags in itertools.groupby(facts, lambda f: f["id"]):
fact_tags = list(fact_tags)
# first one is as good as the last one
grouped_fact = fact_tags[0]
# we need dict so we can modify it (sqlite.Row is read only)
# in python 2.5, sqlite does not have keys() yet, so we hardcode them (yay!)
keys = ["id", "start_time", "end_time", "description", "name",
"activity_id", "category", "tag"]
grouped_fact = dict([(key, grouped_fact[key]) for key in keys])
grouped_fact["tags"] = [ft["tag"] for ft in fact_tags if ft["tag"]]
grouped_facts.append(grouped_fact)
return grouped_facts
def __touch_fact(self, fact, end_time):
end_time = end_time or dt.datetime.now()
# tasks under one minute do not count
if end_time - fact['start_time'] < datetime.timedelta(minutes = 1):
self.__remove_fact(fact['id'])
else:
end_time = end_time.replace(microsecond = 0)
query = """
UPDATE facts
SET end_time = ?
WHERE id = ?
"""
self.execute(query, (end_time, fact['id']))
def __squeeze_in(self, start_time):
""" tries to put task in the given date
if there are conflicts, we will only truncate the ongoing task
and replace it's end part with our activity """
# we are checking if our start time is in the middle of anything
# or maybe there is something after us - so we know to adjust end time
# in the latter case go only few hours ahead. everything else is madness, heh
query = """
SELECT a.*, b.name
FROM facts a
LEFT JOIN activities b on b.id = a.activity_id
WHERE ((start_time < ? and end_time > ?)
OR (start_time > ? and start_time < ? and end_time is null)
OR (start_time > ? and start_time < ?))
ORDER BY start_time
LIMIT 1
"""
fact = self.fetchone(query, (start_time, start_time,
start_time - dt.timedelta(hours = 12),
start_time, start_time,
start_time + dt.timedelta(hours = 12)))
end_time = None
if fact:
if start_time > fact["start_time"]:
#we are in middle of a fact - truncate it to our start
self.execute("UPDATE facts SET end_time=? WHERE id=?",
(start_time, fact["id"]))
else: #otherwise we have found a task that is after us
end_time = fact["start_time"]
return end_time
def __solve_overlaps(self, start_time, end_time):
"""finds facts that happen in given interval and shifts them to
make room for new fact
"""
if end_time is None or start_time is None:
return
# possible combinations and the OR clauses that catch them
# (the side of the number marks if it catches the end or start time)
# |----------------- NEW -----------------|
# |--- old --- 1| |2 --- old --- 1| |2 --- old ---|
# |3 ----------------------- big old ------------------------ 3|
query = """
SELECT a.*, b.name, c.name as category
FROM facts a
LEFT JOIN activities b on b.id = a.activity_id
LEFT JOIN categories c on b.category_id = c.id
WHERE (end_time > ? and end_time < ?)
OR (start_time > ? and start_time < ?)
OR (start_time < ? and end_time > ?)
ORDER BY start_time
"""
conflicts = self.fetchall(query, (start_time, end_time,
start_time, end_time,
start_time, end_time))
for fact in conflicts:
# won't eliminate as it is better to have overlapping entries than loosing data
if start_time < fact["start_time"] and end_time > fact["end_time"]:
continue
# split - truncate until beginning of new entry and create new activity for end
if fact["start_time"] < start_time < fact["end_time"] and \
fact["start_time"] < end_time < fact["end_time"]:
logging.info("splitting %s" % fact["name"])
# truncate until beginning of the new entry
self.execute("""UPDATE facts
SET end_time = ?
WHERE id = ?""", (start_time, fact["id"]))
fact_name = fact["name"]
# create new fact for the end
new_fact = stuff.Fact(fact["name"],
category = fact["category"],
description = fact["description"],
)
new_fact_id = self.__add_fact(new_fact.serialized_name(), end_time, fact["end_time"])
# copy tags
tag_update = """INSERT INTO fact_tags(fact_id, tag_id)
SELECT ?, tag_id
FROM fact_tags
WHERE fact_id = ?"""
self.execute(tag_update, (new_fact_id, fact["id"])) #clone tags
trophies.unlock("split")
# overlap start
elif start_time < fact["start_time"] < end_time:
logging.info("Overlapping start of %s" % fact["name"])
self.execute("UPDATE facts SET start_time=? WHERE id=?",
(end_time, fact["id"]))
# overlap end
elif start_time < fact["end_time"] < end_time:
logging.info("Overlapping end of %s" % fact["name"])
self.execute("UPDATE facts SET end_time=? WHERE id=?",
(start_time, fact["id"]))
def __add_fact(self, serialized_fact, start_time, end_time = None, temporary = False):
fact = stuff.Fact(serialized_fact,
start_time = start_time,
end_time = end_time)
start_time = start_time or fact.start_time
end_time = end_time or fact.end_time
if not fact.activity or start_time is None: # sanity check
return 0
# get tags from database - this will create any missing tags too
tags = [dict(zip(('id', 'name', 'autocomplete'), row))
for row in self.GetTagIds(fact.tags)]
now = datetime.datetime.now()
# if in future - roll back to past
if start_time > datetime.datetime.now():
start_time = dt.datetime.combine(now.date(), start_time.time())
if start_time > now:
start_time -= dt.timedelta(days = 1)
if end_time and end_time > now:
end_time = dt.datetime.combine(now.date(), end_time.time())
if end_time > now:
end_time -= dt.timedelta(days = 1)
# now check if maybe there is also a category
category_id = None
if fact.category:
category_id = self.__get_category_id(fact.category)
if not category_id:
category_id = self.__add_category(fact.category)
trophies.unlock("no_hands")
# try to find activity, resurrect if not temporary
activity_id = self.__get_activity_by_name(fact.activity,
category_id,
resurrect = not temporary)
if not activity_id:
activity_id = self.__add_activity(fact.activity,
category_id, temporary)
else:
activity_id = activity_id['id']
# if we are working on +/- current day - check the last_activity
if (dt.datetime.now() - start_time <= dt.timedelta(days=1)):
# pull in previous facts
facts = self.__get_todays_facts()
previous = None
if facts and facts[-1]["end_time"] == None:
previous = facts[-1]
if previous and previous['start_time'] <= start_time:
# check if maybe that is the same one, in that case no need to restart
if previous["activity_id"] == activity_id \
and set(previous["tags"]) == set([tag["name"] for tag in tags]) \
and (previous["description"] or "") == (fact.description or ""):
return None
# if no description is added
# see if maybe previous was too short to qualify as an activity
if not previous["description"] \
and 60 >= (start_time - previous['start_time']).seconds >= 0:
self.__remove_fact(previous['id'])
# now that we removed the previous one, see if maybe the one
# before that is actually same as the one we want to start
# (glueing)
if len(facts) > 1 and 60 >= (start_time - facts[-2]['end_time']).seconds >= 0:
before = facts[-2]
if before["activity_id"] == activity_id \
and set(before["tags"]) == set([tag["name"] for tag in tags]):
# resume and return
update = """
UPDATE facts
SET end_time = null
WHERE id = ?
"""
self.execute(update, (before["id"],))
return before["id"]
else:
# otherwise stop
update = """
UPDATE facts
SET end_time = ?
WHERE id = ?
"""
self.execute(update, (start_time, previous["id"]))
# done with the current activity, now we can solve overlaps
if not end_time:
end_time = self.__squeeze_in(start_time)
else:
self.__solve_overlaps(start_time, end_time)
# finally add the new entry
insert = """
INSERT INTO facts (activity_id, start_time, end_time, description)
VALUES (?, ?, ?, ?)
"""
self.execute(insert, (activity_id, start_time, end_time, fact.description))
fact_id = self.__last_insert_rowid()
#now link tags
insert = ["insert into fact_tags(fact_id, tag_id) values(?, ?)"] * len(tags)
params = [(fact_id, tag["id"]) for tag in tags]
self.execute(insert, params)
self.__remove_index([fact_id])
return fact_id
def __last_insert_rowid(self):
return self.fetchone("SELECT last_insert_rowid();")[0]
def __get_todays_facts(self):
from configuration import conf
day_start = conf.get("day_start_minutes")
day_start = dt.time(day_start / 60, day_start % 60)
today = (dt.datetime.now() - dt.timedelta(hours = day_start.hour,
minutes = day_start.minute)).date()
return self.__get_facts(today)
def __get_facts(self, date, end_date = None, search_terms = ""):
from configuration import conf
day_start = conf.get("day_start_minutes")
day_start = dt.time(day_start / 60, day_start % 60)
split_time = day_start
datetime_from = dt.datetime.combine(date, split_time)
end_date = end_date or date
datetime_to = dt.datetime.combine(end_date, split_time) + dt.timedelta(days = 1)
query = """
SELECT a.id AS id,
a.start_time AS start_time,
a.end_time AS end_time,
a.description as description,
b.name AS name, b.id as activity_id,
coalesce(c.name, ?) as category,
e.name as tag
FROM facts a
LEFT JOIN activities b ON a.activity_id = b.id
LEFT JOIN categories c ON b.category_id = c.id
LEFT JOIN fact_tags d ON d.fact_id = a.id
LEFT JOIN tags e ON e.id = d.tag_id
WHERE (a.end_time >= ? OR a.end_time IS NULL) AND a.start_time <= ?
"""
if search_terms:
# check if we need changes to the index
self.__check_index(datetime_from, datetime_to)
search_terms = search_terms.replace('\\', '\\\\').replace('%', '\\%').replace('_', '\\_').replace("'", "''")
query += """ AND a.id in (SELECT id
FROM fact_index
WHERE fact_index MATCH '%s')""" % search_terms
query += " ORDER BY a.start_time, e.name"
facts = self.fetchall(query, (_("Unsorted"),
datetime_from,
datetime_to))
#first let's put all tags in an array
facts = self.__group_tags(facts)
res = []
for fact in facts:
# heuristics to assign tasks to proper days
# if fact has no end time, set the last minute of the day,
# or current time if fact has happened in last 24 hours
if fact["end_time"]:
fact_end_time = fact["end_time"]
elif (dt.date.today() - fact["start_time"].date()) <= dt.timedelta(days=1):
fact_end_time = dt.datetime.now().replace(microsecond = 0)
else:
fact_end_time = fact["start_time"]
fact_start_date = fact["start_time"].date() \
- dt.timedelta(1 if fact["start_time"].time() < split_time else 0)
fact_end_date = fact_end_time.date() \
- dt.timedelta(1 if fact_end_time.time() < split_time else 0)
fact_date_span = fact_end_date - fact_start_date
# check if the task spans across two dates
if fact_date_span.days == 1:
datetime_split = dt.datetime.combine(fact_end_date, split_time)
start_date_duration = datetime_split - fact["start_time"]
end_date_duration = fact_end_time - datetime_split
if start_date_duration > end_date_duration:
# most of the task was done during the previous day
fact_date = fact_start_date
else:
fact_date = fact_end_date
else:
# either doesn't span or more than 24 hrs tracked
# (in which case we give up)
fact_date = fact_start_date
if fact_date < date or fact_date > end_date:
# due to spanning we've jumped outside of given period
continue
fact["date"] = fact_date
fact["delta"] = fact_end_time - fact["start_time"]
res.append(fact)
return res
def __remove_fact(self, fact_id):
statements = ["DELETE FROM fact_tags where fact_id = ?",
"DELETE FROM facts where id = ?"]
self.execute(statements, [(fact_id,)] * 2)
self.__remove_index([fact_id])
def __get_category_activities(self, category_id):
"""returns list of activities, if category is specified, order by name
otherwise - by activity_order"""
query = """
SELECT a.id, a.name, a.category_id, b.name as category
FROM activities a
LEFT JOIN categories b on coalesce(b.id, -1) = a.category_id
WHERE category_id = ?
AND deleted is null
ORDER BY lower(a.name)
"""
return self.fetchall(query, (category_id, ))
def __get_activities(self, search):
"""returns list of activities for autocomplete,
activity names converted to lowercase"""
query = """
SELECT a.name AS name, b.name AS category
FROM activities a
LEFT JOIN categories b ON coalesce(b.id, -1) = a.category_id
LEFT JOIN facts f ON a.id = f.activity_id
WHERE deleted IS NULL
AND a.search_name LIKE ? ESCAPE '\\'
GROUP BY a.id
ORDER BY max(f.start_time) DESC, lower(a.name)
LIMIT 50
"""
search = search.lower()
search = search.replace('\\', '\\\\').replace('%', '\\%').replace('_', '\\_')
activities = self.fetchall(query, (u'%s%%' % search, ))
return activities
def __remove_activity(self, id):
""" check if we have any facts with this activity and behave accordingly
if there are facts - sets activity to deleted = True
else, just remove it"""
query = "select count(*) as count from facts where activity_id = ?"
bound_facts = self.fetchone(query, (id,))['count']
if bound_facts > 0:
self.execute("UPDATE activities SET deleted = 1 WHERE id = ?", (id,))
else:
self.execute("delete from activities where id = ?", (id,))
# Finished! - deleted an activity with more than 50 facts on it
if bound_facts >= 50:
trophies.unlock("finished")
def __remove_category(self, id):
"""move all activities to unsorted and remove category"""
affected_query = """
SELECT id
FROM facts
WHERE activity_id in (SELECT id FROM activities where category_id=?)
"""
affected_ids = [res[0] for res in self.fetchall(affected_query, (id,))]
update = "update activities set category_id = -1 where category_id = ?"
self.execute(update, (id, ))
self.execute("delete from categories where id = ?", (id, ))
self.__remove_index(affected_ids)
def __add_activity(self, name, category_id = None, temporary = False):
# first check that we don't have anything like that yet
activity = self.__get_activity_by_name(name, category_id)
if activity:
return activity['id']
#now do the create bit
category_id = category_id or -1
deleted = None
if temporary:
deleted = 1
query = """
INSERT INTO activities (name, search_name, category_id, deleted)
VALUES (?, ?, ?, ?)
"""
self.execute(query, (name, name.lower(), category_id, deleted))
return self.__last_insert_rowid()
def __remove_index(self, ids):
"""remove affected ids from the index"""
if not ids:
return
ids = ",".join((str(id) for id in ids))
self.execute("DELETE FROM fact_index where id in (%s)" % ids)
def __check_index(self, start_date, end_date):
"""check if maybe index needs rebuilding in the time span"""
index_query = """SELECT id
FROM facts
WHERE (end_time >= ? OR end_time IS NULL)
AND start_time <= ?
AND id not in(select id from fact_index)"""
rebuild_ids = ",".join([str(res[0]) for res in self.fetchall(index_query, (start_date, end_date))])
if rebuild_ids:
query = """
SELECT a.id AS id,
a.start_time AS start_time,
a.end_time AS end_time,
a.description as description,
b.name AS name, b.id as activity_id,
coalesce(c.name, ?) as category,
e.name as tag
FROM facts a
LEFT JOIN activities b ON a.activity_id = b.id
LEFT JOIN categories c ON b.category_id = c.id
LEFT JOIN fact_tags d ON d.fact_id = a.id
LEFT JOIN tags e ON e.id = d.tag_id
WHERE a.id in (%s)
ORDER BY a.id
""" % rebuild_ids
facts = self.__group_tags(self.fetchall(query, (_("Unsorted"), )))
insert = """INSERT INTO fact_index (id, name, category, description, tag)
VALUES (?, ?, ?, ?, ?)"""
params = [(fact['id'], fact['name'], fact['category'], fact['description'], " ".join(fact['tags'])) for fact in facts]
self.executemany(insert, params)
""" Here be dragons (lame connection/cursor wrappers) """
def get_connection(self):
if self.con is None:
self.con = sqlite.connect(self.db_path, detect_types=sqlite.PARSE_DECLTYPES|sqlite.PARSE_COLNAMES)
self.con.row_factory = sqlite.Row
return self.con
connection = property(get_connection, None)
def fetchall(self, query, params = None):
con = self.connection
cur = con.cursor()
logging.debug("%s %s" % (query, params))
if params:
cur.execute(query, params)
else:
cur.execute(query)
res = cur.fetchall()
cur.close()
return res
def fetchone(self, query, params = None):
res = self.fetchall(query, params)
if res:
return res[0]
else:
return None
def execute(self, statement, params = ()):
"""
execute sql statement. optionally you can give multiple statements
to save on cursor creation and closure
"""
con = self.__con or self.connection
cur = self.__cur or con.cursor()
if isinstance(statement, list) == False: # we expect to receive instructions in list
statement = [statement]
params = [params]
for state, param in zip(statement, params):
logging.debug("%s %s" % (state, param))
cur.execute(state, param)
if not self.__con:
con.commit()
cur.close()
self.register_modification()
def executemany(self, statement, params = []):
con = self.__con or self.connection
cur = self.__cur or con.cursor()
logging.debug("%s %s" % (statement, params))
cur.executemany(statement, params)
if not self.__con:
con.commit()
cur.close()
self.register_modification()
def start_transaction(self):
# will give some hints to execute not to close or commit anything
self.__con = self.connection
self.__cur = self.__con.cursor()
def end_transaction(self):
self.__con.commit()
self.__cur.close()
self.__con, self.__cur = None, None
self.register_modification()
def run_fixtures(self):
self.start_transaction()
# defaults
work_category = {"name": _("Work"),
"entries": [_("Reading news"),
_("Checking stocks"),
_("Super secret project X"),
_("World domination")]}
nonwork_category = {"name": _("Day-to-day"),
"entries": [_("Lunch"),
_("Watering flowers"),
_("Doing handstands")]}
"""upgrade DB to hamster version"""
version = self.fetchone("SELECT version FROM version")["version"]
current_version = 9
if version < 2:
"""moving from fact_date, fact_time to start_time, end_time"""
self.execute("""
CREATE TABLE facts_new
(id integer primary key,
activity_id integer,
start_time varchar2(12),
end_time varchar2(12))
""")
self.execute("""
INSERT INTO facts_new
(id, activity_id, start_time)
SELECT id, activity_id, fact_date || fact_time
FROM facts
""")
self.execute("DROP TABLE facts")
self.execute("ALTER TABLE facts_new RENAME TO facts")
# run through all facts and set the end time
# if previous fact is not on the same date, then it means that it was the
# last one in previous, so remove it
# this logic saves our last entry from being deleted, which is good
facts = self.fetchall("""
SELECT id, activity_id, start_time,
substr(start_time,1, 8) start_date
FROM facts
ORDER BY start_time
""")
prev_fact = None
for fact in facts:
if prev_fact:
if prev_fact['start_date'] == fact['start_date']:
self.execute("UPDATE facts SET end_time = ? where id = ?",
(fact['start_time'], prev_fact['id']))
else:
#otherwise that's the last entry of the day - remove it
self.execute("DELETE FROM facts WHERE id = ?", (prev_fact["id"],))
prev_fact = fact
#it was kind of silly not to have datetimes in first place
if version < 3:
self.execute("""
CREATE TABLE facts_new
(id integer primary key,
activity_id integer,
start_time timestamp,
end_time timestamp)
""")
self.execute("""
INSERT INTO facts_new
(id, activity_id, start_time, end_time)
SELECT id, activity_id,
substr(start_time,1,4) || "-"
|| substr(start_time, 5, 2) || "-"
|| substr(start_time, 7, 2) || " "
|| substr(start_time, 9, 2) || ":"
|| substr(start_time, 11, 2) || ":00",
substr(end_time,1,4) || "-"
|| substr(end_time, 5, 2) || "-"
|| substr(end_time, 7, 2) || " "
|| substr(end_time, 9, 2) || ":"
|| substr(end_time, 11, 2) || ":00"
FROM facts;
""")
self.execute("DROP TABLE facts")
self.execute("ALTER TABLE facts_new RENAME TO facts")
#adding categories table to categorize activities
if version < 4:
#adding the categories table
self.execute("""
CREATE TABLE categories
(id integer primary key,
name varchar2(500),
color_code varchar2(50),
category_order integer)
""")
# adding default categories, and make sure that uncategorized stays on bottom for starters
# set order to 2 in case, if we get work in next lines
self.execute("""
INSERT INTO categories
(id, name, category_order)
VALUES (1, ?, 2);
""", (nonwork_category["name"],))
#check if we have to create work category - consider work everything that has been determined so, and is not deleted
work_activities = self.fetchone("""
SELECT count(*) as work_activities
FROM activities
WHERE deleted is null and work=1;
""")['work_activities']
if work_activities > 0:
self.execute("""
INSERT INTO categories
(id, name, category_order)
VALUES (2, ?, 1);
""", (work_category["name"],))
# now add category field to activities, before starting the move
self.execute(""" ALTER TABLE activities
ADD COLUMN category_id integer;
""")
# starting the move
# first remove all deleted activities with no instances in facts
self.execute("""
DELETE FROM activities
WHERE deleted = 1
AND id not in(select activity_id from facts);
""")
# moving work / non-work to appropriate categories
# exploit false/true = 0/1 thing
self.execute(""" UPDATE activities
SET category_id = work + 1
WHERE deleted is null
""")
#finally, set category to -1 where there is none
self.execute(""" UPDATE activities
SET category_id = -1
WHERE category_id is null
""")
# drop work column and forget value of deleted
# previously deleted records are now unsorted ones
# user will be able to mark them as deleted again, in which case
# they won't appear in autocomplete, or in categories
# resurrection happens, when user enters the exact same name
self.execute("""
CREATE TABLE activities_new (id integer primary key,
name varchar2(500),
activity_order integer,
deleted integer,
category_id integer);
""")
self.execute("""
INSERT INTO activities_new
(id, name, activity_order, category_id)
SELECT id, name, activity_order, category_id
FROM activities;
""")
self.execute("DROP TABLE activities")
self.execute("ALTER TABLE activities_new RENAME TO activities")
if version < 5:
self.execute("ALTER TABLE facts add column description varchar2")
if version < 6:
# facts table could use an index
self.execute("CREATE INDEX idx_facts_start_end ON facts(start_time, end_time)")
self.execute("CREATE INDEX idx_facts_start_end_activity ON facts(start_time, end_time, activity_id)")
# adding tags
self.execute("""CREATE TABLE tags (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
name TEXT NOT NULL,
autocomplete BOOL DEFAULT true)""")
self.execute("CREATE INDEX idx_tags_name ON tags(name)")
self.execute("CREATE TABLE fact_tags(fact_id integer, tag_id integer)")
self.execute("CREATE INDEX idx_fact_tags_fact ON fact_tags(fact_id)")
self.execute("CREATE INDEX idx_fact_tags_tag ON fact_tags(tag_id)")
if version < 7:
self.execute("""CREATE TABLE increment_facts (id integer primary key autoincrement,
activity_id integer,
start_time timestamp,
end_time timestamp,
description varchar2)""")
self.execute("""INSERT INTO increment_facts(id, activity_id, start_time, end_time, description)
SELECT id, activity_id, start_time, end_time, description from facts""")
self.execute("DROP table facts")
self.execute("ALTER TABLE increment_facts RENAME TO facts")
if version < 8:
# working around sqlite's utf-f case sensitivity (bug 624438)
# more info: http://www.gsak.net/help/hs23820.htm
self.execute("ALTER TABLE activities ADD COLUMN search_name varchar2")
activities = self.fetchall("select * from activities")
statement = "update activities set search_name = ? where id = ?"
for activity in activities:
self.execute(statement, (activity['name'].lower(), activity['id']))
# same for categories
self.execute("ALTER TABLE categories ADD COLUMN search_name varchar2")
categories = self.fetchall("select * from categories")
statement = "update categories set search_name = ? where id = ?"
for category in categories:
self.execute(statement, (category['name'].lower(), category['id']))
if version < 9:
# adding full text search
self.execute("""CREATE VIRTUAL TABLE fact_index
USING fts3(id, name, category, description, tag)""")
# at the happy end, update version number
if version < current_version:
#lock down current version
self.execute("UPDATE version SET version = %d" % current_version)
print "updated database from version %d to %d" % (version, current_version)
# oldtimer – database version structure had been performed on startup (thus we know that he has been on at least 2 versions)
trophies.unlock("oldtimer")
"""we start with an empty database and then populate with default
values. This way defaults can be localized!"""
category_count = self.fetchone("select count(*) from categories")[0]
if category_count == 0:
work_cat_id = self.__add_category(work_category["name"])
for entry in work_category["entries"]:
self.__add_activity(entry, work_cat_id)
nonwork_cat_id = self.__add_category(nonwork_category["name"])
for entry in nonwork_category["entries"]:
self.__add_activity(entry, nonwork_cat_id)
self.end_transaction()
|