1089 lines
42 KiB
Python
1089 lines
42 KiB
Python
|
# -*- coding: utf-8 -*-
|
||
|
|
||
|
from datetime import datetime, timedelta
|
||
|
from collections import defaultdict
|
||
|
|
||
|
from trac.core import *
|
||
|
from trac.env import IEnvironmentSetupParticipant
|
||
|
from trac.perm import PermissionSystem
|
||
|
from trac.db import Table, Column, Index
|
||
|
from trac.db.util import sql_escape_percent
|
||
|
from trac.util.datefmt import utc, to_timestamp
|
||
|
from trac.util.text import to_unicode
|
||
|
from trac.util.datefmt import get_timezone, utc, format_time, localtz
|
||
|
from trac.util.translation import _
|
||
|
|
||
|
from dateutil.rrule import *
|
||
|
from babel.core import Locale
|
||
|
from ctdotools.utils import validate_id, gen_wiki_page
|
||
|
from tracrendezvous.location.model import ItemLocation
|
||
|
|
||
|
__all__ = ['Event', 'EventRRule', 'EventRDate', 'EventModelProvider']
|
||
|
|
||
|
|
||
|
def rrule_to_ical(rrule):
|
||
|
ttypes = ("YEARLY","MONTHLY","WEEKLY","DAILY")
|
||
|
s = "RRULE:FREQ=%s;INTERVAL=%s%s" % (ttypes[rrule._freq], rrule._interval)
|
||
|
if rrule._until:
|
||
|
s += ";UNTIL=%s" % rrule._until.strftime("%Y%m%dT%H%M%SZ")
|
||
|
elif rrule._count:
|
||
|
s += ";COUNT=%s" % rrule._count
|
||
|
return s
|
||
|
|
||
|
def unfold(d):
|
||
|
return map(int, d.split())
|
||
|
|
||
|
def unfold_weekdays(d):
|
||
|
return map(lambda s:map(int,s.split(",")), d.split())
|
||
|
|
||
|
class EventRDate(object):
|
||
|
def __init__(self, env, erd_id=0, e_id=0, erd_exclude=False,
|
||
|
erd_datetime=None):
|
||
|
self.env = env
|
||
|
self.erd_id = erd_id
|
||
|
self.e_id = e_id
|
||
|
self.erd_exclude = erd_exclude
|
||
|
self.erd_datetime = erd_datetime
|
||
|
|
||
|
@staticmethod
|
||
|
def fetch_one(env, erd_id):
|
||
|
db = env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
cursor.execute("SELECT * "
|
||
|
"FROM event_rdates "
|
||
|
"WHERE erd_id=%s", (erd_id,))
|
||
|
row = cursor.fetchone()
|
||
|
if not row:
|
||
|
return None
|
||
|
return EventRDate(env, row[0], row[1], bool(row[2]),
|
||
|
datetime.fromtimestamp(row[3], utc))
|
||
|
|
||
|
@staticmethod
|
||
|
def fetch_by_event(env, e_id):
|
||
|
db = env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
cursor.execute("SELECT * "
|
||
|
"FROM event_rdates "
|
||
|
"WHERE e_id=%s", (e_id,))
|
||
|
rows = cursor.fetchall()
|
||
|
res = list()
|
||
|
for row in rows:
|
||
|
res.append(EventRDate(env, row[0], row[1], bool(row[2]),
|
||
|
datetime.fromtimestamp(row[3], utc)))
|
||
|
return res
|
||
|
|
||
|
def commit(self):
|
||
|
db = self.env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
cursor.execute("INSERT INTO event_rdates "
|
||
|
"(e_id, erd_exclude, erd_datetime) VALUES (%s,%s,%s);",
|
||
|
(int(self.e_id), int(self.erd_exclude),
|
||
|
to_timestamp(self.erd_datetime)))
|
||
|
db.commit()
|
||
|
self.erd_id = db.get_last_id(cursor, 'event_rdates')
|
||
|
|
||
|
def update(self):
|
||
|
db = self.env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
cursor.execute("UPDATE event_rdates SET "
|
||
|
"erd_datetime=%s "
|
||
|
"WHERE erd_id=%s;",
|
||
|
(to_timestamp(self.erd_datetime), int(self.erd_id)))
|
||
|
db.commit()
|
||
|
|
||
|
@staticmethod
|
||
|
def delete(env, erd_id):
|
||
|
db = env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
try:
|
||
|
cursor.execute("DELETE FROM event_rdates " \
|
||
|
"WHERE erd_id=%s", (erd_id,))
|
||
|
db.commit()
|
||
|
except Exception:
|
||
|
db.rollback()
|
||
|
pass
|
||
|
|
||
|
class EventWikiPage(object):
|
||
|
def __init__(self, env, ewp_id=0, e_id=0, time_begin=None, wikipage=None):
|
||
|
self.env = env
|
||
|
self.ewp_id = ewp_id
|
||
|
self.e_id = e_id
|
||
|
self.time_begin = time_begin
|
||
|
self.wikipage = wikipage
|
||
|
|
||
|
@staticmethod
|
||
|
def fetch_by_event_occurrence(env, e_id, time_begin=None):
|
||
|
db = env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
if not time_begin:
|
||
|
cursor.execute("SELECT * "
|
||
|
"FROM event_wikipages "
|
||
|
"WHERE e_id=%s", (e_id,))
|
||
|
else:
|
||
|
cursor.execute("SELECT * "
|
||
|
"FROM event_wikipages "
|
||
|
"WHERE e_id=%s and time_begin=%s", (e_id, time_begin))
|
||
|
rows = cursor.fetchall()
|
||
|
res = list()
|
||
|
for row in rows:
|
||
|
res.append(EventWikiPage(env, *row))
|
||
|
return res
|
||
|
|
||
|
def commit(self):
|
||
|
db = self.env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
cursor.execute("INSERT INTO event_wikipages " \
|
||
|
"(e_id, time_begin, wikipage) VALUES (%s,%s,%s);",
|
||
|
(int(self.e_id), to_timestamp(self.time_begin), self.wikipage))
|
||
|
db.commit()
|
||
|
self.ewp_id = db.get_last_id(cursor, 'event_wikipages')
|
||
|
|
||
|
def update(self, env):
|
||
|
db = self.env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
cursor.execute("UPDATE event_wikipages " \
|
||
|
"(e_id,timestamp,wikipage) VALUES (%s,%s,%s) where ewp_id=%s;",
|
||
|
(int(self.e_id), to_timestamp(self.time_begin), self.wikipage, int(self.ewp_id)))
|
||
|
db.commit()
|
||
|
|
||
|
@staticmethod
|
||
|
def delete(env, erd_id):
|
||
|
db = env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
try:
|
||
|
cursor.execute("DELETE FROM event_wikipages " \
|
||
|
"WHERE ewp_id=%s", (ewp_id,))
|
||
|
db.commit()
|
||
|
except Exception:
|
||
|
db.rollback()
|
||
|
pass
|
||
|
|
||
|
|
||
|
class EventRRule(object):
|
||
|
freq_enum = (YEARLY, MONTHLY, WEEKLY, DAILY)
|
||
|
day_enum = (MO, TU, WE, TH, FR, SA, SU)
|
||
|
# TODO: porting to babel
|
||
|
day_abr_names = (_("MO"), _("TU"), _("WE"), _("TH"), _("FR"), _("SA"), _("SU"))
|
||
|
day_names = (_("Monday"), _("Tuesday"), _("Wednesday"), _("Thursday"), _("Friday"), _("Saturday"), _("Sunday"))
|
||
|
monthday_names = ["%d." % i for i in xrange(1,32)] + ['last',] + ["%d-last" % i for i in xrange(1,32)]
|
||
|
month_names = (_("January"), _("February"), _("March"), _("April"), _("May"), _("June"), _("July"), _("August"), _("September"), _("October"), _("November"), _("December"))
|
||
|
monthday_enum = range(1,32) + range(-1,-33,-1)
|
||
|
weekday_names = (_("1st"), _("2nd"), _("3rd"), _("4th"), _("5th"), _("last"), _("2-last"), _("3-last"), _("4-last"), _("5-last"))
|
||
|
selectkeys = [1, 2, 3, 4, 5, -1, -2, -3, -4, -5, -6]
|
||
|
|
||
|
def __init__(self, env, err_id=0, e_id=None, exclude=None, freq=None, interval=None,
|
||
|
count=None, until=None, bysetpos=None,
|
||
|
bymonth=None, bymonthday=None, byyearday=None,
|
||
|
byweeknumber=None, byweekday=set(), byweekdayocc=None):
|
||
|
self.env = env
|
||
|
self.err_id = err_id
|
||
|
self.e_id = e_id
|
||
|
self.exclude = exclude
|
||
|
self.freq = freq
|
||
|
self.interval = interval
|
||
|
self.count = count
|
||
|
self.until = until
|
||
|
self.bysetpos = bysetpos
|
||
|
self.bymonth = bymonth
|
||
|
self.bymonthday = bymonthday
|
||
|
self.byyearday = byyearday
|
||
|
self.byweeknumber = byweeknumber
|
||
|
self.byweekday = byweekday
|
||
|
self.byweekdayocc = byweekdayocc
|
||
|
|
||
|
@staticmethod
|
||
|
def _fetch_data(env, e_id):
|
||
|
db = env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
cursor.execute("""SELECT *
|
||
|
FROM event_rrules
|
||
|
WHERE e_id=%s""", (e_id,))
|
||
|
return cursor.fetchall()
|
||
|
|
||
|
@staticmethod
|
||
|
def fetch_by_event(env, e_id):
|
||
|
'''Returns a list of EventRRule that can be used to display and edit the raw data '''
|
||
|
rows = EventRRule._fetch_data(env, e_id)
|
||
|
if not rows:
|
||
|
return list()
|
||
|
res = list()
|
||
|
for row in rows:
|
||
|
err_id, e_id, exclude, freq, interval, count, until, bysetpos, bymonth, bymonthday, byyearday, byweeknumber, byweekday, byweekdayocc = row
|
||
|
if byweekday:
|
||
|
try:
|
||
|
byweekday = set(map(int, byweekday.split(" ")))
|
||
|
except AttributeError:
|
||
|
byweekday = set((byweekday, ))
|
||
|
else:
|
||
|
byweekday = set()
|
||
|
res.append(EventRRule(env,
|
||
|
err_id,
|
||
|
e_id,
|
||
|
exclude,
|
||
|
freq,
|
||
|
interval,
|
||
|
count,
|
||
|
until and datetime.fromtimestamp(until, utc) or None,
|
||
|
bysetpos,
|
||
|
bymonth,
|
||
|
bymonthday,
|
||
|
byyearday,
|
||
|
byweeknumber,
|
||
|
byweekday,
|
||
|
byweekdayocc))
|
||
|
return res
|
||
|
|
||
|
@staticmethod
|
||
|
def fetch_by_event_rrules(env, e_id, time_begin=None):
|
||
|
''' returns a rruleset that can be used to actually display dates'''
|
||
|
rows = EventRRule._fetch_data(env, e_id)
|
||
|
if not rows:
|
||
|
return rruleset()
|
||
|
res = rruleset()
|
||
|
for row in rows:
|
||
|
res.rrule(EventRRule.to_rrule(row, time_begin))
|
||
|
return res
|
||
|
|
||
|
@staticmethod
|
||
|
def fetch_by_event_full(env, e_id, time_begin=None):
|
||
|
rows = EventRRule._fetch_data(env, e_id)
|
||
|
if not rows:
|
||
|
return list(), rruleset()
|
||
|
lst = list()
|
||
|
rs= rruleset()
|
||
|
for row in rows:
|
||
|
rs.rrule(EventRRule.to_rrule(row, time_begin))
|
||
|
err_id, e_id, exclude, freq, interval, count, until, bysetpos, bymonth, bymonthday, byyearday, byweeknumber, byweekday, byweekdayocc = row
|
||
|
if byweekday:
|
||
|
try:
|
||
|
byweekday = set(map(int, byweekday.split(" ")))
|
||
|
except AttributeError:
|
||
|
byweekday = set((byweekday, ))
|
||
|
else:
|
||
|
byweekday = set()
|
||
|
lst.append(EventRRule(env, err_id, e_id, exclude, freq, interval, count, until and datetime.fromtimestamp(until, utc) or None, bysetpos, bymonth, bymonthday, byyearday, byweeknumber, byweekday, byweekdayocc))
|
||
|
return lst, rs
|
||
|
|
||
|
@staticmethod
|
||
|
def fetch_by_event_ical(env, e_id, time_begin=None):
|
||
|
''' returns a list of ical formatted strings'''
|
||
|
rows = EventRRule._fetch_data(env, e_id)
|
||
|
lst = list()
|
||
|
for row in rows:
|
||
|
lst.append(EventRRule.to_ical(row))
|
||
|
return lst
|
||
|
|
||
|
def commit(self, conn=None):
|
||
|
db = conn and conn or self.env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
|
||
|
try:
|
||
|
cursor.execute("INSERT INTO event_rrules (e_id, exclude, freq, interval, count, until, bysetpos, bymonth, bymonthday, byyearday, byweeknumber, byweekday, byweekdayocc) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);",
|
||
|
(self.e_id,
|
||
|
self.exclude,
|
||
|
self.freq,
|
||
|
self.interval,
|
||
|
self.count,
|
||
|
to_timestamp(self.until),
|
||
|
self.bysetpos,
|
||
|
self.bymonth,
|
||
|
self.bymonthday,
|
||
|
self.byyearday,
|
||
|
self.byweeknumber,
|
||
|
self.byweekday and " ".join(map(str, self.byweekday)) or None,
|
||
|
self.byweekdayocc))
|
||
|
db.commit()
|
||
|
self.err_id = db.get_last_id(cursor, 'event_rrules')
|
||
|
except Exception, e:
|
||
|
db.rollback()
|
||
|
raise
|
||
|
|
||
|
@staticmethod
|
||
|
def delete(env, e_id):
|
||
|
db = env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
try:
|
||
|
cursor.execute("DELETE FROM event_rrules where e_id=%s", (e_id,))
|
||
|
db.commit()
|
||
|
except Exception, e:
|
||
|
db.rollback()
|
||
|
raise
|
||
|
|
||
|
def update(self, conn=None):
|
||
|
db = conn and conn or self.env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
weekdays = self.byweekday and " ".join(map(str, self.byweekday)) or None
|
||
|
cursor.execute("UPDATE event_rrules " \
|
||
|
"SET exclude=%s, " \
|
||
|
"freq=%s, " \
|
||
|
"interval=%s, " \
|
||
|
"count=%s, " \
|
||
|
"until=%s, " \
|
||
|
"bysetpos=%s, " \
|
||
|
"bymonth=%s, " \
|
||
|
"bymonthday=%s, " \
|
||
|
"byyearday=%s, " \
|
||
|
"byweeknumber=%s, " \
|
||
|
"byweekday=%s, " \
|
||
|
"byweekdayocc=%s " \
|
||
|
"WHERE e_id=%s", (self.exclude,
|
||
|
self.freq,
|
||
|
self.interval,
|
||
|
self.count,
|
||
|
self.until and to_timestamp(self.until) or None,
|
||
|
self.bysetpos,
|
||
|
self.bymonth,
|
||
|
self.bymonthday,
|
||
|
self.byyearday,
|
||
|
self.byweeknumber,
|
||
|
weekdays,
|
||
|
self.byweekdayocc,
|
||
|
self.e_id))
|
||
|
if not conn:
|
||
|
db.commit()
|
||
|
|
||
|
@staticmethod
|
||
|
def to_rrule(row, time_begin):
|
||
|
err_id, e_id, exclude, freq, interval, count, until, bysetpos, bymonth, bymonthday, byyearday, byweeknumber, byweekday, byweekdayocc = row
|
||
|
args = {"dtstart" : time_begin, "interval" : interval}
|
||
|
if count!=None:
|
||
|
args["count"] = count
|
||
|
if until:
|
||
|
args["until"] = datetime.fromtimestamp(until, utc)
|
||
|
if bysetpos!=None:
|
||
|
args["bysetpos"] = bysetpos
|
||
|
if bymonth!=None:
|
||
|
args["bymonth"] = bymonth
|
||
|
if bymonthday!=None:
|
||
|
args["bymonthday"] = bymonthday
|
||
|
if byyearday!=None:
|
||
|
args["byyearday"] = byyearday
|
||
|
if byweeknumber!=None:
|
||
|
args["byweekno"] = byweeknumber
|
||
|
if byweekday!=None:
|
||
|
if byweekdayocc != None:
|
||
|
try:
|
||
|
byweekdayocc = byweekdayocc.split()
|
||
|
except Exception:
|
||
|
byweekdayocc = [byweekdayocc,]
|
||
|
else:
|
||
|
byweekdayocc = []
|
||
|
if byweekday != None:
|
||
|
try:
|
||
|
byweekday = byweekday.split()
|
||
|
except Exception:
|
||
|
byweekday = [byweekday,]
|
||
|
else:
|
||
|
byweekday = []
|
||
|
args["byweekday"] = map(
|
||
|
lambda x:EventRRule.day_enum[x[0]](x[1] != None and
|
||
|
EventRRule.selectkeys[x[1]] or None),
|
||
|
map(None, *[map(int, byweekday), map(int, byweekdayocc)]))
|
||
|
return rrule(freq, **args)
|
||
|
|
||
|
@staticmethod
|
||
|
def to_ical(row):
|
||
|
ttypes = ("YEARLY","MONTHLY","WEEKLY","DAILY")
|
||
|
err_id, e_id, exclude, freq, interval, count, until, bysetpos, bymonth, bymonthday, byyearday, byweeknumber, byweekday, byweekdayocc = row
|
||
|
s = ["RRULE:FREQ=%s;INTERVAL=%s" % (ttypes[freq], interval),]
|
||
|
if until:
|
||
|
s.append("UNTIL=%s" % datetime.fromtimestamp(until, utc).strftime("%Y%m%dT%H%M%SZ"))
|
||
|
if count!=None:
|
||
|
s.append("COUNT=%s" % count)
|
||
|
if bysetpos!=None:
|
||
|
s.append("BYSETPOS=%s" % bysetpos)
|
||
|
if bymonth!=None:
|
||
|
s.append("BYMONTH=%s" % bymonth)
|
||
|
if bymonthday!=None:
|
||
|
s.append("MONTHDAY=%s" % bymonthday)
|
||
|
if byyearday!=None:
|
||
|
s.append("COUNT=%s" % byyearday)
|
||
|
if byweeknumber!=None:
|
||
|
s.append("COUNT=%s" % byweeknumber)
|
||
|
if byweekday != None:
|
||
|
if byweekdayocc != None:
|
||
|
try:
|
||
|
byweekdayocc = byweekdayocc.split()
|
||
|
except Exception:
|
||
|
byweekdayocc = [str(byweekdayocc),]
|
||
|
else:
|
||
|
byweekdayocc = []
|
||
|
if byweekday != None:
|
||
|
try:
|
||
|
byweekday = byweekday.split()
|
||
|
except Exception:
|
||
|
byweekday = [str(byweekday),]
|
||
|
else:
|
||
|
byweekday = []
|
||
|
#data = list()
|
||
|
#count = 0
|
||
|
#print "byweekday", byweekday
|
||
|
#print "byweekdayocc", byweekdayocc
|
||
|
#wi = iter(byweekday)
|
||
|
#oi = iter(byweekdayocc)
|
||
|
#while 1:
|
||
|
#d = wi.next()
|
||
|
#print "d", d
|
||
|
#try:
|
||
|
#d = wi.next()
|
||
|
#except StopIteration:
|
||
|
#break
|
||
|
#try:
|
||
|
#o = oi.next()
|
||
|
#except StopIteration:
|
||
|
#o = None
|
||
|
#print "o", o
|
||
|
#data.append((d, o))
|
||
|
data = map(None, map(int, byweekday), map(int, byweekdayocc))
|
||
|
s.append("BYDAY=%s" % ",".join(
|
||
|
map(lambda x:"%s%s" % (x[1] != None and EventRRule.selectkeys[x[1]] or '', EventRRule.day_enum[x[0]]),
|
||
|
data)))
|
||
|
return ";".join(s)
|
||
|
|
||
|
def explain(self):
|
||
|
start = (_(" %d year(s)"), _("monthly"), _("weekly"), _("daily"))
|
||
|
freq = (_("year(s)"), _("month(s)"), _("week(s)"), _("day(s)"))
|
||
|
expl = []
|
||
|
expl.append("Repeat every %d %s" % (self.interval, freq[self.freq]))
|
||
|
if self.byweekday:
|
||
|
if self.byweekdayocc != None:
|
||
|
try:
|
||
|
byweekdayocc = map(int, self.byweekdayocc.split())
|
||
|
except Exception:
|
||
|
byweekdayocc = [int(self.byweekdayocc), ]
|
||
|
else:
|
||
|
byweekdayocc = []
|
||
|
byweekday = self.byweekday
|
||
|
if not byweekday:
|
||
|
byweekday = []
|
||
|
tmp = map(None, byweekday, byweekdayocc)
|
||
|
tpl = _("on %s")
|
||
|
res = list()
|
||
|
for x0, x1 in tmp:
|
||
|
if x1:
|
||
|
res.append("%s %s" % (unicode(self.weekday_names[x1]), unicode(self.day_names[x0])))
|
||
|
else:
|
||
|
res.append(unicode(self.day_names[x0]))
|
||
|
#tmp = ", ".join(res)
|
||
|
tmp = tpl % ", ".join(
|
||
|
map(lambda x: x[1] and _("%s %s") %
|
||
|
(unicode(self.weekday_names[x[1]]), unicode(self.day_names[x[0]])) or
|
||
|
unicode(self.day_names[x[0]]), tmp))
|
||
|
expl.append(tmp)
|
||
|
elif self.bymonthday:
|
||
|
expl.append(_("on day %s") % self.bymonthday)
|
||
|
elif self.bymonth:
|
||
|
expl.append(_("in %s") % self.month_names[self.bymonth])
|
||
|
elif self.byyearday:
|
||
|
expl.append(_("on day %s") % self.byyearday)
|
||
|
|
||
|
if self.count and self.count > 0:
|
||
|
expl.append(_("for %d times" % self.count))
|
||
|
if self.until:
|
||
|
expl.append(_("until %s" % self.until.strftime('%Y-%m-%d')))
|
||
|
#else:
|
||
|
#raise NotImplementedError("could not provide a sane explanation of event:\n%s" % self.__str__())
|
||
|
return " ".join(expl)
|
||
|
|
||
|
def __str__(self):
|
||
|
return "\n".join(("self.e_id %s" % self.e_id,
|
||
|
"self.exclude %s" % self.exclude,
|
||
|
"self.freq %s" % self.freq,
|
||
|
"self.interval %s" % self.interval,
|
||
|
"self.count %s" % self.count,
|
||
|
"self.until %s" % to_timestamp(self.until),
|
||
|
"self.bysetpos %s" % self.bysetpos,
|
||
|
"self.bymonth %s" % self.bymonth,
|
||
|
"self.bymonthday %s" % self.bymonthday,
|
||
|
"self.byyearday %s" % self.byyearday,
|
||
|
"self.byweeknumber %s" % self.byweeknumber,
|
||
|
"self.byweekday %s" % self.byweekday,
|
||
|
"self.byweekdayocc %s" % self.byweekdayocc))
|
||
|
|
||
|
class Event(object):
|
||
|
def __init__(self, env, e_id, name, author, time_created, time_modified, time_begin, time_end, location_id, initial_e_id=None, tags=None, attendees=None, is_periodic=False, wikipage=None):
|
||
|
"""maps an relation of the 'events' table to a python object
|
||
|
|
||
|
@type e_id: int
|
||
|
@param e_id: primary key
|
||
|
|
||
|
@type name: string
|
||
|
@param name: name of that event
|
||
|
|
||
|
@type author: string
|
||
|
@param author: name of events' creator
|
||
|
|
||
|
@type time_begin: datetime
|
||
|
@param time_begin: begin of the event
|
||
|
|
||
|
@type time_end: datetime
|
||
|
@param time_end: end of the event
|
||
|
|
||
|
@type time_created: datetime
|
||
|
@param time_created: creation timestamp of the event
|
||
|
|
||
|
@type time_modified: datetime
|
||
|
@param time_modified: timestamp of last modification
|
||
|
|
||
|
@type location_id: int
|
||
|
@param location_id: primary key of the location the event is taking place
|
||
|
|
||
|
@type initial_e_id: int
|
||
|
@param initial_e_id: gets the same value as e_id, but only if it's
|
||
|
not the first occurence of an recurring event
|
||
|
|
||
|
@type tags: unicode
|
||
|
@param tags: space seperated list of tags
|
||
|
|
||
|
@type attendees: unicode
|
||
|
@param attendees: space seperated list of tags
|
||
|
|
||
|
@type is_periodic: bool
|
||
|
@param is_periodic: shows if that event is recurring
|
||
|
|
||
|
@type wikipage: unicode
|
||
|
@param wikipage: the link as plaintext without 'wiki' prefix, e.g: "events/wikipage-of-that-event", or "foo"
|
||
|
"""
|
||
|
self.env = env
|
||
|
self.e_id = e_id
|
||
|
self.name = unicode(name)
|
||
|
self.author = unicode(author)
|
||
|
self.time_created = time_created
|
||
|
self.time_modified = time_modified
|
||
|
self.time_begin = time_begin
|
||
|
self.time_end = time_end
|
||
|
self.location_id = location_id
|
||
|
self.initial_e_id = initial_e_id # reference to another object of the same type. if this is not None, its a follow-up event with a wiki page created
|
||
|
self.tags = tags # space separated list of strings
|
||
|
self.attendees = attendees # space separated list of strings
|
||
|
self.is_periodic = is_periodic
|
||
|
self.location = ItemLocation.fetch_one(env, location_id)
|
||
|
#self.location = 0
|
||
|
self.wikipage = wikipage
|
||
|
|
||
|
def periodic(self):
|
||
|
return self.is_periodic or self.initial_e_id
|
||
|
|
||
|
@staticmethod
|
||
|
def fetch_one(env, event_id, show_next=False, show_all=False, days=365):
|
||
|
""" returns Event with primary key event_id
|
||
|
|
||
|
@type event_id: int
|
||
|
@param event_id: primary key of Event
|
||
|
|
||
|
@type show_next: bool
|
||
|
@param show_next: if True and if Event.is periodic==True then
|
||
|
|
||
|
@type show_all: bool
|
||
|
@param show_all: if True and if Event.is periodic==True then
|
||
|
event gets a new member list 'Event.followups' of followup events between now and now + 1year.
|
||
|
"""
|
||
|
db = env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
cursor.execute("SELECT * " \
|
||
|
"FROM events " \
|
||
|
"WHERE e_id=%s", (event_id,))
|
||
|
row = cursor.fetchone()
|
||
|
if not row:
|
||
|
return None
|
||
|
e_id, name, author, time_created, time_modified, time_begin, time_end, location_id, initial_e_id, tags, attendees, is_periodic, wikipage = row
|
||
|
event = Event(env, e_id, name, author, datetime.fromtimestamp(time_created, utc), datetime.fromtimestamp(time_modified, utc), datetime.fromtimestamp(time_begin, utc), datetime.fromtimestamp(time_end, utc), location_id, initial_e_id, tags, attendees, bool(is_periodic), wikipage)
|
||
|
if show_next and is_periodic:
|
||
|
rrules = EventRRule.fetch_by_event_rrules(env, e_id, event.time_begin)
|
||
|
if rrules:
|
||
|
dt = rrules.after(datetime.now(utc), True)
|
||
|
if not dt:
|
||
|
return event
|
||
|
delta = event.time_end - event.time_begin
|
||
|
s = Event(env,
|
||
|
e_id,
|
||
|
name,
|
||
|
author,
|
||
|
event.time_created,
|
||
|
event.time_modified,
|
||
|
dt,
|
||
|
dt + delta,
|
||
|
location_id,
|
||
|
e_id,
|
||
|
tags,
|
||
|
attendees,
|
||
|
False,
|
||
|
wikipage)
|
||
|
return s
|
||
|
if show_all and is_periodic:
|
||
|
event.followups = []
|
||
|
event.rrules_explained = None
|
||
|
ls, rrules = EventRRule.fetch_by_event_full(env, e_id, event.time_begin)
|
||
|
if rrules:
|
||
|
text = ls[0].explain()
|
||
|
event.rrules_explained = text
|
||
|
n = datetime.now(utc)
|
||
|
e = n + timedelta(days)
|
||
|
followups = rrules.between(n, e, True)
|
||
|
delta = event.time_end - event.time_begin
|
||
|
for i in followups:
|
||
|
dt = datetime(i.year, i.month, i.day, event.time_begin.hour, event.time_begin.minute, tzinfo=utc)
|
||
|
s = Event(env,
|
||
|
e_id,
|
||
|
name,
|
||
|
author,
|
||
|
datetime.fromtimestamp(time_created, utc),
|
||
|
datetime.fromtimestamp(time_modified, utc),
|
||
|
dt,
|
||
|
dt + delta,
|
||
|
location_id,
|
||
|
e_id,
|
||
|
tags,
|
||
|
attendees,
|
||
|
False,
|
||
|
wikipage)
|
||
|
s.rrules_explained = text
|
||
|
event.followups.append(s)
|
||
|
return event
|
||
|
|
||
|
@staticmethod
|
||
|
def fetch_all(env):
|
||
|
db = env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
cursor.execute("SELECT * from events;")
|
||
|
rows = cursor.fetchall()
|
||
|
if not rows:
|
||
|
return []
|
||
|
res = []
|
||
|
for row in rows:
|
||
|
e_id, name, author, time_created, time_modified, time_begin, time_end, location_id, initial_e_id, tags, attendees, is_periodic, wikipage = row
|
||
|
time_begin = datetime.fromtimestamp(time_begin, utc)
|
||
|
event = Event(env, e_id, name, author,
|
||
|
datetime.fromtimestamp(time_created, utc),
|
||
|
datetime.fromtimestamp(time_modified, utc),
|
||
|
time_begin, datetime.fromtimestamp(time_end, utc), location_id, initial_e_id,
|
||
|
tags, attendees, is_periodic, wikipage)
|
||
|
if is_periodic:
|
||
|
rrules = EventRRule.fetch_by_event_rrules(env, e_id, event.time_begin)
|
||
|
if rrules:
|
||
|
dt = rrules.after(datetime.now(utc), inc=True)
|
||
|
if not dt:
|
||
|
res.append(event)
|
||
|
continue
|
||
|
delta = event.time_end - event.time_begin
|
||
|
s = Event(env,
|
||
|
e_id,
|
||
|
name,
|
||
|
author,
|
||
|
event.time_created,
|
||
|
event.time_modified,
|
||
|
dt,
|
||
|
dt + delta,
|
||
|
location_id,
|
||
|
e_id,
|
||
|
tags,
|
||
|
attendees,
|
||
|
False,
|
||
|
wikipage)
|
||
|
res.append(s)
|
||
|
else:
|
||
|
res.append(event)
|
||
|
return res
|
||
|
|
||
|
@staticmethod
|
||
|
def fetch_all_with_rrule(env):
|
||
|
db = env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
cursor.execute("SELECT * from events;")
|
||
|
rows = cursor.fetchall()
|
||
|
if not rows:
|
||
|
return []
|
||
|
res = []
|
||
|
for row in rows:
|
||
|
e_id, name, author, time_created, time_modified, time_begin, time_end, location_id, initial_e_id, tags, attendees, is_periodic, wikipage = row
|
||
|
time_begin = datetime.fromtimestamp(time_begin, utc)
|
||
|
event = Event(env, e_id, name, author,
|
||
|
datetime.fromtimestamp(time_created, utc),
|
||
|
datetime.fromtimestamp(time_modified, utc),
|
||
|
time_begin, datetime.fromtimestamp(time_end, utc), location_id, initial_e_id,
|
||
|
tags, attendees, is_periodic, wikipage)
|
||
|
res.append(event)
|
||
|
if is_periodic:
|
||
|
try:
|
||
|
event.rrule = EventRRule.fetch_by_event(env, e_id)[0]
|
||
|
except Exception:
|
||
|
pass
|
||
|
return res
|
||
|
|
||
|
@staticmethod
|
||
|
def fetch_as_ical(env):
|
||
|
db = env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
cursor.execute("SELECT * from events;")
|
||
|
rows = cursor.fetchall()
|
||
|
if not rows:
|
||
|
return []
|
||
|
res = []
|
||
|
for row in rows:
|
||
|
e_id, name, author, time_created, time_modified, time_begin, time_end, location_id, initial_e_id, tags, attendees, is_periodic, wikipage = row
|
||
|
time_begin = datetime.fromtimestamp(time_begin, utc)
|
||
|
event = Event(env, e_id, name, author,
|
||
|
datetime.fromtimestamp(time_created, utc),
|
||
|
datetime.fromtimestamp(time_modified, utc),
|
||
|
time_begin, datetime.fromtimestamp(time_end, utc), location_id, initial_e_id,
|
||
|
tags, attendees, is_periodic, wikipage)
|
||
|
event.rrules = EventRRule.fetch_by_event_ical(env, int(e_id))
|
||
|
event.alarms = [
|
||
|
u"""
|
||
|
BEGIN:VALARM
|
||
|
DESCRIPTION:Gleich beginnt das Event '%s'
|
||
|
ACTION:DISPLAY
|
||
|
TRIGGER;VALUE=DURATION:-PT10M
|
||
|
END:VALARM
|
||
|
BEGIN:VALARM
|
||
|
DESCRIPTION:In 2 Tagen beginnt das Event '%s'
|
||
|
ACTION:DISPLAY
|
||
|
TRIGGER;VALUE=DURATION:-P2D
|
||
|
END:VALARM""" % (event.name, event.name)]
|
||
|
res.append(event)
|
||
|
return res
|
||
|
|
||
|
@staticmethod
|
||
|
def get_recurrency_data(env, e_id):
|
||
|
return EventRRule.fetch_by_event_data(env, e_id).extend(EventRDate.fetch_by_event(env, e_id))
|
||
|
|
||
|
@staticmethod
|
||
|
def _data_fetch_by_period(env, start_dt, end_dt, is_periodic=False, locations=[]):
|
||
|
db = env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
if is_periodic:
|
||
|
cursor.execute("SELECT * from events where is_periodic=1;")
|
||
|
return cursor.fetchall()
|
||
|
else:
|
||
|
query = "SELECT * from events where is_periodic='0' and (time_begin between %s and %s or time_end between %s and %s)"
|
||
|
if locations:
|
||
|
query += " and location_id in (%s)" % ",".join(map(sql_escape_percent, locations))
|
||
|
query += ";"
|
||
|
s = to_timestamp(start_dt)
|
||
|
e = to_timestamp(end_dt)
|
||
|
cursor.execute(query, (s, e, s, e))
|
||
|
return cursor.fetchall()
|
||
|
|
||
|
@staticmethod
|
||
|
def fetch_by_period_dict(env, start_dt, end_dt, locations=[]):
|
||
|
"""returns a dictionary with date as keys and lists of events as values
|
||
|
Some events might be periodic and the actual recurrency ruleset created and processed,
|
||
|
so this method is somewhat cpu intensive. To keep apart singular from recurring events check
|
||
|
Event.is_periodic
|
||
|
"""
|
||
|
rows = Event._data_fetch_by_period(env, start_dt, end_dt, locations=locations)
|
||
|
res = defaultdict(list)
|
||
|
if rows:
|
||
|
for row in rows:
|
||
|
e_id, name, author, time_created, time_modified, time_begin, time_end, location_id, initial_e_id, tags, attendees, is_periodic, wikipage = row
|
||
|
time_begin = datetime.fromtimestamp(time_begin, utc)
|
||
|
res[time_begin.date()].append(
|
||
|
Event(env, e_id, name, author, datetime.fromtimestamp(time_created, utc),
|
||
|
datetime.fromtimestamp(time_modified, utc),
|
||
|
time_begin, datetime.fromtimestamp(time_end, utc), location_id, initial_e_id,
|
||
|
tags, attendees, False, wikipage))
|
||
|
rows = Event._data_fetch_by_period(env, start_dt, end_dt, is_periodic=True, locations=locations)
|
||
|
if not rows:
|
||
|
return res
|
||
|
for row in rows:
|
||
|
e_id, name, author, time_created, time_modified, time_begin, time_end, location_id, initial_e_id, tags, attendees, is_periodic, wikipage = row
|
||
|
time_begin = datetime.fromtimestamp(time_begin, utc)
|
||
|
time_end = datetime.fromtimestamp(time_end, utc)
|
||
|
delta = time_end - time_begin
|
||
|
rrules = EventRRule.fetch_by_event_rrules(env, e_id, time_begin)
|
||
|
if not rrules:
|
||
|
raise ValueError("missing rruleset")
|
||
|
try:
|
||
|
myrrule = EventRRule.fetch_by_event(env, e_id)[0]
|
||
|
except Exception, e:
|
||
|
myrrule = None
|
||
|
try:
|
||
|
excluding = EventRDate.fetch_by_event(env, e_id)
|
||
|
except Exception,e:
|
||
|
excluding = []
|
||
|
for i in excluding:
|
||
|
rrules.exdate(i.erd_datetime.replace(hour=time_begin.hour, minute=time_begin.minute))
|
||
|
followups = rrules.between(start_dt, end_dt, True)
|
||
|
for i in followups:
|
||
|
dt = datetime(i.year, i.month, i.day, time_begin.hour, time_begin.minute, tzinfo=utc)
|
||
|
s = Event(env,
|
||
|
e_id,
|
||
|
name,
|
||
|
author,
|
||
|
datetime.fromtimestamp(time_created, utc),
|
||
|
datetime.fromtimestamp(time_modified, utc),
|
||
|
dt,
|
||
|
dt + delta,
|
||
|
location_id,
|
||
|
e_id,
|
||
|
tags,
|
||
|
attendees,
|
||
|
False,
|
||
|
wikipage)
|
||
|
if rrule:
|
||
|
s.rrule = myrrule
|
||
|
res[i.date()].append(s)
|
||
|
return res
|
||
|
|
||
|
@staticmethod
|
||
|
def fetch_by_period_list(env, start_dt, end_dt, locations=[]):
|
||
|
"""If you need a list of events between datetime a and datetime b, use this method, which is less expensive than Event.fetch_by_period_dict.
|
||
|
"""
|
||
|
rows = Event._data_fetch_by_period(env, start_dt, end_dt, locations=locations)
|
||
|
res = list()
|
||
|
if rows:
|
||
|
for row in rows:
|
||
|
e_id, name, author, time_created, time_modified, time_begin, time_end, location_id, initial_e_id, tags, attendees, is_periodic, wikipage = row
|
||
|
time_begin = datetime.fromtimestamp(time_begin, utc)
|
||
|
res.append(Event(env, e_id, name, author,
|
||
|
datetime.fromtimestamp(time_created, utc),
|
||
|
datetime.fromtimestamp(time_modified, utc),
|
||
|
time_begin, datetime.fromtimestamp(time_end, utc),
|
||
|
location_id, initial_e_id, tags, attendees, False, wikipage))
|
||
|
rows = Event._data_fetch_by_period(env, start_dt, end_dt, True, locations=locations)
|
||
|
if not rows:
|
||
|
return res
|
||
|
for row in rows:
|
||
|
e_id, name, author, time_created, time_modified, time_begin, time_end, location_id, initial_e_id, tags, attendees, is_periodic, wikipage = row
|
||
|
time_begin = datetime.fromtimestamp(time_begin, utc)
|
||
|
time_end = datetime.fromtimestamp(time_end, utc)
|
||
|
time_created = datetime.fromtimestamp(time_created, utc)
|
||
|
time_modified = datetime.fromtimestamp(time_modified, utc)
|
||
|
delta = time_end - time_begin
|
||
|
rrules = EventRRule.fetch_by_event_rrules(env, e_id, time_begin)
|
||
|
followups = rrules.between(start_dt, end_dt, inc=True)
|
||
|
try:
|
||
|
rrule = EventRRule.fetch_by_event(env, e_id)[0]
|
||
|
except Exception,e:
|
||
|
pass
|
||
|
for i in followups:
|
||
|
dt = datetime(i.year, i.month, i.day, time_begin.hour, time_begin.minute, tzinfo=utc)
|
||
|
s = Event(env,
|
||
|
e_id,
|
||
|
name,
|
||
|
author,
|
||
|
time_created,
|
||
|
time_modified,
|
||
|
dt,
|
||
|
dt + delta,
|
||
|
location_id,
|
||
|
e_id,
|
||
|
tags,
|
||
|
attendees,
|
||
|
False,
|
||
|
wikipage)
|
||
|
if rrule:
|
||
|
s.rrule = rrule
|
||
|
res.append(s)
|
||
|
return res
|
||
|
|
||
|
def commit(self, conn=None):
|
||
|
db = conn and conn or self.env.get_db_cnx()
|
||
|
t = datetime.now(utc)
|
||
|
cursor = db.cursor()
|
||
|
try:
|
||
|
cursor.execute("INSERT INTO events " \
|
||
|
"(name,author,time_created,time_modified,time_begin,time_end,location_id,initial_e_id,tags,attendees,is_periodic, wikipage) " \
|
||
|
"VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);",
|
||
|
(self.name,
|
||
|
self.author,
|
||
|
to_timestamp(self.time_created),
|
||
|
to_timestamp(self.time_modified),
|
||
|
to_timestamp(self.time_begin),
|
||
|
to_timestamp(self.time_end),
|
||
|
self.location_id,
|
||
|
self.initial_e_id,
|
||
|
self.tags,
|
||
|
self.attendees,
|
||
|
int(self.is_periodic),
|
||
|
self.wikipage))
|
||
|
db.commit()
|
||
|
self.e_id = db.get_last_id(cursor, 'events')
|
||
|
except Exception, e:
|
||
|
db.rollback()
|
||
|
raise
|
||
|
|
||
|
@staticmethod
|
||
|
def delete(env, e_id):
|
||
|
db = env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
try:
|
||
|
cursor.execute("DELETE FROM events WHERE e_id = %s", (e_id,))
|
||
|
db.commit()
|
||
|
except Exception, e:
|
||
|
db.rollback()
|
||
|
raise
|
||
|
|
||
|
def update(self, conn=None):
|
||
|
db = conn and conn or self.env.get_db_cnx()
|
||
|
cursor = db.cursor()
|
||
|
try:
|
||
|
cursor.execute("UPDATE events " \
|
||
|
"SET name =%s, " \
|
||
|
"author=%s, " \
|
||
|
"time_created=%s, " \
|
||
|
"time_modified=%s, " \
|
||
|
"time_begin=%s, " \
|
||
|
"time_end=%s, " \
|
||
|
"initial_e_id=%s, " \
|
||
|
"location_id=%s, " \
|
||
|
"tags=%s, " \
|
||
|
"attendees=%s, " \
|
||
|
"is_periodic=%s, " \
|
||
|
"wikipage=%s " \
|
||
|
"WHERE e_id=%s", (self.name, self.author, to_timestamp(self.time_created), to_timestamp(self.time_modified),
|
||
|
to_timestamp(self.time_begin), to_timestamp(self.time_end),
|
||
|
self.initial_e_id, self.location_id, self.tags, self.attendees, int(self.is_periodic), self.wikipage, self.e_id))
|
||
|
db.commit()
|
||
|
except Exception, e:
|
||
|
db.rollback()
|
||
|
raise
|
||
|
|
||
|
def __str__(self):
|
||
|
return "<Event: %d, %s, %s, %s>" % (self.e_id, self.name, self.time_begin, self.time_end)
|
||
|
|
||
|
|
||
|
class EventModelProvider(Component):
|
||
|
implements(IEnvironmentSetupParticipant)
|
||
|
|
||
|
SCHEMA = [
|
||
|
Table('events', key='e_id')[
|
||
|
Column('e_id', auto_increment=True),
|
||
|
Column('name'),
|
||
|
Column('author'),
|
||
|
Column('time_created', type='int'),
|
||
|
Column('time_modified', type='int'),
|
||
|
Column('time_begin', type='int'),
|
||
|
Column('time_end', type='int'),
|
||
|
Column('location_id', type='int'),
|
||
|
Column('initial_e_id', type='int'),
|
||
|
Column('tags'),
|
||
|
Column('attendees'),
|
||
|
Column('is_periodic', type="int"),
|
||
|
Column('wikipage'),
|
||
|
Index(['name'])],
|
||
|
|
||
|
Table('event_wikipages', key='ewp_id')[
|
||
|
Column('ewp_id', auto_increment=True),
|
||
|
Column('e_id'),
|
||
|
Column('wikipage')],
|
||
|
|
||
|
Table('event_rrules', key='err_id')[
|
||
|
Column('err_id', auto_increment=True),
|
||
|
Column('e_id', type="int"),
|
||
|
Column('exclude', type="int"),
|
||
|
Column('freq', type="int"),
|
||
|
Column('interval', type="int"),
|
||
|
Column('count', type="int"),
|
||
|
Column('until', type="int"),
|
||
|
Column('bysetpos', type="int"),
|
||
|
Column('bymonth', type="int"),
|
||
|
Column('bymonthday', type="int"),
|
||
|
Column('byyearday', type="int"),
|
||
|
Column('byweeknumber'),
|
||
|
Column('byweekday'),
|
||
|
Column('byweekdayocc')],
|
||
|
|
||
|
Table('event_rdates')[
|
||
|
Column('erd_id', auto_increment=True),
|
||
|
Column('e_id', type="int"),
|
||
|
Column('erd_exclude', type="int"),
|
||
|
Column('erd_datetime', type="int")]]
|
||
|
|
||
|
#TERMINE_DATA = (
|
||
|
#(u"Offizielles Treffen",
|
||
|
#to_timestamp(datetime.now(utc)),
|
||
|
#to_timestamp(datetime.now(utc)),
|
||
|
#to_timestamp(datetime(2009,5,7,17,0, tzinfo=utc)),
|
||
|
#to_timestamp(datetime(2009,5,7,20,0, tzinfo=utc)),
|
||
|
#1,
|
||
|
#None,
|
||
|
#"foo bar",
|
||
|
#"heinz horst elke peter",
|
||
|
#True),
|
||
|
#(u"Topic Treffen",
|
||
|
#to_timestamp(datetime.now(utc)),
|
||
|
#to_timestamp(datetime.now(utc)),
|
||
|
#to_timestamp(datetime(2009,5,8,17,0, tzinfo=utc)),
|
||
|
#to_timestamp(datetime(2009,5,8,20,0, tzinfo=utc)),
|
||
|
#1,
|
||
|
#None,
|
||
|
#"foo bar",
|
||
|
#"heinz horst elke peter",
|
||
|
#True),
|
||
|
#(u"Zombies Beamer Action",
|
||
|
#to_timestamp(datetime.now(utc)),
|
||
|
#to_timestamp(datetime.now(utc)),
|
||
|
#to_timestamp(datetime(2009,7,2,17,0, tzinfo=utc)),
|
||
|
#to_timestamp(datetime(2009,7,2,23,0, tzinfo=utc)),
|
||
|
#1,
|
||
|
#None,
|
||
|
#"gamez beamer",
|
||
|
#"syn knuddel kalle lucifer schnarchnase",
|
||
|
#False))
|
||
|
|
||
|
TERMINE_PERIOD_DATA = (
|
||
|
(1,0,3,15,30,None),
|
||
|
(2,0,3,30,15,None))
|
||
|
|
||
|
def environment_created(self):
|
||
|
|
||
|
self._create_models(self.env.get_db_cnx())
|
||
|
|
||
|
def environment_needs_upgrade(self, db):
|
||
|
"""First version - nothing to migrate, but possibly to create.
|
||
|
"""
|
||
|
|
||
|
cursor = db.cursor()
|
||
|
try:
|
||
|
cursor.execute("select count(*) from events")
|
||
|
cursor.fetchone()
|
||
|
cursor.execute("select count(*) from event_rrules")
|
||
|
cursor.fetchone()
|
||
|
cursor.execute("select count(*) from event_rdates")
|
||
|
cursor.fetchone()
|
||
|
cursor.execute("select count(*) from event_wikipages")
|
||
|
cursor.fetchone()
|
||
|
return False
|
||
|
except:
|
||
|
db.rollback()
|
||
|
return True
|
||
|
|
||
|
def upgrade_environment(self, db):
|
||
|
""" nothing to do here for now
|
||
|
"""
|
||
|
self._create_models(db)
|
||
|
|
||
|
def _create_models(self, db):
|
||
|
|
||
|
"""Called when a new Trac environment is created."""
|
||
|
|
||
|
db_backend = None
|
||
|
try:
|
||
|
from trac.db import DatabaseManager
|
||
|
db_backend, _ = DatabaseManager(self.env)._get_connector()
|
||
|
except ImportError:
|
||
|
db_backend = self.env.get_db_cnx()
|
||
|
try:
|
||
|
cursor = db.cursor()
|
||
|
for table in self.SCHEMA:
|
||
|
try:
|
||
|
for stmt in db_backend.to_sql(table):
|
||
|
self.env.log.debug(stmt)
|
||
|
cursor.execute(stmt)
|
||
|
except Exception, e:
|
||
|
self.env.log.exception(e)
|
||
|
db.commit()
|
||
|
#cursor.executemany("""INSERT INTO 'events'
|
||
|
# (name, time_created, time_modified, time_begin, time_end, location_id, initial_e_id, tags, attendees, is_periodic)
|
||
|
# VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)""", self.TERMINE_DATA)
|
||
|
#cursor.executemany("""INSERT INTO 'event_rrules'
|
||
|
# (e_id,exclude,freq,interval,count,until)
|
||
|
# VALUES(%s,%s,%s,%s,%s,%s)""", self.TERMINE_PERIOD_DATA)
|
||
|
#db.commit()
|
||
|
#gen_wiki_page(self.env, "hotshelf", "events/1", u"= Offizielles Treffen = \nDiese Page kann mit Ideen und Inhalten des Events/Treffs gefüllt werden", "localhost")
|
||
|
#gen_wiki_page(self.env, "hotshelf", "events/2", u"= Topic Treffen = \nDiese Page kann mit Ideen und Inhalten des Events/Treffs gefüllt werden", "localhost")
|
||
|
except Exception, e:
|
||
|
db.rollback()
|
||
|
raise
|