Coverage for Database.py: 21%
2287 statements
« prev ^ index » next coverage.py v7.6.1, created at 2024-09-28 16:41 +0000
« prev ^ index » next coverage.py v7.6.1, created at 2024-09-28 16:41 +0000
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3# # Copyright 2008-2011, Ray E. Barker
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License as published by
7# the Free Software Foundation; either version 2 of the License, or
8# (at your option) any later version.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License
16# along with this program; if not, write to the Free Software
17# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
18"""Database.py
20Create and manage the database objects.
21"""
22from __future__ import print_function
23from __future__ import division
24import future
26from past.utils import old_div
28# #import L10n
29# #_ = L10n.get_translation()
31########################################################################
33# TODO: - rebuild indexes / vacuum option
34# - check speed of get_stats_from_hand() - add log info
35# - check size of db, seems big? (mysql)
36######### investigate size of mysql db (200K for just 7K hands? 2GB for 140K hands?)
38# postmaster -D /var/lib/pgsql/data
40# Standard Library modules
41import os
42import sys
43import traceback
44from datetime import datetime, date, time, timedelta
45from time import time, strftime, sleep
46from decimal_wrapper import Decimal
47import string
48import re
51import math
52import pytz
53import csv
54import logging
55import random
58re_char = re.compile('[^a-zA-Z]')
59re_insert = re.compile(r"insert\sinto\s(?P<TABLENAME>[A-Za-z]+)\s(?P<COLUMNS>\(.+?\))\s+values", re.DOTALL)
61# FreePokerTools modules
62import SQL
63import Card
64import Charset
65from Exceptions import *
66import Configuration
68if __name__ == "__main__":
69 Configuration.set_logfile("fpdb-log.txt")
70# logging has been set up in fpdb.py or HUD_main.py, use their settings:
71log = logging.getLogger("db")
73# Other library modules
74try:
75 import sqlalchemy.pool as pool
76 use_pool = True
77except ImportError:
78 log.info(("Not using sqlalchemy connection pool."))
79 use_pool = False
81try:
82 from numpy import var
83 use_numpy = True
84except ImportError:
85 log.info(("Not using numpy to define variance in sqlite."))
86 use_numpy = False
89DB_VERSION = 216
91# Variance created as sqlite has a bunch of undefined aggregate functions.
93class VARIANCE(object):
94 def __init__(self):
95 self.store = []
97 def step(self, value):
98 self.store.append(value)
100 def finalize(self):
101 return float(var(self.store))
103class sqlitemath(object):
104 def mod(self, a, b):
105 return a%b
108def adapt_decimal(d):
109 return str(d)
111def convert_decimal(s):
112 print('convertvalue')
113 print(s)
114 s = s.decode()
115 return Decimal(s)
118# These are for appendStats. Insert new stats at the right place, because
119# SQL needs strict order.
120# Keys used to index into player data in storeHandsPlayers.
121HANDS_PLAYERS_KEYS = [
122 'startCash',
123 'effStack',
124 'startBounty',
125 'endBounty',
126 'seatNo',
127 'sitout',
128 'card1',
129 'card2',
130 'card3',
131 'card4',
132 'card5',
133 'card6',
134 'card7',
135 'card8',
136 'card9',
137 'card10',
138 'card11',
139 'card12',
140 'card13',
141 'card14',
142 'card15',
143 'card16',
144 'card17',
145 'card18',
146 'card19',
147 'card20',
148 'common',
149 'committed',
150 'winnings',
151 'rake',
152 'rakeDealt',
153 'rakeContributed',
154 'rakeWeighted',
155 'totalProfit',
156 'allInEV',
157 'street0VPIChance',
158 'street0VPI',
159 'street1Seen',
160 'street2Seen',
161 'street3Seen',
162 'street4Seen',
163 'sawShowdown',
164 'showed',
165 'street0AllIn',
166 'street1AllIn',
167 'street2AllIn',
168 'street3AllIn',
169 'street4AllIn',
170 'wentAllIn',
171 'street0AggrChance',
172 'street0Aggr',
173 'street1Aggr',
174 'street2Aggr',
175 'street3Aggr',
176 'street4Aggr',
177 'street1CBChance',
178 'street2CBChance',
179 'street3CBChance',
180 'street4CBChance',
181 'street1CBDone',
182 'street2CBDone',
183 'street3CBDone',
184 'street4CBDone',
185 'wonWhenSeenStreet1',
186 'wonWhenSeenStreet2',
187 'wonWhenSeenStreet3',
188 'wonWhenSeenStreet4',
189 'wonAtSD',
190 'position',
191 'street0InPosition',
192 'street1InPosition',
193 'street2InPosition',
194 'street3InPosition',
195 'street4InPosition',
196 'street0FirstToAct',
197 'street1FirstToAct',
198 'street2FirstToAct',
199 'street3FirstToAct',
200 'street4FirstToAct',
201 'tourneysPlayersId',
202 'startCards',
203 'street0CalledRaiseChance',
204 'street0CalledRaiseDone',
205 'street0_2BChance',
206 'street0_2BDone',
207 'street0_3BChance',
208 'street0_3BDone',
209 'street0_4BChance',
210 'street0_4BDone',
211 'street0_C4BChance',
212 'street0_C4BDone',
213 'street0_FoldTo2BChance',
214 'street0_FoldTo2BDone',
215 'street0_FoldTo3BChance',
216 'street0_FoldTo3BDone',
217 'street0_FoldTo4BChance',
218 'street0_FoldTo4BDone',
219 'street0_SqueezeChance',
220 'street0_SqueezeDone',
221 'raiseToStealChance',
222 'raiseToStealDone',
223 'stealChance',
224 'stealDone',
225 'success_Steal',
226 'otherRaisedStreet0',
227 'otherRaisedStreet1',
228 'otherRaisedStreet2',
229 'otherRaisedStreet3',
230 'otherRaisedStreet4',
231 'foldToOtherRaisedStreet0',
232 'foldToOtherRaisedStreet1',
233 'foldToOtherRaisedStreet2',
234 'foldToOtherRaisedStreet3',
235 'foldToOtherRaisedStreet4',
236 'raiseFirstInChance',
237 'raisedFirstIn',
238 'foldBbToStealChance',
239 'foldedBbToSteal',
240 'foldSbToStealChance',
241 'foldedSbToSteal',
242 'foldToStreet1CBChance',
243 'foldToStreet1CBDone',
244 'foldToStreet2CBChance',
245 'foldToStreet2CBDone',
246 'foldToStreet3CBChance',
247 'foldToStreet3CBDone',
248 'foldToStreet4CBChance',
249 'foldToStreet4CBDone',
250 'street1CheckCallRaiseChance',
251 'street1CheckCallDone',
252 'street1CheckRaiseDone',
253 'street2CheckCallRaiseChance',
254 'street2CheckCallDone',
255 'street2CheckRaiseDone',
256 'street3CheckCallRaiseChance',
257 'street3CheckCallDone',
258 'street3CheckRaiseDone',
259 'street4CheckCallRaiseChance',
260 'street4CheckCallDone',
261 'street4CheckRaiseDone',
262 'street0Calls',
263 'street1Calls',
264 'street2Calls',
265 'street3Calls',
266 'street4Calls',
267 'street0Bets',
268 'street1Bets',
269 'street2Bets',
270 'street3Bets',
271 'street4Bets',
272 'street0Raises',
273 'street1Raises',
274 'street2Raises',
275 'street3Raises',
276 'street4Raises',
277 'street1Discards',
278 'street2Discards',
279 'street3Discards',
280 'handString'
281]
283# Just like STATS_KEYS, this lets us efficiently add data at the
284# "beginning" later.
285HANDS_PLAYERS_KEYS.reverse()
288CACHE_KEYS = [
289 'n',
290 'street0VPIChance',
291 'street0VPI',
292 'street0AggrChance',
293 'street0Aggr',
294 'street0CalledRaiseChance',
295 'street0CalledRaiseDone',
296 'street0_2BChance',
297 'street0_2BDone',
298 'street0_3BChance',
299 'street0_3BDone',
300 'street0_4BChance',
301 'street0_4BDone',
302 'street0_C4BChance',
303 'street0_C4BDone',
304 'street0_FoldTo2BChance',
305 'street0_FoldTo2BDone',
306 'street0_FoldTo3BChance',
307 'street0_FoldTo3BDone',
308 'street0_FoldTo4BChance',
309 'street0_FoldTo4BDone',
310 'street0_SqueezeChance',
311 'street0_SqueezeDone',
312 'raiseToStealChance',
313 'raiseToStealDone',
314 'stealChance',
315 'stealDone',
316 'success_Steal',
317 'street1Seen',
318 'street2Seen',
319 'street3Seen',
320 'street4Seen',
321 'sawShowdown',
322 'street1Aggr',
323 'street2Aggr',
324 'street3Aggr',
325 'street4Aggr',
326 'otherRaisedStreet0',
327 'otherRaisedStreet1',
328 'otherRaisedStreet2',
329 'otherRaisedStreet3',
330 'otherRaisedStreet4',
331 'foldToOtherRaisedStreet0',
332 'foldToOtherRaisedStreet1',
333 'foldToOtherRaisedStreet2',
334 'foldToOtherRaisedStreet3',
335 'foldToOtherRaisedStreet4',
336 'wonWhenSeenStreet1',
337 'wonWhenSeenStreet2',
338 'wonWhenSeenStreet3',
339 'wonWhenSeenStreet4',
340 'wonAtSD',
341 'raiseFirstInChance',
342 'raisedFirstIn',
343 'foldBbToStealChance',
344 'foldedBbToSteal',
345 'foldSbToStealChance',
346 'foldedSbToSteal',
347 'street1CBChance',
348 'street1CBDone',
349 'street2CBChance',
350 'street2CBDone',
351 'street3CBChance',
352 'street3CBDone',
353 'street4CBChance',
354 'street4CBDone',
355 'foldToStreet1CBChance',
356 'foldToStreet1CBDone',
357 'foldToStreet2CBChance',
358 'foldToStreet2CBDone',
359 'foldToStreet3CBChance',
360 'foldToStreet3CBDone',
361 'foldToStreet4CBChance',
362 'foldToStreet4CBDone',
363 'common',
364 'committed',
365 'winnings',
366 'rake',
367 'rakeDealt',
368 'rakeContributed',
369 'rakeWeighted',
370 'totalProfit',
371 'allInEV',
372 'showdownWinnings',
373 'nonShowdownWinnings',
374 'street1CheckCallRaiseChance',
375 'street1CheckCallDone',
376 'street1CheckRaiseDone',
377 'street2CheckCallRaiseChance',
378 'street2CheckCallDone',
379 'street2CheckRaiseDone',
380 'street3CheckCallRaiseChance',
381 'street3CheckCallDone',
382 'street3CheckRaiseDone',
383 'street4CheckCallRaiseChance',
384 'street4CheckCallDone',
385 'street4CheckRaiseDone',
386 'street0Calls',
387 'street1Calls',
388 'street2Calls',
389 'street3Calls',
390 'street4Calls',
391 'street0Bets',
392 'street1Bets',
393 'street2Bets',
394 'street3Bets',
395 'street4Bets',
396 'street0Raises',
397 'street1Raises',
398 'street2Raises',
399 'street3Raises',
400 'street4Raises',
401 'street1Discards',
402 'street2Discards',
403 'street3Discards'
404 ]
407class Database(object):
409 MYSQL_INNODB = 2
410 PGSQL = 3
411 SQLITE = 4
413 hero_hudstart_def = '1999-12-31' # default for length of Hero's stats in HUD
414 villain_hudstart_def = '1999-12-31' # default for length of Villain's stats in HUD
416 # Data Structures for index and foreign key creation
417 # drop_code is an int with possible values: 0 - don't drop for bulk import
418 # 1 - drop during bulk import
419 # db differences:
420 # - note that mysql automatically creates indexes on constrained columns when
421 # foreign keys are created, while postgres does not. Hence the much longer list
422 # of indexes is required for postgres.
423 # all primary keys are left on all the time
424 #
425 # table column drop_code
427 indexes = [
428 [ ] # no db with index 0
429 , [ ] # no db with index 1
430 , [ # indexes for mysql (list index 2) (foreign keys not here, in next data structure)
431 # {'tab':'Players', 'col':'name', 'drop':0} unique indexes not dropped
432 # {'tab':'Hands', 'col':'siteHandNo', 'drop':0} unique indexes not dropped
433 #, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} unique indexes not dropped
434 ]
435 , [ # indexes for postgres (list index 3)
436 {'tab':'Gametypes', 'col':'siteId', 'drop':0}
437 , {'tab':'Hands', 'col':'tourneyId', 'drop':0} # mct 22/3/09
438 , {'tab':'Hands', 'col':'gametypeId', 'drop':0} # mct 22/3/09
439 , {'tab':'Hands', 'col':'sessionId', 'drop':0} # mct 22/3/09
440 , {'tab':'Hands', 'col':'fileId', 'drop':0} # mct 22/3/09
441 #, {'tab':'Hands', 'col':'siteHandNo', 'drop':0} unique indexes not dropped
442 , {'tab':'HandsActions', 'col':'handId', 'drop':1}
443 , {'tab':'HandsActions', 'col':'playerId', 'drop':1}
444 , {'tab':'HandsActions', 'col':'actionId', 'drop':1}
445 , {'tab':'HandsStove', 'col':'handId', 'drop':1}
446 , {'tab':'HandsStove', 'col':'playerId', 'drop':1}
447 , {'tab':'HandsStove', 'col':'hiLo', 'drop':1}
448 , {'tab':'HandsPots', 'col':'handId', 'drop':1}
449 , {'tab':'HandsPots', 'col':'playerId', 'drop':1}
450 , {'tab':'Boards', 'col':'handId', 'drop':1}
451 , {'tab':'HandsPlayers', 'col':'handId', 'drop':1}
452 , {'tab':'HandsPlayers', 'col':'playerId', 'drop':1}
453 , {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
454 , {'tab':'HandsPlayers', 'col':'startCards', 'drop':1}
455 , {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
456 , {'tab':'HudCache', 'col':'playerId', 'drop':0}
457 , {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
458 , {'tab':'Sessions', 'col':'weekId', 'drop':1}
459 , {'tab':'Sessions', 'col':'monthId', 'drop':1}
460 , {'tab':'SessionsCache', 'col':'sessionId', 'drop':1}
461 , {'tab':'SessionsCache', 'col':'gametypeId', 'drop':1}
462 , {'tab':'SessionsCache', 'col':'playerId', 'drop':0}
463 , {'tab':'TourneysCache', 'col':'sessionId', 'drop':1}
464 , {'tab':'TourneysCache', 'col':'tourneyId', 'drop':1}
465 , {'tab':'TourneysCache', 'col':'playerId', 'drop':0}
466 , {'tab':'Players', 'col':'siteId', 'drop':1}
467 #, {'tab':'Players', 'col':'name', 'drop':0} unique indexes not dropped
468 , {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
469 , {'tab':'Tourneys', 'col':'sessionId', 'drop':1}
470 #, {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} unique indexes not dropped
471 , {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
472 #, {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0} unique indexes not dropped
473 , {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
474 , {'tab':'Backings', 'col':'tourneysPlayersId', 'drop':0}
475 , {'tab':'Backings', 'col':'playerId', 'drop':0}
476 , {'tab':'RawHands', 'col':'id', 'drop':0}
477 , {'tab':'RawTourneys', 'col':'id', 'drop':0}
478 ]
479 , [ # indexes for sqlite (list index 4)
480 {'tab':'Hands', 'col':'tourneyId', 'drop':0}
481 , {'tab':'Hands', 'col':'gametypeId', 'drop':0}
482 , {'tab':'Hands', 'col':'sessionId', 'drop':0}
483 , {'tab':'Hands', 'col':'fileId', 'drop':0}
484 , {'tab':'Boards', 'col':'handId', 'drop':0}
485 , {'tab':'Gametypes', 'col':'siteId', 'drop':0}
486 , {'tab':'HandsPlayers', 'col':'handId', 'drop':0}
487 , {'tab':'HandsPlayers', 'col':'playerId', 'drop':0}
488 , {'tab':'HandsPlayers', 'col':'tourneysPlayersId', 'drop':0}
489 , {'tab':'HandsActions', 'col':'handId', 'drop':0}
490 , {'tab':'HandsActions', 'col':'playerId', 'drop':0}
491 , {'tab':'HandsActions', 'col':'actionId', 'drop':1}
492 , {'tab':'HandsStove', 'col':'handId', 'drop':0}
493 , {'tab':'HandsStove', 'col':'playerId', 'drop':0}
494 , {'tab':'HandsPots', 'col':'handId', 'drop':0}
495 , {'tab':'HandsPots', 'col':'playerId', 'drop':0}
496 , {'tab':'HudCache', 'col':'gametypeId', 'drop':1}
497 , {'tab':'HudCache', 'col':'playerId', 'drop':0}
498 , {'tab':'HudCache', 'col':'tourneyTypeId', 'drop':0}
499 , {'tab':'Sessions', 'col':'weekId', 'drop':1}
500 , {'tab':'Sessions', 'col':'monthId', 'drop':1}
501 , {'tab':'SessionsCache', 'col':'sessionId', 'drop':1}
502 , {'tab':'SessionsCache', 'col':'gametypeId', 'drop':1}
503 , {'tab':'SessionsCache', 'col':'playerId', 'drop':0}
504 , {'tab':'TourneysCache', 'col':'sessionId', 'drop':1}
505 , {'tab':'TourneysCache', 'col':'tourneyId', 'drop':1}
506 , {'tab':'TourneysCache', 'col':'playerId', 'drop':0}
507 , {'tab':'Players', 'col':'siteId', 'drop':1}
508 , {'tab':'Tourneys', 'col':'tourneyTypeId', 'drop':1}
509 , {'tab':'Tourneys', 'col':'sessionId', 'drop':1}
510 , {'tab':'TourneysPlayers', 'col':'playerId', 'drop':0}
511 , {'tab':'TourneyTypes', 'col':'siteId', 'drop':0}
512 , {'tab':'Backings', 'col':'tourneysPlayersId', 'drop':0}
513 , {'tab':'Backings', 'col':'playerId', 'drop':0}
514 , {'tab':'RawHands', 'col':'id', 'drop':0}
515 , {'tab':'RawTourneys', 'col':'id', 'drop':0}
516 ]
517 ]
520 foreignKeys = [
521 [ ] # no db with index 0
522 , [ ] # no db with index 1
523 , [ # foreign keys for mysql (index 2)
524 {'fktab':'Hands', 'fkcol':'tourneyId', 'rtab':'Tourneys', 'rcol':'id', 'drop':1}
525 , {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
526 , {'fktab':'Hands', 'fkcol':'sessionId', 'rtab':'Sessions', 'rcol':'id', 'drop':1}
527 , {'fktab':'Hands', 'fkcol':'fileId', 'rtab':'Files', 'rcol':'id', 'drop':1}
528 , {'fktab':'Boards', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
529 , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
530 , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
531 , {'fktab':'HandsPlayers', 'fkcol':'tourneysPlayersId','rtab':'TourneysPlayers','rcol':'id', 'drop':1}
532 , {'fktab':'HandsPlayers', 'fkcol':'startCards', 'rtab':'StartCards', 'rcol':'id', 'drop':1}
533 , {'fktab':'HandsActions', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
534 , {'fktab':'HandsActions', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
535 , {'fktab':'HandsActions', 'fkcol':'actionId', 'rtab':'Actions', 'rcol':'id', 'drop':1}
536 , {'fktab':'HandsStove', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
537 , {'fktab':'HandsStove', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
538 , {'fktab':'HandsStove', 'fkcol':'rankId', 'rtab':'Rank', 'rcol':'id', 'drop':1}
539 , {'fktab':'HandsPots', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
540 , {'fktab':'HandsPots', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
541 , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
542 , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
543 , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
544 , {'fktab':'Sessions', 'fkcol':'weekId', 'rtab':'Weeks', 'rcol':'id', 'drop':1}
545 , {'fktab':'Sessions', 'fkcol':'monthId', 'rtab':'Months', 'rcol':'id', 'drop':1}
546 , {'fktab':'SessionsCache', 'fkcol':'sessionId', 'rtab':'Sessions', 'rcol':'id', 'drop':1}
547 , {'fktab':'SessionsCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
548 , {'fktab':'SessionsCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
549 , {'fktab':'TourneysCache', 'fkcol':'sessionId', 'rtab':'Sessions', 'rcol':'id', 'drop':1}
550 , {'fktab':'TourneysCache', 'fkcol':'tourneyId', 'rtab':'Tourneys', 'rcol':'id', 'drop':1}
551 , {'fktab':'TourneysCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
552 , {'fktab':'Tourneys', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
553 , {'fktab':'Tourneys', 'fkcol':'sessionId', 'rtab':'Sessions', 'rcol':'id', 'drop':1}
554 ]
555 , [ # foreign keys for postgres (index 3)
556 {'fktab':'Hands', 'fkcol':'tourneyId', 'rtab':'Tourneys', 'rcol':'id', 'drop':1}
557 , {'fktab':'Hands', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
558 , {'fktab':'Hands', 'fkcol':'sessionId', 'rtab':'Sessions', 'rcol':'id', 'drop':1}
559 , {'fktab':'Hands', 'fkcol':'fileId', 'rtab':'Files', 'rcol':'id', 'drop':1}
560 , {'fktab':'Boards', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
561 , {'fktab':'HandsPlayers', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
562 , {'fktab':'HandsPlayers', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
563 , {'fktab':'HandsPlayers', 'fkcol':'tourneysPlayersId','rtab':'TourneysPlayers','rcol':'id', 'drop':1}
564 , {'fktab':'HandsPlayers', 'fkcol':'startCards', 'rtab':'StartCards', 'rcol':'id', 'drop':1}
565 , {'fktab':'HandsActions', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
566 , {'fktab':'HandsActions', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
567 , {'fktab':'HandsActions', 'fkcol':'actionId', 'rtab':'Actions', 'rcol':'id', 'drop':1}
568 , {'fktab':'HandsStove', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
569 , {'fktab':'HandsStove', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
570 , {'fktab':'HandsStove', 'fkcol':'rankId', 'rtab':'Rank', 'rcol':'id', 'drop':1}
571 , {'fktab':'HandsPots', 'fkcol':'handId', 'rtab':'Hands', 'rcol':'id', 'drop':1}
572 , {'fktab':'HandsPots', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':1}
573 , {'fktab':'HudCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
574 , {'fktab':'HudCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
575 , {'fktab':'HudCache', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
576 , {'fktab':'Sessions', 'fkcol':'weekId', 'rtab':'Weeks', 'rcol':'id', 'drop':1}
577 , {'fktab':'Sessions', 'fkcol':'monthId', 'rtab':'Months', 'rcol':'id', 'drop':1}
578 , {'fktab':'SessionsCache', 'fkcol':'sessionId', 'rtab':'Sessions', 'rcol':'id', 'drop':1}
579 , {'fktab':'SessionsCache', 'fkcol':'gametypeId', 'rtab':'Gametypes', 'rcol':'id', 'drop':1}
580 , {'fktab':'SessionsCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
581 , {'fktab':'TourneysCache', 'fkcol':'sessionId', 'rtab':'Sessions', 'rcol':'id', 'drop':1}
582 , {'fktab':'TourneysCache', 'fkcol':'tourneyId', 'rtab':'Tourneys', 'rcol':'id', 'drop':1}
583 , {'fktab':'TourneysCache', 'fkcol':'playerId', 'rtab':'Players', 'rcol':'id', 'drop':0}
584 , {'fktab':'Tourneys', 'fkcol':'tourneyTypeId', 'rtab':'TourneyTypes', 'rcol':'id', 'drop':1}
585 , {'fktab':'Tourneys', 'fkcol':'sessionId', 'rtab':'Sessions', 'rcol':'id', 'drop':1}
586 ]
587 , [ # no foreign keys in sqlite (index 4)
588 ]
589 ]
592 # MySQL Notes:
593 # "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id
594 # - creates index handId on <thistable>.handId
595 # alter table t drop foreign key fk
596 # alter table t add foreign key (fkcol) references tab(rcol)
597 # alter table t add constraint c foreign key (fkcol) references tab(rcol)
598 # (fkcol is used for foreigh key name)
600 # mysql to list indexes: (CG - "LIST INDEXES" should work too)
601 # SELECT table_name, index_name, non_unique, column_name
602 # FROM INFORMATION_SCHEMA.STATISTICS
603 # WHERE table_name = 'tbl_name'
604 # AND table_schema = 'db_name'
605 # ORDER BY table_name, index_name, seq_in_index
606 #
607 # ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo)
608 # ALTER TABLE tab DROP INDEX idx
610 # mysql to list fks:
611 # SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name
612 # FROM information_schema.KEY_COLUMN_USAGE
613 # WHERE REFERENCED_TABLE_SCHEMA = (your schema name here)
614 # AND REFERENCED_TABLE_NAME is not null
615 # ORDER BY TABLE_NAME, COLUMN_NAME;
617 # this may indicate missing object
618 # _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)")
621 # PG notes:
623 # To add a foreign key constraint to a table:
624 # ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL;
625 # ALTER TABLE tab DROP CONSTRAINT zipchk
626 #
627 # Note: index names must be unique across a schema
628 # CREATE INDEX idx ON tab(col)
629 # DROP INDEX idx
630 # SELECT * FROM PG_INDEXES
632 # SQLite notes:
634 # To add an index:
635 # create index indexname on tablename (col);
638 def __init__(self, c, sql = None, autoconnect = True):
639 self.config = c
640 self.__connected = False
641 self.settings = {}
642 self.settings['os'] = "linuxmac" if os.name != "nt" else "windows"
643 db_params = c.get_db_parameters()
644 self.import_options = c.get_import_parameters()
645 self.backend = db_params['db-backend']
646 self.db_server = db_params['db-server']
647 self.database = db_params['db-databaseName']
648 self.host = db_params['db-host']
649 self.db_path = ''
650 gen = c.get_general_params()
651 self.day_start = 0
652 self._hero = None
653 self._has_lock = False
654 self.printdata = False
655 self.resetCache()
656 self.resetBulkCache()
658 if 'day_start' in gen:
659 self.day_start = float(gen['day_start'])
661 self.sessionTimeout = float(self.import_options['sessionTimeout'])
662 self.publicDB = self.import_options['publicDB']
664 # where possible avoid creating new SQL instance by using the global one passed in
665 if sql is None:
666 self.sql = SQL.Sql(db_server = self.db_server)
667 else:
668 self.sql = sql
670 if autoconnect:
671 # connect to db
672 self.do_connect(c)
674 if self.backend == self.PGSQL:
675 from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_SERIALIZABLE
676 #ISOLATION_LEVEL_AUTOCOMMIT = 0
677 #ISOLATION_LEVEL_READ_COMMITTED = 1
678 #ISOLATION_LEVEL_SERIALIZABLE = 2
681 if self.backend == self.SQLITE and self.database == ':memory:' and self.wrongDbVersion and self.is_connected():
682 log.info("sqlite/:memory: - creating")
683 self.recreate_tables()
684 self.wrongDbVersion = False
686 self.gtcache = None # GameTypeId cache
687 self.tcache = None # TourneyId cache
688 self.pcache = None # PlayerId cache
689 self.tpcache = None # TourneysPlayersId cache
691 # if fastStoreHudCache is true then the hudcache will be build using the limited configuration which ignores date, seats, and position
692 self.build_full_hudcache = not self.import_options['fastStoreHudCache']
693 self.cacheSessions = self.import_options['cacheSessions']
694 self.callHud = self.import_options['callFpdbHud']
696 #self.hud_hero_style = 'T' # Duplicate set of vars just for hero - not used yet.
697 #self.hud_hero_hands = 2000 # Idea is that you might want all-time stats for others
698 #self.hud_hero_days = 30 # but last T days or last H hands for yourself
700 # vars for hand ids or dates fetched according to above config:
701 self.hand_1day_ago = 0 # max hand id more than 24 hrs earlier than now
702 self.date_ndays_ago = 'd000000' # date N days ago ('d' + YYMMDD)
703 self.h_date_ndays_ago = 'd000000' # date N days ago ('d' + YYMMDD) for hero
704 self.date_nhands_ago = {} # dates N hands ago per player - not used yet
706 self.saveActions = False if self.import_options['saveActions'] == False else True
708 if self.is_connected():
709 if not self.wrongDbVersion:
710 self.get_sites()
711 self.connection.rollback() # make sure any locks taken so far are released
712 #end def __init__
714 def dumpDatabase(self):
715 result="fpdb database dump\nDB version=" + str(DB_VERSION)+"\n\n"
717 tables=self.cursor.execute(self.sql.query['list_tables'])
718 tables=self.cursor.fetchall()
719 for table in (u'Actions', u'Autorates', u'Backings', u'Gametypes', u'Hands', u'Boards', u'HandsActions', u'HandsPlayers', u'HandsStove', u'Files', u'HudCache', u'Sessions', u'SessionsCache', u'TourneysCache',u'Players', u'RawHands', u'RawTourneys', u'Settings', u'Sites', u'TourneyTypes', u'Tourneys', u'TourneysPlayers'):
720 print("table:", table)
721 result+="###################\nTable "+table+"\n###################\n"
722 rows=self.cursor.execute(self.sql.query['get'+table])
723 rows=self.cursor.fetchall()
724 columnNames=self.cursor.description
725 if not rows:
726 result+="empty table\n"
727 else:
728 for row in rows:
729 for columnNumber in range(len(columnNames)):
730 if columnNames[columnNumber][0]=="importTime":
731 result+=(" "+columnNames[columnNumber][0]+"=ignore\n")
732 elif columnNames[columnNumber][0]=="styleKey":
733 result+=(" "+columnNames[columnNumber][0]+"=ignore\n")
734 else:
735 result+=(" "+columnNames[columnNumber][0]+"="+str(row[columnNumber])+"\n")
736 result+="\n"
737 result+="\n"
738 return result
739 #end def dumpDatabase
741 # could be used by hud to change hud style
742 def set_hud_style(self, style):
743 self.hud_style = style
745 def do_connect(self, c):
746 if c is None:
747 raise FpdbError('Configuration not defined')
749 db = c.get_db_parameters()
750 try:
751 self.connect(backend=db['db-backend'],
752 host=db['db-host'],
753 port=db['db-port'],
754 database=db['db-databaseName'],
755 user=db['db-user'],
756 password=db['db-password'])
757 except:
758 # error during connect
759 self.__connected = False
760 raise
762 db_params = c.get_db_parameters()
763 self.import_options = c.get_import_parameters()
764 self.backend = db_params['db-backend']
765 self.db_server = db_params['db-server']
766 self.database = db_params['db-databaseName']
767 self.host = db_params['db-host']
768 self.port = db_params['db-port']
770 def connect(self, backend=None, host=None, port=None, database=None,
771 user=None, password=None, create=False):
772 """Connects a database with the given parameters"""
773 if backend is None:
774 raise FpdbError('Database backend not defined')
775 self.backend = backend
776 self.host = host
777 self.port = port
778 self.user = user
779 self.password = password
780 self.database = database
781 self.connection = None
782 self.cursor = None
783 self.hand_inc = 1
785 if backend == Database.MYSQL_INNODB:
786 #####not working mysql connector on py3.9####
787 import MySQLdb
788 if use_pool:
789 MySQLdb = pool.manage(MySQLdb, pool_size=5)
790 try:
791 self.connection = MySQLdb.connect(host=host
792 ,user=user
793 ,passwd=password
794 ,db=database
795 ,charset='utf8'
796 ,use_unicode=True)
797 self.__connected = True
798 #TODO: Add port option
799 except MySQLdb.Error as ex:
800 if ex.args[0] == 1045:
801 raise FpdbMySQLAccessDenied(ex.args[0], ex.args[1])
802 elif ex.args[0] == 2002 or ex.args[0] == 2003: # 2002 is no unix socket, 2003 is no tcp socket
803 raise FpdbMySQLNoDatabase(ex.args[0], ex.args[1])
804 else:
805 print(("*** WARNING UNKNOWN MYSQL ERROR:"), ex)
806 c = self.get_cursor()
807 c.execute("show variables like 'auto_increment_increment'")
808 self.hand_inc = int(c.fetchone()[1])
809 elif backend == Database.PGSQL:
810 import psycopg2
811 import psycopg2.extensions
812 if use_pool:
813 psycopg2 = pool.manage(psycopg2, pool_size=5)
814 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
815 psycopg2.extensions.register_adapter(Decimal, psycopg2._psycopg.Decimal)
817 self.__connected = False
818 if self.host == "localhost" or self.host == "127.0.0.1":
819 try:
820 self.connection = psycopg2.connect(database=database)
821 # Forcer l'encodage UTF-8
822 self.connection.set_client_encoding('UTF8')
823 self.__connected = True
824 except:
825 # direct connection failed so try user/pass/... version
826 pass
828 if not self.is_connected():
829 try:
830 self.connection = psycopg2.connect(host=host,
831 port=port,
832 user=user,
833 password=password,
834 database=database)
835 # Forcer l'encodage UTF-8
836 self.connection.set_client_encoding('UTF8')
837 self.__connected = True
838 except Exception as ex:
839 if 'Connection refused' in ex.args[0] or ('database "' in ex.args[0] and '" does not exist' in ex.args[0]):
840 raise FpdbPostgresqlNoDatabase(errmsg=ex.args[0])
841 elif 'password authentication' in ex.args[0]:
842 raise FpdbPostgresqlAccessDenied(errmsg=ex.args[0])
843 elif 'role "' in ex.args[0] and '" does not exist' in ex.args[0]: #role "fpdb" does not exist
844 raise FpdbPostgresqlAccessDenied(errmsg=ex.args[0])
845 else:
846 msg = ex.args[0]
847 log.error(msg)
848 raise FpdbError(msg)
850 elif backend == Database.SQLITE:
851 create = True
852 import sqlite3
853 if use_pool:
854 sqlite3 = pool.manage(sqlite3, pool_size=1)
855 #else:
856 # log.warning("SQLite won't work well without 'sqlalchemy' installed.")
858 if database != ":memory:":
859 if not os.path.isdir(self.config.dir_database) and create:
860 log.info(("Creating directory: '%s'") % (self.config.dir_database))
861 os.makedirs(self.config.dir_database)
862 database = os.path.join(self.config.dir_database, database).replace("\\","/")
863 self.db_path = database
864 log.info(("Connecting to SQLite: %s") % self.db_path)
865 if os.path.exists(database) or create:
866 self.connection = sqlite3.connect(self.db_path, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES )
867 self.__connected = True
868 sqlite3.register_converter("bool", lambda x: bool(int(x)))
869 sqlite3.register_adapter(bool, lambda x: 1 if x else 0)
870 sqlite3.register_converter("decimal", convert_decimal)
871 sqlite3.register_adapter(Decimal, adapt_decimal)
872 self.connection.create_function("floor", 1, math.floor)
873 self.connection.create_function("sqrt", 1, math.sqrt)
874 tmp = sqlitemath()
875 self.connection.create_function("mod", 2, tmp.mod)
876 if use_numpy:
877 self.connection.create_aggregate("variance", 1, VARIANCE)
878 else:
879 log.warning(("Some database functions will not work without NumPy support"))
880 self.cursor = self.connection.cursor()
881 self.cursor.execute('PRAGMA temp_store=2') # use memory for temp tables/indexes
882 self.cursor.execute('PRAGMA journal_mode=WAL') # use memory for temp tables/indexes
883 self.cursor.execute('PRAGMA synchronous=0') # don't wait for file writes to finish
884 else:
885 raise FpdbError("sqlite database "+database+" does not exist")
886 else:
887 raise FpdbError("unrecognised database backend:"+str(backend))
889 if self.is_connected():
890 self.cursor = self.connection.cursor()
891 self.cursor.execute(self.sql.query['set tx level'])
892 self.check_version(database=database, create=create)
894 def get_sites(self):
895 self.cursor.execute("SELECT name,id FROM Sites")
896 sites = self.cursor.fetchall()
897 self.config.set_site_ids(sites)
899 def check_version(self, database, create):
900 self.wrongDbVersion = False
901 try:
902 self.cursor.execute("SELECT * FROM Settings")
903 settings = self.cursor.fetchone()
904 if settings[0] != DB_VERSION:
905 log.error((("Outdated or too new database version (%s).") % (settings[0])) + " " + ("Please recreate tables."))
906 self.wrongDbVersion = True
907 except:# _mysql_exceptions.ProgrammingError:
908 if database != ":memory:":
909 if create:
910 #print (("Failed to read settings table.") + " - " + ("Recreating tables."))
911 log.info(("Failed to read settings table.") + " - " + ("Recreating tables."))
912 self.recreate_tables()
913 self.check_version(database=database, create=False)
914 else:
915 #print (("Failed to read settings table.") + " - " + ("Please recreate tables."))
916 log.info(("Failed to read settings table.") + " - " + ("Please recreate tables."))
917 self.wrongDbVersion = True
918 else:
919 self.wrongDbVersion = True
920 #end def connect
922 def commit(self):
923 if self.backend != self.SQLITE:
924 self.connection.commit()
925 else:
926 # sqlite commits can fail because of shared locks on the database (SQLITE_BUSY)
927 # re-try commit if it fails in case this happened
928 maxtimes = 5
929 pause = 1
930 ok = False
931 for i in range(maxtimes):
932 try:
933 ret = self.connection.commit()
934 #log.debug(("commit finished ok, i = ")+str(i))
935 ok = True
936 except:
937 log.debug(("commit %s failed: info=%s value=%s") % (str(i), str(sys.exc_info()), str(sys.exc_info()[1])))
938 sleep(pause)
939 if ok: break
940 if not ok:
941 log.debug(("commit failed"))
942 raise FpdbError('sqlite commit failed')
944 def rollback(self):
945 self.connection.rollback()
947 def connected(self):
948 """ now deprecated, use is_connected() instead """
949 return self.__connected
951 def is_connected(self):
952 return self.__connected
954 def get_cursor(self, connect=False):
955 if self.backend == Database.MYSQL_INNODB and os.name == 'nt':
956 self.connection.ping(True)
957 return self.connection.cursor()
959 def close_connection(self):
960 self.connection.close()
961 self.__connected = False
963 def disconnect(self, due_to_error=False):
964 """Disconnects the DB (rolls back if param is true, otherwise commits"""
965 if due_to_error:
966 self.connection.rollback()
967 else:
968 self.connection.commit()
969 self.cursor.close()
970 self.connection.close()
971 self.__connected = False
973 def reconnect(self, due_to_error=False):
974 """Reconnects the DB"""
975 #print "started reconnect"
976 self.disconnect(due_to_error)
977 self.connect(self.backend, self.host, self.database, self.user, self.password)
979 def get_backend_name(self):
980 """Returns the name of the currently used backend"""
981 if self.backend==2:
982 return "MySQL InnoDB"
983 elif self.backend==3:
984 return "PostgreSQL"
985 elif self.backend==4:
986 return "SQLite"
987 else:
988 raise FpdbError("invalid backend")
990 def get_db_info(self):
991 return (self.host, self.database, self.user, self.password)
993 def get_table_name(self, hand_id):
994 c = self.connection.cursor()
995 c.execute(self.sql.query['get_table_name'], (hand_id, ))
996 row = c.fetchone()
997 return row
999 def get_table_info(self, hand_id):
1000 c = self.connection.cursor()
1001 c.execute(self.sql.query['get_table_name'], (hand_id, ))
1002 row = c.fetchone()
1003 l = list(row)
1004 if row[3] == "ring": # cash game
1005 l.append(None)
1006 l.append(None)
1007 return l
1008 else: # tournament
1009 tour_no, tab_no = re.split(" ", row[0], 1)
1010 l.append(tour_no)
1011 l.append(tab_no)
1012 return l
1014 def get_last_hand(self):
1015 c = self.connection.cursor()
1016 c.execute(self.sql.query['get_last_hand'])
1017 row = c.fetchone()
1018 return row[0]
1020 def get_xml(self, hand_id):
1021 c = self.connection.cursor()
1022 c.execute(self.sql.query['get_xml'], (hand_id))
1023 row = c.fetchone()
1024 return row[0]
1026 def get_recent_hands(self, last_hand):
1027 c = self.connection.cursor()
1028 c.execute(self.sql.query['get_recent_hands'], {'last_hand': last_hand})
1029 return c.fetchall()
1031 def get_gameinfo_from_hid(self, hand_id):
1032 # returns a gameinfo (gametype) dictionary suitable for passing
1033 # to Hand.hand_factory
1034 c = self.connection.cursor()
1035 q = self.sql.query['get_gameinfo_from_hid']
1036 q = q.replace('%s', self.sql.query['placeholder'])
1037 c.execute (q, (hand_id, ))
1038 row = c.fetchone()
1039 gameinfo = {'sitename':row[0],'category':row[1],'base':row[2],'type':row[3],'limitType':row[4],
1040 'hilo':row[5],'sb':row[6],'bb':row[7], 'sbet':row[8],'bbet':row[9], 'currency':row[10], 'gametypeId':row[11], 'split':row[12]}
1041 return gameinfo
1043# Query 'get_hand_info' does not exist, so it seems
1044# def get_hand_info(self, new_hand_id):
1045# c = self.connection.cursor()
1046# c.execute(self.sql.query['get_hand_info'], new_hand_id)
1047# return c.fetchall()
1049 def getHandCount(self):
1050 c = self.connection.cursor()
1051 c.execute(self.sql.query['getHandCount'])
1052 return c.fetchone()[0]
1053 #end def getHandCount
1055 def getTourneyCount(self):
1056 c = self.connection.cursor()
1057 c.execute(self.sql.query['getTourneyCount'])
1058 return c.fetchone()[0]
1059 #end def getTourneyCount
1061 def getTourneyTypeCount(self):
1062 c = self.connection.cursor()
1063 c.execute(self.sql.query['getTourneyTypeCount'])
1064 return c.fetchone()[0]
1065 #end def getTourneyCount
1067 def getSiteTourneyNos(self, site):
1068 c = self.connection.cursor()
1069 q = self.sql.query['getSiteId']
1070 q = q.replace('%s', self.sql.query['placeholder'])
1071 c.execute(q, (site,))
1072 siteid = c.fetchone()[0]
1073 q = self.sql.query['getSiteTourneyNos']
1074 q = q.replace('%s', self.sql.query['placeholder'])
1075 c.execute(q, (siteid,))
1076 alist = []
1077 for row in c.fetchall():
1078 alist.append(row)
1079 return alist
1081 def get_actual_seat(self, hand_id, name):
1082 c = self.connection.cursor()
1083 c.execute(self.sql.query['get_actual_seat'], (hand_id, name))
1084 row = c.fetchone()
1085 return row[0]
1087 def get_cards(self, hand):
1088 """Get and return the cards for each player in the hand."""
1089 cards = {} # dict of cards, the key is the seat number,
1090 # the value is a tuple of the players cards
1091 # example: {1: (0, 0, 20, 21, 22, 0 , 0)}
1092 c = self.connection.cursor()
1093 c.execute(self.sql.query['get_cards'], [hand])
1094 for row in c.fetchall():
1095 cards[row[0]] = row[1:]
1096 return cards
1098 def get_common_cards(self, hand):
1099 """Get and return the community cards for the specified hand."""
1100 cards = {}
1101 c = self.connection.cursor()
1102 c.execute(self.sql.query['get_common_cards'], [hand])
1103# row = c.fetchone()
1104 cards['common'] = c.fetchone()
1105 return cards
1107 def get_action_from_hand(self, hand_no):
1108 action = [ [], [], [], [], [] ]
1109 c = self.connection.cursor()
1110 c.execute(self.sql.query['get_action_from_hand'], (hand_no,))
1111 for row in c.fetchall():
1112 street = row[0]
1113 act = row[1:]
1114 action[street].append(act)
1115 return action
1117 def get_winners_from_hand(self, hand):
1118 """Returns a hash of winners:amount won, given a hand number."""
1119 winners = {}
1120 c = self.connection.cursor()
1121 c.execute(self.sql.query['get_winners_from_hand'], (hand,))
1122 for row in c.fetchall():
1123 winners[row[0]] = row[1]
1124 return winners
1126 def set_printdata(self, val):
1127 self.printdata = val
1129 def init_hud_stat_vars(self, hud_days, h_hud_days):
1130 """Initialise variables used by Hud to fetch stats:
1131 self.hand_1day_ago handId of latest hand played more than a day ago
1132 self.date_ndays_ago date n days ago
1133 self.h_date_ndays_ago date n days ago for hero (different n)
1134 """
1135 self.hand_1day_ago = 1
1136 c = self.get_cursor()
1137 c.execute(self.sql.query['get_hand_1day_ago'])
1138 row = c.fetchone()
1139 if row and row[0]:
1140 self.hand_1day_ago = int(row[0])
1142 tz = datetime.utcnow() - datetime.today()
1143 tz_offset = old_div(tz.seconds,3600)
1144 tz_day_start_offset = self.day_start + tz_offset
1146 d = timedelta(days=hud_days, hours=tz_day_start_offset)
1147 now = datetime.utcnow() - d
1148 self.date_ndays_ago = "d%02d%02d%02d" % (now.year - 2000, now.month, now.day)
1150 d = timedelta(days=h_hud_days, hours=tz_day_start_offset)
1151 now = datetime.utcnow() - d
1152 self.h_date_ndays_ago = "d%02d%02d%02d" % (now.year - 2000, now.month, now.day)
1154 # is get_stats_from_hand slow?
1155 # Gimick - yes - reason being that the gametypeid join on hands
1156 # increases exec time on SQLite and postgres by a factor of 6 to 10
1157 # method below changed to lookup hand.gametypeid and pass that as
1158 # a constant to the underlying query.
1160 def get_stats_from_hand( self, hand, type # type is "ring" or "tour"
1161 , hud_params = {'stat_range':'A', 'agg_bb_mult':1000
1162 ,'seats_style':'A', 'seats_cust_nums_low':1, 'seats_cust_nums_high':10
1163 ,'h_stat_range':'S', 'h_agg_bb_mult':1000
1164 ,'h_seats_style':'A', 'h_seats_cust_nums_low':1, 'h_seats_cust_nums_high':10
1165 }
1166 , hero_id = -1
1167 , num_seats = 6
1168 ):
1169 stat_range = hud_params['stat_range']
1170 agg_bb_mult = hud_params['agg_bb_mult']
1171 seats_style = hud_params['seats_style']
1172 seats_cust_nums_low = hud_params['seats_cust_nums_low']
1173 seats_cust_nums_high = hud_params['seats_cust_nums_high']
1174 h_stat_range = hud_params['h_stat_range']
1175 h_agg_bb_mult = hud_params['h_agg_bb_mult']
1176 h_seats_style = hud_params['h_seats_style']
1177 h_seats_cust_nums_low = hud_params['h_seats_cust_nums_low']
1178 h_seats_cust_nums_high = hud_params['h_seats_cust_nums_high']
1180 stat_dict = {}
1182 if seats_style == 'A':
1183 seats_min, seats_max = 0, 10
1184 elif seats_style == 'C':
1185 seats_min, seats_max = seats_cust_nums_low, seats_cust_nums_high
1186 elif seats_style == 'E':
1187 seats_min, seats_max = num_seats, num_seats
1188 else:
1189 seats_min, seats_max = 0, 10
1190 log.warning("bad seats_style value: " + seats_style)
1192 if h_seats_style == 'A':
1193 h_seats_min, h_seats_max = 0, 10
1194 elif h_seats_style == 'C':
1195 h_seats_min, h_seats_max = h_seats_cust_nums_low, h_seats_cust_nums_high
1196 elif h_seats_style == 'E':
1197 h_seats_min, h_seats_max = num_seats, num_seats
1198 else:
1199 h_seats_min, h_seats_max = 0, 10
1200 log.warning("bad h_seats_style value: " + h_seats_style)
1202 if stat_range == 'S' or h_stat_range == 'S':
1203 self.get_stats_from_hand_session(hand, stat_dict, hero_id
1204 ,stat_range, seats_min, seats_max
1205 ,h_stat_range, h_seats_min, h_seats_max)
1207 if stat_range == 'S' and h_stat_range == 'S':
1208 return stat_dict
1210 if stat_range == 'T':
1211 stylekey = self.date_ndays_ago
1212 elif stat_range == 'A':
1213 stylekey = '0000000' # all stylekey values should be higher than this
1214 elif stat_range == 'S':
1215 stylekey = 'zzzzzzz' # all stylekey values should be lower than this
1216 else:
1217 stylekey = '0000000'
1218 log.info('stat_range: %s' % stat_range)
1220 #elif stat_range == 'H':
1221 # stylekey = date_nhands_ago needs array by player here ...
1223 if h_stat_range == 'T':
1224 h_stylekey = self.h_date_ndays_ago
1225 elif h_stat_range == 'A':
1226 h_stylekey = '0000000' # all stylekey values should be higher than this
1227 elif h_stat_range == 'S':
1228 h_stylekey = 'zzzzzzz' # all stylekey values should be lower than this
1229 else:
1230 h_stylekey = '00000000'
1231 log.info('h_stat_range: %s' % h_stat_range)
1233 #elif h_stat_range == 'H':
1234 # h_stylekey = date_nhands_ago needs array by player here ...
1236 # lookup gametypeId from hand
1237 handinfo = self.get_gameinfo_from_hid(hand)
1238 gametypeId = handinfo["gametypeId"]
1240 query = 'get_stats_from_hand_aggregated'
1241 subs = (hand
1242 ,hero_id, stylekey, agg_bb_mult, agg_bb_mult, gametypeId, seats_min, seats_max # hero params
1243 ,hero_id, h_stylekey, h_agg_bb_mult, h_agg_bb_mult, gametypeId, h_seats_min, h_seats_max) # villain params
1245 stime = time()
1246 c = self.connection.cursor()
1248 # Now get the stats
1249 c.execute(self.sql.query[query], subs)
1250 ptime = time() - stime
1251 log.info("HudCache query get_stats_from_hand_aggregated took %.3f seconds" % ptime)
1252 colnames = [desc[0] for desc in c.description]
1253 for row in c.fetchall():
1254 playerid = row[0]
1255 if (playerid == hero_id and h_stat_range != 'S') or (playerid != hero_id and stat_range != 'S'):
1256 t_dict = {}
1257 for name, val in zip(colnames, row):
1258 t_dict[name.lower()] = val
1259 stat_dict[t_dict['player_id']] = t_dict
1261 return stat_dict
1263 # uses query on handsplayers instead of hudcache to get stats on just this session
1264 def get_stats_from_hand_session(self, hand, stat_dict, hero_id
1265 ,stat_range, seats_min, seats_max
1266 ,h_stat_range, h_seats_min, h_seats_max):
1267 """Get stats for just this session (currently defined as any play in the last 24 hours - to
1268 be improved at some point ...)
1269 h_stat_range and stat_range params indicate whether to get stats for hero and/or others
1270 - only fetch heroes stats if h_stat_range == 'S',
1271 and only fetch others stats if stat_range == 'S'
1272 seats_min/max params give seats limits, only include stats if between these values
1273 """
1275 query = self.sql.query['get_stats_from_hand_session']
1276 if self.db_server == 'mysql':
1277 query = query.replace("<signed>", 'signed ')
1278 else:
1279 query = query.replace("<signed>", '')
1281 subs = (self.hand_1day_ago, hand, hero_id, seats_min, seats_max
1282 , hero_id, h_seats_min, h_seats_max)
1283 c = self.get_cursor()
1285 # now get the stats
1286 #print "sess_stats: subs =", subs, "subs[0] =", subs[0]
1287 c.execute(query, subs)
1288 colnames = [desc[0] for desc in c.description]
1289 n = 0
1291 row = c.fetchone()
1292 if colnames[0].lower() == 'player_id':
1294 # Loop through stats adding them to appropriate stat_dict:
1295 while row:
1296 playerid = row[0]
1297 seats = row[1]
1298 if (playerid == hero_id and h_stat_range == 'S') or (playerid != hero_id and stat_range == 'S'):
1299 for name, val in zip(colnames, row):
1300 if not playerid in stat_dict:
1301 stat_dict[playerid] = {}
1302 stat_dict[playerid][name.lower()] = val
1303 elif not name.lower() in stat_dict[playerid]:
1304 stat_dict[playerid][name.lower()] = val
1305 elif name.lower() not in ('hand_id', 'player_id', 'seat', 'screen_name', 'seats'):
1306 #print "DEBUG: stat_dict[%s][%s]: %s" %(playerid, name.lower(), val)
1307 stat_dict[playerid][name.lower()] += val
1308 n += 1
1309 if n >= 10000: break # todo: don't think this is needed so set nice and high
1310 # prevents infinite loop so leave for now - comment out or remove?
1311 row = c.fetchone()
1312 else:
1313 log.error(("ERROR: query %s result does not have player_id as first column") % (query,))
1315 #print " %d rows fetched, len(stat_dict) = %d" % (n, len(stat_dict))
1317 #print "session stat_dict =", stat_dict
1318 #return stat_dict
1320 def get_player_id(self, config, siteName, playerName):
1321 c = self.connection.cursor()
1322 siteNameUtf = Charset.to_utf8(siteName)
1323 playerNameUtf = str(playerName)
1324 #print "db.get_player_id siteName",siteName,"playerName",playerName
1325 c.execute(self.sql.query['get_player_id'], (playerNameUtf, siteNameUtf))
1326 row = c.fetchone()
1327 if row:
1328 return row[0]
1329 else:
1330 return None
1332 def get_player_names(self, config, site_id=None, like_player_name="%"):
1333 """Fetch player names from players. Use site_id and like_player_name if provided"""
1335 if site_id is None:
1336 site_id = -1
1337 c = self.get_cursor()
1338 p_name = Charset.to_utf8(like_player_name)
1339 c.execute(self.sql.query['get_player_names'], (p_name, site_id, site_id))
1340 rows = c.fetchall()
1341 return rows
1343 def get_site_id(self, site):
1344 c = self.get_cursor()
1345 c.execute(self.sql.query['getSiteId'], (site,))
1346 result = c.fetchall()
1347 return result
1349 def resetCache(self):
1350 self.ttold = set() # TourneyTypes old
1351 self.ttnew = set() # TourneyTypes new
1352 self.wmold = set() # WeeksMonths old
1353 self.wmnew = set() # WeeksMonths new
1354 self.gtcache = None # GameTypeId cache
1355 self.tcache = None # TourneyId cache
1356 self.pcache = None # PlayerId cache
1357 self.tpcache = None # TourneysPlayersId cache
1359 def get_last_insert_id(self, cursor=None):
1360 ret = None
1361 try:
1362 if self.backend == self.MYSQL_INNODB:
1363 ret = self.connection.insert_id()
1364 if ret < 1 or ret > 999999999:
1365 log.warning(("getLastInsertId(): problem fetching insert_id? ret=%d") % ret)
1366 ret = -1
1367 elif self.backend == self.PGSQL:
1368 # some options:
1369 # currval(hands_id_seq) - use name of implicit seq here
1370 # lastval() - still needs sequences set up?
1371 # insert ... returning is useful syntax (but postgres specific?)
1372 # see rules (fancy trigger type things)
1373 c = self.get_cursor()
1374 ret = c.execute ("SELECT lastval()")
1375 row = c.fetchone()
1376 if not row:
1377 log.warning(("getLastInsertId(%s): problem fetching lastval? row=%d") % (seq, row))
1378 ret = -1
1379 else:
1380 ret = row[0]
1381 elif self.backend == self.SQLITE:
1382 ret = cursor.lastrowid
1383 else:
1384 log.error(("getLastInsertId(): unknown backend: %d") % self.backend)
1385 ret = -1
1386 except:
1387 ret = -1
1388 err = traceback.extract_tb(sys.exc_info()[2])
1389 log.error(("*** Database get_last_insert_id error: ") + str(sys.exc_info()[1]))
1390 log.error("\n".join( [e[0]+':'+str(e[1])+" "+e[2] for e in err] ))
1391 raise
1392 return ret
1394 def prepareBulkImport(self):
1395 """Drop some indexes/foreign keys to prepare for bulk import.
1396 Currently keeping the standalone indexes as needed to import quickly"""
1397 stime = time()
1398 c = self.get_cursor()
1399 # sc: don't think autocommit=0 is needed, should already be in that mode
1400 if self.backend == self.MYSQL_INNODB:
1401 c.execute("SET foreign_key_checks=0")
1402 c.execute("SET autocommit=0")
1403 return
1404 if self.backend == self.PGSQL:
1405 self.connection.set_isolation_level(0) # allow table/index operations to work
1406 for fk in self.foreignKeys[self.backend]:
1407 if fk['drop'] == 1:
1408 if self.backend == self.MYSQL_INNODB:
1409 c.execute("SELECT constraint_name " +
1410 "FROM information_schema.KEY_COLUMN_USAGE " +
1411 #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
1412 "WHERE 1=1 " +
1413 "AND table_name = %s AND column_name = %s " +
1414 "AND referenced_table_name = %s " +
1415 "AND referenced_column_name = %s ",
1416 (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
1417 cons = c.fetchone()
1418 #print "preparebulk find fk: cons=", cons
1419 if cons:
1420 print("dropping mysql fk", cons[0], fk['fktab'], fk['fkcol'])
1421 try:
1422 c.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0])
1423 except:
1424 print(" drop failed: " + str(sys.exc_info()))
1425 elif self.backend == self.PGSQL:
1426 # DON'T FORGET TO RECREATE THEM!!
1427 print("dropping pg fk", fk['fktab'], fk['fkcol'])
1428 try:
1429 # try to lock table to see if index drop will work:
1430 # hmmm, tested by commenting out rollback in grapher. lock seems to work but
1431 # then drop still hangs :-( does work in some tests though??
1432 # will leave code here for now pending further tests/enhancement ...
1433 c.execute("BEGIN TRANSACTION")
1434 c.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) )
1435 #print "after lock, status:", c.statusmessage
1436 #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
1437 try:
1438 c.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
1439 print("dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk['fktab'], fk['fkcol']))
1440 except:
1441 if "does not exist" not in str(sys.exc_info()[1]):
1442 print(("warning: drop pg fk %s_%s_fkey failed: %s, continuing ...") \
1443 % (fk['fktab'], fk['fkcol'], str(sys.exc_info()[1]).rstrip('\n') ))
1444 c.execute("END TRANSACTION")
1445 except:
1446 print(("warning: constraint %s_%s_fkey not dropped: %s, continuing ...") \
1447 % (fk['fktab'],fk['fkcol'], str(sys.exc_info()[1]).rstrip('\n')))
1448 else:
1449 return -1
1451 for idx in self.indexes[self.backend]:
1452 if idx['drop'] == 1:
1453 if self.backend == self.MYSQL_INNODB:
1454 print(("dropping mysql index "), idx['tab'], idx['col'])
1455 try:
1456 # apparently nowait is not implemented in mysql so this just hangs if there are locks
1457 # preventing the index drop :-(
1458 c.execute( "alter table %s drop index %s;", (idx['tab'],idx['col']) )
1459 except:
1460 print((" drop index failed: ") + str(sys.exc_info()))
1461 # ALTER TABLE `fpdb`.`handsplayers` DROP INDEX `playerId`;
1462 # using: 'HandsPlayers' drop index 'playerId'
1463 elif self.backend == self.PGSQL:
1464 # DON'T FORGET TO RECREATE THEM!!
1465 print(("dropping pg index "), idx['tab'], idx['col'])
1466 try:
1467 # try to lock table to see if index drop will work:
1468 c.execute("BEGIN TRANSACTION")
1469 c.execute( "lock table %s in exclusive mode nowait" % (idx['tab'],) )
1470 #print "after lock, status:", c.statusmessage
1471 try:
1472 # table locked ok so index drop should work:
1473 #print "drop index %s_%s_idx" % (idx['tab'],idx['col'])
1474 c.execute( "drop index if exists %s_%s_idx" % (idx['tab'],idx['col']) )
1475 #print "dropped pg index ", idx['tab'], idx['col']
1476 except:
1477 if "does not exist" not in str(sys.exc_info()[1]):
1478 print(("warning: drop index %s_%s_idx failed: %s, continuing ...") \
1479 % (idx['tab'],idx['col'], str(sys.exc_info()[1]).rstrip('\n')))
1480 c.execute("END TRANSACTION")
1481 except:
1482 print(("warning: index %s_%s_idx not dropped %s, continuing ...") \
1483 % (idx['tab'],idx['col'], str(sys.exc_info()[1]).rstrip('\n')))
1484 else:
1485 return -1
1487 if self.backend == self.PGSQL:
1488 self.connection.set_isolation_level(1) # go back to normal isolation level
1489 self.commit() # seems to clear up errors if there were any in postgres
1490 ptime = time() - stime
1491 print (("prepare import took %s seconds") % ptime)
1492 #end def prepareBulkImport
1494 def afterBulkImport(self):
1495 """Re-create any dropped indexes/foreign keys after bulk import"""
1496 stime = time()
1498 c = self.get_cursor()
1499 if self.backend == self.MYSQL_INNODB:
1500 c.execute("SET foreign_key_checks=1")
1501 c.execute("SET autocommit=1")
1502 return
1504 if self.backend == self.PGSQL:
1505 self.connection.set_isolation_level(0) # allow table/index operations to work
1506 for fk in self.foreignKeys[self.backend]:
1507 if fk['drop'] == 1:
1508 if self.backend == self.MYSQL_INNODB:
1509 c.execute("SELECT constraint_name " +
1510 "FROM information_schema.KEY_COLUMN_USAGE " +
1511 #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
1512 "WHERE 1=1 " +
1513 "AND table_name = %s AND column_name = %s " +
1514 "AND referenced_table_name = %s " +
1515 "AND referenced_column_name = %s ",
1516 (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
1517 cons = c.fetchone()
1518 #print "afterbulk: cons=", cons
1519 if cons:
1520 pass
1521 else:
1522 print(("Creating foreign key "), fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol'])
1523 try:
1524 c.execute("alter table " + fk['fktab'] + " add foreign key ("
1525 + fk['fkcol'] + ") references " + fk['rtab'] + "("
1526 + fk['rcol'] + ")")
1527 except:
1528 print(("Create foreign key failed: ") + str(sys.exc_info()))
1529 elif self.backend == self.PGSQL:
1530 print(("Creating foreign key "), fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol'])
1531 try:
1532 c.execute("alter table " + fk['fktab'] + " add constraint "
1533 + fk['fktab'] + '_' + fk['fkcol'] + '_fkey'
1534 + " foreign key (" + fk['fkcol']
1535 + ") references " + fk['rtab'] + "(" + fk['rcol'] + ")")
1536 except:
1537 print(("Create foreign key failed: ") + str(sys.exc_info()))
1538 else:
1539 return -1
1541 for idx in self.indexes[self.backend]:
1542 if idx['drop'] == 1:
1543 if self.backend == self.MYSQL_INNODB:
1544 print(("Creating MySQL index %s %s") % (idx['tab'], idx['col']))
1545 try:
1546 s = "alter table %s add index %s(%s)" % (idx['tab'],idx['col'],idx['col'])
1547 c.execute(s)
1548 except:
1549 print(("Create foreign key failed: ") + str(sys.exc_info()))
1550 elif self.backend == self.PGSQL:
1551 # pass
1552 # mod to use tab_col for index name?
1553 print(("Creating PostgreSQL index "), idx['tab'], idx['col'])
1554 try:
1555 s = "create index %s_%s_idx on %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
1556 c.execute(s)
1557 except:
1558 print(("Create index failed: ") + str(sys.exc_info()))
1559 else:
1560 return -1
1562 if self.backend == self.PGSQL:
1563 self.connection.set_isolation_level(1) # go back to normal isolation level
1564 self.commit() # seems to clear up errors if there were any in postgres
1565 atime = time() - stime
1566 print (("After import took %s seconds") % atime)
1567 #end def afterBulkImport
1569 def drop_referential_integrity(self):
1570 """Update all tables to remove foreign keys"""
1572 c = self.get_cursor()
1573 c.execute(self.sql.query['list_tables'])
1574 result = c.fetchall()
1576 for i in range(len(result)):
1577 c.execute("SHOW CREATE TABLE " + result[i][0])
1578 inner = c.fetchall()
1580 for j in range(len(inner)):
1581 # result[i][0] - Table name
1582 # result[i][1] - CREATE TABLE parameters
1583 #Searching for CONSTRAINT `tablename_ibfk_1`
1584 for m in re.finditer('(ibfk_[0-9]+)', inner[j][1]):
1585 key = "`" + inner[j][0] + "_" + m.group() + "`"
1586 c.execute("ALTER TABLE " + inner[j][0] + " DROP FOREIGN KEY " + key)
1587 self.commit()
1588 #end drop_referential_inegrity
1590 def recreate_tables(self):
1591 """(Re-)creates the tables of the current DB"""
1593 self.drop_tables()
1594 self.resetCache()
1595 self.resetBulkCache()
1596 self.create_tables()
1597 self.createAllIndexes()
1598 self.commit()
1599 self.get_sites()
1600 log.info(("Finished recreating tables"))
1601 #end def recreate_tables
1603 def create_tables(self):
1604 log.debug(self.sql.query['createSettingsTable'])
1605 c = self.get_cursor()
1606 c.execute(self.sql.query['createSettingsTable'])
1608 log.debug("Creating tables")
1609 c.execute(self.sql.query['createActionsTable'])
1610 c.execute(self.sql.query['createRankTable'])
1611 c.execute(self.sql.query['createStartCardsTable'])
1612 c.execute(self.sql.query['createSitesTable'])
1613 c.execute(self.sql.query['createGametypesTable'])
1614 c.execute(self.sql.query['createFilesTable'])
1615 c.execute(self.sql.query['createPlayersTable'])
1616 c.execute(self.sql.query['createAutoratesTable'])
1617 c.execute(self.sql.query['createWeeksTable'])
1618 c.execute(self.sql.query['createMonthsTable'])
1619 c.execute(self.sql.query['createSessionsTable'])
1620 c.execute(self.sql.query['createTourneyTypesTable'])
1621 c.execute(self.sql.query['createTourneysTable'])
1622 c.execute(self.sql.query['createTourneysPlayersTable'])
1623 c.execute(self.sql.query['createSessionsCacheTable'])
1624 c.execute(self.sql.query['createTourneysCacheTable'])
1625 c.execute(self.sql.query['createHandsTable'])
1626 c.execute(self.sql.query['createHandsPlayersTable'])
1627 c.execute(self.sql.query['createHandsActionsTable'])
1628 c.execute(self.sql.query['createHandsStoveTable'])
1629 c.execute(self.sql.query['createHandsPotsTable'])
1630 c.execute(self.sql.query['createHudCacheTable'])
1631 c.execute(self.sql.query['createCardsCacheTable'])
1632 c.execute(self.sql.query['createPositionsCacheTable'])
1633 c.execute(self.sql.query['createBoardsTable'])
1634 c.execute(self.sql.query['createBackingsTable'])
1635 c.execute(self.sql.query['createRawHands'])
1636 c.execute(self.sql.query['createRawTourneys'])
1638 # Create unique indexes:
1639 log.debug("Creating unique indexes")
1640 c.execute(self.sql.query['addTourneyIndex'])
1641 c.execute(self.sql.query['addHandsIndex'].replace('<heroseat>', ', heroSeat' if self.publicDB else ''))
1642 c.execute(self.sql.query['addPlayersIndex'])
1643 c.execute(self.sql.query['addTPlayersIndex'])
1644 c.execute(self.sql.query['addPlayersSeat'])
1645 c.execute(self.sql.query['addHeroSeat'])
1646 c.execute(self.sql.query['addStartCardsIndex'])
1647 c.execute(self.sql.query['addSeatsIndex'])
1648 c.execute(self.sql.query['addPositionIndex'])
1649 c.execute(self.sql.query['addFilesIndex'])
1650 c.execute(self.sql.query['addTableNameIndex'])
1651 c.execute(self.sql.query['addPlayerNameIndex'])
1652 c.execute(self.sql.query['addPlayerHeroesIndex'])
1653 c.execute(self.sql.query['addStartCashIndex'])
1654 c.execute(self.sql.query['addEffStackIndex'])
1655 c.execute(self.sql.query['addTotalProfitIndex'])
1656 c.execute(self.sql.query['addWinningsIndex'])
1657 c.execute(self.sql.query['addFinalPotIndex'])
1658 c.execute(self.sql.query['addStreetIndex'])
1659 c.execute(self.sql.query['addSessionsCacheCompundIndex'])
1660 c.execute(self.sql.query['addTourneysCacheCompundIndex'])
1661 c.execute(self.sql.query['addHudCacheCompundIndex'])
1662 c.execute(self.sql.query['addCardsCacheCompundIndex'])
1663 c.execute(self.sql.query['addPositionsCacheCompundIndex'])
1665 self.fillDefaultData()
1666 self.commit()
1668 def drop_tables(self):
1669 """Drops the fpdb tables from the current db"""
1670 c = self.get_cursor()
1672 backend = self.get_backend_name()
1673 if backend == 'MySQL InnoDB': # what happens if someone is using MyISAM?
1674 try:
1675 self.drop_referential_integrity() # needed to drop tables with foreign keys
1676 c.execute(self.sql.query['list_tables'])
1677 tables = c.fetchall()
1678 for table in tables:
1679 c.execute(self.sql.query['drop_table'] + table[0])
1680 c.execute('SET FOREIGN_KEY_CHECKS=1')
1681 except:
1682 err = traceback.extract_tb(sys.exc_info()[2])[-1]
1683 print(("***Error dropping tables:"), +err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1]))
1684 self.rollback()
1685 elif backend == 'PostgreSQL':
1686 try:
1687 self.commit()
1688 c.execute(self.sql.query['list_tables'])
1689 tables = c.fetchall()
1690 for table in tables:
1691 c.execute(self.sql.query['drop_table'] + table[0] + ' cascade')
1692 except:
1693 err = traceback.extract_tb(sys.exc_info()[2])[-1]
1694 print(("***Error dropping tables:"), err[2]+"("+str(err[1])+"): "+str(sys.exc_info()[1]))
1695 self.rollback()
1696 elif backend == 'SQLite':
1697 c.execute(self.sql.query['list_tables'])
1698 for table in c.fetchall():
1699 if table[0] != 'sqlite_stat1':
1700 log.info("%s '%s'" % (self.sql.query['drop_table'], table[0]))
1701 c.execute(self.sql.query['drop_table'] + table[0])
1702 self.commit()
1703 #end def drop_tables
1705 def createAllIndexes(self):
1706 """Create new indexes"""
1708 if self.backend == self.PGSQL:
1709 self.connection.set_isolation_level(0) # allow table/index operations to work
1710 c = self.get_cursor()
1711 for idx in self.indexes[self.backend]:
1712 log.info(("Creating index %s %s") %(idx['tab'], idx['col']))
1713 if self.backend == self.MYSQL_INNODB:
1714 s = "CREATE INDEX %s ON %s(%s)" % (idx['col'],idx['tab'],idx['col'])
1715 c.execute(s)
1716 elif self.backend == self.PGSQL or self.backend == self.SQLITE:
1717 s = "CREATE INDEX %s_%s_idx ON %s(%s)" % (idx['tab'], idx['col'], idx['tab'], idx['col'])
1718 c.execute(s)
1720 if self.backend == self.PGSQL:
1721 self.connection.set_isolation_level(1) # go back to normal isolation level
1722 #end def createAllIndexes
1724 def dropAllIndexes(self):
1725 """Drop all standalone indexes (i.e. not including primary keys or foreign keys)
1726 using list of indexes in indexes data structure"""
1727 # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
1728 if self.backend == self.PGSQL:
1729 self.connection.set_isolation_level(0) # allow table/index operations to work
1730 for idx in self.indexes[self.backend]:
1731 if self.backend == self.MYSQL_INNODB:
1732 print((("Dropping index:"), idx['tab'], idx['col']))
1733 try:
1734 self.get_cursor().execute( "alter table %s drop index %s"
1735 , (idx['tab'], idx['col']) )
1736 except:
1737 print(("Drop index failed:"), str(sys.exc_info()))
1738 elif self.backend == self.PGSQL:
1739 print((("Dropping index:"), idx['tab'], idx['col']))
1740 # mod to use tab_col for index name?
1741 try:
1742 self.get_cursor().execute( "drop index %s_%s_idx"
1743 % (idx['tab'],idx['col']) )
1744 except:
1745 print((("Drop index failed:"), str(sys.exc_info())))
1746 elif self.backend == self.SQLITE:
1747 print((("Dropping index:"), idx['tab'], idx['col']))
1748 try:
1749 self.get_cursor().execute( "drop index %s_%s_idx"
1750 % (idx['tab'],idx['col']) )
1751 except:
1752 print(("Drop index failed:"), str(sys.exc_info()))
1753 else:
1754 return -1
1755 if self.backend == self.PGSQL:
1756 self.connection.set_isolation_level(1) # go back to normal isolation level
1757 #end def dropAllIndexes
1759 def createAllForeignKeys(self):
1760 """Create foreign keys"""
1762 try:
1763 if self.backend == self.PGSQL:
1764 self.connection.set_isolation_level(0) # allow table/index operations to work
1765 c = self.get_cursor()
1766 except:
1767 print(("set_isolation_level failed:"), str(sys.exc_info()))
1769 for fk in self.foreignKeys[self.backend]:
1770 if self.backend == self.MYSQL_INNODB:
1771 c.execute("SELECT constraint_name " +
1772 "FROM information_schema.KEY_COLUMN_USAGE " +
1773 #"WHERE REFERENCED_TABLE_SCHEMA = 'fpdb'
1774 "WHERE 1=1 " +
1775 "AND table_name = %s AND column_name = %s " +
1776 "AND referenced_table_name = %s " +
1777 "AND referenced_column_name = %s ",
1778 (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
1779 cons = c.fetchone()
1780 #print "afterbulk: cons=", cons
1781 if cons:
1782 pass
1783 else:
1784 print(("Creating foreign key:"), fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol'])
1785 try:
1786 c.execute("alter table " + fk['fktab'] + " add foreign key ("
1787 + fk['fkcol'] + ") references " + fk['rtab'] + "("
1788 + fk['rcol'] + ")")
1789 except:
1790 print(("Create foreign key failed:"), str(sys.exc_info()))
1791 elif self.backend == self.PGSQL:
1792 print(("Creating foreign key:"), fk['fktab'], fk['fkcol'], "->", fk['rtab'], fk['rcol'])
1793 try:
1794 c.execute("alter table " + fk['fktab'] + " add constraint "
1795 + fk['fktab'] + '_' + fk['fkcol'] + '_fkey'
1796 + " foreign key (" + fk['fkcol']
1797 + ") references " + fk['rtab'] + "(" + fk['rcol'] + ")")
1798 except:
1799 print(("Create foreign key failed:"), str(sys.exc_info()))
1800 else:
1801 pass
1803 try:
1804 if self.backend == self.PGSQL:
1805 self.connection.set_isolation_level(1) # go back to normal isolation level
1806 except:
1807 print(("set_isolation_level failed:"), str(sys.exc_info()))
1808 #end def createAllForeignKeys
1810 def dropAllForeignKeys(self):
1811 """Drop all standalone indexes (i.e. not including primary keys or foreign keys)
1812 using list of indexes in indexes data structure"""
1813 # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK)
1814 if self.backend == self.PGSQL:
1815 self.connection.set_isolation_level(0) # allow table/index operations to work
1816 c = self.get_cursor()
1818 for fk in self.foreignKeys[self.backend]:
1819 if self.backend == self.MYSQL_INNODB:
1820 c.execute("SELECT constraint_name " +
1821 "FROM information_schema.KEY_COLUMN_USAGE " +
1822 #"WHERE REFERENCED_TABLE_SHEMA = 'fpdb'
1823 "WHERE 1=1 " +
1824 "AND table_name = %s AND column_name = %s " +
1825 "AND referenced_table_name = %s " +
1826 "AND referenced_column_name = %s ",
1827 (fk['fktab'], fk['fkcol'], fk['rtab'], fk['rcol']) )
1828 cons = c.fetchone()
1829 #print "preparebulk find fk: cons=", cons
1830 if cons:
1831 print(("Dropping foreign key:"), cons[0], fk['fktab'], fk['fkcol'])
1832 try:
1833 c.execute("alter table " + fk['fktab'] + " drop foreign key " + cons[0])
1834 except:
1835 print(("Warning:"), ("Drop foreign key %s_%s_fkey failed: %s, continuing ...") \
1836 % (fk['fktab'], fk['fkcol'], str(sys.exc_info()[1]).rstrip('\n') ))
1837 elif self.backend == self.PGSQL:
1838# DON'T FORGET TO RECREATE THEM!!
1839 print(("Dropping foreign key:"), fk['fktab'], fk['fkcol'])
1840 try:
1841 # try to lock table to see if index drop will work:
1842 # hmmm, tested by commenting out rollback in grapher. lock seems to work but
1843 # then drop still hangs :-( does work in some tests though??
1844 # will leave code here for now pending further tests/enhancement ...
1845 c.execute("BEGIN TRANSACTION")
1846 c.execute( "lock table %s in exclusive mode nowait" % (fk['fktab'],) )
1847 #print "after lock, status:", c.statusmessage
1848 #print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol'])
1849 try:
1850 c.execute("alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']))
1851 print(("dropped foreign key %s_%s_fkey, continuing ...") % (fk['fktab'], fk['fkcol']))
1852 except:
1853 if "does not exist" not in str(sys.exc_info()[1]):
1854 print(("Warning:"), ("Drop foreign key %s_%s_fkey failed: %s, continuing ...") \
1855 % (fk['fktab'], fk['fkcol'], str(sys.exc_info()[1]).rstrip('\n') ))
1856 c.execute("END TRANSACTION")
1857 except:
1858 print(("Warning:"), ("constraint %s_%s_fkey not dropped: %s, continuing ...") \
1859 % (fk['fktab'],fk['fkcol'], str(sys.exc_info()[1]).rstrip('\n')))
1860 else:
1861 #print ("Only MySQL and Postgres supported so far")
1862 pass
1864 if self.backend == self.PGSQL:
1865 self.connection.set_isolation_level(1) # go back to normal isolation level
1866 #end def dropAllForeignKeys
1869 def fillDefaultData(self):
1870 c = self.get_cursor()
1871 c.execute("INSERT INTO Settings (version) VALUES (%s);" % (DB_VERSION))
1872 #Fill Sites
1873 c.execute("INSERT INTO Sites (id,name,code) VALUES ('1', 'Full Tilt Poker', 'FT')")
1874 c.execute("INSERT INTO Sites (id,name,code) VALUES ('2', 'PokerStars', 'PS')")
1875 c.execute("INSERT INTO Sites (id,name,code) VALUES ('3', 'Everleaf', 'EV')")
1876 c.execute("INSERT INTO Sites (id,name,code) VALUES ('4', 'Boss', 'BM')")
1877 c.execute("INSERT INTO Sites (id,name,code) VALUES ('5', 'OnGame', 'OG')")
1878 c.execute("INSERT INTO Sites (id,name,code) VALUES ('6', 'UltimateBet', 'UB')")
1879 c.execute("INSERT INTO Sites (id,name,code) VALUES ('7', 'Betfair', 'BF')")
1880 #c.execute("INSERT INTO Sites (id,name,code) VALUES ('8', 'Absolute', 'AB')")
1881 c.execute("INSERT INTO Sites (id,name,code) VALUES ('9', 'PartyPoker', 'PP')")
1882 c.execute("INSERT INTO Sites (id,name,code) VALUES ('10', 'PacificPoker', 'P8')")
1883 c.execute("INSERT INTO Sites (id,name,code) VALUES ('11', 'Partouche', 'PA')")
1884 c.execute("INSERT INTO Sites (id,name,code) VALUES ('12', 'Merge', 'MN')")
1885 c.execute("INSERT INTO Sites (id,name,code) VALUES ('13', 'PKR', 'PK')")
1886 c.execute("INSERT INTO Sites (id,name,code) VALUES ('14', 'iPoker', 'IP')")
1887 c.execute("INSERT INTO Sites (id,name,code) VALUES ('15', 'Winamax', 'WM')")
1888 c.execute("INSERT INTO Sites (id,name,code) VALUES ('16', 'Everest', 'EP')")
1889 c.execute("INSERT INTO Sites (id,name,code) VALUES ('17', 'Cake', 'CK')")
1890 c.execute("INSERT INTO Sites (id,name,code) VALUES ('18', 'Entraction', 'TR')")
1891 c.execute("INSERT INTO Sites (id,name,code) VALUES ('19', 'BetOnline', 'BO')")
1892 c.execute("INSERT INTO Sites (id,name,code) VALUES ('20', 'Microgaming', 'MG')")
1893 c.execute("INSERT INTO Sites (id,name,code) VALUES ('21', 'Bovada', 'BV')")
1894 c.execute("INSERT INTO Sites (id,name,code) VALUES ('22', 'Enet', 'EN')")
1895 c.execute("INSERT INTO Sites (id,name,code) VALUES ('23', 'SealsWithClubs', 'SW')")
1896 c.execute("INSERT INTO Sites (id,name,code) VALUES ('24', 'WinningPoker', 'WP')")
1897 c.execute("INSERT INTO Sites (id,name,code) VALUES ('25', 'PokerMaster', 'PM')")
1898 c.execute("INSERT INTO Sites (id,name,code) VALUES ('26', 'Run It Once Poker', 'RO')")
1899 c.execute("INSERT INTO Sites (id,name,code) VALUES ('27', 'GGPoker', 'GG')")
1900 c.execute("INSERT INTO Sites (id,name,code) VALUES ('28', 'KingsClub', 'KC')")
1901 c.execute("INSERT INTO Sites (id,name,code) VALUES ('29', 'PokerBros', 'PB')")
1902 c.execute("INSERT INTO Sites (id,name,code) VALUES ('30', 'Unibet', 'UN')")
1903 #c.execute("INSERT INTO Sites (id,name,code) VALUES ('31', 'PMU Poker', 'PM')")
1904 #Fill Actions
1905 c.execute("INSERT INTO Actions (id,name,code) VALUES ('1', 'ante', 'A')")
1906 c.execute("INSERT INTO Actions (id,name,code) VALUES ('2', 'small blind', 'SB')")
1907 c.execute("INSERT INTO Actions (id,name,code) VALUES ('3', 'secondsb', 'SSB')")
1908 c.execute("INSERT INTO Actions (id,name,code) VALUES ('4', 'big blind', 'BB')")
1909 c.execute("INSERT INTO Actions (id,name,code) VALUES ('5', 'both', 'SBBB')")
1910 c.execute("INSERT INTO Actions (id,name,code) VALUES ('6', 'calls', 'C')")
1911 c.execute("INSERT INTO Actions (id,name,code) VALUES ('7', 'raises', 'R')")
1912 c.execute("INSERT INTO Actions (id,name,code) VALUES ('8', 'bets', 'B')")
1913 c.execute("INSERT INTO Actions (id,name,code) VALUES ('9', 'stands pat', 'S')")
1914 c.execute("INSERT INTO Actions (id,name,code) VALUES ('10', 'folds', 'F')")
1915 c.execute("INSERT INTO Actions (id,name,code) VALUES ('11', 'checks', 'K')")
1916 c.execute("INSERT INTO Actions (id,name,code) VALUES ('12', 'discards', 'D')")
1917 c.execute("INSERT INTO Actions (id,name,code) VALUES ('13', 'bringin', 'I')")
1918 c.execute("INSERT INTO Actions (id,name,code) VALUES ('14', 'completes', 'P')")
1919 c.execute("INSERT INTO Actions (id,name,code) VALUES ('15', 'straddle', 'ST')")
1920 c.execute("INSERT INTO Actions (id,name,code) VALUES ('16', 'button blind', 'BUB')")
1921 #Fill Rank
1922 c.execute("INSERT INTO Rank (id,name) VALUES ('1', 'Nothing')")
1923 c.execute("INSERT INTO Rank (id,name) VALUES ('2', 'NoPair')")
1924 c.execute("INSERT INTO Rank (id,name) VALUES ('3', 'OnePair')")
1925 c.execute("INSERT INTO Rank (id,name) VALUES ('4', 'TwoPair')")
1926 c.execute("INSERT INTO Rank (id,name) VALUES ('5', 'Trips')")
1927 c.execute("INSERT INTO Rank (id,name) VALUES ('6', 'Straight')")
1928 c.execute("INSERT INTO Rank (id,name) VALUES ('7', 'Flush')")
1929 c.execute("INSERT INTO Rank (id,name) VALUES ('8', 'FlHouse')")
1930 c.execute("INSERT INTO Rank (id,name) VALUES ('9', 'Quads')")
1931 c.execute("INSERT INTO Rank (id,name) VALUES ('10', 'StFlush')")
1932 #Fill StartCards
1933 sql = "INSERT INTO StartCards (category, name, rank, combinations) VALUES (%s, %s, %s, %s)".replace('%s', self.sql.query['placeholder'])
1934 for i in range(170):
1935 (name, rank, combinations) = Card.StartCardRank(i)
1936 c.execute(sql, ('holdem', name, rank, combinations))
1937 for idx in range(-13,1184):
1938 name = Card.decodeRazzStartHand(idx)
1939 c.execute(sql, ('razz', name, idx, 0))
1941 #end def fillDefaultData
1943 def rebuild_indexes(self, start=None):
1944 self.dropAllIndexes()
1945 self.createAllIndexes()
1946 self.dropAllForeignKeys()
1947 self.createAllForeignKeys()
1948 #end def rebuild_indexes
1950 def replace_statscache(self, type, table, query):
1951 if table == 'HudCache':
1952 insert = """HudCache
1953 (gametypeId
1954 ,playerId
1955 ,seats
1956 ,position
1957 <tourney_insert_clause>
1958 ,styleKey"""
1960 select = """h.gametypeId
1961 ,hp.playerId
1962 ,h.seats as seat_num
1963 <hc_position>
1964 <tourney_select_clause>
1965 <styleKey>"""
1967 group = """h.gametypeId
1968 ,hp.playerId
1969 ,seat_num
1970 ,hc_position
1971 <tourney_group_clause>
1972 <styleKeyGroup>"""
1974 query = query.replace('<insert>', insert)
1975 query = query.replace('<select>', select)
1976 query = query.replace('<group>', group)
1977 query = query.replace('<sessions_join_clause>', "")
1979 if self.build_full_hudcache:
1980 query = query.replace('<hc_position>', """,case when hp.position = 'B' then 'B'
1981 when hp.position = 'S' then 'S'
1982 when hp.position = '0' then 'D'
1983 when hp.position = '1' then 'C'
1984 when hp.position = '2' then 'M'
1985 when hp.position = '3' then 'M'
1986 when hp.position = '4' then 'M'
1987 when hp.position = '5' then 'E'
1988 when hp.position = '6' then 'E'
1989 when hp.position = '7' then 'E'
1990 when hp.position = '8' then 'E'
1991 when hp.position = '9' then 'E'
1992 else 'E'
1993 end as hc_position""")
1994 if self.backend == self.PGSQL:
1995 query = query.replace('<styleKey>', ",'d' || to_char(h.startTime, 'YYMMDD')")
1996 query = query.replace('<styleKeyGroup>', ",to_char(h.startTime, 'YYMMDD')")
1997 elif self.backend == self.SQLITE:
1998 query = query.replace('<styleKey>', ",'d' || substr(strftime('%Y%m%d', h.startTime),3,7)")
1999 query = query.replace('<styleKeyGroup>', ",substr(strftime('%Y%m%d', h.startTime),3,7)")
2000 elif self.backend == self.MYSQL_INNODB:
2001 query = query.replace('<styleKey>', ",date_format(h.startTime, 'd%y%m%d')")
2002 query = query.replace('<styleKeyGroup>', ",date_format(h.startTime, 'd%y%m%d')")
2003 else:
2004 query = query.replace('<hc_position>', ",'0' as hc_position")
2005 query = query.replace('<styleKey>', ",'A000000' as styleKey")
2006 query = query.replace('<styleKeyGroup>', ',styleKey')
2008 if type == 'tour':
2009 query = query.replace('<tourney_insert_clause>', ",tourneyTypeId")
2010 query = query.replace('<tourney_select_clause>', ",t.tourneyTypeId")
2011 query = query.replace('<tourney_group_clause>', ",t.tourneyTypeId")
2012 else:
2013 query = query.replace('<tourney_insert_clause>', "")
2014 query = query.replace('<tourney_select_clause>', "")
2015 query = query.replace('<tourney_group_clause>', "")
2017 query = query.replace('<hero_where>', "")
2018 query = query.replace('<hero_join>', '')
2020 elif table == 'CardsCache':
2021 insert = """CardsCache
2022 (weekId
2023 ,monthId
2024 ,gametypeId
2025 <tourney_insert_clause>
2026 ,playerId
2027 ,startCards"""
2029 select = """s.weekId
2030 ,s.monthId
2031 ,h.gametypeId
2032 <tourney_select_clause>
2033 ,hp.playerId
2034 ,hp.startCards"""
2036 group = """s.weekId
2037 ,s.monthId
2038 ,h.gametypeId
2039 <tourney_group_clause>
2040 ,hp.playerId
2041 ,hp.startCards"""
2043 query = query.replace('<insert>', insert)
2044 query = query.replace('<select>', select)
2045 query = query.replace('<group>', group)
2046 query = query.replace('<hero_join>', '')
2047 query = query.replace('<sessions_join_clause>', """INNER JOIN Sessions s ON (s.id = h.sessionId)
2048 INNER JOIN Players p ON (hp.playerId = p.id)""")
2049 query = query.replace('<hero_where>', '')
2051 if type == 'tour':
2052 query = query.replace('<tourney_insert_clause>', ",tourneyTypeId")
2053 query = query.replace('<tourney_select_clause>', ",t.tourneyTypeId")
2054 query = query.replace('<tourney_group_clause>', ",t.tourneyTypeId")
2055 else:
2056 query = query.replace('<tourney_insert_clause>', "")
2057 query = query.replace('<tourney_select_clause>', "")
2058 query = query.replace('<tourney_group_clause>', "")
2060 elif table == 'PositionsCache':
2061 insert = """PositionsCache
2062 (weekId
2063 ,monthId
2064 ,gametypeId
2065 <tourney_insert_clause>
2066 ,playerId
2067 ,seats
2068 ,maxPosition
2069 ,position"""
2071 select = """s.weekId
2072 ,s.monthId
2073 ,h.gametypeId
2074 <tourney_select_clause>
2075 ,hp.playerId
2076 ,h.seats
2077 ,h.maxPosition
2078 ,hp.position"""
2080 group = """s.weekId
2081 ,s.monthId
2082 ,h.gametypeId
2083 <tourney_group_clause>
2084 ,hp.playerId
2085 ,h.seats
2086 ,h.maxPosition
2087 ,hp.position"""
2089 query = query.replace('<insert>', insert)
2090 query = query.replace('<select>', select)
2091 query = query.replace('<group>', group)
2092 query = query.replace('<hero_join>', '')
2093 query = query.replace('<sessions_join_clause>', """INNER JOIN Sessions s ON (s.id = h.sessionId)
2094 INNER JOIN Players p ON (hp.playerId = p.id)""")
2095 query = query.replace('<hero_where>', "")
2097 if type == 'tour':
2098 query = query.replace('<tourney_insert_clause>', ",tourneyTypeId")
2099 query = query.replace('<tourney_select_clause>', ",t.tourneyTypeId")
2100 query = query.replace('<tourney_group_clause>', ",t.tourneyTypeId")
2101 else:
2102 query = query.replace('<tourney_insert_clause>', "")
2103 query = query.replace('<tourney_select_clause>', "")
2104 query = query.replace('<tourney_group_clause>', "")
2106 return query
2108 def rebuild_cache(self, h_start=None, v_start=None, table = 'HudCache', ttid = None, wmid = None):
2109 """clears hudcache and rebuilds from the individual handsplayers records"""
2110 stime = time()
2111 # derive list of program owner's player ids
2112 self.hero = {} # name of program owner indexed by site id
2113 self.hero_ids = {'dummy':-53, 'dummy2':-52} # playerid of owner indexed by site id
2114 # make sure at least two values in list
2115 # so that tuple generation creates doesn't use
2116 # () or (1,) style
2117 if not h_start and not v_start:
2118 self.hero_ids = None
2119 else:
2120 for site in self.config.get_supported_sites():
2121 result = self.get_site_id(site)
2122 if result:
2123 site_id = result[0][0]
2124 self.hero[site_id] = self.config.supported_sites[site].screen_name
2125 p_id = self.get_player_id(self.config, site, self.hero[site_id])
2126 if p_id:
2127 self.hero_ids[site_id] = int(p_id)
2129 if not h_start:
2130 h_start = self.hero_hudstart_def
2131 if not v_start:
2132 v_start = self.villain_hudstart_def
2134 if not ttid and not wmid:
2135 self.get_cursor().execute(self.sql.query['clear%s' % table])
2136 self.commit()
2138 if not ttid:
2139 if self.hero_ids is None:
2140 if wmid:
2141 where = "WHERE g.type = 'ring' AND weekId = %s and monthId = %s<hero_where>" % wmid
2142 else:
2143 where = "WHERE g.type = 'ring'<hero_where>"
2144 else:
2145 where = "where ((( hp.playerId not in " + str(tuple(self.hero_ids.values())) \
2146 + " and h.startTime > '" + v_start + "')" \
2147 + " or ( hp.playerId in " + str(tuple(self.hero_ids.values())) \
2148 + " and h.startTime > '" + h_start + "'))" \
2149 + " AND hp.tourneysPlayersId IS NULL)"
2150 rebuild_sql_cash = self.sql.query['rebuildCache'].replace('%s', self.sql.query['placeholder'])
2151 rebuild_sql_cash = rebuild_sql_cash.replace('<tourney_join_clause>', "")
2152 rebuild_sql_cash = rebuild_sql_cash.replace('<where_clause>', where)
2153 rebuild_sql_cash = self.replace_statscache('ring', table, rebuild_sql_cash)
2154 #print rebuild_sql_cash
2155 self.get_cursor().execute(rebuild_sql_cash)
2156 self.commit()
2157 #print ("Rebuild cache(cash) took %.1f seconds") % (time() - stime,)
2159 if ttid:
2160 where = "WHERE t.tourneyTypeId = %s<hero_where>" % ttid
2161 elif self.hero_ids is None:
2162 if wmid:
2163 where = "WHERE g.type = 'tour' AND weekId = %s and monthId = %s<hero_where>" % wmid
2164 else:
2165 where = "WHERE g.type = 'tour'<hero_where>"
2166 else:
2167 where = "where ((( hp.playerId not in " + str(tuple(self.hero_ids.values())) \
2168 + " and h.startTime > '" + v_start + "')" \
2169 + " or ( hp.playerId in " + str(tuple(self.hero_ids.values())) \
2170 + " and h.startTime > '" + h_start + "'))" \
2171 + " AND hp.tourneysPlayersId >= 0)"
2172 rebuild_sql_tourney = self.sql.query['rebuildCache'].replace('%s', self.sql.query['placeholder'])
2173 rebuild_sql_tourney = rebuild_sql_tourney.replace('<tourney_join_clause>', """INNER JOIN Tourneys t ON (t.id = h.tourneyId)""")
2174 rebuild_sql_tourney = rebuild_sql_tourney.replace('<where_clause>', where)
2175 rebuild_sql_tourney = self.replace_statscache('tour', table, rebuild_sql_tourney)
2176 #print rebuild_sql_tourney
2177 self.get_cursor().execute(rebuild_sql_tourney)
2178 self.commit()
2179 #print ("Rebuild hudcache took %.1f seconds") % (time() - stime,)
2180 #end def rebuild_cache
2182 def update_timezone(self, tz_name):
2183 select_W = self.sql.query['select_W'].replace('%s', self.sql.query['placeholder'])
2184 select_M = self.sql.query['select_M'].replace('%s', self.sql.query['placeholder'])
2185 insert_W = self.sql.query['insert_W'].replace('%s', self.sql.query['placeholder'])
2186 insert_M = self.sql.query['insert_M'].replace('%s', self.sql.query['placeholder'])
2187 update_WM_S = self.sql.query['update_WM_S'].replace('%s', self.sql.query['placeholder'])
2188 c = self.get_cursor()
2189 c.execute("SELECT id, sessionStart, weekId wid, monthId mid FROM Sessions")
2190 sessions = self.fetchallDict(c,['id','sessionStart','wid', 'mid'])
2191 for s in sessions:
2192 utc_start = pytz.utc.localize(s['sessionStart'])
2193 tz = pytz.timezone(tz_name)
2194 loc_tz = utc_start.astimezone(tz).strftime('%z')
2195 offset = timedelta(hours=int(loc_tz[:-2]), minutes=int(loc_tz[0]+loc_tz[-2:]))
2196 local = s['sessionStart'] + offset
2197 monthStart = datetime(local.year, local.month, 1)
2198 weekdate = datetime(local.year, local.month, local.day)
2199 weekStart = weekdate - timedelta(days=weekdate.weekday())
2200 wid = self.insertOrUpdate('weeks', c, (weekStart,), select_W, insert_W)
2201 mid = self.insertOrUpdate('months', c, (monthStart,), select_M, insert_M)
2202 if wid != s['wid'] or mid != s['mid']:
2203 row = [wid, mid, s['id']]
2204 c.execute(update_WM_S, row)
2205 self.wmold.add((s['wid'], s['mid']))
2206 self.wmnew.add((wid, mid))
2207 self.commit()
2208 self.cleanUpWeeksMonths()
2210 def get_hero_hudcache_start(self):
2211 """fetches earliest stylekey from hudcache for one of hero's player ids"""
2213 try:
2214 # derive list of program owner's player ids
2215 self.hero = {} # name of program owner indexed by site id
2216 self.hero_ids = {'dummy':-53, 'dummy2':-52} # playerid of owner indexed by site id
2217 # make sure at least two values in list
2218 # so that tuple generation creates doesn't use
2219 # () or (1,) style
2220 for site in self.config.get_supported_sites():
2221 result = self.get_site_id(site)
2222 if result:
2223 site_id = result[0][0]
2224 self.hero[site_id] = self.config.supported_sites[site].screen_name
2225 p_id = self.get_player_id(self.config, site, self.hero[site_id])
2226 if p_id:
2227 self.hero_ids[site_id] = int(p_id)
2229 q = self.sql.query['get_hero_hudcache_start'].replace("<playerid_list>", str(tuple(self.hero_ids.values())))
2230 c = self.get_cursor()
2231 c.execute(q)
2232 tmp = c.fetchone()
2233 if tmp == (None,):
2234 return self.hero_hudstart_def
2235 else:
2236 return "20"+tmp[0][1:3] + "-" + tmp[0][3:5] + "-" + tmp[0][5:7]
2237 except:
2238 err = traceback.extract_tb(sys.exc_info()[2])[-1]
2239 print(("Error rebuilding hudcache:"), str(sys.exc_info()[1]))
2240 print(err)
2241 #end def get_hero_hudcache_start
2244 def analyzeDB(self):
2245 """Do whatever the DB can offer to update index/table statistics"""
2246 stime = time()
2247 if self.backend == self.MYSQL_INNODB or self.backend == self.SQLITE:
2248 try:
2249 self.get_cursor().execute(self.sql.query['analyze'])
2250 except:
2251 print(("Error during analyze:"), str(sys.exc_info()[1]))
2252 elif self.backend == self.PGSQL:
2253 self.connection.set_isolation_level(0) # allow analyze to work
2254 try:
2255 self.get_cursor().execute(self.sql.query['analyze'])
2256 except:
2257 print(("Error during analyze:"), str(sys.exc_info()[1]))
2258 self.connection.set_isolation_level(1) # go back to normal isolation level
2259 self.commit()
2260 atime = time() - stime
2261 log.info(("Analyze took %.1f seconds") % (atime,))
2262 #end def analyzeDB
2264 def vacuumDB(self):
2265 """Do whatever the DB can offer to update index/table statistics"""
2266 stime = time()
2267 if self.backend == self.MYSQL_INNODB or self.backend == self.SQLITE:
2268 try:
2269 self.get_cursor().execute(self.sql.query['vacuum'])
2270 except:
2271 print(("Error during vacuum:"), str(sys.exc_info()[1]))
2272 elif self.backend == self.PGSQL:
2273 self.connection.set_isolation_level(0) # allow vacuum to work
2274 try:
2275 self.get_cursor().execute(self.sql.query['vacuum'])
2276 except:
2277 print(("Error during vacuum:"), str(sys.exc_info()[1]))
2278 self.connection.set_isolation_level(1) # go back to normal isolation level
2279 self.commit()
2280 atime = time() - stime
2281 print(("Vacuum took %.1f seconds") % (atime,))
2282 #end def analyzeDB
2284# Start of Hand Writing routines. Idea is to provide a mixture of routines to store Hand data
2285# however the calling prog requires. Main aims:
2286# - existing static routines from fpdb_simple just modified
2288 def setThreadId(self, threadid):
2289 self.threadId = threadid
2291 def acquireLock(self, wait=True, retry_time=.01):
2292 while not self._has_lock:
2293 cursor = self.get_cursor()
2294 num = cursor.execute(self.sql.query['switchLockOn'], (True, self.threadId))
2295 self.commit()
2296 if (self.backend == self.MYSQL_INNODB and num == 0):
2297 if not wait:
2298 return False
2299 sleep(retry_time)
2300 else:
2301 self._has_lock = True
2302 return True
2304 def releaseLock(self):
2305 if self._has_lock:
2306 cursor = self.get_cursor()
2307 num = cursor.execute(self.sql.query['switchLockOff'], (False, self.threadId))
2308 self.commit()
2309 self._has_lock = False
2311 def lock_for_insert(self):
2312 """Lock tables in MySQL to try to speed inserts up"""
2313 try:
2314 self.get_cursor().execute(self.sql.query['lockForInsert'])
2315 except:
2316 print(("Error during lock_for_insert:"), str(sys.exc_info()[1]))
2317 #end def lock_for_insert
2319 def resetBulkCache(self, reconnect=False):
2320 self.siteHandNos = [] # cache of siteHandNo
2321 self.hbulk = [] # Hands bulk inserts
2322 self.bbulk = [] # Boards bulk inserts
2323 self.hpbulk = [] # HandsPlayers bulk inserts
2324 self.habulk = [] # HandsActions bulk inserts
2325 self.hcbulk = {} # HudCache bulk inserts
2326 self.dcbulk = {} # CardsCache bulk inserts
2327 self.pcbulk = {} # PositionsCache bulk inserts
2328 self.hsbulk = [] # HandsStove bulk inserts
2329 self.htbulk = [] # HandsPots bulk inserts
2330 self.tbulk = {} # Tourneys bulk updates
2331 self.s = {'bk': []} # Sessions bulk updates
2332 self.sc = {} # SessionsCache bulk updates
2333 self.tc = {} # TourneysCache bulk updates
2334 self.hids = [] # hand ids in order of hand bulk inserts
2335 #self.tids = [] # tourney ids in order of hp bulk inserts
2336 if reconnect: self.do_connect(self.config)
2338 def executemany(self, c, q, values):
2339 if self.backend == self.PGSQL and self.import_options['hhBulkPath'] != "":
2340 # COPY much faster under postgres. Requires superuser privileges
2341 m = re_insert.match(q)
2342 rand = ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(5))
2343 bulk_file = os.path.join(self.import_options['hhBulkPath'], m.group("TABLENAME") + '_' + rand)
2344 with open(bulk_file, 'wb') as csvfile:
2345 writer = csv.writer(csvfile, delimiter='\t', quotechar='"', quoting=csv.QUOTE_MINIMAL)
2346 writer.writerows(w for w in values)
2347 q_insert = "COPY " + m.group("TABLENAME") + m.group("COLUMNS") + " FROM '" + bulk_file + "' DELIMITER '\t' CSV"
2348 c.execute(q_insert)
2349 os.remove(bulk_file)
2350 else:
2351 batch_size=20000 #experiment to find optimal batch_size for your data
2352 while values: # repeat until all records in values have been inserted ''
2353 batch, values = values[:batch_size], values[batch_size:] #split values into the current batch and the remaining records
2354 c.executemany(q, batch ) #insert current batch ''
2356 def storeHand(self, hdata, doinsert = False, printdata = False):
2357 if printdata:
2358 print ("######## Hands ##########")
2359 import pprint
2360 pp = pprint.PrettyPrinter(indent=4)
2361 pp.pprint(hdata)
2362 print ("###### End Hands ########")
2364 # Tablename can have odd charachers
2365 hdata['tableName'] = Charset.to_db_utf8(hdata['tableName'])[:50]
2367 self.hids.append(hdata['id'])
2368 self.hbulk.append( [ hdata['tableName'],
2369 hdata['siteHandNo'],
2370 hdata['tourneyId'],
2371 hdata['gametypeId'],
2372 hdata['sessionId'],
2373 hdata['fileId'],
2374 hdata['startTime'].replace(tzinfo=None),
2375 datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'), #importtime
2376 hdata['seats'],
2377 hdata['heroSeat'],
2378 hdata['maxPosition'],
2379 hdata['texture'],
2380 hdata['playersVpi'],
2381 hdata['boardcard1'],
2382 hdata['boardcard2'],
2383 hdata['boardcard3'],
2384 hdata['boardcard4'],
2385 hdata['boardcard5'],
2386 hdata['runItTwice'],
2387 hdata['playersAtStreet1'],
2388 hdata['playersAtStreet2'],
2389 hdata['playersAtStreet3'],
2390 hdata['playersAtStreet4'],
2391 hdata['playersAtShowdown'],
2392 hdata['street0Raises'],
2393 hdata['street1Raises'],
2394 hdata['street2Raises'],
2395 hdata['street3Raises'],
2396 hdata['street4Raises'],
2397 hdata['street0Pot'],
2398 hdata['street1Pot'],
2399 hdata['street2Pot'],
2400 hdata['street3Pot'],
2401 hdata['street4Pot'],
2402 hdata['finalPot']
2403 ])
2405 if doinsert:
2406 self.appendHandsSessionIds()
2407 self.updateTourneysSessions()
2408 q = self.sql.query['store_hand']
2409 q = q.replace('%s', self.sql.query['placeholder'])
2410 c = self.get_cursor()
2411 c.executemany(q, self.hbulk)
2412 self.commit()
2414 def storeBoards(self, id, boards, doinsert):
2415 if boards:
2416 for b in boards:
2417 self.bbulk += [[id] + b]
2418 if doinsert and self.bbulk:
2419 q = self.sql.query['store_boards']
2420 q = q.replace('%s', self.sql.query['placeholder'])
2421 c = self.get_cursor()
2422 self.executemany(c, q, self.bbulk) #c.executemany(q, self.bbulk)
2424 def updateTourneysSessions(self):
2425 if self.tbulk:
2426 q_update_sessions = self.sql.query['updateTourneysSessions'].replace('%s', self.sql.query['placeholder'])
2427 c = self.get_cursor()
2428 for t, sid in list(self.tbulk.items()):
2429 c.execute(q_update_sessions, (sid, t))
2430 self.commit()
2432 def storeHandsPlayers(self, hid, pids, pdata, doinsert = False, printdata = False):
2433 #print "DEBUG: %s %s %s" %(hid, pids, pdata)
2434 if printdata:
2435 import pprint
2436 pp = pprint.PrettyPrinter(indent=4)
2437 pp.pprint(pdata)
2439 hpbulk = self.hpbulk
2440 for p, pvalue in list(pdata.items()):
2441 # Add (hid, pids[p]) + all the values in pvalue at the
2442 # keys in HANDS_PLAYERS_KEYS to hpbulk.
2443 bulk_data = [pvalue[key] for key in HANDS_PLAYERS_KEYS]
2444 bulk_data.append(pids[p])
2445 bulk_data.append(hid)
2446 bulk_data.reverse()
2447 hpbulk.append(bulk_data)
2449 if doinsert:
2450 #self.appendHandsPlayersSessionIds()
2451 q = self.sql.query['store_hands_players']
2452 q = q.replace('%s', self.sql.query['placeholder'])
2453 c = self.get_cursor(True)
2454 self.executemany(c, q, self.hpbulk) #c.executemany(q, self.hpbulk)
2456 def storeHandsPots(self, tdata, doinsert):
2457 self.htbulk += tdata
2458 if doinsert and self.htbulk:
2459 q = self.sql.query['store_hands_pots']
2460 q = q.replace('%s', self.sql.query['placeholder'])
2461 c = self.get_cursor()
2462 self.executemany(c, q, self.htbulk) #c.executemany(q, self.hsbulk)
2464 def storeHandsActions(self, hid, pids, adata, doinsert = False, printdata = False):
2465 #print "DEBUG: %s %s %s" %(hid, pids, adata)
2467 # This can be used to generate test data. Currently unused
2468 #if printdata:
2469 # import pprint
2470 # pp = pprint.PrettyPrinter(indent=4)
2471 # pp.pprint(adata)
2473 for a in adata:
2474 self.habulk.append( (hid,
2475 pids[adata[a]['player']],
2476 adata[a]['street'],
2477 adata[a]['actionNo'],
2478 adata[a]['streetActionNo'],
2479 adata[a]['actionId'],
2480 adata[a]['amount'],
2481 adata[a]['raiseTo'],
2482 adata[a]['amountCalled'],
2483 adata[a]['numDiscarded'],
2484 adata[a]['cardsDiscarded'],
2485 adata[a]['allIn']
2486 ) )
2488 if doinsert:
2489 q = self.sql.query['store_hands_actions']
2490 q = q.replace('%s', self.sql.query['placeholder'])
2491 c = self.get_cursor()
2492 self.executemany(c, q, self.habulk) #c.executemany(q, self.habulk)
2494 def storeHandsStove(self, sdata, doinsert):
2495 self.hsbulk += sdata
2496 if doinsert and self.hsbulk:
2497 q = self.sql.query['store_hands_stove']
2498 q = q.replace('%s', self.sql.query['placeholder'])
2499 c = self.get_cursor()
2500 self.executemany(c, q, self.hsbulk) #c.executemany(q, self.hsbulk)
2502 def storeHudCache(self, gid, gametype, pids, starttime, pdata, doinsert=False):
2503 """Update cached statistics. If update fails because no record exists, do an insert."""
2505 if pdata:
2506 tz = datetime.utcnow() - datetime.today()
2507 tz_offset = old_div(tz.seconds,3600)
2508 tz_day_start_offset = self.day_start + tz_offset
2510 d = timedelta(hours=tz_day_start_offset)
2511 starttime_offset = starttime - d
2512 styleKey = datetime.strftime(starttime_offset, 'd%y%m%d')
2513 seats = len(pids)
2515 pos = {'B':'B', 'S':'S', 0:'D', 1:'C', 2:'M', 3:'M', 4:'M', 5:'E', 6:'E', 7:'E', 8:'E', 9:'E' }
2517 for p in pdata:
2518 player_stats = pdata.get(p)
2519 garbageTourneyTypes = player_stats['tourneyTypeId'] in self.ttnew or player_stats['tourneyTypeId'] in self.ttold
2520 if self.import_options['hhBulkPath'] == "" or not garbageTourneyTypes:
2521 position = pos[player_stats['position']]
2522 k = (gid
2523 ,pids[p]
2524 ,seats
2525 ,position if self.build_full_hudcache else '0'
2526 ,player_stats['tourneyTypeId']
2527 ,styleKey if self.build_full_hudcache else 'A000000'
2528 )
2529 player_stats['n'] = 1
2530 line = [int(player_stats[s]) if isinstance(player_stats[s],bool) else player_stats[s] for s in CACHE_KEYS]
2532 hud = self.hcbulk.get(k)
2533 # Add line to the old line in the hudcache.
2534 if hud is not None:
2535 for idx,val in enumerate(line):
2536 hud[idx] += val
2537 else:
2538 self.hcbulk[k] = line
2540 if doinsert:
2541 update_hudcache = self.sql.query['update_hudcache']
2542 update_hudcache = update_hudcache.replace('%s', self.sql.query['placeholder'])
2543 insert_hudcache = self.sql.query['insert_hudcache']
2544 insert_hudcache = insert_hudcache.replace('%s', self.sql.query['placeholder'])
2546 select_hudcache_ring = self.sql.query['select_hudcache_ring']
2547 select_hudcache_ring = select_hudcache_ring.replace('%s', self.sql.query['placeholder'])
2548 select_hudcache_tour = self.sql.query['select_hudcache_tour']
2549 select_hudcache_tour = select_hudcache_tour.replace('%s', self.sql.query['placeholder'])
2550 inserts = []
2551 c = self.get_cursor()
2552 for k, item in list(self.hcbulk.items()):
2554 if not k[4]:
2555 q = select_hudcache_ring
2556 row = list(k[:4]) + [k[-1]]
2557 else:
2558 q = select_hudcache_tour
2559 row = list(k)
2561 c.execute(q, row)
2562 result = c.fetchone()
2563 if result:
2564 id = result[0]
2565 update = item + [id]
2566 c.execute(update_hudcache, update)
2568 else:
2569 inserts.append(list(k) + item)
2571 if inserts:
2572 self.executemany(c, insert_hudcache, inserts)
2573 self.commit()
2575 def storeSessions(self, hid, pids, startTime, tid, heroes, tz_name, doinsert = False):
2576 """Update cached sessions. If no record exists, do an insert"""
2577 THRESHOLD = timedelta(seconds=int(self.sessionTimeout * 60))
2578 if tz_name in pytz.common_timezones:
2579 naive = startTime.replace(tzinfo=None)
2580 utc_start = pytz.utc.localize(naive)
2581 tz = pytz.timezone(tz_name)
2582 loc_tz = utc_start.astimezone(tz).strftime('%z')
2583 offset = timedelta(hours=int(loc_tz[:-2]), minutes=int(loc_tz[0]+loc_tz[-2:]))
2584 local = naive + offset
2585 monthStart = datetime(local.year, local.month, 1)
2586 weekdate = datetime(local.year, local.month, local.day)
2587 weekStart = weekdate - timedelta(days=weekdate.weekday())
2588 else:
2589 if strftime('%Z') == 'UTC':
2590 local = startTime
2591 loc_tz = '0'
2592 else:
2593 tz_dt = datetime.today() - datetime.utcnow()
2594 loc_tz = old_div(tz_dt.seconds,3600) - 24
2595 offset = timedelta(hours=int(loc_tz))
2596 local = startTime + offset
2597 monthStart = datetime(local.year, local.month, 1)
2598 weekdate = datetime(local.year, local.month, local.day)
2599 weekStart = weekdate - timedelta(days=weekdate.weekday())
2601 j, hand = None, {}
2602 for p, id in list(pids.items()):
2603 if id in heroes:
2604 hand['startTime'] = startTime.replace(tzinfo=None)
2605 hand['weekStart'] = weekStart
2606 hand['monthStart'] = monthStart
2607 hand['ids'] = [hid]
2608 hand['tourneys'] = set()
2610 id = []
2611 if hand:
2612 lower = hand['startTime']-THRESHOLD
2613 upper = hand['startTime']+THRESHOLD
2614 for i in range(len(self.s['bk'])):
2615 if (((lower <= self.s['bk'][i]['sessionEnd'])
2616 and (upper >= self.s['bk'][i]['sessionStart']))
2617 or (tid in self.s['bk'][i]['tourneys'])):
2618 if ((hand['startTime'] <= self.s['bk'][i]['sessionEnd'])
2619 and (hand['startTime'] >= self.s['bk'][i]['sessionStart'])):
2620 id.append(i)
2621 elif hand['startTime'] < self.s['bk'][i]['sessionStart']:
2622 self.s['bk'][i]['sessionStart'] = hand['startTime']
2623 self.s['bk'][i]['weekStart'] = hand['weekStart']
2624 self.s['bk'][i]['monthStart'] = hand['monthStart']
2625 id.append(i)
2626 elif hand['startTime'] > self.s['bk'][i]['sessionEnd']:
2627 self.s['bk'][i]['sessionEnd'] = hand['startTime']
2628 id.append(i)
2629 if len(id) == 1:
2630 j = id[0]
2631 self.s['bk'][j]['ids'] += [hid]
2632 if tid: self.s['bk'][j]['tourneys'].add(tid)
2633 elif len(id) > 1:
2634 merged = {}
2635 merged['ids'] = [hid]
2636 merged['tourneys'] = set()
2637 if tid: merged['tourneys'].add(tid)
2638 for n in id:
2639 h = self.s['bk'][n]
2640 if not merged.get('sessionStart') or merged.get('sessionStart') > h['sessionStart']:
2641 merged['sessionStart'] = h['sessionStart']
2642 merged['weekStart'] = h['weekStart']
2643 merged['monthStart'] = h['monthStart']
2644 if not merged.get('sessionEnd') or merged.get('sessionEnd') < h['sessionEnd']:
2645 merged['sessionEnd'] = h['sessionEnd']
2646 merged['ids'] += h['ids']
2647 merged['tourneys'].union(h['tourneys'])
2648 self.s['bk'][n]['delete'] = True
2650 self.s['bk'] = [item for item in self.s['bk'] if not item.get('delete')]
2651 self.s['bk'].append(merged)
2652 elif len(id) == 0:
2653 j = len(self.s['bk'])
2654 hand['id'] = None
2655 hand['sessionStart'] = hand['startTime']
2656 hand['sessionEnd'] = hand['startTime']
2657 if tid: hand['tourneys'].add(tid)
2658 self.s['bk'].append(hand)
2660 if doinsert:
2661 select_S = self.sql.query['select_S'].replace('%s', self.sql.query['placeholder'])
2662 select_W = self.sql.query['select_W'].replace('%s', self.sql.query['placeholder'])
2663 select_M = self.sql.query['select_M'].replace('%s', self.sql.query['placeholder'])
2664 update_S = self.sql.query['update_S'].replace('%s', self.sql.query['placeholder'])
2665 insert_W = self.sql.query['insert_W'].replace('%s', self.sql.query['placeholder'])
2666 insert_M = self.sql.query['insert_M'].replace('%s', self.sql.query['placeholder'])
2667 insert_S = self.sql.query['insert_S'].replace('%s', self.sql.query['placeholder'])
2668 update_S_SC = self.sql.query['update_S_SC'].replace('%s', self.sql.query['placeholder'])
2669 update_S_TC = self.sql.query['update_S_TC'].replace('%s', self.sql.query['placeholder'])
2670 update_S_T = self.sql.query['update_S_T'].replace('%s', self.sql.query['placeholder'])
2671 update_S_H = self.sql.query['update_S_H'].replace('%s', self.sql.query['placeholder'])
2672 delete_S = self.sql.query['delete_S'].replace('%s', self.sql.query['placeholder'])
2673 c = self.get_cursor()
2674 for i in range(len(self.s['bk'])):
2675 lower = self.s['bk'][i]['sessionStart'] - THRESHOLD
2676 upper = self.s['bk'][i]['sessionEnd'] + THRESHOLD
2677 tourneys = self.s['bk'][i]['tourneys']
2678 if (self.s['bk'][i]['tourneys']):
2679 toursql = 'OR SC.id in (SELECT DISTINCT sessionId FROM Tourneys T WHERE T.id in (%s))' % ', '.join(str(t) for t in tourneys)
2680 q = select_S.replace('<TOURSELECT>', toursql)
2681 else:
2682 q = select_S.replace('<TOURSELECT>', '')
2683 c.execute(q, (lower, upper))
2684 r = self.fetchallDict(c,['id', 'sessionStart', 'sessionEnd', 'weekStart', 'monthStart', 'weekId', 'monthId'])
2685 num = len(r)
2686 if (num == 1):
2687 start, end = r[0]['sessionStart'], r[0]['sessionEnd']
2688 week, month = r[0]['weekStart'], r[0]['monthStart']
2689 wid, mid = r[0]['weekId'], r[0]['monthId']
2690 update, updateW, updateM = False, False, False
2691 if self.s['bk'][i]['sessionStart'] < start:
2692 start, update = self.s['bk'][i]['sessionStart'], True
2693 if self.s['bk'][i]['weekStart'] != week:
2694 week, updateW = self.s['bk'][i]['weekStart'], True
2695 if self.s['bk'][i]['monthStart'] != month:
2696 month, updateM = self.s['bk'][i]['monthStart'], True
2697 if (updateW or updateM):
2698 self.wmold.add((wid, mid))
2699 if self.s['bk'][i]['sessionEnd'] > end:
2700 end, update = self.s['bk'][i]['sessionEnd'], True
2701 if updateW: wid = self.insertOrUpdate('weeks', c, (week,), select_W, insert_W)
2702 if updateM: mid = self.insertOrUpdate('months', c, (month,), select_M, insert_M)
2703 if (updateW or updateM):
2704 self.wmnew.add((wid, mid))
2705 if update:
2706 c.execute(update_S, [wid, mid, start, end, r[0]['id']])
2707 for h in self.s['bk'][i]['ids']:
2708 self.s[h] = {'id': r[0]['id'], 'wid': wid, 'mid': mid}
2709 elif (num > 1):
2710 start, end, wmold, merge = None, None, set(), []
2711 for n in r: merge.append(n['id'])
2712 merge.sort()
2713 r.append(self.s['bk'][i])
2714 for n in r:
2715 if 'weekId' in n:
2716 wmold.add((n['weekId'], n['monthId']))
2717 if start:
2718 if start > n['sessionStart']:
2719 start = n['sessionStart']
2720 week = n['weekStart']
2721 month = n['monthStart']
2722 else:
2723 start = n['sessionStart']
2724 week = n['weekStart']
2725 month = n['monthStart']
2726 if end:
2727 if end < n['sessionEnd']:
2728 end = n['sessionEnd']
2729 else:
2730 end = n['sessionEnd']
2731 wid = self.insertOrUpdate('weeks', c, (week,), select_W, insert_W)
2732 mid = self.insertOrUpdate('months', c, (month,), select_M, insert_M)
2733 wmold.discard((wid, mid))
2734 if len(wmold)>0:
2735 self.wmold = self.wmold.union(wmold)
2736 self.wmnew.add((wid, mid))
2737 row = [wid, mid, start, end]
2738 c.execute(insert_S, row)
2739 sid = self.get_last_insert_id(c)
2740 for h in self.s['bk'][i]['ids']:
2741 self.s[h] = {'id': sid, 'wid': wid, 'mid': mid}
2742 for m in merge:
2743 for h, n in list(self.s.items()):
2744 if h!='bk' and n['id'] == m:
2745 self.s[h] = {'id': sid, 'wid': wid, 'mid': mid}
2746 c.execute(update_S_TC,(sid, m))
2747 c.execute(update_S_SC,(sid, m))
2748 c.execute(update_S_T, (sid, m))
2749 c.execute(update_S_H, (sid, m))
2750 c.execute(delete_S, (m,))
2751 elif (num == 0):
2752 start = self.s['bk'][i]['sessionStart']
2753 end = self.s['bk'][i]['sessionEnd']
2754 week = self.s['bk'][i]['weekStart']
2755 month = self.s['bk'][i]['monthStart']
2756 wid = self.insertOrUpdate('weeks', c, (week,), select_W, insert_W)
2757 mid = self.insertOrUpdate('months', c, (month,), select_M, insert_M)
2758 row = [wid, mid, start, end]
2759 c.execute(insert_S, row)
2760 sid = self.get_last_insert_id(c)
2761 for h in self.s['bk'][i]['ids']:
2762 self.s[h] = {'id': sid, 'wid': wid, 'mid': mid}
2763 self.commit()
2765 def storeSessionsCache(self, hid, pids, startTime, gametypeId, gametype, pdata, heroes, doinsert = False):
2766 """Update cached cash sessions. If no record exists, do an insert"""
2767 THRESHOLD = timedelta(seconds=int(self.sessionTimeout * 60))
2768 if pdata: #gametype['type']=='ring' and
2769 for p, pid in list(pids.items()):
2770 hp = {}
2771 k = (gametypeId, pid)
2772 hp['startTime'] = startTime.replace(tzinfo=None)
2773 hp['hid'] = hid
2774 hp['ids'] = []
2775 pdata[p]['n'] = 1
2776 hp['line'] = [int(pdata[p][s]) if isinstance(pdata[p][s],bool) else pdata[p][s] for s in CACHE_KEYS]
2777 id = []
2778 sessionplayer = self.sc.get(k)
2779 if sessionplayer is not None:
2780 lower = hp['startTime']-THRESHOLD
2781 upper = hp['startTime']+THRESHOLD
2782 for i in range(len(sessionplayer)):
2783 if lower <= sessionplayer[i]['endTime'] and upper >= sessionplayer[i]['startTime']:
2784 if len(id)==0:
2785 for idx,val in enumerate(hp['line']):
2786 sessionplayer[i]['line'][idx] += val
2787 if ((hp['startTime'] <= sessionplayer[i]['endTime'])
2788 and (hp['startTime'] >= sessionplayer[i]['startTime'])):
2789 id.append(i)
2790 elif hp['startTime'] < sessionplayer[i]['startTime']:
2791 sessionplayer[i]['startTime'] = hp['startTime']
2792 id.append(i)
2793 elif hp['startTime'] > sessionplayer[i]['endTime']:
2794 sessionplayer[i]['endTime'] = hp['startTime']
2795 id.append(i)
2796 if len(id) == 1:
2797 i = id[0]
2798 if pids[p]==heroes[0]:
2799 self.sc[k][i]['ids'].append(hid)
2800 elif len(id) == 2:
2801 i, j = id[0], id[1]
2802 if sessionplayer[i]['startTime'] < sessionplayer[j]['startTime']:
2803 sessionplayer[i]['endTime'] = sessionplayer[j]['endTime']
2804 else: sessionplayer[i]['startTime'] = sessionplayer[j]['startTime']
2805 for idx,val in enumerate(sessionplayer[j]['line']):
2806 sessionplayer[i]['line'][idx] += val
2807 g = sessionplayer.pop(j)
2808 if pids[p]==heroes[0]:
2809 self.sc[k][i]['ids'].append(hid)
2810 self.sc[k][i]['ids'] += g['ids']
2811 elif len(id) == 0:
2812 if sessionplayer is None:
2813 self.sc[k] = []
2814 hp['endTime'] = hp['startTime']
2815 if pids[p]==heroes[0]: hp['ids'].append(hid)
2816 self.sc[k].append(hp)
2818 if doinsert:
2819 select_SC = self.sql.query['select_SC'].replace('%s', self.sql.query['placeholder'])
2820 update_SC = self.sql.query['update_SC'].replace('%s', self.sql.query['placeholder'])
2821 insert_SC = self.sql.query['insert_SC'].replace('%s', self.sql.query['placeholder'])
2822 delete_SC = self.sql.query['delete_SC'].replace('%s', self.sql.query['placeholder'])
2823 c = self.get_cursor()
2824 for k, sessionplayer in list(self.sc.items()):
2825 for session in sessionplayer:
2826 hid = session['hid']
2827 sid = self.s.get(hid)['id']
2828 lower = session['startTime'] - THRESHOLD
2829 upper = session['endTime'] + THRESHOLD
2830 row = [lower, upper] + list(k[:2])
2831 c.execute(select_SC, row)
2832 r = self.fetchallDict(c, ['id', 'sessionId', 'startTime', 'endTime'] + CACHE_KEYS)
2833 num = len(r)
2834 d = [0]*num
2835 for z in range(num):
2836 d[z] = {}
2837 d[z]['line'] = [int(r[z][s]) if isinstance(r[z][s],bool) else r[z][s] for s in CACHE_KEYS]
2838 d[z]['id'] = r[z]['id']
2839 d[z]['sessionId'] = r[z]['sessionId']
2840 d[z]['startTime'] = r[z]['startTime']
2841 d[z]['endTime'] = r[z]['endTime']
2842 if (num == 1):
2843 start, end, id = r[0]['startTime'], r[0]['endTime'], r[0]['id']
2844 if session['startTime'] < start:
2845 start = session['startTime']
2846 if session['endTime'] > end:
2847 end = session['endTime']
2848 row = [start, end] + session['line'] + [id]
2849 c.execute(update_SC, row)
2850 elif (num > 1):
2851 start, end, merge, line = None, None, [], [0]*len(CACHE_KEYS)
2852 for n in r: merge.append(n['id'])
2853 merge.sort()
2854 r = d
2855 r.append(session)
2856 for n in r:
2857 if start:
2858 if start > n['startTime']:
2859 start = n['startTime']
2860 else: start = n['startTime']
2861 if end:
2862 if end < n['endTime']:
2863 end = n['endTime']
2864 else: end = n['endTime']
2865 for idx in range(len(CACHE_KEYS)):
2866 line[idx] += int(n['line'][idx]) if isinstance(n['line'][idx],bool) else n['line'][idx]
2867 row = [sid, start, end] + list(k[:2]) + line
2868 c.execute(insert_SC, row)
2869 id = self.get_last_insert_id(c)
2870 for m in merge:
2871 c.execute(delete_SC, (m,))
2872 self.commit()
2873 elif (num == 0):
2874 start = session['startTime']
2875 end = session['endTime']
2876 row = [sid, start, end] + list(k[:2]) + session['line']
2877 c.execute(insert_SC, row)
2878 id = self.get_last_insert_id(c)
2879 self.commit()
2881 def storeTourneysCache(self, hid, pids, startTime, tid, gametype, pdata, heroes, doinsert = False):
2882 """Update cached tour sessions. If no record exists, do an insert"""
2883 if gametype['type']=='tour' and pdata:
2884 for p in pdata:
2885 k = (tid
2886 ,pids[p]
2887 )
2888 pdata[p]['n'] = 1
2889 line = [int(pdata[p][s]) if isinstance(pdata[p][s], bool) else pdata[p][s] for s in CACHE_KEYS]
2890 tourplayer = self.tc.get(k)
2891 # Add line to the old line in the tourcache.
2892 if tourplayer is not None:
2893 for idx,val in enumerate(line):
2894 tourplayer['line'][idx] += val
2895 if pids[p]==heroes[0]:
2896 tourplayer['ids'].append(hid)
2897 else:
2898 self.tc[k] = {'startTime' : None,
2899 'endTime' : None,
2900 'hid' : hid,
2901 'ids' : []}
2902 self.tc[k]['line'] = line
2903 if pids[p]==heroes[0]:
2904 self.tc[k]['ids'].append(hid)
2906 if not self.tc[k]['startTime'] or startTime < self.tc[k]['startTime']:
2907 self.tc[k]['startTime'] = startTime
2908 self.tc[k]['hid'] = hid
2909 if not self.tc[k]['endTime'] or startTime > self.tc[k]['endTime']:
2910 self.tc[k]['endTime'] = startTime
2912 if doinsert:
2913 update_TC = self.sql.query['update_TC'].replace('%s', self.sql.query['placeholder'])
2914 insert_TC = self.sql.query['insert_TC'].replace('%s', self.sql.query['placeholder'])
2915 select_TC = self.sql.query['select_TC'].replace('%s', self.sql.query['placeholder'])
2917 inserts = []
2918 c = self.get_cursor()
2919 for k, tc in list(self.tc.items()):
2920 sc = self.s.get(tc['hid'])
2921 tc['startTime'] = tc['startTime'].replace(tzinfo=None)
2922 tc['endTime'] = tc['endTime'].replace(tzinfo=None)
2923 c.execute(select_TC, k)
2924 r = self.fetchallDict(c, ['id', 'startTime', 'endTime'])
2925 num = len(r)
2926 if (num == 1):
2927 update = not r[0]['startTime'] or not r[0]['endTime']
2928 if (update or (tc['startTime']<r[0]['startTime'] and tc['endTime']>r[0]['endTime'])):
2929 q = update_TC.replace('<UPDATE>', 'startTime=%s, endTime=%s,')
2930 row = [tc['startTime'], tc['endTime']] + tc['line'] + list(k[:2])
2931 elif tc['startTime']<r[0]['startTime']:
2932 q = update_TC.replace('<UPDATE>', 'startTime=%s, ')
2933 row = [tc['startTime']] + tc['line'] + list(k[:2])
2934 elif tc['endTime']>r[0]['endTime']:
2935 q = update_TC.replace('<UPDATE>', 'endTime=%s, ')
2936 row = [tc['endTime']] + tc['line'] + list(k[:2])
2937 else:
2938 q = update_TC.replace('<UPDATE>', '')
2939 row = tc['line'] + list(k[:2])
2940 c.execute(q, row)
2941 elif (num == 0):
2942 row = [sc['id'], tc['startTime'], tc['endTime']] + list(k[:2]) + tc['line']
2943 #append to the bulk inserts
2944 inserts.append(row)
2946 if inserts:
2947 self.executemany(c, insert_TC, inserts)
2948 self.commit()
2950 def storeCardsCache(self, hid, pids, startTime, gametypeId, tourneyTypeId, pdata, heroes, tz_name, doinsert):
2951 """Update cached cards statistics. If update fails because no record exists, do an insert."""
2953 for p in pdata:
2954 k = (hid
2955 ,gametypeId
2956 ,tourneyTypeId
2957 ,pids[p]
2958 ,pdata[p]['startCards']
2959 )
2960 pdata[p]['n'] = 1
2961 line = [int(pdata[p][s]) if isinstance(pdata[p][s], bool) else pdata[p][s] for s in CACHE_KEYS]
2962 self.dcbulk[k] = line
2964 if doinsert:
2965 update_cardscache = self.sql.query['update_cardscache']
2966 update_cardscache = update_cardscache.replace('%s', self.sql.query['placeholder'])
2967 insert_cardscache = self.sql.query['insert_cardscache']
2968 insert_cardscache = insert_cardscache.replace('%s', self.sql.query['placeholder'])
2969 select_cardscache_ring = self.sql.query['select_cardscache_ring']
2970 select_cardscache_ring = select_cardscache_ring.replace('%s', self.sql.query['placeholder'])
2971 select_cardscache_tour = self.sql.query['select_cardscache_tour']
2972 select_cardscache_tour = select_cardscache_tour.replace('%s', self.sql.query['placeholder'])
2974 select_W = self.sql.query['select_W'].replace('%s', self.sql.query['placeholder'])
2975 select_M = self.sql.query['select_M'].replace('%s', self.sql.query['placeholder'])
2976 insert_W = self.sql.query['insert_W'].replace('%s', self.sql.query['placeholder'])
2977 insert_M = self.sql.query['insert_M'].replace('%s', self.sql.query['placeholder'])
2979 dccache, inserts = {}, []
2980 for k, l in list(self.dcbulk.items()):
2981 sc = self.s.get(k[0])
2982 if sc != None:
2983 garbageWeekMonths = (sc['wid'], sc['mid']) in self.wmnew or (sc['wid'], sc['mid']) in self.wmold
2984 garbageTourneyTypes = k[2] in self.ttnew or k[2] in self.ttold
2985 if self.import_options['hhBulkPath'] == "" or (not garbageWeekMonths and not garbageTourneyTypes):
2986 n = (sc['wid'], sc['mid'], k[1], k[2], k[3], k[4])
2987 startCards = dccache.get(n)
2988 # Add line to the old line in the hudcache.
2989 if startCards is not None:
2990 for idx,val in enumerate(l):
2991 dccache[n][idx] += val
2992 else:
2993 dccache[n] = l
2995 c = self.get_cursor()
2996 for k, item in list(dccache.items()):
2997 if k[3]:
2998 q = select_cardscache_tour
2999 row = list(k)
3000 else:
3001 q = select_cardscache_ring
3002 row = list(k[:3]) + list(k[-2:])
3003 c.execute(q, row)
3004 result = c.fetchone()
3005 if result:
3006 id = result[0]
3007 update = item + [id]
3008 c.execute(update_cardscache, update)
3009 else:
3010 insert = list(k) + item
3011 inserts.append(insert)
3013 if inserts:
3014 self.executemany(c, insert_cardscache, inserts)
3015 self.commit()
3017 def storePositionsCache(self, hid, pids, startTime, gametypeId, tourneyTypeId, pdata, hdata, heroes, tz_name, doinsert):
3018 """Update cached position statistics. If update fails because no record exists, do an insert."""
3020 for p in pdata:
3021 position = str(pdata[p]['position'])
3022 k = (hid
3023 ,gametypeId
3024 ,tourneyTypeId
3025 ,pids[p]
3026 ,len(pids)
3027 ,hdata['maxPosition']
3028 ,position
3029 )
3030 pdata[p]['n'] = 1
3031 line = [int(pdata[p][s]) if isinstance(pdata[p][s],bool) else pdata[p][s] for s in CACHE_KEYS]
3032 self.pcbulk[k] = line
3034 if doinsert:
3035 update_positionscache = self.sql.query['update_positionscache']
3036 update_positionscache = update_positionscache.replace('%s', self.sql.query['placeholder'])
3037 insert_positionscache = self.sql.query['insert_positionscache']
3038 insert_positionscache = insert_positionscache.replace('%s', self.sql.query['placeholder'])
3040 select_positionscache_ring = self.sql.query['select_positionscache_ring']
3041 select_positionscache_ring = select_positionscache_ring.replace('%s', self.sql.query['placeholder'])
3042 select_positionscache_tour = self.sql.query['select_positionscache_tour']
3043 select_positionscache_tour = select_positionscache_tour.replace('%s', self.sql.query['placeholder'])
3045 select_W = self.sql.query['select_W'].replace('%s', self.sql.query['placeholder'])
3046 select_M = self.sql.query['select_M'].replace('%s', self.sql.query['placeholder'])
3047 insert_W = self.sql.query['insert_W'].replace('%s', self.sql.query['placeholder'])
3048 insert_M = self.sql.query['insert_M'].replace('%s', self.sql.query['placeholder'])
3050 pccache, inserts = {}, []
3051 for k, l in list(self.pcbulk.items()):
3052 sc = self.s.get(k[0])
3053 if sc != None:
3054 garbageWeekMonths = (sc['wid'], sc['mid']) in self.wmnew or (sc['wid'], sc['mid']) in self.wmold
3055 garbageTourneyTypes = k[2] in self.ttnew or k[2] in self.ttold
3056 if self.import_options['hhBulkPath'] == "" or (not garbageWeekMonths and not garbageTourneyTypes):
3057 n = (sc['wid'], sc['mid'], k[1], k[2], k[3], k[4], k[5], k[6])
3058 positions = pccache.get(n)
3059 # Add line to the old line in the hudcache.
3060 if positions is not None:
3061 for idx,val in enumerate(l):
3062 pccache[n][idx] += val
3063 else:
3064 pccache[n] = l
3066 c = self.get_cursor()
3067 for k, item in list(pccache.items()):
3068 if k[3]:
3069 q = select_positionscache_tour
3070 row = list(k)
3071 else:
3072 q = select_positionscache_ring
3073 row = list(k[:3]) + list(k[-4:])
3075 c.execute(q, row)
3076 result = c.fetchone()
3077 if result:
3078 id = result[0]
3079 update = item + [id]
3080 c.execute(update_positionscache, update)
3081 else:
3082 insert = list(k) + item
3083 inserts.append(insert)
3085 if inserts:
3086 self.executemany(c, insert_positionscache, inserts)
3087 self.commit()
3089 def appendHandsSessionIds(self):
3090 for i in range(len(self.hbulk)):
3091 hid = self.hids[i]
3092 tid = self.hbulk[i][2]
3093 sc = self.s.get(hid)
3094 if sc is not None:
3095 self.hbulk[i][4] = sc['id']
3096 if tid: self.tbulk[tid] = sc['id']
3098 def get_id(self, file):
3099 q = self.sql.query['get_id']
3100 q = q.replace('%s', self.sql.query['placeholder'])
3101 c = self.get_cursor()
3102 c.execute(q, (file,))
3103 id = c.fetchone()
3104 if not id:
3105 return 0
3106 return id[0]
3108 def storeFile(self, fdata):
3109 q = self.sql.query['store_file']
3110 q = q.replace('%s', self.sql.query['placeholder'])
3111 c = self.get_cursor()
3112 c.execute(q, fdata)
3113 id = self.get_last_insert_id(c)
3114 return id
3116 def updateFile(self, fdata):
3117 q = self.sql.query['update_file']
3118 q = q.replace('%s', self.sql.query['placeholder'])
3119 c = self.get_cursor()
3120 c.execute(q, fdata)
3122 def getHeroIds(self, pids, sitename):
3123 #Grab playerIds using hero names in HUD_Config.xml
3124 try:
3125 # derive list of program owner's player ids
3126 hero = {} # name of program owner indexed by site id
3127 hero_ids = []
3128 # make sure at least two values in list
3129 # so that tuple generation creates doesn't use
3130 # () or (1,) style
3131 for site in self.config.get_supported_sites():
3132 hero = self.config.supported_sites[site].screen_name
3133 for n, v in list(pids.items()):
3134 if n == hero and sitename == site:
3135 hero_ids.append(v)
3137 except:
3138 err = traceback.extract_tb(sys.exc_info()[2])[-1]
3139 #print ("Error aquiring hero ids:"), str(sys.exc_value)
3140 return hero_ids
3142 def fetchallDict(self, cursor, desc):
3143 data = cursor.fetchall()
3144 if not data: return []
3145 results = [0]*len(data)
3146 for i in range(len(data)):
3147 results[i] = {}
3148 for n in range(len(desc)):
3149 results[i][desc[n]] = data[i][n]
3150 return results
3152 def nextHandId(self):
3153 c = self.get_cursor(True)
3154 c.execute("SELECT max(id) FROM Hands")
3155 id = c.fetchone()[0]
3156 if not id: id = 0
3157 id += self.hand_inc
3158 return id
3160 def isDuplicate(self, siteId, siteHandNo, heroSeat, publicDB):
3161 q = self.sql.query['isAlreadyInDB'].replace('%s', self.sql.query['placeholder'])
3162 if publicDB:
3163 key = (siteHandNo, siteId, heroSeat)
3164 q = q.replace('<heroSeat>', ' AND heroSeat=%s').replace('%s', self.sql.query['placeholder'])
3165 else:
3166 key = (siteHandNo, siteId)
3167 q = q.replace('<heroSeat>', '')
3168 if key in self.siteHandNos:
3169 return True
3170 c = self.get_cursor()
3171 c.execute(q, key)
3172 result = c.fetchall()
3173 if len(result) > 0:
3174 return True
3175 self.siteHandNos.append(key)
3176 return False
3178 def getSqlPlayerIDs(self, pnames, siteid, hero):
3179 result = {}
3180 if(self.pcache == None):
3181 self.pcache = LambdaDict(lambda key:self.insertPlayer(key[0], key[1], key[2]))
3183 for player in pnames:
3184 result[player] = self.pcache[(player,siteid,player==hero)]
3185 # NOTE: Using the LambdaDict does the same thing as:
3186 #if player in self.pcache:
3187 # #print "DEBUG: cachehit"
3188 # pass
3189 #else:
3190 # self.pcache[player] = self.insertPlayer(player, siteid)
3191 #result[player] = self.pcache[player]
3193 return result
3195 def insertPlayer(self, name, site_id, hero):
3196 insert_player = "INSERT INTO Players (name, siteId, hero, chars) VALUES (%s, %s, %s, %s)"
3197 insert_player = insert_player.replace('%s', self.sql.query['placeholder'])
3198 _name = name[:32]
3199 if re_char.match(_name[0]):
3200 char = '123'
3201 elif len(_name)==1 or re_char.match(_name[1]):
3202 char = _name[0] + '1'
3203 else:
3204 char = _name[:2]
3206 key = (_name, site_id, hero, char.upper())
3208 #NOTE/FIXME?: MySQL has ON DUPLICATE KEY UPDATE
3209 #Usage:
3210 # INSERT INTO `tags` (`tag`, `count`)
3211 # VALUES ($tag, 1)
3212 # ON DUPLICATE KEY UPDATE `count`=`count`+1;
3215 #print "DEBUG: name: %s site: %s" %(name, site_id)
3216 result = None
3217 c = self.get_cursor()
3218 q = "SELECT id, name, hero FROM Players WHERE name=%s and siteid=%s"
3219 q = q.replace('%s', self.sql.query['placeholder'])
3220 result = self.insertOrUpdate('players', c, key, q, insert_player)
3221 return result
3223 def insertOrUpdate(self, type, cursor, key, select, insert):
3224 if type=='players':
3225 cursor.execute (select, key[:2])
3226 else:
3227 cursor.execute (select, key)
3228 tmp = cursor.fetchone()
3229 if (tmp == None):
3230 cursor.execute (insert, key)
3231 result = self.get_last_insert_id(cursor)
3232 else:
3233 result = tmp[0]
3234 if type=='players':
3235 if not tmp[2] and key[2]:
3236 q = "UPDATE Players SET hero=%s WHERE name=%s and siteid=%s"
3237 q = q.replace('%s', self.sql.query['placeholder'])
3238 cursor.execute (q, (key[2], key[0], key[1]))
3239 return result
3241 def getSqlGameTypeId(self, siteid, game, printdata = False):
3242 if(self.gtcache == None):
3243 self.gtcache = LambdaDict(lambda key:self.insertGameTypes(key[0], key[1]))
3245 self.gtprintdata = printdata
3246 hilo = Card.games[game['category']][2]
3248 gtinfo = (siteid, game['type'], game['category'], game['limitType'], game['currency'],
3249 game['mix'], int(Decimal(game['sb'])*100), int(Decimal(game['bb'])*100), game['maxSeats'],
3250 int(game['ante']*100), game['buyinType'], game['fast'], game['newToGame'], game['homeGame'], game['split'])
3252 gtinsert = (siteid, game['currency'], game['type'], game['base'], game['category'], game['limitType'], hilo,
3253 game['mix'], int(Decimal(game['sb'])*100), int(Decimal(game['bb'])*100),
3254 int(Decimal(game['bb'])*100), int(Decimal(game['bb'])*200), game['maxSeats'], int(game['ante']*100),
3255 game['buyinType'], game['fast'], game['newToGame'], game['homeGame'], game['split'])
3257 result = self.gtcache[(gtinfo, gtinsert)]
3258 # NOTE: Using the LambdaDict does the same thing as:
3259 #if player in self.pcache:
3260 # #print "DEBUG: cachehit"
3261 # pass
3262 #else:
3263 # self.pcache[player] = self.insertPlayer(player, siteid)
3264 #result[player] = self.pcache[player]
3266 return result
3268 def insertGameTypes(self, gtinfo, gtinsert):
3269 result = None
3270 c = self.get_cursor()
3271 q = self.sql.query['getGametypeNL']
3272 q = q.replace('%s', self.sql.query['placeholder'])
3273 c.execute(q, gtinfo)
3274 tmp = c.fetchone()
3275 if (tmp == None):
3277 if self.gtprintdata:
3278 print ("######## Gametype ##########")
3279 import pprint
3280 pp = pprint.PrettyPrinter(indent=4)
3281 pp.pprint(gtinsert)
3282 print ("###### End Gametype ########")
3284 c.execute(self.sql.query['insertGameTypes'].replace('%s', self.sql.query['placeholder']), gtinsert)
3285 result = self.get_last_insert_id(c)
3286 else:
3287 result = tmp[0]
3288 return result
3290 def getTourneyInfo(self, siteName, tourneyNo):
3291 c = self.get_cursor()
3292 q = self.sql.query['getTourneyInfo'].replace('%s', self.sql.query['placeholder'])
3293 c.execute(q, (siteName, tourneyNo))
3294 columnNames=c.description
3296 names=[]
3297 for column in columnNames:
3298 names.append(column[0])
3300 data=c.fetchone()
3301 return (names,data)
3302 #end def getTourneyInfo
3304 def getTourneyTypesIds(self):
3305 c = self.connection.cursor()
3306 c.execute(self.sql.query['getTourneyTypesIds'])
3307 result = c.fetchall()
3308 return result
3309 #end def getTourneyTypesIds
3311 def getSqlTourneyTypeIDs(self, hand):
3312 #if(self.ttcache == None):
3313 # self.ttcache = LambdaDict(lambda key:self.insertTourneyType(key[0], key[1], key[2]))
3315 #tourneydata = (hand.siteId, hand.buyinCurrency, hand.buyin, hand.fee, hand.gametype['category'],
3316 # hand.gametype['limitType'], hand.maxseats, hand.isSng, hand.isKO, hand.koBounty, hand.isProgressive,
3317 # hand.isRebuy, hand.rebuyCost, hand.isAddOn, hand.addOnCost, hand.speed, hand.isShootout, hand.isMatrix)
3319 result = self.createOrUpdateTourneyType(hand) #self.ttcache[(hand.tourNo, hand.siteId, tourneydata)]
3321 return result
3323 def defaultTourneyTypeValue(self, value1, value2, field):
3324 if ((not value1) or
3325 (field=='maxseats' and value1>value2) or
3326 (field=='limitType' and value2=='mx') or
3327 ((field,value1)==('buyinCurrency','NA')) or
3328 ((field,value1)==('stack','Regular')) or
3329 ((field,value1)==('speed','Normal')) or
3330 (field=='koBounty' and value1)
3331 ):
3332 return True
3333 return False
3335 def createOrUpdateTourneyType(self, obj):
3336 ttid, _ttid, updateDb = None, None, False
3337 setattr(obj, 'limitType', obj.gametype['limitType'])
3338 cursor = self.get_cursor()
3339 q = self.sql.query['getTourneyTypeIdByTourneyNo'].replace('%s', self.sql.query['placeholder'])
3340 cursor.execute(q, (obj.tourNo, obj.siteId))
3341 result=cursor.fetchone()
3343 if result != None:
3344 columnNames=[desc[0].lower() for desc in cursor.description]
3345 expectedValues = (('buyin', 'buyin'), ('fee', 'fee'), ('buyinCurrency', 'currency'), ('limitType', 'limittype'), ('isSng', 'sng'), ('maxseats', 'maxseats')
3346 , ('isKO', 'knockout'), ('koBounty', 'kobounty'), ('isProgressive', 'progressive'), ('isRebuy', 'rebuy'), ('rebuyCost', 'rebuycost')
3347 , ('isAddOn', 'addon'), ('addOnCost','addoncost'), ('speed', 'speed'), ('isShootout', 'shootout')
3348 , ('isMatrix', 'matrix'), ('isFast', 'fast'), ('stack', 'stack'), ('isStep', 'step'), ('stepNo', 'stepno')
3349 , ('isChance', 'chance'), ('chanceCount', 'chancecount'), ('isMultiEntry', 'multientry'), ('isReEntry', 'reentry')
3350 , ('isHomeGame', 'homegame'), ('isNewToGame', 'newtogame'), ('isSplit', 'split'), ('isFifty50', 'fifty50'), ('isTime', 'time')
3351 , ('timeAmt', 'timeamt'), ('isSatellite', 'satellite'), ('isDoubleOrNothing', 'doubleornothing'), ('isCashOut', 'cashout')
3352 , ('isOnDemand', 'ondemand'), ('isFlighted', 'flighted'), ('isGuarantee', 'guarantee'), ('guaranteeAmt', 'guaranteeamt'))
3353 resultDict = dict(list(zip(columnNames, result)))
3354 ttid = resultDict["id"]
3355 for ev in expectedValues:
3356 objField, dbField = ev
3357 objVal, dbVal = getattr(obj, objField), resultDict[dbField]
3358 if self.defaultTourneyTypeValue(objVal, dbVal, objField) and dbVal:#DB has this value but object doesnt, so update object
3359 setattr(obj, objField, dbVal)
3360 elif self.defaultTourneyTypeValue(dbVal, objVal, objField) and objVal:#object has this value but DB doesnt, so update DB
3361 updateDb=True
3362 oldttid = ttid
3363 if not result or updateDb:
3364 if obj.gametype['mix']!='none':
3365 category, limitType = obj.gametype['mix'], 'mx'
3366 elif result != None and resultDict['limittype']=='mx':
3367 category, limitType = resultDict['category'], 'mx'
3368 else:
3369 category, limitType = obj.gametype['category'], obj.gametype['limitType']
3370 row = (obj.siteId, obj.buyinCurrency, obj.buyin, obj.fee, category,
3371 limitType, obj.maxseats, obj.isSng, obj.isKO, obj.koBounty, obj.isProgressive,
3372 obj.isRebuy, obj.rebuyCost, obj.isAddOn, obj.addOnCost, obj.speed, obj.isShootout,
3373 obj.isMatrix, obj.isFast, obj.stack, obj.isStep, obj.stepNo, obj.isChance, obj.chanceCount,
3374 obj.isMultiEntry, obj.isReEntry, obj.isHomeGame, obj.isNewToGame, obj.isSplit, obj.isFifty50, obj.isTime,
3375 obj.timeAmt, obj.isSatellite, obj.isDoubleOrNothing, obj.isCashOut, obj.isOnDemand, obj.isFlighted,
3376 obj.isGuarantee, obj.guaranteeAmt)
3377 cursor.execute (self.sql.query['getTourneyTypeId'].replace('%s', self.sql.query['placeholder']), row)
3378 tmp=cursor.fetchone()
3379 try:
3380 ttid = tmp[0]
3381 except TypeError: #this means we need to create a new entry
3382 if self.printdata:
3383 print ("######## Tourneys ##########")
3384 import pprint
3385 pp = pprint.PrettyPrinter(indent=4)
3386 pp.pprint(row)
3387 print ("###### End Tourneys ########")
3388 cursor.execute (self.sql.query['insertTourneyType'].replace('%s', self.sql.query['placeholder']), row)
3389 ttid = self.get_last_insert_id(cursor)
3390 if updateDb:
3391 #print 'DEBUG createOrUpdateTourneyType:', 'old', oldttid, 'new', ttid, row
3392 q = self.sql.query['updateTourneyTypeId'].replace('%s', self.sql.query['placeholder'])
3393 cursor.execute(q, (ttid, obj.siteId, obj.tourNo))
3394 self.ttold.add(oldttid)
3395 self.ttnew.add(ttid)
3396 return ttid
3398 def cleanUpTourneyTypes(self):
3399 if self.ttold:
3400 if self.callHud and self.cacheSessions:
3401 tables = ('HudCache','CardsCache', 'PositionsCache')
3402 elif self.callHud:
3403 tables = ('HudCache',)
3404 elif self.cacheSessions:
3405 tables = ('CardsCache', 'PositionsCache')
3406 else:
3407 tables = set([])
3408 select = self.sql.query['selectTourneyWithTypeId'].replace('%s', self.sql.query['placeholder'])
3409 delete = self.sql.query['deleteTourneyTypeId'].replace('%s', self.sql.query['placeholder'])
3410 cursor = self.get_cursor()
3411 for ttid in self.ttold:
3412 for t in tables:
3413 statement = 'clear%sTourneyType' % t
3414 clear = self.sql.query[statement].replace('%s', self.sql.query['placeholder'])
3415 cursor.execute(clear, (ttid,))
3416 self.commit()
3417 cursor.execute(select, (ttid,))
3418 result=cursor.fetchone()
3419 if not result:
3420 cursor.execute(delete, (ttid,))
3421 self.commit()
3422 for ttid in self.ttnew:
3423 for t in tables:
3424 statement = 'clear%sTourneyType' % t
3425 clear = self.sql.query[statement].replace('%s', self.sql.query['placeholder'])
3426 cursor.execute(clear, (ttid,))
3427 self.commit()
3428 for t in tables:
3429 statement = 'fetchNew%sTourneyTypeIds' % t
3430 fetch = self.sql.query[statement].replace('%s', self.sql.query['placeholder'])
3431 cursor.execute(fetch)
3432 for id in cursor.fetchall():
3433 self.rebuild_cache(None, None, t, id[0])
3436 def cleanUpWeeksMonths(self):
3437 if self.cacheSessions and self.wmold:
3438 selectWeekId = self.sql.query['selectSessionWithWeekId'].replace('%s', self.sql.query['placeholder'])
3439 selectMonthId = self.sql.query['selectSessionWithMonthId'].replace('%s', self.sql.query['placeholder'])
3440 deleteWeekId = self.sql.query['deleteWeekId'].replace('%s', self.sql.query['placeholder'])
3441 deleteMonthId = self.sql.query['deleteMonthId'].replace('%s', self.sql.query['placeholder'])
3442 cursor = self.get_cursor()
3443 weeks, months, wmids = set(), set(), set()
3444 for (wid, mid) in self.wmold:
3445 for t in ('CardsCache', 'PositionsCache'):
3446 statement = 'clear%sWeeksMonths' % t
3447 clear = self.sql.query[statement].replace('%s', self.sql.query['placeholder'])
3448 cursor.execute(clear, (wid, mid))
3449 self.commit()
3450 weeks.add(wid)
3451 months.add(mid)
3453 for wid in weeks:
3454 cursor.execute(selectWeekId, (wid,))
3455 result=cursor.fetchone()
3456 if not result:
3457 cursor.execute(deleteWeekId, (wid,))
3458 self.commit()
3460 for mid in months:
3461 cursor.execute(selectMonthId, (mid,))
3462 result=cursor.fetchone()
3463 if not result:
3464 cursor.execute(deleteMonthId, (mid,))
3465 self.commit()
3467 for (wid, mid) in self.wmnew:
3468 for t in ('CardsCache', 'PositionsCache'):
3469 statement = 'clear%sWeeksMonths' % t
3470 clear = self.sql.query[statement].replace('%s', self.sql.query['placeholder'])
3471 cursor.execute(clear, (wid, mid))
3472 self.commit()
3474 if self.wmold:
3475 for t in ('CardsCache', 'PositionsCache'):
3476 statement = 'fetchNew%sWeeksMonths' % t
3477 fetch = self.sql.query[statement].replace('%s', self.sql.query['placeholder'])
3478 cursor.execute(fetch)
3479 for (wid, mid) in cursor.fetchall():
3480 wmids.add((wid, mid))
3481 for wmid in wmids:
3482 for t in ('CardsCache', 'PositionsCache'):
3483 self.rebuild_cache(None, None, t, None, wmid)
3484 self.commit()
3486 def rebuild_caches(self):
3487 if self.callHud and self.cacheSessions:
3488 tables = ('HudCache','CardsCache', 'PositionsCache')
3489 elif self.callHud:
3490 tables = ('HudCache',)
3491 elif self.cacheSessions:
3492 tables = ('CardsCache', 'PositionsCache')
3493 else:
3494 tables = set([])
3495 for t in tables:
3496 self.rebuild_cache(None, None, t)
3498 def resetClean(self):
3499 self.ttold = set()
3500 self.ttnew = set()
3501 self.wmold = set()
3502 self.wmnew = set()
3504 def cleanRequired(self):
3505 if self.ttold or self.wmold:
3506 return True
3507 return False
3509 def getSqlTourneyIDs(self, hand):
3510 result = None
3511 c = self.get_cursor()
3512 q = self.sql.query['getTourneyByTourneyNo']
3513 q = q.replace('%s', self.sql.query['placeholder'])
3514 t = hand.startTime.replace(tzinfo=None)
3515 c.execute (q, (hand.siteId, hand.tourNo))
3517 tmp = c.fetchone()
3518 if (tmp == None):
3519 c.execute (self.sql.query['insertTourney'].replace('%s', self.sql.query['placeholder']),
3520 (hand.tourneyTypeId, None, hand.tourNo, None, None,
3521 t, t, hand.tourneyName, None, None, None, None, None, None))
3522 result = self.get_last_insert_id(c)
3523 else:
3524 result = tmp[0]
3525 columnNames = [desc[0] for desc in c.description]
3526 resultDict = dict(list(zip(columnNames, tmp)))
3527 if self.backend == self.PGSQL:
3528 startTime, endTime = resultDict['starttime'], resultDict['endtime']
3529 else:
3530 startTime, endTime = resultDict['startTime'], resultDict['endTime']
3532 if (startTime == None or t < startTime):
3533 q = self.sql.query['updateTourneyStart'].replace('%s', self.sql.query['placeholder'])
3534 c.execute(q, (t, result))
3535 elif (endTime == None or t > endTime):
3536 q = self.sql.query['updateTourneyEnd'].replace('%s', self.sql.query['placeholder'])
3537 c.execute(q, (t, result))
3538 return result
3540 def createOrUpdateTourney(self, summary):
3541 cursor = self.get_cursor()
3542 q = self.sql.query['getTourneyByTourneyNo'].replace('%s', self.sql.query['placeholder'])
3543 cursor.execute(q, (summary.siteId, summary.tourNo))
3545 columnNames=[desc[0] for desc in cursor.description]
3546 result=cursor.fetchone()
3548 if result != None:
3549 if self.backend == self.PGSQL:
3550 expectedValues = (('comment','comment'), ('tourneyName','tourneyname')
3551 ,('totalRebuyCount','totalrebuycount'), ('totalAddOnCount','totaladdoncount')
3552 ,('prizepool','prizepool'), ('startTime','starttime'), ('entries','entries')
3553 ,('commentTs','commentts'), ('endTime','endtime'), ('added', 'added'), ('addedCurrency', 'addedcurrency'))
3554 else:
3555 expectedValues = (('comment','comment'), ('tourneyName','tourneyName')
3556 ,('totalRebuyCount','totalRebuyCount'), ('totalAddOnCount','totalAddOnCount')
3557 ,('prizepool','prizepool'), ('startTime','startTime'), ('entries','entries')
3558 ,('commentTs','commentTs'), ('endTime','endTime'), ('added', 'added'), ('addedCurrency', 'addedCurrency'))
3559 updateDb=False
3560 resultDict = dict(list(zip(columnNames, result)))
3562 tourneyId = resultDict["id"]
3563 for ev in expectedValues :
3564 if getattr(summary, ev[0])==None and resultDict[ev[1]]!=None:#DB has this value but object doesnt, so update object
3565 setattr(summary, ev[0], resultDict[ev[1]])
3566 elif getattr(summary, ev[0])!=None and not resultDict[ev[1]]:#object has this value but DB doesnt, so update DB
3567 updateDb=True
3568 #elif ev=="startTime":
3569 # if (resultDict[ev] < summary.startTime):
3570 # summary.startTime=resultDict[ev]
3571 if updateDb:
3572 q = self.sql.query['updateTourney'].replace('%s', self.sql.query['placeholder'])
3573 startTime, endTime = None, None
3574 if (summary.startTime!=None): startTime = summary.startTime.replace(tzinfo=None)
3575 if (summary.endTime!=None): endTime = summary.endTime.replace(tzinfo=None)
3576 row = (summary.entries, summary.prizepool, startTime, endTime, summary.tourneyName,
3577 summary.totalRebuyCount, summary.totalAddOnCount, summary.comment, summary.commentTs,
3578 summary.added, summary.addedCurrency, tourneyId
3579 )
3580 cursor.execute(q, row)
3581 else:
3582 startTime, endTime = None, None
3583 if (summary.startTime!=None): startTime = summary.startTime.replace(tzinfo=None)
3584 if (summary.endTime!=None): endTime = summary.endTime.replace(tzinfo=None)
3585 row = (summary.tourneyTypeId, None, summary.tourNo, summary.entries, summary.prizepool, startTime,
3586 endTime, summary.tourneyName, summary.totalRebuyCount, summary.totalAddOnCount,
3587 summary.comment, summary.commentTs, summary.added, summary.addedCurrency)
3588 if self.printdata:
3589 print ("######## Tourneys ##########")
3590 import pprint
3591 pp = pprint.PrettyPrinter(indent=4)
3592 pp.pprint(row)
3593 print ("###### End Tourneys ########")
3594 cursor.execute (self.sql.query['insertTourney'].replace('%s', self.sql.query['placeholder']), row)
3595 tourneyId = self.get_last_insert_id(cursor)
3596 return tourneyId
3597 #end def createOrUpdateTourney
3599 def getTourneyPlayerInfo(self, siteName, tourneyNo, playerName):
3600 c = self.get_cursor()
3601 c.execute(self.sql.query['getTourneyPlayerInfo'], (siteName, tourneyNo, playerName))
3602 columnNames=c.description
3604 names=[]
3605 for column in columnNames:
3606 names.append(column[0])
3608 data=c.fetchone()
3609 return (names,data)
3610 #end def getTourneyPlayerInfo
3612 def getSqlTourneysPlayersIDs(self, hand):
3613 result = {}
3614 if(self.tpcache == None):
3615 self.tpcache = LambdaDict(lambda key:self.insertTourneysPlayers(key[0], key[1], key[2]))
3617 for player in hand.players:
3618 playerId = hand.dbid_pids[player[1]]
3619 result[player[1]] = self.tpcache[(playerId,hand.tourneyId,hand.entryId)]
3621 return result
3623 def insertTourneysPlayers(self, playerId, tourneyId, entryId):
3624 result = None
3625 c = self.get_cursor()
3626 q = self.sql.query['getTourneysPlayersByIds']
3627 q = q.replace('%s', self.sql.query['placeholder'])
3629 c.execute (q, (tourneyId, playerId, entryId))
3631 tmp = c.fetchone()
3632 if (tmp == None): #new player
3633 c.execute (self.sql.query['insertTourneysPlayer'].replace('%s',self.sql.query['placeholder'])
3634 ,(tourneyId, playerId, entryId, None, None, None, None, None, None))
3635 #Get last id might be faster here.
3636 #c.execute ("SELECT id FROM Players WHERE name=%s", (name,))
3637 result = self.get_last_insert_id(c)
3638 else:
3639 result = tmp[0]
3640 return result
3642 def updateTourneyPlayerBounties(self, hand):
3643 updateDb = False
3644 cursor = self.get_cursor()
3645 q = self.sql.query['updateTourneysPlayerBounties'].replace('%s', self.sql.query['placeholder'])
3646 for player, tourneysPlayersId in list(hand.tourneysPlayersIds.items()):
3647 if player in hand.koCounts:
3648 cursor.execute(q, (
3649 hand.koCounts[player],
3650 hand.koCounts[player],
3651 tourneysPlayersId
3652 ))
3653 updateDb = True
3654 if updateDb: self.commit()
3656 def createOrUpdateTourneysPlayers(self, summary):
3657 tourneysPlayersIds, tplayers, inserts = {}, [], []
3658 cursor = self.get_cursor()
3659 cursor.execute (self.sql.query['getTourneysPlayersByTourney'].replace('%s', self.sql.query['placeholder']),
3660 (summary.tourneyId,))
3661 result=cursor.fetchall()
3662 if result: tplayers += [i for i in result]
3663 for player, entries in list(summary.players.items()):
3664 playerId = summary.dbid_pids[player]
3665 for entryIdx in range(len(entries)):
3666 entryId = entries[entryIdx]
3667 if (playerId,entryId) in tplayers:
3668 cursor.execute (self.sql.query['getTourneysPlayersByIds'].replace('%s', self.sql.query['placeholder']),
3669 (summary.tourneyId, playerId, entryId))
3670 columnNames=[desc[0] for desc in cursor.description]
3671 result=cursor.fetchone()
3672 if self.backend == self.PGSQL:
3673 expectedValues = (('rank','rank'), ('winnings', 'winnings')
3674 ,('winningsCurrency','winningscurrency'), ('rebuyCount','rebuycount')
3675 ,('addOnCount','addoncount'), ('koCount','kocount'))
3676 else:
3677 expectedValues = (('rank','rank'), ('winnings', 'winnings')
3678 ,('winningsCurrency','winningsCurrency'), ('rebuyCount','rebuyCount')
3679 ,('addOnCount','addOnCount'), ('koCount','koCount'))
3680 updateDb=False
3681 resultDict = dict(list(zip(columnNames, result)))
3682 tourneysPlayersIds[(player,entryId)]=result[0]
3683 for ev in expectedValues :
3684 summaryAttribute=ev[0]
3685 if ev[0]!="winnings" and ev[0]!="winningsCurrency":
3686 summaryAttribute+="s"
3687 summaryDict = getattr(summary, summaryAttribute)
3688 if summaryDict[player][entryIdx]==None and resultDict[ev[1]]!=None:#DB has this value but object doesnt, so update object
3689 summaryDict[player][entryIdx] = resultDict[ev[1]]
3690 setattr(summary, summaryAttribute, summaryDict)
3691 elif summaryDict[player][entryIdx]!=None and not resultDict[ev[1]]:#object has this value but DB doesnt, so update DB
3692 updateDb=True
3693 if updateDb:
3694 q = self.sql.query['updateTourneysPlayer'].replace('%s', self.sql.query['placeholder'])
3695 inputs = (summary.ranks[player][entryIdx],
3696 summary.winnings[player][entryIdx],
3697 summary.winningsCurrency[player][entryIdx],
3698 summary.rebuyCounts[player][entryIdx],
3699 summary.addOnCounts[player][entryIdx],
3700 summary.koCounts[player][entryIdx],
3701 tourneysPlayersIds[(player,entryId)]
3702 )
3703 #print q
3704 #pp = pprint.PrettyPrinter(indent=4)
3705 #pp.pprint(inputs)
3706 cursor.execute(q, inputs)
3707 else:
3708 inserts.append((
3709 summary.tourneyId,
3710 playerId,
3711 entryId,
3712 summary.ranks[player][entryIdx],
3713 summary.winnings[player][entryIdx],
3714 summary.winningsCurrency[player][entryIdx],
3715 summary.rebuyCounts[player][entryIdx],
3716 summary.addOnCounts[player][entryIdx],
3717 summary.koCounts[player][entryIdx]
3718 ))
3719 if inserts:
3720 self.executemany(cursor, self.sql.query['insertTourneysPlayer'].replace('%s', self.sql.query['placeholder']), inserts)
3723#end class Database
3725if __name__=="__main__":
3726 c = Configuration.Config()
3727 sql = SQL.Sql(db_server = 'sqlite')
3729 db_connection = Database(c) # mysql fpdb holdem
3730# db_connection = Database(c, 'fpdb-p', 'test') # mysql fpdb holdem
3731# db_connection = Database(c, 'PTrackSv2', 'razz') # mysql razz
3732# db_connection = Database(c, 'ptracks', 'razz') # postgres
3733 print("database connection object = ", db_connection.connection)
3734 # db_connection.recreate_tables()
3735 db_connection.dropAllIndexes()
3736 db_connection.createAllIndexes()
3738 h = db_connection.get_last_hand()
3739 print("last hand = ", h)
3741 hero = db_connection.get_player_id(c, 'PokerStars', 'nutOmatic')
3742 if hero:
3743 print("nutOmatic player_id", hero)
3745 # example of displaying query plan in sqlite:
3746 if db_connection.backend == 4:
3747 print()
3748 c = db_connection.get_cursor()
3749 c.execute('explain query plan '+sql.query['get_table_name'], (h, ))
3750 for row in c.fetchall():
3751 print("Query plan:", row)
3752 print()
3754 t0 = time()
3755 stat_dict = db_connection.get_stats_from_hand(h, "ring")
3756 t1 = time()
3757 for p in list(stat_dict.keys()):
3758 print(p, " ", stat_dict[p])
3760 print(("cards ="), db_connection.get_cards(u'1'))
3761 db_connection.close_connection
3763 print(("get_stats took: %4.3f seconds") % (t1-t0))
3765 print(("Press ENTER to continue."))
3766 sys.stdin.readline()
3768#Code borrowed from http://push.cx/2008/caching-dictionaries-in-python-vs-ruby
3769class LambdaDict(dict):
3770 def __init__(self, l):
3771 super(LambdaDict, self).__init__()
3772 self.l = l
3774 def __getitem__(self, key):
3775 if key in self:
3776 return self.get(key)
3777 else:
3778 self.__setitem__(key, self.l(key))
3779 return self.get(key)