Coverage for Database.py: 21%

2292 statements  

« prev     ^ index     » next       coverage.py v7.6.7, created at 2024-11-18 00:10 +0000

1#!/usr/bin/env python 

2# -*- coding: utf-8 -*- 

3# # Copyright 2008-2011, Ray E. Barker 

4# 

5# This program is free software; you can redistribute it and/or modify 

6# it under the terms of the GNU General Public License as published by 

7# the Free Software Foundation; either version 2 of the License, or 

8# (at your option) any later version. 

9# 

10# This program is distributed in the hope that it will be useful, 

11# but WITHOUT ANY WARRANTY; without even the implied warranty of 

12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

13# GNU General Public License for more details. 

14# 

15# You should have received a copy of the GNU General Public License 

16# along with this program; if not, write to the Free Software 

17# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 

18"""Database.py 

19 

20Create and manage the database objects. 

21""" 

22 

23from __future__ import print_function 

24from __future__ import division 

25 

26from past.utils import old_div 

27 

28# #import L10n 

29# #_ = L10n.get_translation() 

30 

31######################################################################## 

32 

33# TODO: - rebuild indexes / vacuum option 

34# - check speed of get_stats_from_hand() - add log info 

35# - check size of db, seems big? (mysql) 

36######### investigate size of mysql db (200K for just 7K hands? 2GB for 140K hands?) 

37 

38# postmaster -D /var/lib/pgsql/data 

39 

40# Standard Library modules 

41import os 

42import sys 

43import traceback 

44from datetime import datetime, timedelta 

45from time import time, strftime, sleep 

46from decimal import Decimal 

47import string 

48import re 

49import math 

50import pytz 

51import csv 

52import logging 

53import random 

54import SQL 

55import Card 

56 

57# # import Charset 

58from Exceptions import ( 

59 FpdbError, 

60 FpdbMySQLAccessDenied, 

61 FpdbMySQLNoDatabase, 

62 FpdbPostgresqlNoDatabase, 

63 FpdbPostgresqlAccessDenied, 

64) 

65import Configuration 

66 

67re_char = re.compile("[^a-zA-Z]") 

68re_insert = re.compile(r"insert\sinto\s(?P<TABLENAME>[A-Za-z]+)\s(?P<COLUMNS>\(.+?\))\s+values", re.DOTALL) 

69 

70# FreePokerTools modules 

71 

72 

73if __name__ == "__main__": 

74 Configuration.set_logfile("fpdb-log.txt") 

75# logging has been set up in fpdb.py or HUD_main.py, use their settings: 

76log = logging.getLogger("db") 

77 

78# Other library modules 

79try: 

80 import sqlalchemy.pool as pool 

81 

82 use_pool = True 

83except ImportError: 

84 log.info(("Not using sqlalchemy connection pool.")) 

85 use_pool = False 

86 

87try: 

88 from numpy import var 

89 

90 use_numpy = True 

91except ImportError: 

92 log.info(("Not using numpy to define variance in sqlite.")) 

93 use_numpy = False 

94 

95 

96DB_VERSION = 216 

97 

98# Variance created as sqlite has a bunch of undefined aggregate functions. 

99 

100 

101class VARIANCE(object): 

102 def __init__(self): 

103 self.store = [] 

104 

105 def step(self, value): 

106 self.store.append(value) 

107 

108 def finalize(self): 

109 return float(var(self.store)) 

110 

111 

112class sqlitemath(object): 

113 def mod(self, a, b): 

114 return a % b 

115 

116 

117def adapt_decimal(d): 

118 return str(d) 

119 

120 

121def convert_decimal(s): 

122 log.debug("convertvalue") 

123 log.debug(s) 

124 s = s.decode() 

125 return Decimal(s) 

126 

127 

128# These are for appendStats. Insert new stats at the right place, because 

129# SQL needs strict order. 

130# Keys used to index into player data in storeHandsPlayers. 

131HANDS_PLAYERS_KEYS = [ 

132 "startCash", 

133 "effStack", 

134 "startBounty", 

135 "endBounty", 

136 "seatNo", 

137 "sitout", 

138 "card1", 

139 "card2", 

140 "card3", 

141 "card4", 

142 "card5", 

143 "card6", 

144 "card7", 

145 "card8", 

146 "card9", 

147 "card10", 

148 "card11", 

149 "card12", 

150 "card13", 

151 "card14", 

152 "card15", 

153 "card16", 

154 "card17", 

155 "card18", 

156 "card19", 

157 "card20", 

158 "common", 

159 "committed", 

160 "winnings", 

161 "rake", 

162 "rakeDealt", 

163 "rakeContributed", 

164 "rakeWeighted", 

165 "totalProfit", 

166 "allInEV", 

167 "street0VPIChance", 

168 "street0VPI", 

169 "street1Seen", 

170 "street2Seen", 

171 "street3Seen", 

172 "street4Seen", 

173 "sawShowdown", 

174 "showed", 

175 "street0AllIn", 

176 "street1AllIn", 

177 "street2AllIn", 

178 "street3AllIn", 

179 "street4AllIn", 

180 "wentAllIn", 

181 "street0AggrChance", 

182 "street0Aggr", 

183 "street1Aggr", 

184 "street2Aggr", 

185 "street3Aggr", 

186 "street4Aggr", 

187 "street1CBChance", 

188 "street2CBChance", 

189 "street3CBChance", 

190 "street4CBChance", 

191 "street1CBDone", 

192 "street2CBDone", 

193 "street3CBDone", 

194 "street4CBDone", 

195 "wonWhenSeenStreet1", 

196 "wonWhenSeenStreet2", 

197 "wonWhenSeenStreet3", 

198 "wonWhenSeenStreet4", 

199 "wonAtSD", 

200 "position", 

201 "street0InPosition", 

202 "street1InPosition", 

203 "street2InPosition", 

204 "street3InPosition", 

205 "street4InPosition", 

206 "street0FirstToAct", 

207 "street1FirstToAct", 

208 "street2FirstToAct", 

209 "street3FirstToAct", 

210 "street4FirstToAct", 

211 "tourneysPlayersId", 

212 "startCards", 

213 "street0CalledRaiseChance", 

214 "street0CalledRaiseDone", 

215 "street0_2BChance", 

216 "street0_2BDone", 

217 "street0_3BChance", 

218 "street0_3BDone", 

219 "street0_4BChance", 

220 "street0_4BDone", 

221 "street0_C4BChance", 

222 "street0_C4BDone", 

223 "street0_FoldTo2BChance", 

224 "street0_FoldTo2BDone", 

225 "street0_FoldTo3BChance", 

226 "street0_FoldTo3BDone", 

227 "street0_FoldTo4BChance", 

228 "street0_FoldTo4BDone", 

229 "street0_SqueezeChance", 

230 "street0_SqueezeDone", 

231 "raiseToStealChance", 

232 "raiseToStealDone", 

233 "stealChance", 

234 "stealDone", 

235 "success_Steal", 

236 "otherRaisedStreet0", 

237 "otherRaisedStreet1", 

238 "otherRaisedStreet2", 

239 "otherRaisedStreet3", 

240 "otherRaisedStreet4", 

241 "foldToOtherRaisedStreet0", 

242 "foldToOtherRaisedStreet1", 

243 "foldToOtherRaisedStreet2", 

244 "foldToOtherRaisedStreet3", 

245 "foldToOtherRaisedStreet4", 

246 "raiseFirstInChance", 

247 "raisedFirstIn", 

248 "foldBbToStealChance", 

249 "foldedBbToSteal", 

250 "foldSbToStealChance", 

251 "foldedSbToSteal", 

252 "foldToStreet1CBChance", 

253 "foldToStreet1CBDone", 

254 "foldToStreet2CBChance", 

255 "foldToStreet2CBDone", 

256 "foldToStreet3CBChance", 

257 "foldToStreet3CBDone", 

258 "foldToStreet4CBChance", 

259 "foldToStreet4CBDone", 

260 "street1CheckCallRaiseChance", 

261 "street1CheckCallDone", 

262 "street1CheckRaiseDone", 

263 "street2CheckCallRaiseChance", 

264 "street2CheckCallDone", 

265 "street2CheckRaiseDone", 

266 "street3CheckCallRaiseChance", 

267 "street3CheckCallDone", 

268 "street3CheckRaiseDone", 

269 "street4CheckCallRaiseChance", 

270 "street4CheckCallDone", 

271 "street4CheckRaiseDone", 

272 "street0Calls", 

273 "street1Calls", 

274 "street2Calls", 

275 "street3Calls", 

276 "street4Calls", 

277 "street0Bets", 

278 "street1Bets", 

279 "street2Bets", 

280 "street3Bets", 

281 "street4Bets", 

282 "street0Raises", 

283 "street1Raises", 

284 "street2Raises", 

285 "street3Raises", 

286 "street4Raises", 

287 "street1Discards", 

288 "street2Discards", 

289 "street3Discards", 

290 "handString", 

291] 

292 

293# Just like STATS_KEYS, this lets us efficiently add data at the 

294# "beginning" later. 

295HANDS_PLAYERS_KEYS.reverse() 

296 

297 

298CACHE_KEYS = [ 

299 "n", 

300 "street0VPIChance", 

301 "street0VPI", 

302 "street0AggrChance", 

303 "street0Aggr", 

304 "street0CalledRaiseChance", 

305 "street0CalledRaiseDone", 

306 "street0_2BChance", 

307 "street0_2BDone", 

308 "street0_3BChance", 

309 "street0_3BDone", 

310 "street0_4BChance", 

311 "street0_4BDone", 

312 "street0_C4BChance", 

313 "street0_C4BDone", 

314 "street0_FoldTo2BChance", 

315 "street0_FoldTo2BDone", 

316 "street0_FoldTo3BChance", 

317 "street0_FoldTo3BDone", 

318 "street0_FoldTo4BChance", 

319 "street0_FoldTo4BDone", 

320 "street0_SqueezeChance", 

321 "street0_SqueezeDone", 

322 "raiseToStealChance", 

323 "raiseToStealDone", 

324 "stealChance", 

325 "stealDone", 

326 "success_Steal", 

327 "street1Seen", 

328 "street2Seen", 

329 "street3Seen", 

330 "street4Seen", 

331 "sawShowdown", 

332 "street1Aggr", 

333 "street2Aggr", 

334 "street3Aggr", 

335 "street4Aggr", 

336 "otherRaisedStreet0", 

337 "otherRaisedStreet1", 

338 "otherRaisedStreet2", 

339 "otherRaisedStreet3", 

340 "otherRaisedStreet4", 

341 "foldToOtherRaisedStreet0", 

342 "foldToOtherRaisedStreet1", 

343 "foldToOtherRaisedStreet2", 

344 "foldToOtherRaisedStreet3", 

345 "foldToOtherRaisedStreet4", 

346 "wonWhenSeenStreet1", 

347 "wonWhenSeenStreet2", 

348 "wonWhenSeenStreet3", 

349 "wonWhenSeenStreet4", 

350 "wonAtSD", 

351 "raiseFirstInChance", 

352 "raisedFirstIn", 

353 "foldBbToStealChance", 

354 "foldedBbToSteal", 

355 "foldSbToStealChance", 

356 "foldedSbToSteal", 

357 "street1CBChance", 

358 "street1CBDone", 

359 "street2CBChance", 

360 "street2CBDone", 

361 "street3CBChance", 

362 "street3CBDone", 

363 "street4CBChance", 

364 "street4CBDone", 

365 "foldToStreet1CBChance", 

366 "foldToStreet1CBDone", 

367 "foldToStreet2CBChance", 

368 "foldToStreet2CBDone", 

369 "foldToStreet3CBChance", 

370 "foldToStreet3CBDone", 

371 "foldToStreet4CBChance", 

372 "foldToStreet4CBDone", 

373 "common", 

374 "committed", 

375 "winnings", 

376 "rake", 

377 "rakeDealt", 

378 "rakeContributed", 

379 "rakeWeighted", 

380 "totalProfit", 

381 "allInEV", 

382 "showdownWinnings", 

383 "nonShowdownWinnings", 

384 "street1CheckCallRaiseChance", 

385 "street1CheckCallDone", 

386 "street1CheckRaiseDone", 

387 "street2CheckCallRaiseChance", 

388 "street2CheckCallDone", 

389 "street2CheckRaiseDone", 

390 "street3CheckCallRaiseChance", 

391 "street3CheckCallDone", 

392 "street3CheckRaiseDone", 

393 "street4CheckCallRaiseChance", 

394 "street4CheckCallDone", 

395 "street4CheckRaiseDone", 

396 "street0Calls", 

397 "street1Calls", 

398 "street2Calls", 

399 "street3Calls", 

400 "street4Calls", 

401 "street0Bets", 

402 "street1Bets", 

403 "street2Bets", 

404 "street3Bets", 

405 "street4Bets", 

406 "street0Raises", 

407 "street1Raises", 

408 "street2Raises", 

409 "street3Raises", 

410 "street4Raises", 

411 "street1Discards", 

412 "street2Discards", 

413 "street3Discards", 

414] 

415 

416 

417class Database(object): 

418 MYSQL_INNODB = 2 

419 PGSQL = 3 

420 SQLITE = 4 

421 

422 hero_hudstart_def = "1999-12-31" # default for length of Hero's stats in HUD 

423 villain_hudstart_def = "1999-12-31" # default for length of Villain's stats in HUD 

424 

425 # Data Structures for index and foreign key creation 

426 # drop_code is an int with possible values: 0 - don't drop for bulk import 

427 # 1 - drop during bulk import 

428 # db differences: 

429 # - note that mysql automatically creates indexes on constrained columns when 

430 # foreign keys are created, while postgres does not. Hence the much longer list 

431 # of indexes is required for postgres. 

432 # all primary keys are left on all the time 

433 # 

434 # table column drop_code 

435 

436 indexes = [ 

437 [], # no db with index 0 

438 [], # no db with index 1 

439 [ # indexes for mysql (list index 2) (foreign keys not here, in next data structure) 

440 # {'tab':'Players', 'col':'name', 'drop':0} unique indexes not dropped 

441 # {'tab':'Hands', 'col':'siteHandNo', 'drop':0} unique indexes not dropped 

442 # , {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} unique indexes not dropped 

443 ], 

444 [ # indexes for postgres (list index 3) 

445 {"tab": "Gametypes", "col": "siteId", "drop": 0}, 

446 {"tab": "Hands", "col": "tourneyId", "drop": 0}, # mct 22/3/09 

447 {"tab": "Hands", "col": "gametypeId", "drop": 0}, # mct 22/3/09 

448 {"tab": "Hands", "col": "sessionId", "drop": 0}, # mct 22/3/09 

449 {"tab": "Hands", "col": "fileId", "drop": 0}, # mct 22/3/09 

450 # , {'tab':'Hands', 'col':'siteHandNo', 'drop':0} unique indexes not dropped 

451 {"tab": "HandsActions", "col": "handId", "drop": 1}, 

452 {"tab": "HandsActions", "col": "playerId", "drop": 1}, 

453 {"tab": "HandsActions", "col": "actionId", "drop": 1}, 

454 {"tab": "HandsStove", "col": "handId", "drop": 1}, 

455 {"tab": "HandsStove", "col": "playerId", "drop": 1}, 

456 {"tab": "HandsStove", "col": "hiLo", "drop": 1}, 

457 {"tab": "HandsPots", "col": "handId", "drop": 1}, 

458 {"tab": "HandsPots", "col": "playerId", "drop": 1}, 

459 {"tab": "Boards", "col": "handId", "drop": 1}, 

460 {"tab": "HandsPlayers", "col": "handId", "drop": 1}, 

461 {"tab": "HandsPlayers", "col": "playerId", "drop": 1}, 

462 {"tab": "HandsPlayers", "col": "tourneysPlayersId", "drop": 0}, 

463 {"tab": "HandsPlayers", "col": "startCards", "drop": 1}, 

464 {"tab": "HudCache", "col": "gametypeId", "drop": 1}, 

465 {"tab": "HudCache", "col": "playerId", "drop": 0}, 

466 {"tab": "HudCache", "col": "tourneyTypeId", "drop": 0}, 

467 {"tab": "Sessions", "col": "weekId", "drop": 1}, 

468 {"tab": "Sessions", "col": "monthId", "drop": 1}, 

469 {"tab": "SessionsCache", "col": "sessionId", "drop": 1}, 

470 {"tab": "SessionsCache", "col": "gametypeId", "drop": 1}, 

471 {"tab": "SessionsCache", "col": "playerId", "drop": 0}, 

472 {"tab": "TourneysCache", "col": "sessionId", "drop": 1}, 

473 {"tab": "TourneysCache", "col": "tourneyId", "drop": 1}, 

474 {"tab": "TourneysCache", "col": "playerId", "drop": 0}, 

475 {"tab": "Players", "col": "siteId", "drop": 1}, 

476 # , {'tab':'Players', 'col':'name', 'drop':0} unique indexes not dropped 

477 {"tab": "Tourneys", "col": "tourneyTypeId", "drop": 1}, 

478 {"tab": "Tourneys", "col": "sessionId", "drop": 1}, 

479 # , {'tab':'Tourneys', 'col':'siteTourneyNo', 'drop':0} unique indexes not dropped 

480 {"tab": "TourneysPlayers", "col": "playerId", "drop": 0}, 

481 # , {'tab':'TourneysPlayers', 'col':'tourneyId', 'drop':0} unique indexes not dropped 

482 {"tab": "TourneyTypes", "col": "siteId", "drop": 0}, 

483 {"tab": "Backings", "col": "tourneysPlayersId", "drop": 0}, 

484 {"tab": "Backings", "col": "playerId", "drop": 0}, 

485 {"tab": "RawHands", "col": "id", "drop": 0}, 

486 {"tab": "RawTourneys", "col": "id", "drop": 0}, 

487 ], 

488 [ # indexes for sqlite (list index 4) 

489 {"tab": "Hands", "col": "tourneyId", "drop": 0}, 

490 {"tab": "Hands", "col": "gametypeId", "drop": 0}, 

491 {"tab": "Hands", "col": "sessionId", "drop": 0}, 

492 {"tab": "Hands", "col": "fileId", "drop": 0}, 

493 {"tab": "Boards", "col": "handId", "drop": 0}, 

494 {"tab": "Gametypes", "col": "siteId", "drop": 0}, 

495 {"tab": "HandsPlayers", "col": "handId", "drop": 0}, 

496 {"tab": "HandsPlayers", "col": "playerId", "drop": 0}, 

497 {"tab": "HandsPlayers", "col": "tourneysPlayersId", "drop": 0}, 

498 {"tab": "HandsActions", "col": "handId", "drop": 0}, 

499 {"tab": "HandsActions", "col": "playerId", "drop": 0}, 

500 {"tab": "HandsActions", "col": "actionId", "drop": 1}, 

501 {"tab": "HandsStove", "col": "handId", "drop": 0}, 

502 {"tab": "HandsStove", "col": "playerId", "drop": 0}, 

503 {"tab": "HandsPots", "col": "handId", "drop": 0}, 

504 {"tab": "HandsPots", "col": "playerId", "drop": 0}, 

505 {"tab": "HudCache", "col": "gametypeId", "drop": 1}, 

506 {"tab": "HudCache", "col": "playerId", "drop": 0}, 

507 {"tab": "HudCache", "col": "tourneyTypeId", "drop": 0}, 

508 {"tab": "Sessions", "col": "weekId", "drop": 1}, 

509 {"tab": "Sessions", "col": "monthId", "drop": 1}, 

510 {"tab": "SessionsCache", "col": "sessionId", "drop": 1}, 

511 {"tab": "SessionsCache", "col": "gametypeId", "drop": 1}, 

512 {"tab": "SessionsCache", "col": "playerId", "drop": 0}, 

513 {"tab": "TourneysCache", "col": "sessionId", "drop": 1}, 

514 {"tab": "TourneysCache", "col": "tourneyId", "drop": 1}, 

515 {"tab": "TourneysCache", "col": "playerId", "drop": 0}, 

516 {"tab": "Players", "col": "siteId", "drop": 1}, 

517 {"tab": "Tourneys", "col": "tourneyTypeId", "drop": 1}, 

518 {"tab": "Tourneys", "col": "sessionId", "drop": 1}, 

519 {"tab": "TourneysPlayers", "col": "playerId", "drop": 0}, 

520 {"tab": "TourneyTypes", "col": "siteId", "drop": 0}, 

521 {"tab": "Backings", "col": "tourneysPlayersId", "drop": 0}, 

522 {"tab": "Backings", "col": "playerId", "drop": 0}, 

523 {"tab": "RawHands", "col": "id", "drop": 0}, 

524 {"tab": "RawTourneys", "col": "id", "drop": 0}, 

525 ], 

526 ] 

527 

528 foreignKeys = [ 

529 [], # no db with index 0 

530 [], # no db with index 1 

531 [ # foreign keys for mysql (index 2) 

532 {"fktab": "Hands", "fkcol": "tourneyId", "rtab": "Tourneys", "rcol": "id", "drop": 1}, 

533 {"fktab": "Hands", "fkcol": "gametypeId", "rtab": "Gametypes", "rcol": "id", "drop": 1}, 

534 {"fktab": "Hands", "fkcol": "sessionId", "rtab": "Sessions", "rcol": "id", "drop": 1}, 

535 {"fktab": "Hands", "fkcol": "fileId", "rtab": "Files", "rcol": "id", "drop": 1}, 

536 {"fktab": "Boards", "fkcol": "handId", "rtab": "Hands", "rcol": "id", "drop": 1}, 

537 {"fktab": "HandsPlayers", "fkcol": "handId", "rtab": "Hands", "rcol": "id", "drop": 1}, 

538 {"fktab": "HandsPlayers", "fkcol": "playerId", "rtab": "Players", "rcol": "id", "drop": 1}, 

539 {"fktab": "HandsPlayers", "fkcol": "tourneysPlayersId", "rtab": "TourneysPlayers", "rcol": "id", "drop": 1}, 

540 {"fktab": "HandsPlayers", "fkcol": "startCards", "rtab": "StartCards", "rcol": "id", "drop": 1}, 

541 {"fktab": "HandsActions", "fkcol": "handId", "rtab": "Hands", "rcol": "id", "drop": 1}, 

542 {"fktab": "HandsActions", "fkcol": "playerId", "rtab": "Players", "rcol": "id", "drop": 1}, 

543 {"fktab": "HandsActions", "fkcol": "actionId", "rtab": "Actions", "rcol": "id", "drop": 1}, 

544 {"fktab": "HandsStove", "fkcol": "handId", "rtab": "Hands", "rcol": "id", "drop": 1}, 

545 {"fktab": "HandsStove", "fkcol": "playerId", "rtab": "Players", "rcol": "id", "drop": 1}, 

546 {"fktab": "HandsStove", "fkcol": "rankId", "rtab": "Rank", "rcol": "id", "drop": 1}, 

547 {"fktab": "HandsPots", "fkcol": "handId", "rtab": "Hands", "rcol": "id", "drop": 1}, 

548 {"fktab": "HandsPots", "fkcol": "playerId", "rtab": "Players", "rcol": "id", "drop": 1}, 

549 {"fktab": "HudCache", "fkcol": "gametypeId", "rtab": "Gametypes", "rcol": "id", "drop": 1}, 

550 {"fktab": "HudCache", "fkcol": "playerId", "rtab": "Players", "rcol": "id", "drop": 0}, 

551 {"fktab": "HudCache", "fkcol": "tourneyTypeId", "rtab": "TourneyTypes", "rcol": "id", "drop": 1}, 

552 {"fktab": "Sessions", "fkcol": "weekId", "rtab": "Weeks", "rcol": "id", "drop": 1}, 

553 {"fktab": "Sessions", "fkcol": "monthId", "rtab": "Months", "rcol": "id", "drop": 1}, 

554 {"fktab": "SessionsCache", "fkcol": "sessionId", "rtab": "Sessions", "rcol": "id", "drop": 1}, 

555 {"fktab": "SessionsCache", "fkcol": "gametypeId", "rtab": "Gametypes", "rcol": "id", "drop": 1}, 

556 {"fktab": "SessionsCache", "fkcol": "playerId", "rtab": "Players", "rcol": "id", "drop": 0}, 

557 {"fktab": "TourneysCache", "fkcol": "sessionId", "rtab": "Sessions", "rcol": "id", "drop": 1}, 

558 {"fktab": "TourneysCache", "fkcol": "tourneyId", "rtab": "Tourneys", "rcol": "id", "drop": 1}, 

559 {"fktab": "TourneysCache", "fkcol": "playerId", "rtab": "Players", "rcol": "id", "drop": 0}, 

560 {"fktab": "Tourneys", "fkcol": "tourneyTypeId", "rtab": "TourneyTypes", "rcol": "id", "drop": 1}, 

561 {"fktab": "Tourneys", "fkcol": "sessionId", "rtab": "Sessions", "rcol": "id", "drop": 1}, 

562 ], 

563 [ # foreign keys for postgres (index 3) 

564 {"fktab": "Hands", "fkcol": "tourneyId", "rtab": "Tourneys", "rcol": "id", "drop": 1}, 

565 {"fktab": "Hands", "fkcol": "gametypeId", "rtab": "Gametypes", "rcol": "id", "drop": 1}, 

566 {"fktab": "Hands", "fkcol": "sessionId", "rtab": "Sessions", "rcol": "id", "drop": 1}, 

567 {"fktab": "Hands", "fkcol": "fileId", "rtab": "Files", "rcol": "id", "drop": 1}, 

568 {"fktab": "Boards", "fkcol": "handId", "rtab": "Hands", "rcol": "id", "drop": 1}, 

569 {"fktab": "HandsPlayers", "fkcol": "handId", "rtab": "Hands", "rcol": "id", "drop": 1}, 

570 {"fktab": "HandsPlayers", "fkcol": "playerId", "rtab": "Players", "rcol": "id", "drop": 1}, 

571 {"fktab": "HandsPlayers", "fkcol": "tourneysPlayersId", "rtab": "TourneysPlayers", "rcol": "id", "drop": 1}, 

572 {"fktab": "HandsPlayers", "fkcol": "startCards", "rtab": "StartCards", "rcol": "id", "drop": 1}, 

573 {"fktab": "HandsActions", "fkcol": "handId", "rtab": "Hands", "rcol": "id", "drop": 1}, 

574 {"fktab": "HandsActions", "fkcol": "playerId", "rtab": "Players", "rcol": "id", "drop": 1}, 

575 {"fktab": "HandsActions", "fkcol": "actionId", "rtab": "Actions", "rcol": "id", "drop": 1}, 

576 {"fktab": "HandsStove", "fkcol": "handId", "rtab": "Hands", "rcol": "id", "drop": 1}, 

577 {"fktab": "HandsStove", "fkcol": "playerId", "rtab": "Players", "rcol": "id", "drop": 1}, 

578 {"fktab": "HandsStove", "fkcol": "rankId", "rtab": "Rank", "rcol": "id", "drop": 1}, 

579 {"fktab": "HandsPots", "fkcol": "handId", "rtab": "Hands", "rcol": "id", "drop": 1}, 

580 {"fktab": "HandsPots", "fkcol": "playerId", "rtab": "Players", "rcol": "id", "drop": 1}, 

581 {"fktab": "HudCache", "fkcol": "gametypeId", "rtab": "Gametypes", "rcol": "id", "drop": 1}, 

582 {"fktab": "HudCache", "fkcol": "playerId", "rtab": "Players", "rcol": "id", "drop": 0}, 

583 {"fktab": "HudCache", "fkcol": "tourneyTypeId", "rtab": "TourneyTypes", "rcol": "id", "drop": 1}, 

584 {"fktab": "Sessions", "fkcol": "weekId", "rtab": "Weeks", "rcol": "id", "drop": 1}, 

585 {"fktab": "Sessions", "fkcol": "monthId", "rtab": "Months", "rcol": "id", "drop": 1}, 

586 {"fktab": "SessionsCache", "fkcol": "sessionId", "rtab": "Sessions", "rcol": "id", "drop": 1}, 

587 {"fktab": "SessionsCache", "fkcol": "gametypeId", "rtab": "Gametypes", "rcol": "id", "drop": 1}, 

588 {"fktab": "SessionsCache", "fkcol": "playerId", "rtab": "Players", "rcol": "id", "drop": 0}, 

589 {"fktab": "TourneysCache", "fkcol": "sessionId", "rtab": "Sessions", "rcol": "id", "drop": 1}, 

590 {"fktab": "TourneysCache", "fkcol": "tourneyId", "rtab": "Tourneys", "rcol": "id", "drop": 1}, 

591 {"fktab": "TourneysCache", "fkcol": "playerId", "rtab": "Players", "rcol": "id", "drop": 0}, 

592 {"fktab": "Tourneys", "fkcol": "tourneyTypeId", "rtab": "TourneyTypes", "rcol": "id", "drop": 1}, 

593 {"fktab": "Tourneys", "fkcol": "sessionId", "rtab": "Sessions", "rcol": "id", "drop": 1}, 

594 ], 

595 [ # no foreign keys in sqlite (index 4) 

596 ], 

597 ] 

598 

599 # MySQL Notes: 

600 # "FOREIGN KEY (handId) REFERENCES Hands(id)" - requires index on Hands.id 

601 # - creates index handId on <thistable>.handId 

602 # alter table t drop foreign key fk 

603 # alter table t add foreign key (fkcol) references tab(rcol) 

604 # alter table t add constraint c foreign key (fkcol) references tab(rcol) 

605 # (fkcol is used for foreigh key name) 

606 

607 # mysql to list indexes: (CG - "LIST INDEXES" should work too) 

608 # SELECT table_name, index_name, non_unique, column_name 

609 # FROM INFORMATION_SCHEMA.STATISTICS 

610 # WHERE table_name = 'tbl_name' 

611 # AND table_schema = 'db_name' 

612 # ORDER BY table_name, index_name, seq_in_index 

613 # 

614 # ALTER TABLE Tourneys ADD INDEX siteTourneyNo(siteTourneyNo) 

615 # ALTER TABLE tab DROP INDEX idx 

616 

617 # mysql to list fks: 

618 # SELECT constraint_name, table_name, column_name, referenced_table_name, referenced_column_name 

619 # FROM information_schema.KEY_COLUMN_USAGE 

620 # WHERE REFERENCED_TABLE_SCHEMA = (your schema name here) 

621 # AND REFERENCED_TABLE_NAME is not null 

622 # ORDER BY TABLE_NAME, COLUMN_NAME; 

623 

624 # this may indicate missing object 

625 # _mysql_exceptions.OperationalError: (1025, "Error on rename of '.\\fpdb\\hands' to '.\\fpdb\\#sql2-7f0-1b' (errno: 152)") 

626 

627 # PG notes: 

628 

629 # To add a foreign key constraint to a table: 

630 # ALTER TABLE tab ADD CONSTRAINT c FOREIGN KEY (col) REFERENCES t2(col2) MATCH FULL; 

631 # ALTER TABLE tab DROP CONSTRAINT zipchk 

632 # 

633 # Note: index names must be unique across a schema 

634 # CREATE INDEX idx ON tab(col) 

635 # DROP INDEX idx 

636 # SELECT * FROM PG_INDEXES 

637 

638 # SQLite notes: 

639 

640 # To add an index: 

641 # create index indexname on tablename (col); 

642 

643 def __init__(self, c, sql=None, autoconnect=True): 

644 self.config = c 

645 self.__connected = False 

646 self.settings = {} 

647 self.settings["os"] = "linuxmac" if os.name != "nt" else "windows" 

648 db_params = c.get_db_parameters() 

649 self.import_options = c.get_import_parameters() 

650 self.backend = db_params["db-backend"] 

651 self.db_server = db_params["db-server"] 

652 self.database = db_params["db-databaseName"] 

653 self.host = db_params["db-host"] 

654 self.db_path = "" 

655 gen = c.get_general_params() 

656 self.day_start = 0 

657 self._hero = None 

658 self._has_lock = False 

659 self.printdata = False 

660 self.resetCache() 

661 self.resetBulkCache() 

662 

663 if "day_start" in gen: 

664 self.day_start = float(gen["day_start"]) 

665 

666 self.sessionTimeout = float(self.import_options["sessionTimeout"]) 

667 self.publicDB = self.import_options["publicDB"] 

668 

669 # where possible avoid creating new SQL instance by using the global one passed in 

670 if sql is None: 

671 self.sql = SQL.Sql(db_server=self.db_server) 

672 else: 

673 self.sql = sql 

674 

675 if autoconnect: 

676 # connect to db 

677 self.do_connect(c) 

678 

679 if self.backend == self.PGSQL: 

680 pass 

681 # ISOLATION_LEVEL_AUTOCOMMIT = 0 

682 # ISOLATION_LEVEL_READ_COMMITTED = 1 

683 # ISOLATION_LEVEL_SERIALIZABLE = 2 

684 

685 if ( 

686 self.backend == self.SQLITE 

687 and self.database == ":memory:" 

688 and self.wrongDbVersion 

689 and self.is_connected() 

690 ): 

691 log.info("sqlite/:memory: - creating") 

692 self.recreate_tables() 

693 self.wrongDbVersion = False 

694 

695 self.gtcache = None # GameTypeId cache 

696 self.tcache = None # TourneyId cache 

697 self.pcache = None # PlayerId cache 

698 self.tpcache = None # TourneysPlayersId cache 

699 

700 # if fastStoreHudCache is true then the hudcache will be build using the limited configuration which ignores date, seats, and position 

701 self.build_full_hudcache = not self.import_options["fastStoreHudCache"] 

702 self.cacheSessions = self.import_options["cacheSessions"] 

703 self.callHud = self.import_options["callFpdbHud"] 

704 

705 # self.hud_hero_style = 'T' # Duplicate set of vars just for hero - not used yet. 

706 # self.hud_hero_hands = 2000 # Idea is that you might want all-time stats for others 

707 # self.hud_hero_days = 30 # but last T days or last H hands for yourself 

708 

709 # vars for hand ids or dates fetched according to above config: 

710 self.hand_1day_ago = 0 # max hand id more than 24 hrs earlier than now 

711 self.date_ndays_ago = "d000000" # date N days ago ('d' + YYMMDD) 

712 self.h_date_ndays_ago = "d000000" # date N days ago ('d' + YYMMDD) for hero 

713 self.date_nhands_ago = {} # dates N hands ago per player - not used yet 

714 

715 self.saveActions = False if self.import_options["saveActions"] is False else True 

716 

717 if self.is_connected(): 

718 if not self.wrongDbVersion: 

719 self.get_sites() 

720 self.connection.rollback() # make sure any locks taken so far are released 

721 

722 # end def __init__ 

723 

724 def dumpDatabase(self): 

725 result = "fpdb database dump\nDB version=" + str(DB_VERSION) + "\n\n" 

726 

727 # tables = self.cursor.execute(self.sql.query["list_tables"]) 

728 # tables = self.cursor.fetchall() 

729 for table in ( 

730 "Actions", 

731 "Autorates", 

732 "Backings", 

733 "Gametypes", 

734 "Hands", 

735 "Boards", 

736 "HandsActions", 

737 "HandsPlayers", 

738 "HandsStove", 

739 "Files", 

740 "HudCache", 

741 "Sessions", 

742 "SessionsCache", 

743 "TourneysCache", 

744 "Players", 

745 "RawHands", 

746 "RawTourneys", 

747 "Settings", 

748 "Sites", 

749 "TourneyTypes", 

750 "Tourneys", 

751 "TourneysPlayers", 

752 ): 

753 log.debug("table:", table) 

754 result += "###################\nTable " + table + "\n###################\n" 

755 rows = self.cursor.execute(self.sql.query["get" + table]) 

756 rows = self.cursor.fetchall() 

757 columnNames = self.cursor.description 

758 if not rows: 

759 result += "empty table\n" 

760 else: 

761 for row in rows: 

762 for columnNumber in range(len(columnNames)): 

763 if columnNames[columnNumber][0] == "importTime": 

764 result += " " + columnNames[columnNumber][0] + "=ignore\n" 

765 elif columnNames[columnNumber][0] == "styleKey": 

766 result += " " + columnNames[columnNumber][0] + "=ignore\n" 

767 else: 

768 result += " " + columnNames[columnNumber][0] + "=" + str(row[columnNumber]) + "\n" 

769 result += "\n" 

770 result += "\n" 

771 return result 

772 

773 # end def dumpDatabase 

774 

775 # could be used by hud to change hud style 

776 def set_hud_style(self, style): 

777 self.hud_style = style 

778 

779 def do_connect(self, c): 

780 if c is None: 

781 raise FpdbError("Configuration not defined") 

782 

783 db = c.get_db_parameters() 

784 try: 

785 self.connect( 

786 backend=db["db-backend"], 

787 host=db["db-host"], 

788 port=db["db-port"], 

789 database=db["db-databaseName"], 

790 user=db["db-user"], 

791 password=db["db-password"], 

792 ) 

793 except: 

794 # error during connect 

795 self.__connected = False 

796 raise 

797 

798 db_params = c.get_db_parameters() 

799 self.import_options = c.get_import_parameters() 

800 self.backend = db_params["db-backend"] 

801 self.db_server = db_params["db-server"] 

802 self.database = db_params["db-databaseName"] 

803 self.host = db_params["db-host"] 

804 self.port = db_params["db-port"] 

805 

806 def connect(self, backend=None, host=None, port=None, database=None, user=None, password=None, create=False): 

807 """Connects a database with the given parameters""" 

808 if backend is None: 

809 raise FpdbError("Database backend not defined") 

810 self.backend = backend 

811 self.host = host 

812 self.port = port 

813 self.user = user 

814 self.password = password 

815 self.database = database 

816 self.connection = None 

817 self.cursor = None 

818 self.hand_inc = 1 

819 

820 if backend == Database.MYSQL_INNODB: 

821 #####not working mysql connector on py3.9#### 

822 import MySQLdb 

823 

824 if use_pool: 

825 MySQLdb = pool.manage(MySQLdb, pool_size=5) 

826 try: 

827 self.connection = MySQLdb.connect( 

828 host=host, user=user, passwd=password, db=database, charset="utf8", use_unicode=True 

829 ) 

830 self.__connected = True 

831 # TODO: Add port option 

832 except MySQLdb.Error as ex: 

833 if ex.args[0] == 1045: 

834 raise FpdbMySQLAccessDenied(ex.args[0], ex.args[1]) 

835 elif ex.args[0] == 2002 or ex.args[0] == 2003: # 2002 is no unix socket, 2003 is no tcp socket 

836 raise FpdbMySQLNoDatabase(ex.args[0], ex.args[1]) 

837 else: 

838 log.debug(("*** WARNING UNKNOWN MYSQL ERROR:"), ex) 

839 c = self.get_cursor() 

840 c.execute("show variables like 'auto_increment_increment'") 

841 self.hand_inc = int(c.fetchone()[1]) 

842 elif backend == Database.PGSQL: 

843 import psycopg2 

844 import psycopg2.extensions 

845 

846 if use_pool: 

847 psycopg2 = pool.manage(psycopg2, pool_size=5) 

848 psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) 

849 psycopg2.extensions.register_adapter(Decimal, psycopg2._psycopg.Decimal) 

850 

851 self.__connected = False 

852 if self.host == "localhost" or self.host == "127.0.0.1": 

853 try: 

854 self.connection = psycopg2.connect(database=database) 

855 # Forcer l'encodage UTF-8 

856 self.connection.set_client_encoding("UTF8") 

857 self.__connected = True 

858 except psycopg2.OperationalError: 

859 # direct connection failed so try user/pass/... version 

860 pass 

861 

862 if not self.is_connected(): 

863 try: 

864 self.connection = psycopg2.connect( 

865 host=host, port=port, user=user, password=password, database=database 

866 ) 

867 # Forcer l'encodage UTF-8 

868 self.connection.set_client_encoding("UTF8") 

869 self.__connected = True 

870 except Exception as ex: 

871 if "Connection refused" in ex.args[0] or ( 

872 'database "' in ex.args[0] and '" does not exist' in ex.args[0] 

873 ): 

874 raise FpdbPostgresqlNoDatabase(errmsg=ex.args[0]) 

875 elif "password authentication" in ex.args[0]: 

876 raise FpdbPostgresqlAccessDenied(errmsg=ex.args[0]) 

877 elif 'role "' in ex.args[0] and '" does not exist' in ex.args[0]: # role "fpdb" does not exist 

878 raise FpdbPostgresqlAccessDenied(errmsg=ex.args[0]) 

879 else: 

880 msg = ex.args[0] 

881 log.error(msg) 

882 raise FpdbError(msg) 

883 

884 elif backend == Database.SQLITE: 

885 create = True 

886 import sqlite3 

887 

888 if use_pool: 

889 sqlite3 = pool.manage(sqlite3, pool_size=1) 

890 # else: 

891 # log.warning("SQLite won't work well without 'sqlalchemy' installed.") 

892 

893 if database != ":memory:": 

894 if not os.path.isdir(self.config.dir_database) and create: 

895 log.info(("Creating directory: '%s'") % (self.config.dir_database)) 

896 os.makedirs(self.config.dir_database) 

897 database = os.path.join(self.config.dir_database, database).replace("\\", "/") 

898 self.db_path = database 

899 log.info(("Connecting to SQLite: %s") % self.db_path) 

900 if os.path.exists(database) or create: 

901 self.connection = sqlite3.connect( 

902 self.db_path, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES 

903 ) 

904 self.__connected = True 

905 sqlite3.register_converter("bool", lambda x: bool(int(x))) 

906 sqlite3.register_adapter(bool, lambda x: 1 if x else 0) 

907 sqlite3.register_converter("decimal", convert_decimal) 

908 sqlite3.register_adapter(Decimal, adapt_decimal) 

909 self.connection.create_function("floor", 1, math.floor) 

910 self.connection.create_function("sqrt", 1, math.sqrt) 

911 tmp = sqlitemath() 

912 self.connection.create_function("mod", 2, tmp.mod) 

913 if use_numpy: 

914 self.connection.create_aggregate("variance", 1, VARIANCE) 

915 else: 

916 log.warning(("Some database functions will not work without NumPy support")) 

917 self.cursor = self.connection.cursor() 

918 self.cursor.execute("PRAGMA temp_store=2") # use memory for temp tables/indexes 

919 self.cursor.execute("PRAGMA journal_mode=WAL") # use memory for temp tables/indexes 

920 self.cursor.execute("PRAGMA synchronous=0") # don't wait for file writes to finish 

921 else: 

922 raise FpdbError("sqlite database " + database + " does not exist") 

923 else: 

924 raise FpdbError("unrecognised database backend:" + str(backend)) 

925 

926 if self.is_connected(): 

927 self.cursor = self.connection.cursor() 

928 self.cursor.execute(self.sql.query["set tx level"]) 

929 self.check_version(database=database, create=create) 

930 

931 def get_sites(self): 

932 self.cursor.execute("SELECT name,id FROM Sites") 

933 sites = self.cursor.fetchall() 

934 self.config.set_site_ids(sites) 

935 

936 def check_version(self, database, create): 

937 self.wrongDbVersion = False 

938 try: 

939 self.cursor.execute("SELECT * FROM Settings") 

940 settings = self.cursor.fetchone() 

941 if settings[0] != DB_VERSION: 

942 log.error( 

943 (("Outdated or too new database version (%s).") % (settings[0])) + " " + ("Please recreate tables.") 

944 ) 

945 self.wrongDbVersion = True 

946 except Exception: # _mysql_exceptions.ProgrammingError: 

947 if database != ":memory:": 

948 if create: 

949 # print (("Failed to read settings table.") + " - " + ("Recreating tables.")) 

950 log.info(("Failed to read settings table.") + " - " + ("Recreating tables.")) 

951 self.recreate_tables() 

952 self.check_version(database=database, create=False) 

953 else: 

954 # print (("Failed to read settings table.") + " - " + ("Please recreate tables.")) 

955 log.info(("Failed to read settings table.") + " - " + ("Please recreate tables.")) 

956 self.wrongDbVersion = True 

957 else: 

958 self.wrongDbVersion = True 

959 

960 # end def connect 

961 

962 def commit(self): 

963 if self.backend != self.SQLITE: 

964 self.connection.commit() 

965 else: 

966 # sqlite commits can fail because of shared locks on the database (SQLITE_BUSY) 

967 # re-try commit if it fails in case this happened 

968 maxtimes = 5 

969 pause = 1 

970 ok = False 

971 for i in range(maxtimes): 

972 try: 

973 self.connection.commit() 

974 # log.debug(("commit finished ok, i = ")+str(i)) 

975 ok = True 

976 except Exception as e: 

977 log.debug(("commit %s failed: info=%s value=%s") % (str(i), str(sys.exc_info()), str(e))) 

978 sleep(pause) 

979 if ok: 

980 break 

981 if not ok: 

982 log.debug("commit failed") 

983 raise FpdbError("sqlite commit failed") 

984 

985 def rollback(self): 

986 self.connection.rollback() 

987 

988 def connected(self): 

989 """now deprecated, use is_connected() instead""" 

990 return self.__connected 

991 

992 def is_connected(self): 

993 return self.__connected 

994 

995 def get_cursor(self, connect=False): 

996 if self.backend == Database.MYSQL_INNODB and os.name == "nt": 

997 self.connection.ping(True) 

998 return self.connection.cursor() 

999 

1000 def close_connection(self): 

1001 self.connection.close() 

1002 self.__connected = False 

1003 

1004 def disconnect(self, due_to_error=False): 

1005 """Disconnects the DB (rolls back if param is true, otherwise commits""" 

1006 if due_to_error: 

1007 self.connection.rollback() 

1008 else: 

1009 self.connection.commit() 

1010 self.cursor.close() 

1011 self.connection.close() 

1012 self.__connected = False 

1013 

1014 def reconnect(self, due_to_error=False): 

1015 """Reconnects the DB""" 

1016 # print "started reconnect" 

1017 self.disconnect(due_to_error) 

1018 self.connect(self.backend, self.host, self.database, self.user, self.password) 

1019 

1020 def get_backend_name(self): 

1021 """Returns the name of the currently used backend""" 

1022 if self.backend == 2: 

1023 return "MySQL InnoDB" 

1024 elif self.backend == 3: 

1025 return "PostgreSQL" 

1026 elif self.backend == 4: 

1027 return "SQLite" 

1028 else: 

1029 raise FpdbError("invalid backend") 

1030 

1031 def get_db_info(self): 

1032 return (self.host, self.database, self.user, self.password) 

1033 

1034 def get_table_name(self, hand_id): 

1035 c = self.connection.cursor() 

1036 c.execute(self.sql.query["get_table_name"], (hand_id,)) 

1037 row = c.fetchone() 

1038 return row 

1039 

1040 def get_table_info(self, hand_id): 

1041 c = self.connection.cursor() 

1042 c.execute(self.sql.query["get_table_name"], (hand_id,)) 

1043 row = c.fetchone() 

1044 table_info = list(row) 

1045 if row[3] == "ring": # cash game 

1046 table_info.append(None) 

1047 table_info.append(None) 

1048 return table_info 

1049 else: # tournament 

1050 tour_no, tab_no = re.split(" ", row[0], 1) 

1051 table_info.append(tour_no) 

1052 table_info.append(tab_no) 

1053 return table_info 

1054 

1055 def get_last_hand(self): 

1056 c = self.connection.cursor() 

1057 c.execute(self.sql.query["get_last_hand"]) 

1058 row = c.fetchone() 

1059 return row[0] 

1060 

1061 def get_xml(self, hand_id): 

1062 c = self.connection.cursor() 

1063 c.execute(self.sql.query["get_xml"], (hand_id)) 

1064 row = c.fetchone() 

1065 return row[0] 

1066 

1067 def get_recent_hands(self, last_hand): 

1068 c = self.connection.cursor() 

1069 c.execute(self.sql.query["get_recent_hands"], {"last_hand": last_hand}) 

1070 return c.fetchall() 

1071 

1072 def get_gameinfo_from_hid(self, hand_id): 

1073 # returns a gameinfo (gametype) dictionary suitable for passing 

1074 # to Hand.hand_factory 

1075 c = self.connection.cursor() 

1076 q = self.sql.query["get_gameinfo_from_hid"] 

1077 q = q.replace("%s", self.sql.query["placeholder"]) 

1078 c.execute(q, (hand_id,)) 

1079 row = c.fetchone() 

1080 

1081 if row is None: 

1082 log.error(f"No game info found for hand ID {hand_id}") 

1083 return None 

1084 

1085 gameinfo = { 

1086 "sitename": row[0], 

1087 "category": row[1], 

1088 "base": row[2], 

1089 "type": row[3], 

1090 "limitType": row[4], 

1091 "hilo": row[5], 

1092 "sb": row[6], 

1093 "bb": row[7], 

1094 "sbet": row[8], 

1095 "bbet": row[9], 

1096 "currency": row[10], 

1097 "gametypeId": row[11], 

1098 "split": row[12], 

1099 } 

1100 return gameinfo 

1101 

1102 # Query 'get_hand_info' does not exist, so it seems 

1103 # def get_hand_info(self, new_hand_id): 

1104 # c = self.connection.cursor() 

1105 # c.execute(self.sql.query['get_hand_info'], new_hand_id) 

1106 # return c.fetchall() 

1107 

1108 def getHandCount(self): 

1109 c = self.connection.cursor() 

1110 c.execute(self.sql.query["getHandCount"]) 

1111 return c.fetchone()[0] 

1112 

1113 # end def getHandCount 

1114 

1115 def getTourneyCount(self): 

1116 c = self.connection.cursor() 

1117 c.execute(self.sql.query["getTourneyCount"]) 

1118 return c.fetchone()[0] 

1119 

1120 # end def getTourneyCount 

1121 

1122 def getTourneyTypeCount(self): 

1123 c = self.connection.cursor() 

1124 c.execute(self.sql.query["getTourneyTypeCount"]) 

1125 return c.fetchone()[0] 

1126 

1127 # end def getTourneyCount 

1128 

1129 def getSiteTourneyNos(self, site): 

1130 c = self.connection.cursor() 

1131 q = self.sql.query["getSiteId"] 

1132 q = q.replace("%s", self.sql.query["placeholder"]) 

1133 c.execute(q, (site,)) 

1134 siteid = c.fetchone()[0] 

1135 q = self.sql.query["getSiteTourneyNos"] 

1136 q = q.replace("%s", self.sql.query["placeholder"]) 

1137 c.execute(q, (siteid,)) 

1138 alist = [] 

1139 for row in c.fetchall(): 

1140 alist.append(row) 

1141 return alist 

1142 

1143 def get_actual_seat(self, hand_id, name): 

1144 c = self.connection.cursor() 

1145 c.execute(self.sql.query["get_actual_seat"], (hand_id, name)) 

1146 row = c.fetchone() 

1147 return row[0] 

1148 

1149 def get_cards(self, hand): 

1150 """Get and return the cards for each player in the hand.""" 

1151 cards = {} # dict of cards, the key is the seat number, 

1152 # the value is a tuple of the players cards 

1153 # example: {1: (0, 0, 20, 21, 22, 0 , 0)} 

1154 c = self.connection.cursor() 

1155 c.execute(self.sql.query["get_cards"], [hand]) 

1156 for row in c.fetchall(): 

1157 cards[row[0]] = row[1:] 

1158 return cards 

1159 

1160 def get_common_cards(self, hand): 

1161 """Get and return the community cards for the specified hand.""" 

1162 cards = {} 

1163 c = self.connection.cursor() 

1164 c.execute(self.sql.query["get_common_cards"], [hand]) 

1165 # row = c.fetchone() 

1166 cards["common"] = c.fetchone() 

1167 return cards 

1168 

1169 def get_action_from_hand(self, hand_no): 

1170 action = [[], [], [], [], []] 

1171 c = self.connection.cursor() 

1172 c.execute(self.sql.query["get_action_from_hand"], (hand_no,)) 

1173 for row in c.fetchall(): 

1174 street = row[0] 

1175 act = row[1:] 

1176 action[street].append(act) 

1177 return action 

1178 

1179 def get_winners_from_hand(self, hand): 

1180 """Returns a hash of winners:amount won, given a hand number.""" 

1181 winners = {} 

1182 c = self.connection.cursor() 

1183 c.execute(self.sql.query["get_winners_from_hand"], (hand,)) 

1184 for row in c.fetchall(): 

1185 winners[row[0]] = row[1] 

1186 return winners 

1187 

1188 def set_printdata(self, val): 

1189 self.printdata = val 

1190 

1191 def init_hud_stat_vars(self, hud_days, h_hud_days): 

1192 """Initialise variables used by Hud to fetch stats: 

1193 self.hand_1day_ago handId of latest hand played more than a day ago 

1194 self.date_ndays_ago date n days ago 

1195 self.h_date_ndays_ago date n days ago for hero (different n) 

1196 """ 

1197 self.hand_1day_ago = 1 

1198 c = self.get_cursor() 

1199 c.execute(self.sql.query["get_hand_1day_ago"]) 

1200 row = c.fetchone() 

1201 if row and row[0]: 

1202 self.hand_1day_ago = int(row[0]) 

1203 

1204 tz = datetime.utcnow() - datetime.today() 

1205 tz_offset = old_div(tz.seconds, 3600) 

1206 tz_day_start_offset = self.day_start + tz_offset 

1207 

1208 d = timedelta(days=hud_days, hours=tz_day_start_offset) 

1209 now = datetime.utcnow() - d 

1210 self.date_ndays_ago = "d%02d%02d%02d" % (now.year - 2000, now.month, now.day) 

1211 

1212 d = timedelta(days=h_hud_days, hours=tz_day_start_offset) 

1213 now = datetime.utcnow() - d 

1214 self.h_date_ndays_ago = "d%02d%02d%02d" % (now.year - 2000, now.month, now.day) 

1215 

1216 # is get_stats_from_hand slow? 

1217 # Gimick - yes - reason being that the gametypeid join on hands 

1218 # increases exec time on SQLite and postgres by a factor of 6 to 10 

1219 # method below changed to lookup hand.gametypeid and pass that as 

1220 # a constant to the underlying query. 

1221 

1222 def get_stats_from_hand( 

1223 self, 

1224 hand, 

1225 type, # type is "ring" or "tour" 

1226 hud_params={ 

1227 "stat_range": "A", 

1228 "agg_bb_mult": 1000, 

1229 "seats_style": "A", 

1230 "seats_cust_nums_low": 1, 

1231 "seats_cust_nums_high": 10, 

1232 "h_stat_range": "S", 

1233 "h_agg_bb_mult": 1000, 

1234 "h_seats_style": "A", 

1235 "h_seats_cust_nums_low": 1, 

1236 "h_seats_cust_nums_high": 10, 

1237 }, 

1238 hero_id=-1, 

1239 num_seats=6, 

1240 ): 

1241 stat_range = hud_params["stat_range"] 

1242 agg_bb_mult = hud_params["agg_bb_mult"] 

1243 seats_style = hud_params["seats_style"] 

1244 seats_cust_nums_low = hud_params["seats_cust_nums_low"] 

1245 seats_cust_nums_high = hud_params["seats_cust_nums_high"] 

1246 h_stat_range = hud_params["h_stat_range"] 

1247 h_agg_bb_mult = hud_params["h_agg_bb_mult"] 

1248 h_seats_style = hud_params["h_seats_style"] 

1249 h_seats_cust_nums_low = hud_params["h_seats_cust_nums_low"] 

1250 h_seats_cust_nums_high = hud_params["h_seats_cust_nums_high"] 

1251 

1252 stat_dict = {} 

1253 

1254 if seats_style == "A": 

1255 seats_min, seats_max = 0, 10 

1256 elif seats_style == "C": 

1257 seats_min, seats_max = seats_cust_nums_low, seats_cust_nums_high 

1258 elif seats_style == "E": 

1259 seats_min, seats_max = num_seats, num_seats 

1260 else: 

1261 seats_min, seats_max = 0, 10 

1262 log.warning("bad seats_style value: " + seats_style) 

1263 

1264 if h_seats_style == "A": 

1265 h_seats_min, h_seats_max = 0, 10 

1266 elif h_seats_style == "C": 

1267 h_seats_min, h_seats_max = h_seats_cust_nums_low, h_seats_cust_nums_high 

1268 elif h_seats_style == "E": 

1269 h_seats_min, h_seats_max = num_seats, num_seats 

1270 else: 

1271 h_seats_min, h_seats_max = 0, 10 

1272 log.warning("bad h_seats_style value: " + h_seats_style) 

1273 

1274 if stat_range == "S" or h_stat_range == "S": 

1275 self.get_stats_from_hand_session( 

1276 hand, stat_dict, hero_id, stat_range, seats_min, seats_max, h_stat_range, h_seats_min, h_seats_max 

1277 ) 

1278 

1279 if stat_range == "S" and h_stat_range == "S": 

1280 return stat_dict 

1281 

1282 if stat_range == "T": 

1283 stylekey = self.date_ndays_ago 

1284 elif stat_range == "A": 

1285 stylekey = "0000000" # all stylekey values should be higher than this 

1286 elif stat_range == "S": 

1287 stylekey = "zzzzzzz" # all stylekey values should be lower than this 

1288 else: 

1289 stylekey = "0000000" 

1290 log.info("stat_range: %s" % stat_range) 

1291 

1292 if h_stat_range == "T": 

1293 h_stylekey = self.h_date_ndays_ago 

1294 elif h_stat_range == "A": 

1295 h_stylekey = "0000000" # all stylekey values should be higher than this 

1296 elif h_stat_range == "S": 

1297 h_stylekey = "zzzzzzz" # all stylekey values should be lower than this 

1298 else: 

1299 h_stylekey = "00000000" 

1300 log.info("h_stat_range: %s" % h_stat_range) 

1301 

1302 # lookup gametypeId from hand 

1303 handinfo = self.get_gameinfo_from_hid(hand) 

1304 if handinfo is None: 

1305 log.error(f"No game info found for hand ID {hand}") 

1306 return stat_dict # Return an empty stat_dict if no game info is found 

1307 

1308 gametypeId = handinfo["gametypeId"] 

1309 

1310 query = "get_stats_from_hand_aggregated" 

1311 subs = ( 

1312 hand, 

1313 hero_id, 

1314 stylekey, 

1315 agg_bb_mult, 

1316 agg_bb_mult, 

1317 gametypeId, 

1318 seats_min, 

1319 seats_max, # hero params 

1320 hero_id, 

1321 h_stylekey, 

1322 h_agg_bb_mult, 

1323 h_agg_bb_mult, 

1324 gametypeId, 

1325 h_seats_min, 

1326 h_seats_max, 

1327 ) # villain params 

1328 

1329 stime = time() 

1330 c = self.connection.cursor() 

1331 

1332 # Now get the stats 

1333 c.execute(self.sql.query[query], subs) 

1334 ptime = time() - stime 

1335 log.info("HudCache query get_stats_from_hand_aggregated took %.3f seconds" % ptime) 

1336 colnames = [desc[0] for desc in c.description] 

1337 for row in c.fetchall(): 

1338 playerid = row[0] 

1339 if (playerid == hero_id and h_stat_range != "S") or (playerid != hero_id and stat_range != "S"): 

1340 t_dict = {} 

1341 for name, val in zip(colnames, row): 

1342 t_dict[name.lower()] = val 

1343 stat_dict[t_dict["player_id"]] = t_dict 

1344 

1345 return stat_dict 

1346 

1347 # uses query on handsplayers instead of hudcache to get stats on just this session 

1348 def get_stats_from_hand_session( 

1349 self, hand, stat_dict, hero_id, stat_range, seats_min, seats_max, h_stat_range, h_seats_min, h_seats_max 

1350 ): 

1351 """Get stats for just this session (currently defined as any play in the last 24 hours - to 

1352 be improved at some point ...) 

1353 h_stat_range and stat_range params indicate whether to get stats for hero and/or others 

1354 - only fetch heroes stats if h_stat_range == 'S', 

1355 and only fetch others stats if stat_range == 'S' 

1356 seats_min/max params give seats limits, only include stats if between these values 

1357 """ 

1358 

1359 query = self.sql.query["get_stats_from_hand_session"] 

1360 if self.db_server == "mysql": 

1361 query = query.replace("<signed>", "signed ") 

1362 else: 

1363 query = query.replace("<signed>", "") 

1364 

1365 subs = (self.hand_1day_ago, hand, hero_id, seats_min, seats_max, hero_id, h_seats_min, h_seats_max) 

1366 c = self.get_cursor() 

1367 

1368 # now get the stats 

1369 # print "sess_stats: subs =", subs, "subs[0] =", subs[0] 

1370 c.execute(query, subs) 

1371 colnames = [desc[0] for desc in c.description] 

1372 n = 0 

1373 

1374 row = c.fetchone() 

1375 if colnames[0].lower() == "player_id": 

1376 # Loop through stats adding them to appropriate stat_dict: 

1377 while row: 

1378 playerid = row[0] 

1379 if (playerid == hero_id and h_stat_range == "S") or (playerid != hero_id and stat_range == "S"): 

1380 for name, val in zip(colnames, row): 

1381 if playerid not in stat_dict: 

1382 stat_dict[playerid] = {} 

1383 stat_dict[playerid][name.lower()] = val 

1384 elif name.lower() not in stat_dict[playerid]: 

1385 stat_dict[playerid][name.lower()] = val 

1386 elif name.lower() not in ("hand_id", "player_id", "seat", "screen_name", "seats"): 

1387 stat_dict[playerid][name.lower()] += val 

1388 n += 1 

1389 if n >= 10000: 

1390 break # todo: don't think this is needed so set nice and high 

1391 # prevents infinite loop so leave for now - comment out or remove? 

1392 row = c.fetchone() 

1393 else: 

1394 log.error(("ERROR: query %s result does not have player_id as first column") % (query,)) 

1395 

1396 # print " %d rows fetched, len(stat_dict) = %d" % (n, len(stat_dict)) 

1397 

1398 # print "session stat_dict =", stat_dict 

1399 # return stat_dict 

1400 

1401 def get_player_id(self, config, siteName, playerName): 

1402 c = self.connection.cursor() 

1403 # conversion to UTF-8 in Python 3 is not needed 

1404 c.execute(self.sql.query["get_player_id"], (playerName, siteName)) 

1405 row = c.fetchone() 

1406 if row: 

1407 return row[0] 

1408 else: 

1409 return None 

1410 

1411 def get_player_names(self, config, site_id=None, like_player_name="%"): 

1412 """Fetch player names from players. Use site_id and like_player_name if provided""" 

1413 if site_id is None: 

1414 site_id = -1 

1415 c = self.get_cursor() 

1416 # conversion to UTF-8 in Python 3 is not needed 

1417 c.execute(self.sql.query["get_player_names"], (like_player_name, site_id, site_id)) 

1418 rows = c.fetchall() 

1419 return rows 

1420 

1421 def get_site_id(self, site): 

1422 c = self.get_cursor() 

1423 c.execute(self.sql.query["getSiteId"], (site,)) 

1424 result = c.fetchall() 

1425 return result 

1426 

1427 def resetCache(self): 

1428 self.ttold = set() # TourneyTypes old 

1429 self.ttnew = set() # TourneyTypes new 

1430 self.wmold = set() # WeeksMonths old 

1431 self.wmnew = set() # WeeksMonths new 

1432 self.gtcache = None # GameTypeId cache 

1433 self.tcache = None # TourneyId cache 

1434 self.pcache = None # PlayerId cache 

1435 self.tpcache = None # TourneysPlayersId cache 

1436 

1437 def get_last_insert_id(self, cursor=None): 

1438 ret = None 

1439 try: 

1440 if self.backend == self.MYSQL_INNODB: 

1441 ret = self.connection.insert_id() 

1442 if ret < 1 or ret > 999999999: 

1443 log.warning(("getLastInsertId(): problem fetching insert_id? ret=%d") % ret) 

1444 ret = -1 

1445 elif self.backend == self.PGSQL: 

1446 # some options: 

1447 # currval(hands_id_seq) - use name of implicit seq here 

1448 # lastval() - still needs sequences set up? 

1449 # insert ... returning is useful syntax (but postgres specific?) 

1450 # see rules (fancy trigger type things) 

1451 c = self.get_cursor() 

1452 ret = c.execute("SELECT lastval()") 

1453 row = c.fetchone() 

1454 if not row: 

1455 log.warning("getLastInsertId(): problem fetching lastval? row=%s" % row) 

1456 ret = -1 

1457 else: 

1458 ret = row[0] 

1459 elif self.backend == self.SQLITE: 

1460 ret = cursor.lastrowid 

1461 else: 

1462 log.error(("getLastInsertId(): unknown backend: %d") % self.backend) 

1463 ret = -1 

1464 except: 

1465 ret = -1 

1466 err = traceback.extract_tb(sys.exc_info()[2]) 

1467 log.error(("*** Database get_last_insert_id error: ") + str(sys.exc_info()[1])) 

1468 log.error("\n".join([e[0] + ":" + str(e[1]) + " " + e[2] for e in err])) 

1469 raise 

1470 return ret 

1471 

1472 def prepareBulkImport(self): 

1473 """Drop some indexes/foreign keys to prepare for bulk import. 

1474 Currently keeping the standalone indexes as needed to import quickly""" 

1475 stime = time() 

1476 c = self.get_cursor() 

1477 # sc: don't think autocommit=0 is needed, should already be in that mode 

1478 if self.backend == self.MYSQL_INNODB: 

1479 c.execute("SET foreign_key_checks=0") 

1480 c.execute("SET autocommit=0") 

1481 return 

1482 if self.backend == self.PGSQL: 

1483 self.connection.set_isolation_level(0) # allow table/index operations to work 

1484 for fk in self.foreignKeys[self.backend]: 

1485 if fk["drop"] == 1: 

1486 if self.backend == self.MYSQL_INNODB: 

1487 c.execute( 

1488 "SELECT constraint_name " 

1489 + "FROM information_schema.KEY_COLUMN_USAGE " 

1490 + 

1491 # "WHERE REFERENCED_TABLE_SCHEMA = 'fpdb' 

1492 "WHERE 1=1 " 

1493 + "AND table_name = %s AND column_name = %s " 

1494 + "AND referenced_table_name = %s " 

1495 + "AND referenced_column_name = %s ", 

1496 (fk["fktab"], fk["fkcol"], fk["rtab"], fk["rcol"]), 

1497 ) 

1498 cons = c.fetchone() 

1499 # print "preparebulk find fk: cons=", cons 

1500 if cons: 

1501 log.debug("dropping mysql fk", cons[0], fk["fktab"], fk["fkcol"]) 

1502 try: 

1503 c.execute("alter table " + fk["fktab"] + " drop foreign key " + cons[0]) 

1504 except Exception: 

1505 log.error(" drop failed: " + str(sys.exc_info())) 

1506 elif self.backend == self.PGSQL: 

1507 # DON'T FORGET TO RECREATE THEM!! 

1508 log.debug("dropping pg fk", fk["fktab"], fk["fkcol"]) 

1509 try: 

1510 # try to lock table to see if index drop will work: 

1511 # hmmm, tested by commenting out rollback in grapher. lock seems to work but 

1512 # then drop still hangs :-( does work in some tests though?? 

1513 # will leave code here for now pending further tests/enhancement ... 

1514 c.execute("BEGIN TRANSACTION") 

1515 c.execute("lock table %s in exclusive mode nowait" % (fk["fktab"],)) 

1516 # print "after lock, status:", c.statusmessage 

1517 # print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']) 

1518 try: 

1519 c.execute( 

1520 "alter table %s drop constraint %s_%s_fkey" % (fk["fktab"], fk["fktab"], fk["fkcol"]) 

1521 ) 

1522 log.debug("dropped pg fk pg fk %s_%s_fkey, continuing ..." % (fk["fktab"], fk["fkcol"])) 

1523 except Exception: 

1524 if "does not exist" not in str(sys.exc_info()[1]): 

1525 log.error( 

1526 ("warning: drop pg fk %s_%s_fkey failed: %s, continuing ...") 

1527 % (fk["fktab"], fk["fkcol"], str(sys.exc_info()[1]).rstrip("\n")) 

1528 ) 

1529 c.execute("END TRANSACTION") 

1530 except Exception: 

1531 log.debug( 

1532 ("warning: constraint %s_%s_fkey not dropped: %s, continuing ...") 

1533 % (fk["fktab"], fk["fkcol"], str(sys.exc_info()[1]).rstrip("\n")) 

1534 ) 

1535 else: 

1536 return -1 

1537 

1538 for idx in self.indexes[self.backend]: 

1539 if idx["drop"] == 1: 

1540 if self.backend == self.MYSQL_INNODB: 

1541 log.info(("dropping mysql index "), idx["tab"], idx["col"]) 

1542 try: 

1543 # apparently nowait is not implemented in mysql so this just hangs if there are locks 

1544 # preventing the index drop :-( 

1545 c.execute("alter table %s drop index %s;", (idx["tab"], idx["col"])) 

1546 except Exception: 

1547 log.error((" drop index failed: ") + str(sys.exc_info())) 

1548 # ALTER TABLE `fpdb`.`handsplayers` DROP INDEX `playerId`; 

1549 # using: 'HandsPlayers' drop index 'playerId' 

1550 elif self.backend == self.PGSQL: 

1551 # DON'T FORGET TO RECREATE THEM!! 

1552 log.info(("dropping pg index "), idx["tab"], idx["col"]) 

1553 try: 

1554 # try to lock table to see if index drop will work: 

1555 c.execute("BEGIN TRANSACTION") 

1556 c.execute("lock table %s in exclusive mode nowait" % (idx["tab"],)) 

1557 # print "after lock, status:", c.statusmessage 

1558 try: 

1559 # table locked ok so index drop should work: 

1560 # print "drop index %s_%s_idx" % (idx['tab'],idx['col']) 

1561 c.execute("drop index if exists %s_%s_idx" % (idx["tab"], idx["col"])) 

1562 # print "dropped pg index ", idx['tab'], idx['col'] 

1563 except Exception: 

1564 if "does not exist" not in str(sys.exc_info()[1]): 

1565 log.error( 

1566 ("warning: drop index %s_%s_idx failed: %s, continuing ...") 

1567 % (idx["tab"], idx["col"], str(sys.exc_info()[1]).rstrip("\n")) 

1568 ) 

1569 c.execute("END TRANSACTION") 

1570 except Exception: 

1571 log.error( 

1572 ("warning: index %s_%s_idx not dropped %s, continuing ...") 

1573 % (idx["tab"], idx["col"], str(sys.exc_info()[1]).rstrip("\n")) 

1574 ) 

1575 else: 

1576 return -1 

1577 

1578 if self.backend == self.PGSQL: 

1579 self.connection.set_isolation_level(1) # go back to normal isolation level 

1580 self.commit() # seems to clear up errors if there were any in postgres 

1581 ptime = time() - stime 

1582 log.debug(("prepare import took %s seconds") % ptime) 

1583 

1584 # end def prepareBulkImport 

1585 

1586 def afterBulkImport(self): 

1587 """Re-create any dropped indexes/foreign keys after bulk import""" 

1588 stime = time() 

1589 

1590 c = self.get_cursor() 

1591 if self.backend == self.MYSQL_INNODB: 

1592 c.execute("SET foreign_key_checks=1") 

1593 c.execute("SET autocommit=1") 

1594 return 

1595 

1596 if self.backend == self.PGSQL: 

1597 self.connection.set_isolation_level(0) # allow table/index operations to work 

1598 for fk in self.foreignKeys[self.backend]: 

1599 if fk["drop"] == 1: 

1600 if self.backend == self.MYSQL_INNODB: 

1601 c.execute( 

1602 "SELECT constraint_name " 

1603 + "FROM information_schema.KEY_COLUMN_USAGE " 

1604 + 

1605 # "WHERE REFERENCED_TABLE_SCHEMA = 'fpdb' 

1606 "WHERE 1=1 " 

1607 + "AND table_name = %s AND column_name = %s " 

1608 + "AND referenced_table_name = %s " 

1609 + "AND referenced_column_name = %s ", 

1610 (fk["fktab"], fk["fkcol"], fk["rtab"], fk["rcol"]), 

1611 ) 

1612 cons = c.fetchone() 

1613 # print "afterbulk: cons=", cons 

1614 if cons: 

1615 pass 

1616 else: 

1617 log.debug(("Creating foreign key "), fk["fktab"], fk["fkcol"], "->", fk["rtab"], fk["rcol"]) 

1618 try: 

1619 c.execute( 

1620 "alter table " 

1621 + fk["fktab"] 

1622 + " add foreign key (" 

1623 + fk["fkcol"] 

1624 + ") references " 

1625 + fk["rtab"] 

1626 + "(" 

1627 + fk["rcol"] 

1628 + ")" 

1629 ) 

1630 except Exception: 

1631 log.error(("Create foreign key failed: ") + str(sys.exc_info())) 

1632 elif self.backend == self.PGSQL: 

1633 log.debug(("Creating foreign key "), fk["fktab"], fk["fkcol"], "->", fk["rtab"], fk["rcol"]) 

1634 try: 

1635 c.execute( 

1636 "alter table " 

1637 + fk["fktab"] 

1638 + " add constraint " 

1639 + fk["fktab"] 

1640 + "_" 

1641 + fk["fkcol"] 

1642 + "_fkey" 

1643 + " foreign key (" 

1644 + fk["fkcol"] 

1645 + ") references " 

1646 + fk["rtab"] 

1647 + "(" 

1648 + fk["rcol"] 

1649 + ")" 

1650 ) 

1651 except Exception: 

1652 log.error(("Create foreign key failed: ") + str(sys.exc_info())) 

1653 else: 

1654 return -1 

1655 

1656 for idx in self.indexes[self.backend]: 

1657 if idx["drop"] == 1: 

1658 if self.backend == self.MYSQL_INNODB: 

1659 log.debug(("Creating MySQL index %s %s") % (idx["tab"], idx["col"])) 

1660 try: 

1661 s = "alter table %s add index %s(%s)" % (idx["tab"], idx["col"], idx["col"]) 

1662 c.execute(s) 

1663 except Exception: 

1664 log.error(("Create foreign key failed: ") + str(sys.exc_info())) 

1665 elif self.backend == self.PGSQL: 

1666 # pass 

1667 # mod to use tab_col for index name? 

1668 log.debug(("Creating PostgreSQL index "), idx["tab"], idx["col"]) 

1669 try: 

1670 s = "create index %s_%s_idx on %s(%s)" % (idx["tab"], idx["col"], idx["tab"], idx["col"]) 

1671 c.execute(s) 

1672 except Exception: 

1673 log.error(("Create index failed: ") + str(sys.exc_info())) 

1674 else: 

1675 return -1 

1676 

1677 if self.backend == self.PGSQL: 

1678 self.connection.set_isolation_level(1) # go back to normal isolation level 

1679 self.commit() # seems to clear up errors if there were any in postgres 

1680 atime = time() - stime 

1681 log.debug(("After import took %s seconds") % atime) 

1682 

1683 # end def afterBulkImport 

1684 

1685 def drop_referential_integrity(self): 

1686 """Update all tables to remove foreign keys""" 

1687 

1688 c = self.get_cursor() 

1689 c.execute(self.sql.query["list_tables"]) 

1690 result = c.fetchall() 

1691 

1692 for i in range(len(result)): 

1693 c.execute("SHOW CREATE TABLE " + result[i][0]) 

1694 inner = c.fetchall() 

1695 

1696 for j in range(len(inner)): 

1697 # result[i][0] - Table name 

1698 # result[i][1] - CREATE TABLE parameters 

1699 # Searching for CONSTRAINT `tablename_ibfk_1` 

1700 for m in re.finditer("(ibfk_[0-9]+)", inner[j][1]): 

1701 key = "`" + inner[j][0] + "_" + m.group() + "`" 

1702 c.execute("ALTER TABLE " + inner[j][0] + " DROP FOREIGN KEY " + key) 

1703 self.commit() 

1704 

1705 # end drop_referential_inegrity 

1706 

1707 def recreate_tables(self): 

1708 """(Re-)creates the tables of the current DB""" 

1709 

1710 self.drop_tables() 

1711 self.resetCache() 

1712 self.resetBulkCache() 

1713 self.create_tables() 

1714 self.createAllIndexes() 

1715 self.commit() 

1716 self.get_sites() 

1717 log.info(("Finished recreating tables")) 

1718 

1719 # end def recreate_tables 

1720 

1721 def create_tables(self): 

1722 log.debug(self.sql.query["createSettingsTable"]) 

1723 c = self.get_cursor() 

1724 c.execute(self.sql.query["createSettingsTable"]) 

1725 

1726 log.debug("Creating tables") 

1727 c.execute(self.sql.query["createActionsTable"]) 

1728 c.execute(self.sql.query["createRankTable"]) 

1729 c.execute(self.sql.query["createStartCardsTable"]) 

1730 c.execute(self.sql.query["createSitesTable"]) 

1731 c.execute(self.sql.query["createGametypesTable"]) 

1732 c.execute(self.sql.query["createFilesTable"]) 

1733 c.execute(self.sql.query["createPlayersTable"]) 

1734 c.execute(self.sql.query["createAutoratesTable"]) 

1735 c.execute(self.sql.query["createWeeksTable"]) 

1736 c.execute(self.sql.query["createMonthsTable"]) 

1737 c.execute(self.sql.query["createSessionsTable"]) 

1738 c.execute(self.sql.query["createTourneyTypesTable"]) 

1739 c.execute(self.sql.query["createTourneysTable"]) 

1740 c.execute(self.sql.query["createTourneysPlayersTable"]) 

1741 c.execute(self.sql.query["createSessionsCacheTable"]) 

1742 c.execute(self.sql.query["createTourneysCacheTable"]) 

1743 c.execute(self.sql.query["createHandsTable"]) 

1744 c.execute(self.sql.query["createHandsPlayersTable"]) 

1745 c.execute(self.sql.query["createHandsActionsTable"]) 

1746 c.execute(self.sql.query["createHandsStoveTable"]) 

1747 c.execute(self.sql.query["createHandsPotsTable"]) 

1748 c.execute(self.sql.query["createHudCacheTable"]) 

1749 c.execute(self.sql.query["createCardsCacheTable"]) 

1750 c.execute(self.sql.query["createPositionsCacheTable"]) 

1751 c.execute(self.sql.query["createBoardsTable"]) 

1752 c.execute(self.sql.query["createBackingsTable"]) 

1753 c.execute(self.sql.query["createRawHands"]) 

1754 c.execute(self.sql.query["createRawTourneys"]) 

1755 

1756 # Create unique indexes: 

1757 log.debug("Creating unique indexes") 

1758 c.execute(self.sql.query["addTourneyIndex"]) 

1759 c.execute(self.sql.query["addHandsIndex"].replace("<heroseat>", ", heroSeat" if self.publicDB else "")) 

1760 c.execute(self.sql.query["addPlayersIndex"]) 

1761 c.execute(self.sql.query["addTPlayersIndex"]) 

1762 c.execute(self.sql.query["addPlayersSeat"]) 

1763 c.execute(self.sql.query["addHeroSeat"]) 

1764 c.execute(self.sql.query["addStartCardsIndex"]) 

1765 c.execute(self.sql.query["addSeatsIndex"]) 

1766 c.execute(self.sql.query["addPositionIndex"]) 

1767 c.execute(self.sql.query["addFilesIndex"]) 

1768 c.execute(self.sql.query["addTableNameIndex"]) 

1769 c.execute(self.sql.query["addPlayerNameIndex"]) 

1770 c.execute(self.sql.query["addPlayerHeroesIndex"]) 

1771 c.execute(self.sql.query["addStartCashIndex"]) 

1772 c.execute(self.sql.query["addEffStackIndex"]) 

1773 c.execute(self.sql.query["addTotalProfitIndex"]) 

1774 c.execute(self.sql.query["addWinningsIndex"]) 

1775 c.execute(self.sql.query["addFinalPotIndex"]) 

1776 c.execute(self.sql.query["addStreetIndex"]) 

1777 c.execute(self.sql.query["addSessionsCacheCompundIndex"]) 

1778 c.execute(self.sql.query["addTourneysCacheCompundIndex"]) 

1779 c.execute(self.sql.query["addHudCacheCompundIndex"]) 

1780 c.execute(self.sql.query["addCardsCacheCompundIndex"]) 

1781 c.execute(self.sql.query["addPositionsCacheCompundIndex"]) 

1782 

1783 self.fillDefaultData() 

1784 self.commit() 

1785 

1786 def drop_tables(self): 

1787 """Drops the fpdb tables from the current db""" 

1788 c = self.get_cursor() 

1789 

1790 backend = self.get_backend_name() 

1791 if backend == "MySQL InnoDB": # what happens if someone is using MyISAM? 

1792 try: 

1793 self.drop_referential_integrity() # needed to drop tables with foreign keys 

1794 c.execute(self.sql.query["list_tables"]) 

1795 tables = c.fetchall() 

1796 for table in tables: 

1797 c.execute(self.sql.query["drop_table"] + table[0]) 

1798 c.execute("SET FOREIGN_KEY_CHECKS=1") 

1799 except Exception: 

1800 err = traceback.extract_tb(sys.exc_info()[2])[-1] 

1801 log.error(("***Error dropping tables:"), +err[2] + "(" + str(err[1]) + "): " + str(sys.exc_info()[1])) 

1802 self.rollback() 

1803 elif backend == "PostgreSQL": 

1804 try: 

1805 self.commit() 

1806 c.execute(self.sql.query["list_tables"]) 

1807 tables = c.fetchall() 

1808 for table in tables: 

1809 c.execute(self.sql.query["drop_table"] + table[0] + " cascade") 

1810 except Exception: 

1811 err = traceback.extract_tb(sys.exc_info()[2])[-1] 

1812 log.error(("***Error dropping tables:"), err[2] + "(" + str(err[1]) + "): " + str(sys.exc_info()[1])) 

1813 self.rollback() 

1814 elif backend == "SQLite": 

1815 c.execute(self.sql.query["list_tables"]) 

1816 for table in c.fetchall(): 

1817 if table[0] != "sqlite_stat1": 

1818 log.info("%s '%s'" % (self.sql.query["drop_table"], table[0])) 

1819 c.execute(self.sql.query["drop_table"] + table[0]) 

1820 self.commit() 

1821 

1822 # end def drop_tables 

1823 

1824 def createAllIndexes(self): 

1825 """Create new indexes""" 

1826 

1827 if self.backend == self.PGSQL: 

1828 self.connection.set_isolation_level(0) # allow table/index operations to work 

1829 c = self.get_cursor() 

1830 for idx in self.indexes[self.backend]: 

1831 log.info(("Creating index %s %s") % (idx["tab"], idx["col"])) 

1832 if self.backend == self.MYSQL_INNODB: 

1833 s = "CREATE INDEX %s ON %s(%s)" % (idx["col"], idx["tab"], idx["col"]) 

1834 c.execute(s) 

1835 elif self.backend == self.PGSQL or self.backend == self.SQLITE: 

1836 s = "CREATE INDEX %s_%s_idx ON %s(%s)" % (idx["tab"], idx["col"], idx["tab"], idx["col"]) 

1837 c.execute(s) 

1838 

1839 if self.backend == self.PGSQL: 

1840 self.connection.set_isolation_level(1) # go back to normal isolation level 

1841 

1842 # end def createAllIndexes 

1843 

1844 def dropAllIndexes(self): 

1845 """Drop all standalone indexes (i.e. not including primary keys or foreign keys) 

1846 using list of indexes in indexes data structure""" 

1847 # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK) 

1848 if self.backend == self.PGSQL: 

1849 self.connection.set_isolation_level(0) # allow table/index operations to work 

1850 for idx in self.indexes[self.backend]: 

1851 if self.backend == self.MYSQL_INNODB: 

1852 log.debug((("Dropping index:"), idx["tab"], idx["col"])) 

1853 try: 

1854 self.get_cursor().execute("alter table %s drop index %s", (idx["tab"], idx["col"])) 

1855 except Exception: 

1856 log.error(("Drop index failed:"), str(sys.exc_info())) 

1857 elif self.backend == self.PGSQL: 

1858 log.debug((("Dropping index:"), idx["tab"], idx["col"])) 

1859 # mod to use tab_col for index name? 

1860 try: 

1861 self.get_cursor().execute("drop index %s_%s_idx" % (idx["tab"], idx["col"])) 

1862 except Exception: 

1863 log.error((("Drop index failed:"), str(sys.exc_info()))) 

1864 elif self.backend == self.SQLITE: 

1865 log.debug((("Dropping index:"), idx["tab"], idx["col"])) 

1866 try: 

1867 self.get_cursor().execute("drop index %s_%s_idx" % (idx["tab"], idx["col"])) 

1868 except Exception: 

1869 log.error(("Drop index failed:"), str(sys.exc_info())) 

1870 else: 

1871 return -1 

1872 if self.backend == self.PGSQL: 

1873 self.connection.set_isolation_level(1) # go back to normal isolation level 

1874 

1875 # end def dropAllIndexes 

1876 

1877 def createAllForeignKeys(self): 

1878 """Create foreign keys""" 

1879 

1880 try: 

1881 if self.backend == self.PGSQL: 

1882 self.connection.set_isolation_level(0) # allow table/index operations to work 

1883 c = self.get_cursor() 

1884 except Exception: 

1885 log.error(("set_isolation_level failed:"), str(sys.exc_info())) 

1886 

1887 for fk in self.foreignKeys[self.backend]: 

1888 if self.backend == self.MYSQL_INNODB: 

1889 c.execute( 

1890 "SELECT constraint_name " 

1891 + "FROM information_schema.KEY_COLUMN_USAGE " 

1892 + 

1893 # "WHERE REFERENCED_TABLE_SCHEMA = 'fpdb' 

1894 "WHERE 1=1 " 

1895 + "AND table_name = %s AND column_name = %s " 

1896 + "AND referenced_table_name = %s " 

1897 + "AND referenced_column_name = %s ", 

1898 (fk["fktab"], fk["fkcol"], fk["rtab"], fk["rcol"]), 

1899 ) 

1900 cons = c.fetchone() 

1901 # print "afterbulk: cons=", cons 

1902 if cons: 

1903 pass 

1904 else: 

1905 log.debug(("Creating foreign key:"), fk["fktab"], fk["fkcol"], "->", fk["rtab"], fk["rcol"]) 

1906 try: 

1907 c.execute( 

1908 "alter table " 

1909 + fk["fktab"] 

1910 + " add foreign key (" 

1911 + fk["fkcol"] 

1912 + ") references " 

1913 + fk["rtab"] 

1914 + "(" 

1915 + fk["rcol"] 

1916 + ")" 

1917 ) 

1918 except Exception: 

1919 log.error(("Create foreign key failed:"), str(sys.exc_info())) 

1920 elif self.backend == self.PGSQL: 

1921 log.debug(("Creating foreign key:"), fk["fktab"], fk["fkcol"], "->", fk["rtab"], fk["rcol"]) 

1922 try: 

1923 c.execute( 

1924 "alter table " 

1925 + fk["fktab"] 

1926 + " add constraint " 

1927 + fk["fktab"] 

1928 + "_" 

1929 + fk["fkcol"] 

1930 + "_fkey" 

1931 + " foreign key (" 

1932 + fk["fkcol"] 

1933 + ") references " 

1934 + fk["rtab"] 

1935 + "(" 

1936 + fk["rcol"] 

1937 + ")" 

1938 ) 

1939 except Exception: 

1940 log.error(("Create foreign key failed:"), str(sys.exc_info())) 

1941 else: 

1942 pass 

1943 

1944 try: 

1945 if self.backend == self.PGSQL: 

1946 self.connection.set_isolation_level(1) # go back to normal isolation level 

1947 except Exception: 

1948 log.error(("set_isolation_level failed:"), str(sys.exc_info())) 

1949 

1950 # end def createAllForeignKeys 

1951 

1952 def dropAllForeignKeys(self): 

1953 """Drop all standalone indexes (i.e. not including primary keys or foreign keys) 

1954 using list of indexes in indexes data structure""" 

1955 # maybe upgrade to use data dictionary?? (but take care to exclude PK and FK) 

1956 if self.backend == self.PGSQL: 

1957 self.connection.set_isolation_level(0) # allow table/index operations to work 

1958 c = self.get_cursor() 

1959 

1960 for fk in self.foreignKeys[self.backend]: 

1961 if self.backend == self.MYSQL_INNODB: 

1962 c.execute( 

1963 "SELECT constraint_name " 

1964 + "FROM information_schema.KEY_COLUMN_USAGE " 

1965 + 

1966 # "WHERE REFERENCED_TABLE_SHEMA = 'fpdb' 

1967 "WHERE 1=1 " 

1968 + "AND table_name = %s AND column_name = %s " 

1969 + "AND referenced_table_name = %s " 

1970 + "AND referenced_column_name = %s ", 

1971 (fk["fktab"], fk["fkcol"], fk["rtab"], fk["rcol"]), 

1972 ) 

1973 cons = c.fetchone() 

1974 # print "preparebulk find fk: cons=", cons 

1975 if cons: 

1976 log.debug(("Dropping foreign key:"), cons[0], fk["fktab"], fk["fkcol"]) 

1977 try: 

1978 c.execute("alter table " + fk["fktab"] + " drop foreign key " + cons[0]) 

1979 except Exception: 

1980 log.error( 

1981 ("Warning:"), 

1982 ("Drop foreign key %s_%s_fkey failed: %s, continuing ...") 

1983 % (fk["fktab"], fk["fkcol"], str(sys.exc_info()[1]).rstrip("\n")), 

1984 ) 

1985 elif self.backend == self.PGSQL: 

1986 # DON'T FORGET TO RECREATE THEM!! 

1987 log.debug(("Dropping foreign key:"), fk["fktab"], fk["fkcol"]) 

1988 try: 

1989 # try to lock table to see if index drop will work: 

1990 # hmmm, tested by commenting out rollback in grapher. lock seems to work but 

1991 # then drop still hangs :-( does work in some tests though?? 

1992 # will leave code here for now pending further tests/enhancement ... 

1993 c.execute("BEGIN TRANSACTION") 

1994 c.execute("lock table %s in exclusive mode nowait" % (fk["fktab"],)) 

1995 # print "after lock, status:", c.statusmessage 

1996 # print "alter table %s drop constraint %s_%s_fkey" % (fk['fktab'], fk['fktab'], fk['fkcol']) 

1997 try: 

1998 c.execute("alter table %s drop constraint %s_%s_fkey" % (fk["fktab"], fk["fktab"], fk["fkcol"])) 

1999 log.debug(("dropped foreign key %s_%s_fkey, continuing ...") % (fk["fktab"], fk["fkcol"])) 

2000 except Exception: 

2001 if "does not exist" not in str(sys.exc_info()[1]): 

2002 log.erreur( 

2003 ("Warning:"), 

2004 ("Drop foreign key %s_%s_fkey failed: %s, continuing ...") 

2005 % (fk["fktab"], fk["fkcol"], str(sys.exc_info()[1]).rstrip("\n")), 

2006 ) 

2007 c.execute("END TRANSACTION") 

2008 except Exception: 

2009 log.error( 

2010 ("Warning:"), 

2011 ("constraint %s_%s_fkey not dropped: %s, continuing ...") 

2012 % (fk["fktab"], fk["fkcol"], str(sys.exc_info()[1]).rstrip("\n")), 

2013 ) 

2014 else: 

2015 # print ("Only MySQL and Postgres supported so far") 

2016 pass 

2017 

2018 if self.backend == self.PGSQL: 

2019 self.connection.set_isolation_level(1) # go back to normal isolation level 

2020 

2021 # end def dropAllForeignKeys 

2022 

2023 def fillDefaultData(self): 

2024 c = self.get_cursor() 

2025 c.execute("INSERT INTO Settings (version) VALUES (%s);" % (DB_VERSION)) 

2026 # Fill Sites 

2027 c.execute("INSERT INTO Sites (id,name,code) VALUES ('1', 'Full Tilt Poker', 'FT')") 

2028 c.execute("INSERT INTO Sites (id,name,code) VALUES ('2', 'PokerStars', 'PS')") 

2029 c.execute("INSERT INTO Sites (id,name,code) VALUES ('3', 'Everleaf', 'EV')") 

2030 c.execute("INSERT INTO Sites (id,name,code) VALUES ('4', 'Boss', 'BM')") 

2031 c.execute("INSERT INTO Sites (id,name,code) VALUES ('5', 'OnGame', 'OG')") 

2032 c.execute("INSERT INTO Sites (id,name,code) VALUES ('6', 'UltimateBet', 'UB')") 

2033 c.execute("INSERT INTO Sites (id,name,code) VALUES ('7', 'Betfair', 'BF')") 

2034 # c.execute("INSERT INTO Sites (id,name,code) VALUES ('8', 'Absolute', 'AB')") 

2035 c.execute("INSERT INTO Sites (id,name,code) VALUES ('9', 'PartyPoker', 'PP')") 

2036 c.execute("INSERT INTO Sites (id,name,code) VALUES ('10', 'PacificPoker', 'P8')") 

2037 c.execute("INSERT INTO Sites (id,name,code) VALUES ('11', 'Partouche', 'PA')") 

2038 c.execute("INSERT INTO Sites (id,name,code) VALUES ('12', 'Merge', 'MN')") 

2039 c.execute("INSERT INTO Sites (id,name,code) VALUES ('13', 'PKR', 'PK')") 

2040 c.execute("INSERT INTO Sites (id,name,code) VALUES ('14', 'iPoker', 'IP')") 

2041 c.execute("INSERT INTO Sites (id,name,code) VALUES ('15', 'Winamax', 'WM')") 

2042 c.execute("INSERT INTO Sites (id,name,code) VALUES ('16', 'Everest', 'EP')") 

2043 c.execute("INSERT INTO Sites (id,name,code) VALUES ('17', 'Cake', 'CK')") 

2044 c.execute("INSERT INTO Sites (id,name,code) VALUES ('18', 'Entraction', 'TR')") 

2045 c.execute("INSERT INTO Sites (id,name,code) VALUES ('19', 'BetOnline', 'BO')") 

2046 c.execute("INSERT INTO Sites (id,name,code) VALUES ('20', 'Microgaming', 'MG')") 

2047 c.execute("INSERT INTO Sites (id,name,code) VALUES ('21', 'Bovada', 'BV')") 

2048 c.execute("INSERT INTO Sites (id,name,code) VALUES ('22', 'Enet', 'EN')") 

2049 c.execute("INSERT INTO Sites (id,name,code) VALUES ('23', 'SealsWithClubs', 'SW')") 

2050 c.execute("INSERT INTO Sites (id,name,code) VALUES ('24', 'WinningPoker', 'WP')") 

2051 c.execute("INSERT INTO Sites (id,name,code) VALUES ('25', 'PokerMaster', 'PM')") 

2052 c.execute("INSERT INTO Sites (id,name,code) VALUES ('26', 'Run It Once Poker', 'RO')") 

2053 c.execute("INSERT INTO Sites (id,name,code) VALUES ('27', 'GGPoker', 'GG')") 

2054 c.execute("INSERT INTO Sites (id,name,code) VALUES ('28', 'KingsClub', 'KC')") 

2055 c.execute("INSERT INTO Sites (id,name,code) VALUES ('29', 'PokerBros', 'PB')") 

2056 c.execute("INSERT INTO Sites (id,name,code) VALUES ('30', 'Unibet', 'UN')") 

2057 # c.execute("INSERT INTO Sites (id,name,code) VALUES ('31', 'PMU Poker', 'PM')") 

2058 # Fill Actions 

2059 c.execute("INSERT INTO Actions (id,name,code) VALUES ('1', 'ante', 'A')") 

2060 c.execute("INSERT INTO Actions (id,name,code) VALUES ('2', 'small blind', 'SB')") 

2061 c.execute("INSERT INTO Actions (id,name,code) VALUES ('3', 'secondsb', 'SSB')") 

2062 c.execute("INSERT INTO Actions (id,name,code) VALUES ('4', 'big blind', 'BB')") 

2063 c.execute("INSERT INTO Actions (id,name,code) VALUES ('5', 'both', 'SBBB')") 

2064 c.execute("INSERT INTO Actions (id,name,code) VALUES ('6', 'calls', 'C')") 

2065 c.execute("INSERT INTO Actions (id,name,code) VALUES ('7', 'raises', 'R')") 

2066 c.execute("INSERT INTO Actions (id,name,code) VALUES ('8', 'bets', 'B')") 

2067 c.execute("INSERT INTO Actions (id,name,code) VALUES ('9', 'stands pat', 'S')") 

2068 c.execute("INSERT INTO Actions (id,name,code) VALUES ('10', 'folds', 'F')") 

2069 c.execute("INSERT INTO Actions (id,name,code) VALUES ('11', 'checks', 'K')") 

2070 c.execute("INSERT INTO Actions (id,name,code) VALUES ('12', 'discards', 'D')") 

2071 c.execute("INSERT INTO Actions (id,name,code) VALUES ('13', 'bringin', 'I')") 

2072 c.execute("INSERT INTO Actions (id,name,code) VALUES ('14', 'completes', 'P')") 

2073 c.execute("INSERT INTO Actions (id,name,code) VALUES ('15', 'straddle', 'ST')") 

2074 c.execute("INSERT INTO Actions (id,name,code) VALUES ('16', 'button blind', 'BUB')") 

2075 # Fill Rank 

2076 c.execute("INSERT INTO Rank (id,name) VALUES ('1', 'Nothing')") 

2077 c.execute("INSERT INTO Rank (id,name) VALUES ('2', 'NoPair')") 

2078 c.execute("INSERT INTO Rank (id,name) VALUES ('3', 'OnePair')") 

2079 c.execute("INSERT INTO Rank (id,name) VALUES ('4', 'TwoPair')") 

2080 c.execute("INSERT INTO Rank (id,name) VALUES ('5', 'Trips')") 

2081 c.execute("INSERT INTO Rank (id,name) VALUES ('6', 'Straight')") 

2082 c.execute("INSERT INTO Rank (id,name) VALUES ('7', 'Flush')") 

2083 c.execute("INSERT INTO Rank (id,name) VALUES ('8', 'FlHouse')") 

2084 c.execute("INSERT INTO Rank (id,name) VALUES ('9', 'Quads')") 

2085 c.execute("INSERT INTO Rank (id,name) VALUES ('10', 'StFlush')") 

2086 # Fill StartCards 

2087 sql = "INSERT INTO StartCards (category, name, rank, combinations) VALUES (%s, %s, %s, %s)".replace( 

2088 "%s", self.sql.query["placeholder"] 

2089 ) 

2090 for i in range(170): 

2091 (name, rank, combinations) = Card.StartCardRank(i) 

2092 c.execute(sql, ("holdem", name, rank, combinations)) 

2093 for idx in range(-13, 1184): 

2094 name = Card.decodeRazzStartHand(idx) 

2095 c.execute(sql, ("razz", name, idx, 0)) 

2096 

2097 # end def fillDefaultData 

2098 

2099 def rebuild_indexes(self, start=None): 

2100 self.dropAllIndexes() 

2101 self.createAllIndexes() 

2102 self.dropAllForeignKeys() 

2103 self.createAllForeignKeys() 

2104 

2105 # end def rebuild_indexes 

2106 

2107 def replace_statscache(self, type, table, query): 

2108 if table == "HudCache": 

2109 insert = """HudCache 

2110 (gametypeId 

2111 ,playerId 

2112 ,seats 

2113 ,position 

2114 <tourney_insert_clause> 

2115 ,styleKey""" 

2116 

2117 select = """h.gametypeId 

2118 ,hp.playerId 

2119 ,h.seats as seat_num 

2120 <hc_position> 

2121 <tourney_select_clause> 

2122 <styleKey>""" 

2123 

2124 group = """h.gametypeId 

2125 ,hp.playerId 

2126 ,seat_num 

2127 ,hc_position 

2128 <tourney_group_clause> 

2129 <styleKeyGroup>""" 

2130 

2131 query = query.replace("<insert>", insert) 

2132 query = query.replace("<select>", select) 

2133 query = query.replace("<group>", group) 

2134 query = query.replace("<sessions_join_clause>", "") 

2135 

2136 if self.build_full_hudcache: 

2137 query = query.replace( 

2138 "<hc_position>", 

2139 """,case when hp.position = 'B' then 'B' 

2140 when hp.position = 'S' then 'S' 

2141 when hp.position = '0' then 'D' 

2142 when hp.position = '1' then 'C' 

2143 when hp.position = '2' then 'M' 

2144 when hp.position = '3' then 'M' 

2145 when hp.position = '4' then 'M' 

2146 when hp.position = '5' then 'E' 

2147 when hp.position = '6' then 'E' 

2148 when hp.position = '7' then 'E' 

2149 when hp.position = '8' then 'E' 

2150 when hp.position = '9' then 'E' 

2151 else 'E' 

2152 end as hc_position""", 

2153 ) 

2154 if self.backend == self.PGSQL: 

2155 query = query.replace("<styleKey>", ",'d' || to_char(h.startTime, 'YYMMDD')") 

2156 query = query.replace("<styleKeyGroup>", ",to_char(h.startTime, 'YYMMDD')") 

2157 elif self.backend == self.SQLITE: 

2158 query = query.replace("<styleKey>", ",'d' || substr(strftime('%Y%m%d', h.startTime),3,7)") 

2159 query = query.replace("<styleKeyGroup>", ",substr(strftime('%Y%m%d', h.startTime),3,7)") 

2160 elif self.backend == self.MYSQL_INNODB: 

2161 query = query.replace("<styleKey>", ",date_format(h.startTime, 'd%y%m%d')") 

2162 query = query.replace("<styleKeyGroup>", ",date_format(h.startTime, 'd%y%m%d')") 

2163 else: 

2164 query = query.replace("<hc_position>", ",'0' as hc_position") 

2165 query = query.replace("<styleKey>", ",'A000000' as styleKey") 

2166 query = query.replace("<styleKeyGroup>", ",styleKey") 

2167 

2168 if type == "tour": 

2169 query = query.replace("<tourney_insert_clause>", ",tourneyTypeId") 

2170 query = query.replace("<tourney_select_clause>", ",t.tourneyTypeId") 

2171 query = query.replace("<tourney_group_clause>", ",t.tourneyTypeId") 

2172 else: 

2173 query = query.replace("<tourney_insert_clause>", "") 

2174 query = query.replace("<tourney_select_clause>", "") 

2175 query = query.replace("<tourney_group_clause>", "") 

2176 

2177 query = query.replace("<hero_where>", "") 

2178 query = query.replace("<hero_join>", "") 

2179 

2180 elif table == "CardsCache": 

2181 insert = """CardsCache 

2182 (weekId 

2183 ,monthId 

2184 ,gametypeId 

2185 <tourney_insert_clause> 

2186 ,playerId 

2187 ,startCards""" 

2188 

2189 select = """s.weekId 

2190 ,s.monthId 

2191 ,h.gametypeId 

2192 <tourney_select_clause> 

2193 ,hp.playerId 

2194 ,hp.startCards""" 

2195 

2196 group = """s.weekId 

2197 ,s.monthId 

2198 ,h.gametypeId 

2199 <tourney_group_clause> 

2200 ,hp.playerId 

2201 ,hp.startCards""" 

2202 

2203 query = query.replace("<insert>", insert) 

2204 query = query.replace("<select>", select) 

2205 query = query.replace("<group>", group) 

2206 query = query.replace("<hero_join>", "") 

2207 query = query.replace( 

2208 "<sessions_join_clause>", 

2209 """INNER JOIN Sessions s ON (s.id = h.sessionId) 

2210 INNER JOIN Players p ON (hp.playerId = p.id)""", 

2211 ) 

2212 query = query.replace("<hero_where>", "") 

2213 

2214 if type == "tour": 

2215 query = query.replace("<tourney_insert_clause>", ",tourneyTypeId") 

2216 query = query.replace("<tourney_select_clause>", ",t.tourneyTypeId") 

2217 query = query.replace("<tourney_group_clause>", ",t.tourneyTypeId") 

2218 else: 

2219 query = query.replace("<tourney_insert_clause>", "") 

2220 query = query.replace("<tourney_select_clause>", "") 

2221 query = query.replace("<tourney_group_clause>", "") 

2222 

2223 elif table == "PositionsCache": 

2224 insert = """PositionsCache 

2225 (weekId 

2226 ,monthId 

2227 ,gametypeId 

2228 <tourney_insert_clause> 

2229 ,playerId 

2230 ,seats 

2231 ,maxPosition 

2232 ,position""" 

2233 

2234 select = """s.weekId 

2235 ,s.monthId 

2236 ,h.gametypeId 

2237 <tourney_select_clause> 

2238 ,hp.playerId 

2239 ,h.seats 

2240 ,h.maxPosition 

2241 ,hp.position""" 

2242 

2243 group = """s.weekId 

2244 ,s.monthId 

2245 ,h.gametypeId 

2246 <tourney_group_clause> 

2247 ,hp.playerId 

2248 ,h.seats 

2249 ,h.maxPosition 

2250 ,hp.position""" 

2251 

2252 query = query.replace("<insert>", insert) 

2253 query = query.replace("<select>", select) 

2254 query = query.replace("<group>", group) 

2255 query = query.replace("<hero_join>", "") 

2256 query = query.replace( 

2257 "<sessions_join_clause>", 

2258 """INNER JOIN Sessions s ON (s.id = h.sessionId) 

2259 INNER JOIN Players p ON (hp.playerId = p.id)""", 

2260 ) 

2261 query = query.replace("<hero_where>", "") 

2262 

2263 if type == "tour": 

2264 query = query.replace("<tourney_insert_clause>", ",tourneyTypeId") 

2265 query = query.replace("<tourney_select_clause>", ",t.tourneyTypeId") 

2266 query = query.replace("<tourney_group_clause>", ",t.tourneyTypeId") 

2267 else: 

2268 query = query.replace("<tourney_insert_clause>", "") 

2269 query = query.replace("<tourney_select_clause>", "") 

2270 query = query.replace("<tourney_group_clause>", "") 

2271 

2272 return query 

2273 

2274 def rebuild_cache(self, h_start=None, v_start=None, table="HudCache", ttid=None, wmid=None): 

2275 """clears hudcache and rebuilds from the individual handsplayers records""" 

2276 # stime = time() 

2277 # derive list of program owner's player ids 

2278 self.hero = {} # name of program owner indexed by site id 

2279 self.hero_ids = {"dummy": -53, "dummy2": -52} # playerid of owner indexed by site id 

2280 # make sure at least two values in list 

2281 # so that tuple generation creates doesn't use 

2282 # () or (1,) style 

2283 if not h_start and not v_start: 

2284 self.hero_ids = None 

2285 else: 

2286 for site in self.config.get_supported_sites(): 

2287 result = self.get_site_id(site) 

2288 if result: 

2289 site_id = result[0][0] 

2290 self.hero[site_id] = self.config.supported_sites[site].screen_name 

2291 p_id = self.get_player_id(self.config, site, self.hero[site_id]) 

2292 if p_id: 

2293 self.hero_ids[site_id] = int(p_id) 

2294 

2295 if not h_start: 

2296 h_start = self.hero_hudstart_def 

2297 if not v_start: 

2298 v_start = self.villain_hudstart_def 

2299 

2300 if not ttid and not wmid: 

2301 self.get_cursor().execute(self.sql.query["clear%s" % table]) 

2302 self.commit() 

2303 

2304 if not ttid: 

2305 if self.hero_ids is None: 

2306 if wmid: 

2307 where = "WHERE g.type = 'ring' AND weekId = %s and monthId = %s<hero_where>" % wmid 

2308 else: 

2309 where = "WHERE g.type = 'ring'<hero_where>" 

2310 else: 

2311 where = ( 

2312 "where ((( hp.playerId not in " 

2313 + str(tuple(self.hero_ids.values())) 

2314 + " and h.startTime > '" 

2315 + v_start 

2316 + "')" 

2317 + " or ( hp.playerId in " 

2318 + str(tuple(self.hero_ids.values())) 

2319 + " and h.startTime > '" 

2320 + h_start 

2321 + "'))" 

2322 + " AND hp.tourneysPlayersId IS NULL)" 

2323 ) 

2324 rebuild_sql_cash = self.sql.query["rebuildCache"].replace("%s", self.sql.query["placeholder"]) 

2325 rebuild_sql_cash = rebuild_sql_cash.replace("<tourney_join_clause>", "") 

2326 rebuild_sql_cash = rebuild_sql_cash.replace("<where_clause>", where) 

2327 rebuild_sql_cash = self.replace_statscache("ring", table, rebuild_sql_cash) 

2328 # print rebuild_sql_cash 

2329 self.get_cursor().execute(rebuild_sql_cash) 

2330 self.commit() 

2331 # print ("Rebuild cache(cash) took %.1f seconds") % (time() - stime,) 

2332 

2333 if ttid: 

2334 where = "WHERE t.tourneyTypeId = %s<hero_where>" % ttid 

2335 elif self.hero_ids is None: 

2336 if wmid: 

2337 where = "WHERE g.type = 'tour' AND weekId = %s and monthId = %s<hero_where>" % wmid 

2338 else: 

2339 where = "WHERE g.type = 'tour'<hero_where>" 

2340 else: 

2341 where = ( 

2342 "where ((( hp.playerId not in " 

2343 + str(tuple(self.hero_ids.values())) 

2344 + " and h.startTime > '" 

2345 + v_start 

2346 + "')" 

2347 + " or ( hp.playerId in " 

2348 + str(tuple(self.hero_ids.values())) 

2349 + " and h.startTime > '" 

2350 + h_start 

2351 + "'))" 

2352 + " AND hp.tourneysPlayersId >= 0)" 

2353 ) 

2354 rebuild_sql_tourney = self.sql.query["rebuildCache"].replace("%s", self.sql.query["placeholder"]) 

2355 rebuild_sql_tourney = rebuild_sql_tourney.replace( 

2356 "<tourney_join_clause>", """INNER JOIN Tourneys t ON (t.id = h.tourneyId)""" 

2357 ) 

2358 rebuild_sql_tourney = rebuild_sql_tourney.replace("<where_clause>", where) 

2359 rebuild_sql_tourney = self.replace_statscache("tour", table, rebuild_sql_tourney) 

2360 # print rebuild_sql_tourney 

2361 self.get_cursor().execute(rebuild_sql_tourney) 

2362 self.commit() 

2363 # print ("Rebuild hudcache took %.1f seconds") % (time() - stime,) 

2364 

2365 # end def rebuild_cache 

2366 

2367 def update_timezone(self, tz_name): 

2368 select_W = self.sql.query["select_W"].replace("%s", self.sql.query["placeholder"]) 

2369 select_M = self.sql.query["select_M"].replace("%s", self.sql.query["placeholder"]) 

2370 insert_W = self.sql.query["insert_W"].replace("%s", self.sql.query["placeholder"]) 

2371 insert_M = self.sql.query["insert_M"].replace("%s", self.sql.query["placeholder"]) 

2372 update_WM_S = self.sql.query["update_WM_S"].replace("%s", self.sql.query["placeholder"]) 

2373 c = self.get_cursor() 

2374 c.execute("SELECT id, sessionStart, weekId wid, monthId mid FROM Sessions") 

2375 sessions = self.fetchallDict(c, ["id", "sessionStart", "wid", "mid"]) 

2376 for s in sessions: 

2377 utc_start = pytz.utc.localize(s["sessionStart"]) 

2378 tz = pytz.timezone(tz_name) 

2379 loc_tz = utc_start.astimezone(tz).strftime("%z") 

2380 offset = timedelta(hours=int(loc_tz[:-2]), minutes=int(loc_tz[0] + loc_tz[-2:])) 

2381 local = s["sessionStart"] + offset 

2382 monthStart = datetime(local.year, local.month, 1) 

2383 weekdate = datetime(local.year, local.month, local.day) 

2384 weekStart = weekdate - timedelta(days=weekdate.weekday()) 

2385 wid = self.insertOrUpdate("weeks", c, (weekStart,), select_W, insert_W) 

2386 mid = self.insertOrUpdate("months", c, (monthStart,), select_M, insert_M) 

2387 if wid != s["wid"] or mid != s["mid"]: 

2388 row = [wid, mid, s["id"]] 

2389 c.execute(update_WM_S, row) 

2390 self.wmold.add((s["wid"], s["mid"])) 

2391 self.wmnew.add((wid, mid)) 

2392 self.commit() 

2393 self.cleanUpWeeksMonths() 

2394 

2395 def get_hero_hudcache_start(self): 

2396 """fetches earliest stylekey from hudcache for one of hero's player ids""" 

2397 

2398 try: 

2399 # derive list of program owner's player ids 

2400 self.hero = {} # name of program owner indexed by site id 

2401 self.hero_ids = {"dummy": -53, "dummy2": -52} # playerid of owner indexed by site id 

2402 # make sure at least two values in list 

2403 # so that tuple generation creates doesn't use 

2404 # () or (1,) style 

2405 for site in self.config.get_supported_sites(): 

2406 result = self.get_site_id(site) 

2407 if result: 

2408 site_id = result[0][0] 

2409 self.hero[site_id] = self.config.supported_sites[site].screen_name 

2410 p_id = self.get_player_id(self.config, site, self.hero[site_id]) 

2411 if p_id: 

2412 self.hero_ids[site_id] = int(p_id) 

2413 

2414 q = self.sql.query["get_hero_hudcache_start"].replace("<playerid_list>", str(tuple(self.hero_ids.values()))) 

2415 c = self.get_cursor() 

2416 c.execute(q) 

2417 tmp = c.fetchone() 

2418 if tmp == (None,): 

2419 return self.hero_hudstart_def 

2420 else: 

2421 return "20" + tmp[0][1:3] + "-" + tmp[0][3:5] + "-" + tmp[0][5:7] 

2422 except Exception: 

2423 err = traceback.extract_tb(sys.exc_info()[2])[-1] 

2424 log.error(("Error rebuilding hudcache:"), str(sys.exc_info()[1])) 

2425 log.error(err) 

2426 

2427 # end def get_hero_hudcache_start 

2428 

2429 def analyzeDB(self): 

2430 """Do whatever the DB can offer to update index/table statistics""" 

2431 stime = time() 

2432 if self.backend == self.MYSQL_INNODB or self.backend == self.SQLITE: 

2433 try: 

2434 self.get_cursor().execute(self.sql.query["analyze"]) 

2435 except Exception: 

2436 log.error(("Error during analyze:"), str(sys.exc_info()[1])) 

2437 elif self.backend == self.PGSQL: 

2438 self.connection.set_isolation_level(0) # allow analyze to work 

2439 try: 

2440 self.get_cursor().execute(self.sql.query["analyze"]) 

2441 except Exception: 

2442 log.error(("Error during analyze:"), str(sys.exc_info()[1])) 

2443 self.connection.set_isolation_level(1) # go back to normal isolation level 

2444 self.commit() 

2445 atime = time() - stime 

2446 log.info(("Analyze took %.1f seconds") % (atime,)) 

2447 

2448 # end def analyzeDB 

2449 

2450 def vacuumDB(self): 

2451 """Do whatever the DB can offer to update index/table statistics""" 

2452 stime = time() 

2453 if self.backend == self.MYSQL_INNODB or self.backend == self.SQLITE: 

2454 try: 

2455 self.get_cursor().execute(self.sql.query["vacuum"]) 

2456 except Exception: 

2457 log.error(("Error during vacuum:"), str(sys.exc_info()[1])) 

2458 elif self.backend == self.PGSQL: 

2459 self.connection.set_isolation_level(0) # allow vacuum to work 

2460 try: 

2461 self.get_cursor().execute(self.sql.query["vacuum"]) 

2462 except Exception: 

2463 log.error(("Error during vacuum:"), str(sys.exc_info()[1])) 

2464 self.connection.set_isolation_level(1) # go back to normal isolation level 

2465 self.commit() 

2466 atime = time() - stime 

2467 log.debug(("Vacuum took %.1f seconds") % (atime,)) 

2468 

2469 # end def analyzeDB 

2470 

2471 # Start of Hand Writing routines. Idea is to provide a mixture of routines to store Hand data 

2472 # however the calling prog requires. Main aims: 

2473 # - existing static routines from fpdb_simple just modified 

2474 

2475 def setThreadId(self, threadid): 

2476 self.threadId = threadid 

2477 

2478 def acquireLock(self, wait=True, retry_time=0.01): 

2479 while not self._has_lock: 

2480 cursor = self.get_cursor() 

2481 num = cursor.execute(self.sql.query["switchLockOn"], (True, self.threadId)) 

2482 self.commit() 

2483 if self.backend == self.MYSQL_INNODB and num == 0: 

2484 if not wait: 

2485 return False 

2486 sleep(retry_time) 

2487 else: 

2488 self._has_lock = True 

2489 return True 

2490 

2491 def releaseLock(self): 

2492 if self._has_lock: 

2493 cursor = self.get_cursor() 

2494 cursor.execute(self.sql.query["switchLockOff"], (False, self.threadId)) 

2495 self.commit() 

2496 self._has_lock = False 

2497 

2498 def lock_for_insert(self): 

2499 """Lock tables in MySQL to try to speed inserts up""" 

2500 try: 

2501 self.get_cursor().execute(self.sql.query["lockForInsert"]) 

2502 except Exception: 

2503 log.debug(("Error during lock_for_insert:"), str(sys.exc_info()[1])) 

2504 

2505 # end def lock_for_insert 

2506 

2507 def resetBulkCache(self, reconnect=False): 

2508 self.siteHandNos = [] # cache of siteHandNo 

2509 self.hbulk = [] # Hands bulk inserts 

2510 self.bbulk = [] # Boards bulk inserts 

2511 self.hpbulk = [] # HandsPlayers bulk inserts 

2512 self.habulk = [] # HandsActions bulk inserts 

2513 self.hcbulk = {} # HudCache bulk inserts 

2514 self.dcbulk = {} # CardsCache bulk inserts 

2515 self.pcbulk = {} # PositionsCache bulk inserts 

2516 self.hsbulk = [] # HandsStove bulk inserts 

2517 self.htbulk = [] # HandsPots bulk inserts 

2518 self.tbulk = {} # Tourneys bulk updates 

2519 self.s = {"bk": []} # Sessions bulk updates 

2520 self.sc = {} # SessionsCache bulk updates 

2521 self.tc = {} # TourneysCache bulk updates 

2522 self.hids = [] # hand ids in order of hand bulk inserts 

2523 # self.tids = [] # tourney ids in order of hp bulk inserts 

2524 if reconnect: 

2525 self.do_connect(self.config) 

2526 

2527 def executemany(self, c, q, values): 

2528 if self.backend == self.PGSQL and self.import_options["hhBulkPath"] != "": 

2529 # COPY much faster under postgres. Requires superuser privileges 

2530 m = re_insert.match(q) 

2531 rand = "".join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(5)) 

2532 bulk_file = os.path.join(self.import_options["hhBulkPath"], m.group("TABLENAME") + "_" + rand) 

2533 with open(bulk_file, "wb") as csvfile: 

2534 writer = csv.writer(csvfile, delimiter="\t", quotechar='"', quoting=csv.QUOTE_MINIMAL) 

2535 writer.writerows(w for w in values) 

2536 q_insert = ( 

2537 "COPY " + m.group("TABLENAME") + m.group("COLUMNS") + " FROM '" + bulk_file + "' DELIMITER '\t' CSV" 

2538 ) 

2539 c.execute(q_insert) 

2540 os.remove(bulk_file) 

2541 else: 

2542 batch_size = 20000 # experiment to find optimal batch_size for your data 

2543 while values: # repeat until all records in values have been inserted '' 

2544 batch, values = ( 

2545 values[:batch_size], 

2546 values[batch_size:], 

2547 ) # split values into the current batch and the remaining records 

2548 c.executemany(q, batch) # insert current batch '' 

2549 

2550 def storeHand(self, hdata, doinsert=False, printdata=False): 

2551 if printdata: 

2552 log.debug("######## Hands ##########") 

2553 import pprint 

2554 

2555 pp = pprint.PrettyPrinter(indent=4) 

2556 pp.pprint(hdata) 

2557 log.debug("###### End Hands ########") 

2558 

2559 # Tablename can have odd charachers 

2560 # hdata["tableName"] = Charset.to_db_utf8(hdata["tableName"])[:50] 

2561 table_name = hdata.get("tableName", "") 

2562 table_name_safe = table_name.encode("utf-8", "replace").decode("utf-8") 

2563 hdata["tableName"] = table_name_safe[:50] 

2564 

2565 self.hids.append(hdata["id"]) 

2566 self.hbulk.append( 

2567 [ 

2568 hdata["tableName"], 

2569 hdata["siteHandNo"], 

2570 hdata["tourneyId"], 

2571 hdata["gametypeId"], 

2572 hdata["sessionId"], 

2573 hdata["fileId"], 

2574 hdata["startTime"].replace(tzinfo=None), 

2575 datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), # importtime 

2576 hdata["seats"], 

2577 hdata["heroSeat"], 

2578 hdata["maxPosition"], 

2579 hdata["texture"], 

2580 hdata["playersVpi"], 

2581 hdata["boardcard1"], 

2582 hdata["boardcard2"], 

2583 hdata["boardcard3"], 

2584 hdata["boardcard4"], 

2585 hdata["boardcard5"], 

2586 hdata["runItTwice"], 

2587 hdata["playersAtStreet1"], 

2588 hdata["playersAtStreet2"], 

2589 hdata["playersAtStreet3"], 

2590 hdata["playersAtStreet4"], 

2591 hdata["playersAtShowdown"], 

2592 hdata["street0Raises"], 

2593 hdata["street1Raises"], 

2594 hdata["street2Raises"], 

2595 hdata["street3Raises"], 

2596 hdata["street4Raises"], 

2597 hdata["street0Pot"], 

2598 hdata["street1Pot"], 

2599 hdata["street2Pot"], 

2600 hdata["street3Pot"], 

2601 hdata["street4Pot"], 

2602 hdata["finalPot"], 

2603 ] 

2604 ) 

2605 

2606 if doinsert: 

2607 self.appendHandsSessionIds() 

2608 self.updateTourneysSessions() 

2609 q = self.sql.query["store_hand"] 

2610 q = q.replace("%s", self.sql.query["placeholder"]) 

2611 c = self.get_cursor() 

2612 c.executemany(q, self.hbulk) 

2613 self.commit() 

2614 

2615 def storeBoards(self, id, boards, doinsert): 

2616 if boards: 

2617 for b in boards: 

2618 self.bbulk += [[id] + b] 

2619 if doinsert and self.bbulk: 

2620 q = self.sql.query["store_boards"] 

2621 q = q.replace("%s", self.sql.query["placeholder"]) 

2622 c = self.get_cursor() 

2623 self.executemany(c, q, self.bbulk) # c.executemany(q, self.bbulk) 

2624 

2625 def updateTourneysSessions(self): 

2626 if self.tbulk: 

2627 q_update_sessions = self.sql.query["updateTourneysSessions"].replace("%s", self.sql.query["placeholder"]) 

2628 c = self.get_cursor() 

2629 for t, sid in list(self.tbulk.items()): 

2630 c.execute(q_update_sessions, (sid, t)) 

2631 self.commit() 

2632 

2633 def storeHandsPlayers(self, hid, pids, pdata, doinsert=False, printdata=False): 

2634 # print "DEBUG: %s %s %s" %(hid, pids, pdata) 

2635 if printdata: 

2636 import pprint 

2637 

2638 pp = pprint.PrettyPrinter(indent=4) 

2639 pp.pprint(pdata) 

2640 

2641 hpbulk = self.hpbulk 

2642 for p, pvalue in list(pdata.items()): 

2643 # Add (hid, pids[p]) + all the values in pvalue at the 

2644 # keys in HANDS_PLAYERS_KEYS to hpbulk. 

2645 bulk_data = [pvalue[key] for key in HANDS_PLAYERS_KEYS] 

2646 bulk_data.append(pids[p]) 

2647 bulk_data.append(hid) 

2648 bulk_data.reverse() 

2649 hpbulk.append(bulk_data) 

2650 

2651 if doinsert: 

2652 # self.appendHandsPlayersSessionIds() 

2653 q = self.sql.query["store_hands_players"] 

2654 q = q.replace("%s", self.sql.query["placeholder"]) 

2655 c = self.get_cursor(True) 

2656 self.executemany(c, q, self.hpbulk) # c.executemany(q, self.hpbulk) 

2657 

2658 def storeHandsPots(self, tdata, doinsert): 

2659 self.htbulk += tdata 

2660 if doinsert and self.htbulk: 

2661 q = self.sql.query["store_hands_pots"] 

2662 q = q.replace("%s", self.sql.query["placeholder"]) 

2663 c = self.get_cursor() 

2664 self.executemany(c, q, self.htbulk) # c.executemany(q, self.hsbulk) 

2665 

2666 def storeHandsActions(self, hid, pids, adata, doinsert=False, printdata=False): 

2667 # print "DEBUG: %s %s %s" %(hid, pids, adata) 

2668 

2669 # This can be used to generate test data. Currently unused 

2670 # if printdata: 

2671 # import pprint 

2672 # pp = pprint.PrettyPrinter(indent=4) 

2673 # pp.pprint(adata) 

2674 

2675 for a in adata: 

2676 self.habulk.append( 

2677 ( 

2678 hid, 

2679 pids[adata[a]["player"]], 

2680 adata[a]["street"], 

2681 adata[a]["actionNo"], 

2682 adata[a]["streetActionNo"], 

2683 adata[a]["actionId"], 

2684 adata[a]["amount"], 

2685 adata[a]["raiseTo"], 

2686 adata[a]["amountCalled"], 

2687 adata[a]["numDiscarded"], 

2688 adata[a]["cardsDiscarded"], 

2689 adata[a]["allIn"], 

2690 ) 

2691 ) 

2692 

2693 if doinsert: 

2694 q = self.sql.query["store_hands_actions"] 

2695 q = q.replace("%s", self.sql.query["placeholder"]) 

2696 c = self.get_cursor() 

2697 self.executemany(c, q, self.habulk) # c.executemany(q, self.habulk) 

2698 

2699 def storeHandsStove(self, sdata, doinsert): 

2700 self.hsbulk += sdata 

2701 if doinsert and self.hsbulk: 

2702 q = self.sql.query["store_hands_stove"] 

2703 q = q.replace("%s", self.sql.query["placeholder"]) 

2704 c = self.get_cursor() 

2705 self.executemany(c, q, self.hsbulk) # c.executemany(q, self.hsbulk) 

2706 

2707 def storeHudCache(self, gid, gametype, pids, starttime, pdata, doinsert=False): 

2708 """Update cached statistics. If update fails because no record exists, do an insert.""" 

2709 

2710 if pdata: 

2711 tz = datetime.utcnow() - datetime.today() 

2712 tz_offset = old_div(tz.seconds, 3600) 

2713 tz_day_start_offset = self.day_start + tz_offset 

2714 

2715 d = timedelta(hours=tz_day_start_offset) 

2716 starttime_offset = starttime - d 

2717 styleKey = datetime.strftime(starttime_offset, "d%y%m%d") 

2718 seats = len(pids) 

2719 

2720 pos = {"B": "B", "S": "S", 0: "D", 1: "C", 2: "M", 3: "M", 4: "M", 5: "E", 6: "E", 7: "E", 8: "E", 9: "E"} 

2721 

2722 for p in pdata: 

2723 player_stats = pdata.get(p) 

2724 garbageTourneyTypes = ( 

2725 player_stats["tourneyTypeId"] in self.ttnew or player_stats["tourneyTypeId"] in self.ttold 

2726 ) 

2727 if self.import_options["hhBulkPath"] == "" or not garbageTourneyTypes: 

2728 position = pos[player_stats["position"]] 

2729 k = ( 

2730 gid, 

2731 pids[p], 

2732 seats, 

2733 position if self.build_full_hudcache else "0", 

2734 player_stats["tourneyTypeId"], 

2735 styleKey if self.build_full_hudcache else "A000000", 

2736 ) 

2737 player_stats["n"] = 1 

2738 line = [ 

2739 int(player_stats[s]) if isinstance(player_stats[s], bool) else player_stats[s] for s in CACHE_KEYS 

2740 ] 

2741 

2742 hud = self.hcbulk.get(k) 

2743 # Add line to the old line in the hudcache. 

2744 if hud is not None: 

2745 for idx, val in enumerate(line): 

2746 hud[idx] += val 

2747 else: 

2748 self.hcbulk[k] = line 

2749 

2750 if doinsert: 

2751 update_hudcache = self.sql.query["update_hudcache"] 

2752 update_hudcache = update_hudcache.replace("%s", self.sql.query["placeholder"]) 

2753 insert_hudcache = self.sql.query["insert_hudcache"] 

2754 insert_hudcache = insert_hudcache.replace("%s", self.sql.query["placeholder"]) 

2755 

2756 select_hudcache_ring = self.sql.query["select_hudcache_ring"] 

2757 select_hudcache_ring = select_hudcache_ring.replace("%s", self.sql.query["placeholder"]) 

2758 select_hudcache_tour = self.sql.query["select_hudcache_tour"] 

2759 select_hudcache_tour = select_hudcache_tour.replace("%s", self.sql.query["placeholder"]) 

2760 inserts = [] 

2761 c = self.get_cursor() 

2762 for k, item in list(self.hcbulk.items()): 

2763 if not k[4]: 

2764 q = select_hudcache_ring 

2765 row = list(k[:4]) + [k[-1]] 

2766 else: 

2767 q = select_hudcache_tour 

2768 row = list(k) 

2769 

2770 c.execute(q, row) 

2771 result = c.fetchone() 

2772 if result: 

2773 id = result[0] 

2774 update = item + [id] 

2775 c.execute(update_hudcache, update) 

2776 

2777 else: 

2778 inserts.append(list(k) + item) 

2779 

2780 if inserts: 

2781 self.executemany(c, insert_hudcache, inserts) 

2782 self.commit() 

2783 

2784 def storeSessions(self, hid, pids, startTime, tid, heroes, tz_name, doinsert=False): 

2785 """Update cached sessions. If no record exists, do an insert""" 

2786 THRESHOLD = timedelta(seconds=int(self.sessionTimeout * 60)) 

2787 if tz_name in pytz.common_timezones: 

2788 naive = startTime.replace(tzinfo=None) 

2789 utc_start = pytz.utc.localize(naive) 

2790 tz = pytz.timezone(tz_name) 

2791 loc_tz = utc_start.astimezone(tz).strftime("%z") 

2792 offset = timedelta(hours=int(loc_tz[:-2]), minutes=int(loc_tz[0] + loc_tz[-2:])) 

2793 local = naive + offset 

2794 monthStart = datetime(local.year, local.month, 1) 

2795 weekdate = datetime(local.year, local.month, local.day) 

2796 weekStart = weekdate - timedelta(days=weekdate.weekday()) 

2797 else: 

2798 if strftime("%Z") == "UTC": 

2799 local = startTime 

2800 loc_tz = "0" 

2801 else: 

2802 tz_dt = datetime.today() - datetime.utcnow() 

2803 loc_tz = old_div(tz_dt.seconds, 3600) - 24 

2804 offset = timedelta(hours=int(loc_tz)) 

2805 local = startTime + offset 

2806 monthStart = datetime(local.year, local.month, 1) 

2807 weekdate = datetime(local.year, local.month, local.day) 

2808 weekStart = weekdate - timedelta(days=weekdate.weekday()) 

2809 

2810 j, hand = None, {} 

2811 for p, id in list(pids.items()): 

2812 if id in heroes: 

2813 hand["startTime"] = startTime.replace(tzinfo=None) 

2814 hand["weekStart"] = weekStart 

2815 hand["monthStart"] = monthStart 

2816 hand["ids"] = [hid] 

2817 hand["tourneys"] = set() 

2818 

2819 id = [] 

2820 if hand: 

2821 lower = hand["startTime"] - THRESHOLD 

2822 upper = hand["startTime"] + THRESHOLD 

2823 for i in range(len(self.s["bk"])): 

2824 if ((lower <= self.s["bk"][i]["sessionEnd"]) and (upper >= self.s["bk"][i]["sessionStart"])) or ( 

2825 tid in self.s["bk"][i]["tourneys"] 

2826 ): 

2827 if (hand["startTime"] <= self.s["bk"][i]["sessionEnd"]) and ( 

2828 hand["startTime"] >= self.s["bk"][i]["sessionStart"] 

2829 ): 

2830 id.append(i) 

2831 elif hand["startTime"] < self.s["bk"][i]["sessionStart"]: 

2832 self.s["bk"][i]["sessionStart"] = hand["startTime"] 

2833 self.s["bk"][i]["weekStart"] = hand["weekStart"] 

2834 self.s["bk"][i]["monthStart"] = hand["monthStart"] 

2835 id.append(i) 

2836 elif hand["startTime"] > self.s["bk"][i]["sessionEnd"]: 

2837 self.s["bk"][i]["sessionEnd"] = hand["startTime"] 

2838 id.append(i) 

2839 if len(id) == 1: 

2840 j = id[0] 

2841 self.s["bk"][j]["ids"] += [hid] 

2842 if tid: 

2843 self.s["bk"][j]["tourneys"].add(tid) 

2844 elif len(id) > 1: 

2845 merged = {} 

2846 merged["ids"] = [hid] 

2847 merged["tourneys"] = set() 

2848 if tid: 

2849 merged["tourneys"].add(tid) 

2850 for n in id: 

2851 h = self.s["bk"][n] 

2852 if not merged.get("sessionStart") or merged.get("sessionStart") > h["sessionStart"]: 

2853 merged["sessionStart"] = h["sessionStart"] 

2854 merged["weekStart"] = h["weekStart"] 

2855 merged["monthStart"] = h["monthStart"] 

2856 if not merged.get("sessionEnd") or merged.get("sessionEnd") < h["sessionEnd"]: 

2857 merged["sessionEnd"] = h["sessionEnd"] 

2858 merged["ids"] += h["ids"] 

2859 merged["tourneys"].union(h["tourneys"]) 

2860 self.s["bk"][n]["delete"] = True 

2861 

2862 self.s["bk"] = [item for item in self.s["bk"] if not item.get("delete")] 

2863 self.s["bk"].append(merged) 

2864 elif len(id) == 0: 

2865 j = len(self.s["bk"]) 

2866 hand["id"] = None 

2867 hand["sessionStart"] = hand["startTime"] 

2868 hand["sessionEnd"] = hand["startTime"] 

2869 if tid: 

2870 hand["tourneys"].add(tid) 

2871 self.s["bk"].append(hand) 

2872 

2873 if doinsert: 

2874 select_S = self.sql.query["select_S"].replace("%s", self.sql.query["placeholder"]) 

2875 select_W = self.sql.query["select_W"].replace("%s", self.sql.query["placeholder"]) 

2876 select_M = self.sql.query["select_M"].replace("%s", self.sql.query["placeholder"]) 

2877 update_S = self.sql.query["update_S"].replace("%s", self.sql.query["placeholder"]) 

2878 insert_W = self.sql.query["insert_W"].replace("%s", self.sql.query["placeholder"]) 

2879 insert_M = self.sql.query["insert_M"].replace("%s", self.sql.query["placeholder"]) 

2880 insert_S = self.sql.query["insert_S"].replace("%s", self.sql.query["placeholder"]) 

2881 update_S_SC = self.sql.query["update_S_SC"].replace("%s", self.sql.query["placeholder"]) 

2882 update_S_TC = self.sql.query["update_S_TC"].replace("%s", self.sql.query["placeholder"]) 

2883 update_S_T = self.sql.query["update_S_T"].replace("%s", self.sql.query["placeholder"]) 

2884 update_S_H = self.sql.query["update_S_H"].replace("%s", self.sql.query["placeholder"]) 

2885 delete_S = self.sql.query["delete_S"].replace("%s", self.sql.query["placeholder"]) 

2886 c = self.get_cursor() 

2887 for i in range(len(self.s["bk"])): 

2888 lower = self.s["bk"][i]["sessionStart"] - THRESHOLD 

2889 upper = self.s["bk"][i]["sessionEnd"] + THRESHOLD 

2890 tourneys = self.s["bk"][i]["tourneys"] 

2891 if self.s["bk"][i]["tourneys"]: 

2892 toursql = "OR SC.id in (SELECT DISTINCT sessionId FROM Tourneys T WHERE T.id in (%s))" % ", ".join( 

2893 str(t) for t in tourneys 

2894 ) 

2895 q = select_S.replace("<TOURSELECT>", toursql) 

2896 else: 

2897 q = select_S.replace("<TOURSELECT>", "") 

2898 c.execute(q, (lower, upper)) 

2899 r = self.fetchallDict( 

2900 c, ["id", "sessionStart", "sessionEnd", "weekStart", "monthStart", "weekId", "monthId"] 

2901 ) 

2902 num = len(r) 

2903 if num == 1: 

2904 start, end = r[0]["sessionStart"], r[0]["sessionEnd"] 

2905 week, month = r[0]["weekStart"], r[0]["monthStart"] 

2906 wid, mid = r[0]["weekId"], r[0]["monthId"] 

2907 update, updateW, updateM = False, False, False 

2908 if self.s["bk"][i]["sessionStart"] < start: 

2909 start, update = self.s["bk"][i]["sessionStart"], True 

2910 if self.s["bk"][i]["weekStart"] != week: 

2911 week, updateW = self.s["bk"][i]["weekStart"], True 

2912 if self.s["bk"][i]["monthStart"] != month: 

2913 month, updateM = self.s["bk"][i]["monthStart"], True 

2914 if updateW or updateM: 

2915 self.wmold.add((wid, mid)) 

2916 if self.s["bk"][i]["sessionEnd"] > end: 

2917 end, update = self.s["bk"][i]["sessionEnd"], True 

2918 if updateW: 

2919 wid = self.insertOrUpdate("weeks", c, (week,), select_W, insert_W) 

2920 if updateM: 

2921 mid = self.insertOrUpdate("months", c, (month,), select_M, insert_M) 

2922 if updateW or updateM: 

2923 self.wmnew.add((wid, mid)) 

2924 if update: 

2925 c.execute(update_S, [wid, mid, start, end, r[0]["id"]]) 

2926 for h in self.s["bk"][i]["ids"]: 

2927 self.s[h] = {"id": r[0]["id"], "wid": wid, "mid": mid} 

2928 elif num > 1: 

2929 start, end, wmold, merge = None, None, set(), [] 

2930 for n in r: 

2931 merge.append(n["id"]) 

2932 merge.sort() 

2933 r.append(self.s["bk"][i]) 

2934 for n in r: 

2935 if "weekId" in n: 

2936 wmold.add((n["weekId"], n["monthId"])) 

2937 if start: 

2938 if start > n["sessionStart"]: 

2939 start = n["sessionStart"] 

2940 week = n["weekStart"] 

2941 month = n["monthStart"] 

2942 else: 

2943 start = n["sessionStart"] 

2944 week = n["weekStart"] 

2945 month = n["monthStart"] 

2946 if end: 

2947 if end < n["sessionEnd"]: 

2948 end = n["sessionEnd"] 

2949 else: 

2950 end = n["sessionEnd"] 

2951 wid = self.insertOrUpdate("weeks", c, (week,), select_W, insert_W) 

2952 mid = self.insertOrUpdate("months", c, (month,), select_M, insert_M) 

2953 wmold.discard((wid, mid)) 

2954 if len(wmold) > 0: 

2955 self.wmold = self.wmold.union(wmold) 

2956 self.wmnew.add((wid, mid)) 

2957 row = [wid, mid, start, end] 

2958 c.execute(insert_S, row) 

2959 sid = self.get_last_insert_id(c) 

2960 for h in self.s["bk"][i]["ids"]: 

2961 self.s[h] = {"id": sid, "wid": wid, "mid": mid} 

2962 for m in merge: 

2963 for h, n in list(self.s.items()): 

2964 if h != "bk" and n["id"] == m: 

2965 self.s[h] = {"id": sid, "wid": wid, "mid": mid} 

2966 c.execute(update_S_TC, (sid, m)) 

2967 c.execute(update_S_SC, (sid, m)) 

2968 c.execute(update_S_T, (sid, m)) 

2969 c.execute(update_S_H, (sid, m)) 

2970 c.execute(delete_S, (m,)) 

2971 elif num == 0: 

2972 start = self.s["bk"][i]["sessionStart"] 

2973 end = self.s["bk"][i]["sessionEnd"] 

2974 week = self.s["bk"][i]["weekStart"] 

2975 month = self.s["bk"][i]["monthStart"] 

2976 wid = self.insertOrUpdate("weeks", c, (week,), select_W, insert_W) 

2977 mid = self.insertOrUpdate("months", c, (month,), select_M, insert_M) 

2978 row = [wid, mid, start, end] 

2979 c.execute(insert_S, row) 

2980 sid = self.get_last_insert_id(c) 

2981 for h in self.s["bk"][i]["ids"]: 

2982 self.s[h] = {"id": sid, "wid": wid, "mid": mid} 

2983 self.commit() 

2984 

2985 def storeSessionsCache(self, hid, pids, startTime, gametypeId, gametype, pdata, heroes, doinsert=False): 

2986 """Update cached cash sessions. If no record exists, do an insert""" 

2987 THRESHOLD = timedelta(seconds=int(self.sessionTimeout * 60)) 

2988 if pdata: # gametype['type']=='ring' and 

2989 for p, pid in list(pids.items()): 

2990 hp = {} 

2991 k = (gametypeId, pid) 

2992 hp["startTime"] = startTime.replace(tzinfo=None) 

2993 hp["hid"] = hid 

2994 hp["ids"] = [] 

2995 pdata[p]["n"] = 1 

2996 hp["line"] = [int(pdata[p][s]) if isinstance(pdata[p][s], bool) else pdata[p][s] for s in CACHE_KEYS] 

2997 id = [] 

2998 sessionplayer = self.sc.get(k) 

2999 if sessionplayer is not None: 

3000 lower = hp["startTime"] - THRESHOLD 

3001 upper = hp["startTime"] + THRESHOLD 

3002 for i in range(len(sessionplayer)): 

3003 if lower <= sessionplayer[i]["endTime"] and upper >= sessionplayer[i]["startTime"]: 

3004 if len(id) == 0: 

3005 for idx, val in enumerate(hp["line"]): 

3006 sessionplayer[i]["line"][idx] += val 

3007 if (hp["startTime"] <= sessionplayer[i]["endTime"]) and ( 

3008 hp["startTime"] >= sessionplayer[i]["startTime"] 

3009 ): 

3010 id.append(i) 

3011 elif hp["startTime"] < sessionplayer[i]["startTime"]: 

3012 sessionplayer[i]["startTime"] = hp["startTime"] 

3013 id.append(i) 

3014 elif hp["startTime"] > sessionplayer[i]["endTime"]: 

3015 sessionplayer[i]["endTime"] = hp["startTime"] 

3016 id.append(i) 

3017 if len(id) == 1: 

3018 i = id[0] 

3019 if pids[p] == heroes[0]: 

3020 self.sc[k][i]["ids"].append(hid) 

3021 elif len(id) == 2: 

3022 i, j = id[0], id[1] 

3023 if sessionplayer[i]["startTime"] < sessionplayer[j]["startTime"]: 

3024 sessionplayer[i]["endTime"] = sessionplayer[j]["endTime"] 

3025 else: 

3026 sessionplayer[i]["startTime"] = sessionplayer[j]["startTime"] 

3027 for idx, val in enumerate(sessionplayer[j]["line"]): 

3028 sessionplayer[i]["line"][idx] += val 

3029 g = sessionplayer.pop(j) 

3030 if pids[p] == heroes[0]: 

3031 self.sc[k][i]["ids"].append(hid) 

3032 self.sc[k][i]["ids"] += g["ids"] 

3033 elif len(id) == 0: 

3034 if sessionplayer is None: 

3035 self.sc[k] = [] 

3036 hp["endTime"] = hp["startTime"] 

3037 if pids[p] == heroes[0]: 

3038 hp["ids"].append(hid) 

3039 self.sc[k].append(hp) 

3040 

3041 if doinsert: 

3042 select_SC = self.sql.query["select_SC"].replace("%s", self.sql.query["placeholder"]) 

3043 update_SC = self.sql.query["update_SC"].replace("%s", self.sql.query["placeholder"]) 

3044 insert_SC = self.sql.query["insert_SC"].replace("%s", self.sql.query["placeholder"]) 

3045 delete_SC = self.sql.query["delete_SC"].replace("%s", self.sql.query["placeholder"]) 

3046 c = self.get_cursor() 

3047 for k, sessionplayer in list(self.sc.items()): 

3048 for session in sessionplayer: 

3049 hid = session["hid"] 

3050 sid = self.s.get(hid)["id"] 

3051 lower = session["startTime"] - THRESHOLD 

3052 upper = session["endTime"] + THRESHOLD 

3053 row = [lower, upper] + list(k[:2]) 

3054 c.execute(select_SC, row) 

3055 r = self.fetchallDict(c, ["id", "sessionId", "startTime", "endTime"] + CACHE_KEYS) 

3056 num = len(r) 

3057 d = [0] * num 

3058 for z in range(num): 

3059 d[z] = {} 

3060 d[z]["line"] = [int(r[z][s]) if isinstance(r[z][s], bool) else r[z][s] for s in CACHE_KEYS] 

3061 d[z]["id"] = r[z]["id"] 

3062 d[z]["sessionId"] = r[z]["sessionId"] 

3063 d[z]["startTime"] = r[z]["startTime"] 

3064 d[z]["endTime"] = r[z]["endTime"] 

3065 if num == 1: 

3066 start, end, id = r[0]["startTime"], r[0]["endTime"], r[0]["id"] 

3067 if session["startTime"] < start: 

3068 start = session["startTime"] 

3069 if session["endTime"] > end: 

3070 end = session["endTime"] 

3071 row = [start, end] + session["line"] + [id] 

3072 c.execute(update_SC, row) 

3073 elif num > 1: 

3074 start, end, merge, line = None, None, [], [0] * len(CACHE_KEYS) 

3075 for n in r: 

3076 merge.append(n["id"]) 

3077 merge.sort() 

3078 r = d 

3079 r.append(session) 

3080 for n in r: 

3081 if start: 

3082 if start > n["startTime"]: 

3083 start = n["startTime"] 

3084 else: 

3085 start = n["startTime"] 

3086 if end: 

3087 if end < n["endTime"]: 

3088 end = n["endTime"] 

3089 else: 

3090 end = n["endTime"] 

3091 for idx in range(len(CACHE_KEYS)): 

3092 line[idx] += int(n["line"][idx]) if isinstance(n["line"][idx], bool) else n["line"][idx] 

3093 row = [sid, start, end] + list(k[:2]) + line 

3094 c.execute(insert_SC, row) 

3095 id = self.get_last_insert_id(c) 

3096 for m in merge: 

3097 c.execute(delete_SC, (m,)) 

3098 self.commit() 

3099 elif num == 0: 

3100 start = session["startTime"] 

3101 end = session["endTime"] 

3102 row = [sid, start, end] + list(k[:2]) + session["line"] 

3103 c.execute(insert_SC, row) 

3104 id = self.get_last_insert_id(c) 

3105 self.commit() 

3106 

3107 def storeTourneysCache(self, hid, pids, startTime, tid, gametype, pdata, heroes, doinsert=False): 

3108 """Update cached tour sessions. If no record exists, do an insert""" 

3109 if gametype["type"] == "tour" and pdata: 

3110 for p in pdata: 

3111 k = (tid, pids[p]) 

3112 pdata[p]["n"] = 1 

3113 line = [int(pdata[p][s]) if isinstance(pdata[p][s], bool) else pdata[p][s] for s in CACHE_KEYS] 

3114 tourplayer = self.tc.get(k) 

3115 # Add line to the old line in the tourcache. 

3116 if tourplayer is not None: 

3117 for idx, val in enumerate(line): 

3118 tourplayer["line"][idx] += val 

3119 if pids[p] == heroes[0]: 

3120 tourplayer["ids"].append(hid) 

3121 else: 

3122 self.tc[k] = {"startTime": None, "endTime": None, "hid": hid, "ids": []} 

3123 self.tc[k]["line"] = line 

3124 if pids[p] == heroes[0]: 

3125 self.tc[k]["ids"].append(hid) 

3126 

3127 if not self.tc[k]["startTime"] or startTime < self.tc[k]["startTime"]: 

3128 self.tc[k]["startTime"] = startTime 

3129 self.tc[k]["hid"] = hid 

3130 if not self.tc[k]["endTime"] or startTime > self.tc[k]["endTime"]: 

3131 self.tc[k]["endTime"] = startTime 

3132 

3133 if doinsert: 

3134 update_TC = self.sql.query["update_TC"].replace("%s", self.sql.query["placeholder"]) 

3135 insert_TC = self.sql.query["insert_TC"].replace("%s", self.sql.query["placeholder"]) 

3136 select_TC = self.sql.query["select_TC"].replace("%s", self.sql.query["placeholder"]) 

3137 

3138 inserts = [] 

3139 c = self.get_cursor() 

3140 for k, tc in list(self.tc.items()): 

3141 sc = self.s.get(tc["hid"]) 

3142 tc["startTime"] = tc["startTime"].replace(tzinfo=None) 

3143 tc["endTime"] = tc["endTime"].replace(tzinfo=None) 

3144 c.execute(select_TC, k) 

3145 r = self.fetchallDict(c, ["id", "startTime", "endTime"]) 

3146 num = len(r) 

3147 if num == 1: 

3148 update = not r[0]["startTime"] or not r[0]["endTime"] 

3149 if update or (tc["startTime"] < r[0]["startTime"] and tc["endTime"] > r[0]["endTime"]): 

3150 q = update_TC.replace("<UPDATE>", "startTime=%s, endTime=%s,") 

3151 row = [tc["startTime"], tc["endTime"]] + tc["line"] + list(k[:2]) 

3152 elif tc["startTime"] < r[0]["startTime"]: 

3153 q = update_TC.replace("<UPDATE>", "startTime=%s, ") 

3154 row = [tc["startTime"]] + tc["line"] + list(k[:2]) 

3155 elif tc["endTime"] > r[0]["endTime"]: 

3156 q = update_TC.replace("<UPDATE>", "endTime=%s, ") 

3157 row = [tc["endTime"]] + tc["line"] + list(k[:2]) 

3158 else: 

3159 q = update_TC.replace("<UPDATE>", "") 

3160 row = tc["line"] + list(k[:2]) 

3161 c.execute(q, row) 

3162 elif num == 0: 

3163 row = [sc["id"], tc["startTime"], tc["endTime"]] + list(k[:2]) + tc["line"] 

3164 # append to the bulk inserts 

3165 inserts.append(row) 

3166 

3167 if inserts: 

3168 self.executemany(c, insert_TC, inserts) 

3169 self.commit() 

3170 

3171 def storeCardsCache(self, hid, pids, startTime, gametypeId, tourneyTypeId, pdata, heroes, tz_name, doinsert): 

3172 """Update cached cards statistics. If update fails because no record exists, do an insert.""" 

3173 

3174 for p in pdata: 

3175 k = (hid, gametypeId, tourneyTypeId, pids[p], pdata[p]["startCards"]) 

3176 pdata[p]["n"] = 1 

3177 line = [int(pdata[p][s]) if isinstance(pdata[p][s], bool) else pdata[p][s] for s in CACHE_KEYS] 

3178 self.dcbulk[k] = line 

3179 

3180 if doinsert: 

3181 update_cardscache = self.sql.query["update_cardscache"].replace("%s", self.sql.query["placeholder"]) 

3182 insert_cardscache = self.sql.query["insert_cardscache"].replace("%s", self.sql.query["placeholder"]) 

3183 select_cardscache_ring = self.sql.query["select_cardscache_ring"].replace( 

3184 "%s", self.sql.query["placeholder"] 

3185 ) 

3186 select_cardscache_tour = self.sql.query["select_cardscache_tour"].replace( 

3187 "%s", self.sql.query["placeholder"] 

3188 ) 

3189 

3190 # Removed unused variables 

3191 # select_W = self.sql.query["select_W"].replace("%s", self.sql.query["placeholder"]) 

3192 # select_M = self.sql.query["select_M"].replace("%s", self.sql.query["placeholder"]) 

3193 # insert_W = self.sql.query["insert_W"].replace("%s", self.sql.query["placeholder"]) 

3194 # insert_M = self.sql.query["insert_M"].replace("%s", self.sql.query["placeholder"]) 

3195 

3196 dccache, inserts = {}, [] 

3197 for k, line in list(self.dcbulk.items()): 

3198 sc = self.s.get(k[0]) 

3199 if sc is not None: 

3200 garbageWeekMonths = (sc["wid"], sc["mid"]) in self.wmnew or (sc["wid"], sc["mid"]) in self.wmold 

3201 garbageTourneyTypes = k[2] in self.ttnew or k[2] in self.ttold 

3202 if self.import_options["hhBulkPath"] == "" or (not garbageWeekMonths and not garbageTourneyTypes): 

3203 n = (sc["wid"], sc["mid"], k[1], k[2], k[3], k[4]) 

3204 startCards = dccache.get(n) 

3205 # Add line to the old line in the hudcache. 

3206 if startCards is not None: 

3207 for idx, val in enumerate(line): 

3208 dccache[n][idx] += val 

3209 else: 

3210 dccache[n] = line 

3211 

3212 c = self.get_cursor() 

3213 for k, item in list(dccache.items()): 

3214 if k[3]: 

3215 q = select_cardscache_tour 

3216 row = list(k) 

3217 else: 

3218 q = select_cardscache_ring 

3219 row = list(k[:3]) + list(k[-2:]) 

3220 c.execute(q, row) 

3221 result = c.fetchone() 

3222 if result: 

3223 id = result[0] 

3224 update = item + [id] 

3225 c.execute(update_cardscache, update) 

3226 else: 

3227 insert = list(k) + item 

3228 inserts.append(insert) 

3229 

3230 if inserts: 

3231 self.executemany(c, insert_cardscache, inserts) 

3232 self.commit() 

3233 

3234 def storePositionsCache( 

3235 self, hid, pids, startTime, gametypeId, tourneyTypeId, pdata, hdata, heroes, tz_name, doinsert 

3236 ): 

3237 """Update cached position statistics. If update fails because no record exists, do an insert.""" 

3238 

3239 for p in pdata: 

3240 position = str(pdata[p]["position"]) 

3241 k = (hid, gametypeId, tourneyTypeId, pids[p], len(pids), hdata["maxPosition"], position) 

3242 pdata[p]["n"] = 1 

3243 line = [int(pdata[p][s]) if isinstance(pdata[p][s], bool) else pdata[p][s] for s in CACHE_KEYS] 

3244 self.pcbulk[k] = line 

3245 

3246 if doinsert: 

3247 update_positionscache = self.sql.query["update_positionscache"].replace("%s", self.sql.query["placeholder"]) 

3248 insert_positionscache = self.sql.query["insert_positionscache"].replace("%s", self.sql.query["placeholder"]) 

3249 select_positionscache_ring = self.sql.query["select_positionscache_ring"].replace( 

3250 "%s", self.sql.query["placeholder"] 

3251 ) 

3252 select_positionscache_tour = self.sql.query["select_positionscache_tour"].replace( 

3253 "%s", self.sql.query["placeholder"] 

3254 ) 

3255 

3256 # Removed unused variables: 

3257 # select_W = self.sql.query["select_W"].replace("%s", self.sql.query["placeholder"]) 

3258 # select_M = self.sql.query["select_M"].replace("%s", self.sql.query["placeholder"]) 

3259 # insert_W = self.sql.query["insert_W"].replace("%s", self.sql.query["placeholder"]) 

3260 # insert_M = self.sql.query["insert_M"].replace("%s", self.sql.query["placeholder"]) 

3261 

3262 position_cache, inserts = {}, [] 

3263 for k, line in list(self.pcbulk.items()): 

3264 sc = self.s.get(k[0]) 

3265 if sc is not None: 

3266 garbageWeekMonths = (sc["wid"], sc["mid"]) in self.wmnew or (sc["wid"], sc["mid"]) in self.wmold 

3267 garbageTourneyTypes = k[2] in self.ttnew or k[2] in self.ttold 

3268 if self.import_options["hhBulkPath"] == "" or (not garbageWeekMonths and not garbageTourneyTypes): 

3269 n = (sc["wid"], sc["mid"], k[1], k[2], k[3], k[4], k[5], k[6]) 

3270 positions = position_cache.get(n) 

3271 # Add line to the old line in the hudcache. 

3272 if positions is not None: 

3273 for idx, val in enumerate(line): 

3274 position_cache[n][idx] += val 

3275 else: 

3276 position_cache[n] = line 

3277 

3278 c = self.get_cursor() 

3279 for k, item in list(position_cache.items()): 

3280 if k[3]: # Check if it's a tournament 

3281 q = select_positionscache_tour 

3282 row = list(k) 

3283 else: # It's a ring game 

3284 q = select_positionscache_ring 

3285 row = list(k[:3]) + list(k[-4:]) 

3286 

3287 c.execute(q, row) 

3288 result = c.fetchone() 

3289 if result: 

3290 id = result[0] 

3291 update = item + [id] 

3292 c.execute(update_positionscache, update) 

3293 else: 

3294 insert = list(k) + item 

3295 inserts.append(insert) 

3296 

3297 if inserts: 

3298 self.executemany(c, insert_positionscache, inserts) 

3299 self.commit() 

3300 

3301 def appendHandsSessionIds(self): 

3302 for i in range(len(self.hbulk)): 

3303 hid = self.hids[i] 

3304 tid = self.hbulk[i][2] 

3305 sc = self.s.get(hid) 

3306 if sc is not None: 

3307 self.hbulk[i][4] = sc["id"] 

3308 if tid: 

3309 self.tbulk[tid] = sc["id"] 

3310 

3311 def get_id(self, file): 

3312 q = self.sql.query["get_id"] 

3313 q = q.replace("%s", self.sql.query["placeholder"]) 

3314 c = self.get_cursor() 

3315 c.execute(q, (file,)) 

3316 id = c.fetchone() 

3317 if not id: 

3318 return 0 

3319 return id[0] 

3320 

3321 def storeFile(self, fdata): 

3322 q = self.sql.query["store_file"] 

3323 q = q.replace("%s", self.sql.query["placeholder"]) 

3324 c = self.get_cursor() 

3325 c.execute(q, fdata) 

3326 id = self.get_last_insert_id(c) 

3327 return id 

3328 

3329 def updateFile(self, fdata): 

3330 q = self.sql.query["update_file"] 

3331 q = q.replace("%s", self.sql.query["placeholder"]) 

3332 c = self.get_cursor() 

3333 c.execute(q, fdata) 

3334 

3335 def getHeroIds(self, pids, sitename): 

3336 # Grab playerIds using hero names in HUD_Config.xml 

3337 try: 

3338 # derive list of program owner's player ids 

3339 hero = {} # name of program owner indexed by site id 

3340 hero_ids = [] 

3341 # make sure at least two values in list 

3342 # so that tuple generation creates doesn't use 

3343 # () or (1,) style 

3344 for site in self.config.get_supported_sites(): 

3345 hero = self.config.supported_sites[site].screen_name 

3346 for n, v in list(pids.items()): 

3347 if n == hero and sitename == site: 

3348 hero_ids.append(v) 

3349 

3350 except Exception: 

3351 err = traceback.extract_tb(sys.exc_info()[2])[-1] 

3352 log.error(("Error aquiring hero ids:"), str(sys.exc_value)) 

3353 log.error(f"traceback: {err}") 

3354 return hero_ids 

3355 

3356 def fetchallDict(self, cursor, desc): 

3357 data = cursor.fetchall() 

3358 if not data: 

3359 return [] 

3360 results = [0] * len(data) 

3361 for i in range(len(data)): 

3362 results[i] = {} 

3363 for n in range(len(desc)): 

3364 results[i][desc[n]] = data[i][n] 

3365 return results 

3366 

3367 def nextHandId(self): 

3368 c = self.get_cursor(True) 

3369 c.execute("SELECT max(id) FROM Hands") 

3370 id = c.fetchone()[0] 

3371 if not id: 

3372 id = 0 

3373 id += self.hand_inc 

3374 return id 

3375 

3376 def isDuplicate(self, siteId, siteHandNo, heroSeat, publicDB): 

3377 q = self.sql.query["isAlreadyInDB"].replace("%s", self.sql.query["placeholder"]) 

3378 if publicDB: 

3379 key = (siteHandNo, siteId, heroSeat) 

3380 q = q.replace("<heroSeat>", " AND heroSeat=%s").replace("%s", self.sql.query["placeholder"]) 

3381 else: 

3382 key = (siteHandNo, siteId) 

3383 q = q.replace("<heroSeat>", "") 

3384 if key in self.siteHandNos: 

3385 return True 

3386 c = self.get_cursor() 

3387 c.execute(q, key) 

3388 result = c.fetchall() 

3389 if len(result) > 0: 

3390 return True 

3391 self.siteHandNos.append(key) 

3392 return False 

3393 

3394 def getSqlPlayerIDs(self, pnames, siteid, hero): 

3395 result = {} 

3396 if self.pcache is None: 

3397 self.pcache = LambdaDict(lambda key: self.insertPlayer(key[0], key[1], key[2])) 

3398 

3399 for player in pnames: 

3400 result[player] = self.pcache[(player, siteid, player == hero)] 

3401 # NOTE: Using the LambdaDict does the same thing as: 

3402 # if player in self.pcache: 

3403 # #print "DEBUG: cachehit" 

3404 # pass 

3405 # else: 

3406 # self.pcache[player] = self.insertPlayer(player, siteid) 

3407 # result[player] = self.pcache[player] 

3408 

3409 return result 

3410 

3411 def insertPlayer(self, name, site_id, hero): 

3412 insert_player = "INSERT INTO Players (name, siteId, hero, chars) VALUES (%s, %s, %s, %s)" 

3413 insert_player = insert_player.replace("%s", self.sql.query["placeholder"]) 

3414 _name = name[:32] 

3415 if re_char.match(_name[0]): 

3416 char = "123" 

3417 elif len(_name) == 1 or re_char.match(_name[1]): 

3418 char = _name[0] + "1" 

3419 else: 

3420 char = _name[:2] 

3421 

3422 key = (_name, site_id, hero, char.upper()) 

3423 

3424 # NOTE/FIXME?: MySQL has ON DUPLICATE KEY UPDATE 

3425 # Usage: 

3426 # INSERT INTO `tags` (`tag`, `count`) 

3427 # VALUES ($tag, 1) 

3428 # ON DUPLICATE KEY UPDATE `count`=`count`+1; 

3429 

3430 # print "DEBUG: name: %s site: %s" %(name, site_id) 

3431 result = None 

3432 c = self.get_cursor() 

3433 q = "SELECT id, name, hero FROM Players WHERE name=%s and siteid=%s" 

3434 q = q.replace("%s", self.sql.query["placeholder"]) 

3435 result = self.insertOrUpdate("players", c, key, q, insert_player) 

3436 return result 

3437 

3438 def insertOrUpdate(self, type, cursor, key, select, insert): 

3439 if type == "players": 

3440 cursor.execute(select, key[:2]) 

3441 else: 

3442 cursor.execute(select, key) 

3443 tmp = cursor.fetchone() 

3444 if tmp is None: 

3445 cursor.execute(insert, key) 

3446 result = self.get_last_insert_id(cursor) 

3447 else: 

3448 result = tmp[0] 

3449 if type == "players": 

3450 if not tmp[2] and key[2]: 

3451 q = "UPDATE Players SET hero=%s WHERE name=%s and siteid=%s" 

3452 q = q.replace("%s", self.sql.query["placeholder"]) 

3453 cursor.execute(q, (key[2], key[0], key[1])) 

3454 return result 

3455 

3456 def getSqlGameTypeId(self, siteid, game, printdata=False): 

3457 if self.gtcache is None: 

3458 self.gtcache = LambdaDict(lambda key: self.insertGameTypes(key[0], key[1])) 

3459 

3460 self.gtprintdata = printdata 

3461 hilo = Card.games[game["category"]][2] 

3462 

3463 gtinfo = ( 

3464 siteid, 

3465 game["type"], 

3466 game["category"], 

3467 game["limitType"], 

3468 game["currency"], 

3469 game["mix"], 

3470 int(Decimal(game["sb"]) * 100), 

3471 int(Decimal(game["bb"]) * 100), 

3472 game["maxSeats"], 

3473 int(game["ante"] * 100), 

3474 game["buyinType"], 

3475 game["fast"], 

3476 game["newToGame"], 

3477 game["homeGame"], 

3478 game["split"], 

3479 ) 

3480 

3481 gtinsert = ( 

3482 siteid, 

3483 game["currency"], 

3484 game["type"], 

3485 game["base"], 

3486 game["category"], 

3487 game["limitType"], 

3488 hilo, 

3489 game["mix"], 

3490 int(Decimal(game["sb"]) * 100), 

3491 int(Decimal(game["bb"]) * 100), 

3492 int(Decimal(game["bb"]) * 100), 

3493 int(Decimal(game["bb"]) * 200), 

3494 game["maxSeats"], 

3495 int(game["ante"] * 100), 

3496 game["buyinType"], 

3497 game["fast"], 

3498 game["newToGame"], 

3499 game["homeGame"], 

3500 game["split"], 

3501 ) 

3502 

3503 result = self.gtcache[(gtinfo, gtinsert)] 

3504 # NOTE: Using the LambdaDict does the same thing as: 

3505 # if player in self.pcache: 

3506 # #print "DEBUG: cachehit" 

3507 # pass 

3508 # else: 

3509 # self.pcache[player] = self.insertPlayer(player, siteid) 

3510 # result[player] = self.pcache[player] 

3511 

3512 return result 

3513 

3514 def insertGameTypes(self, gtinfo, gtinsert): 

3515 result = None 

3516 c = self.get_cursor() 

3517 q = self.sql.query["getGametypeNL"] 

3518 q = q.replace("%s", self.sql.query["placeholder"]) 

3519 c.execute(q, gtinfo) 

3520 tmp = c.fetchone() 

3521 if tmp is None: 

3522 if self.gtprintdata: 

3523 log.debug("######## Gametype ##########") 

3524 import pprint 

3525 

3526 pp = pprint.PrettyPrinter(indent=4) 

3527 pp.pprint(gtinsert) 

3528 log.debug("###### End Gametype ########") 

3529 

3530 c.execute(self.sql.query["insertGameTypes"].replace("%s", self.sql.query["placeholder"]), gtinsert) 

3531 result = self.get_last_insert_id(c) 

3532 else: 

3533 result = tmp[0] 

3534 return result 

3535 

3536 def getTourneyInfo(self, siteName, tourneyNo): 

3537 c = self.get_cursor() 

3538 q = self.sql.query["getTourneyInfo"].replace("%s", self.sql.query["placeholder"]) 

3539 c.execute(q, (siteName, tourneyNo)) 

3540 columnNames = c.description 

3541 

3542 names = [] 

3543 for column in columnNames: 

3544 names.append(column[0]) 

3545 

3546 data = c.fetchone() 

3547 return (names, data) 

3548 

3549 # end def getTourneyInfo 

3550 

3551 def getTourneyTypesIds(self): 

3552 c = self.connection.cursor() 

3553 c.execute(self.sql.query["getTourneyTypesIds"]) 

3554 result = c.fetchall() 

3555 return result 

3556 

3557 # end def getTourneyTypesIds 

3558 

3559 def getSqlTourneyTypeIDs(self, hand): 

3560 # if(self.ttcache == None): 

3561 # self.ttcache = LambdaDict(lambda key:self.insertTourneyType(key[0], key[1], key[2])) 

3562 

3563 # tourneydata = (hand.siteId, hand.buyinCurrency, hand.buyin, hand.fee, hand.gametype['category'], 

3564 # hand.gametype['limitType'], hand.maxseats, hand.isSng, hand.isKO, hand.koBounty, hand.isProgressive, 

3565 # hand.isRebuy, hand.rebuyCost, hand.isAddOn, hand.addOnCost, hand.speed, hand.isShootout, hand.isMatrix) 

3566 

3567 result = self.createOrUpdateTourneyType(hand) # self.ttcache[(hand.tourNo, hand.siteId, tourneydata)] 

3568 

3569 return result 

3570 

3571 def defaultTourneyTypeValue(self, value1, value2, field): 

3572 if ( 

3573 (not value1) 

3574 or (field == "maxseats" and value1 > value2) 

3575 or (field == "limitType" and value2 == "mx") 

3576 or ((field, value1) == ("buyinCurrency", "NA")) 

3577 or ((field, value1) == ("stack", "Regular")) 

3578 or ((field, value1) == ("speed", "Normal")) 

3579 or (field == "koBounty" and value1) 

3580 ): 

3581 return True 

3582 return False 

3583 

3584 def createOrUpdateTourneyType(self, obj): 

3585 ttid, _ttid, updateDb = None, None, False 

3586 setattr(obj, "limitType", obj.gametype["limitType"]) 

3587 cursor = self.get_cursor() 

3588 q = self.sql.query["getTourneyTypeIdByTourneyNo"].replace("%s", self.sql.query["placeholder"]) 

3589 cursor.execute(q, (obj.tourNo, obj.siteId)) 

3590 result = cursor.fetchone() 

3591 

3592 if result is not None: 

3593 columnNames = [desc[0].lower() for desc in cursor.description] 

3594 expectedValues = ( 

3595 ("buyin", "buyin"), 

3596 ("fee", "fee"), 

3597 ("buyinCurrency", "currency"), 

3598 ("limitType", "limittype"), 

3599 ("isSng", "sng"), 

3600 ("maxseats", "maxseats"), 

3601 ("isKO", "knockout"), 

3602 ("koBounty", "kobounty"), 

3603 ("isProgressive", "progressive"), 

3604 ("isRebuy", "rebuy"), 

3605 ("rebuyCost", "rebuycost"), 

3606 ("isAddOn", "addon"), 

3607 ("addOnCost", "addoncost"), 

3608 ("speed", "speed"), 

3609 ("isShootout", "shootout"), 

3610 ("isMatrix", "matrix"), 

3611 ("isFast", "fast"), 

3612 ("stack", "stack"), 

3613 ("isStep", "step"), 

3614 ("stepNo", "stepno"), 

3615 ("isChance", "chance"), 

3616 ("chanceCount", "chancecount"), 

3617 ("isMultiEntry", "multientry"), 

3618 ("isReEntry", "reentry"), 

3619 ("isHomeGame", "homegame"), 

3620 ("isNewToGame", "newtogame"), 

3621 ("isSplit", "split"), 

3622 ("isFifty50", "fifty50"), 

3623 ("isTime", "time"), 

3624 ("timeAmt", "timeamt"), 

3625 ("isSatellite", "satellite"), 

3626 ("isDoubleOrNothing", "doubleornothing"), 

3627 ("isCashOut", "cashout"), 

3628 ("isOnDemand", "ondemand"), 

3629 ("isFlighted", "flighted"), 

3630 ("isGuarantee", "guarantee"), 

3631 ("guaranteeAmt", "guaranteeamt"), 

3632 ) 

3633 resultDict = dict(list(zip(columnNames, result))) 

3634 ttid = resultDict["id"] 

3635 for ev in expectedValues: 

3636 objField, dbField = ev 

3637 objVal, dbVal = getattr(obj, objField), resultDict[dbField] 

3638 if ( 

3639 self.defaultTourneyTypeValue(objVal, dbVal, objField) and dbVal 

3640 ): # DB has this value but object doesnt, so update object 

3641 setattr(obj, objField, dbVal) 

3642 elif ( 

3643 self.defaultTourneyTypeValue(dbVal, objVal, objField) and objVal 

3644 ): # object has this value but DB doesnt, so update DB 

3645 updateDb = True 

3646 oldttid = ttid 

3647 if not result or updateDb: 

3648 if obj.gametype["mix"] != "none": 

3649 category, limitType = obj.gametype["mix"], "mx" 

3650 elif result is not None and resultDict["limittype"] == "mx": 

3651 category, limitType = resultDict["category"], "mx" 

3652 else: 

3653 category, limitType = obj.gametype["category"], obj.gametype["limitType"] 

3654 row = ( 

3655 obj.siteId, 

3656 obj.buyinCurrency, 

3657 obj.buyin, 

3658 obj.fee, 

3659 category, 

3660 limitType, 

3661 obj.maxseats, 

3662 obj.isSng, 

3663 obj.isKO, 

3664 obj.koBounty, 

3665 obj.isProgressive, 

3666 obj.isRebuy, 

3667 obj.rebuyCost, 

3668 obj.isAddOn, 

3669 obj.addOnCost, 

3670 obj.speed, 

3671 obj.isShootout, 

3672 obj.isMatrix, 

3673 obj.isFast, 

3674 obj.stack, 

3675 obj.isStep, 

3676 obj.stepNo, 

3677 obj.isChance, 

3678 obj.chanceCount, 

3679 obj.isMultiEntry, 

3680 obj.isReEntry, 

3681 obj.isHomeGame, 

3682 obj.isNewToGame, 

3683 obj.isSplit, 

3684 obj.isFifty50, 

3685 obj.isTime, 

3686 obj.timeAmt, 

3687 obj.isSatellite, 

3688 obj.isDoubleOrNothing, 

3689 obj.isCashOut, 

3690 obj.isOnDemand, 

3691 obj.isFlighted, 

3692 obj.isGuarantee, 

3693 obj.guaranteeAmt, 

3694 ) 

3695 cursor.execute(self.sql.query["getTourneyTypeId"].replace("%s", self.sql.query["placeholder"]), row) 

3696 tmp = cursor.fetchone() 

3697 try: 

3698 ttid = tmp[0] 

3699 except TypeError: # this means we need to create a new entry 

3700 if self.printdata: 

3701 log.debug("######## Tourneys ##########") 

3702 import pprint 

3703 

3704 pp = pprint.PrettyPrinter(indent=4) 

3705 pp.pprint(row) 

3706 log.debug("###### End Tourneys ########") 

3707 cursor.execute(self.sql.query["insertTourneyType"].replace("%s", self.sql.query["placeholder"]), row) 

3708 ttid = self.get_last_insert_id(cursor) 

3709 if updateDb: 

3710 # print 'DEBUG createOrUpdateTourneyType:', 'old', oldttid, 'new', ttid, row 

3711 q = self.sql.query["updateTourneyTypeId"].replace("%s", self.sql.query["placeholder"]) 

3712 cursor.execute(q, (ttid, obj.siteId, obj.tourNo)) 

3713 self.ttold.add(oldttid) 

3714 self.ttnew.add(ttid) 

3715 return ttid 

3716 

3717 def cleanUpTourneyTypes(self): 

3718 if self.ttold: 

3719 if self.callHud and self.cacheSessions: 

3720 tables = ("HudCache", "CardsCache", "PositionsCache") 

3721 elif self.callHud: 

3722 tables = ("HudCache",) 

3723 elif self.cacheSessions: 

3724 tables = ("CardsCache", "PositionsCache") 

3725 else: 

3726 tables = set([]) 

3727 select = self.sql.query["selectTourneyWithTypeId"].replace("%s", self.sql.query["placeholder"]) 

3728 delete = self.sql.query["deleteTourneyTypeId"].replace("%s", self.sql.query["placeholder"]) 

3729 cursor = self.get_cursor() 

3730 for ttid in self.ttold: 

3731 for t in tables: 

3732 statement = "clear%sTourneyType" % t 

3733 clear = self.sql.query[statement].replace("%s", self.sql.query["placeholder"]) 

3734 cursor.execute(clear, (ttid,)) 

3735 self.commit() 

3736 cursor.execute(select, (ttid,)) 

3737 result = cursor.fetchone() 

3738 if not result: 

3739 cursor.execute(delete, (ttid,)) 

3740 self.commit() 

3741 for ttid in self.ttnew: 

3742 for t in tables: 

3743 statement = "clear%sTourneyType" % t 

3744 clear = self.sql.query[statement].replace("%s", self.sql.query["placeholder"]) 

3745 cursor.execute(clear, (ttid,)) 

3746 self.commit() 

3747 for t in tables: 

3748 statement = "fetchNew%sTourneyTypeIds" % t 

3749 fetch = self.sql.query[statement].replace("%s", self.sql.query["placeholder"]) 

3750 cursor.execute(fetch) 

3751 for id in cursor.fetchall(): 

3752 self.rebuild_cache(None, None, t, id[0]) 

3753 

3754 def cleanUpWeeksMonths(self): 

3755 if self.cacheSessions and self.wmold: 

3756 selectWeekId = self.sql.query["selectSessionWithWeekId"].replace("%s", self.sql.query["placeholder"]) 

3757 selectMonthId = self.sql.query["selectSessionWithMonthId"].replace("%s", self.sql.query["placeholder"]) 

3758 deleteWeekId = self.sql.query["deleteWeekId"].replace("%s", self.sql.query["placeholder"]) 

3759 deleteMonthId = self.sql.query["deleteMonthId"].replace("%s", self.sql.query["placeholder"]) 

3760 cursor = self.get_cursor() 

3761 weeks, months, wmids = set(), set(), set() 

3762 for wid, mid in self.wmold: 

3763 for t in ("CardsCache", "PositionsCache"): 

3764 statement = "clear%sWeeksMonths" % t 

3765 clear = self.sql.query[statement].replace("%s", self.sql.query["placeholder"]) 

3766 cursor.execute(clear, (wid, mid)) 

3767 self.commit() 

3768 weeks.add(wid) 

3769 months.add(mid) 

3770 

3771 for wid in weeks: 

3772 cursor.execute(selectWeekId, (wid,)) 

3773 result = cursor.fetchone() 

3774 if not result: 

3775 cursor.execute(deleteWeekId, (wid,)) 

3776 self.commit() 

3777 

3778 for mid in months: 

3779 cursor.execute(selectMonthId, (mid,)) 

3780 result = cursor.fetchone() 

3781 if not result: 

3782 cursor.execute(deleteMonthId, (mid,)) 

3783 self.commit() 

3784 

3785 for wid, mid in self.wmnew: 

3786 for t in ("CardsCache", "PositionsCache"): 

3787 statement = "clear%sWeeksMonths" % t 

3788 clear = self.sql.query[statement].replace("%s", self.sql.query["placeholder"]) 

3789 cursor.execute(clear, (wid, mid)) 

3790 self.commit() 

3791 

3792 if self.wmold: 

3793 for t in ("CardsCache", "PositionsCache"): 

3794 statement = "fetchNew%sWeeksMonths" % t 

3795 fetch = self.sql.query[statement].replace("%s", self.sql.query["placeholder"]) 

3796 cursor.execute(fetch) 

3797 for wid, mid in cursor.fetchall(): 

3798 wmids.add((wid, mid)) 

3799 for wmid in wmids: 

3800 for t in ("CardsCache", "PositionsCache"): 

3801 self.rebuild_cache(None, None, t, None, wmid) 

3802 self.commit() 

3803 

3804 def rebuild_caches(self): 

3805 if self.callHud and self.cacheSessions: 

3806 tables = ("HudCache", "CardsCache", "PositionsCache") 

3807 elif self.callHud: 

3808 tables = ("HudCache",) 

3809 elif self.cacheSessions: 

3810 tables = ("CardsCache", "PositionsCache") 

3811 else: 

3812 tables = set([]) 

3813 for t in tables: 

3814 self.rebuild_cache(None, None, t) 

3815 

3816 def resetClean(self): 

3817 self.ttold = set() 

3818 self.ttnew = set() 

3819 self.wmold = set() 

3820 self.wmnew = set() 

3821 

3822 def cleanRequired(self): 

3823 if self.ttold or self.wmold: 

3824 return True 

3825 return False 

3826 

3827 def getSqlTourneyIDs(self, hand): 

3828 result = None 

3829 c = self.get_cursor() 

3830 q = self.sql.query["getTourneyByTourneyNo"] 

3831 q = q.replace("%s", self.sql.query["placeholder"]) 

3832 t = hand.startTime.replace(tzinfo=None) 

3833 c.execute(q, (hand.siteId, hand.tourNo)) 

3834 

3835 tmp = c.fetchone() 

3836 if tmp is None: 

3837 c.execute( 

3838 self.sql.query["insertTourney"].replace("%s", self.sql.query["placeholder"]), 

3839 ( 

3840 hand.tourneyTypeId, 

3841 None, 

3842 hand.tourNo, 

3843 None, 

3844 None, 

3845 t, 

3846 t, 

3847 hand.tourneyName, 

3848 None, 

3849 None, 

3850 None, 

3851 None, 

3852 None, 

3853 None, 

3854 ), 

3855 ) 

3856 result = self.get_last_insert_id(c) 

3857 else: 

3858 result = tmp[0] 

3859 columnNames = [desc[0] for desc in c.description] 

3860 resultDict = dict(list(zip(columnNames, tmp))) 

3861 if self.backend == self.PGSQL: 

3862 startTime, endTime = resultDict["starttime"], resultDict["endtime"] 

3863 else: 

3864 startTime, endTime = resultDict["startTime"], resultDict["endTime"] 

3865 

3866 if startTime is None or t < startTime: 

3867 q = self.sql.query["updateTourneyStart"].replace("%s", self.sql.query["placeholder"]) 

3868 c.execute(q, (t, result)) 

3869 elif endTime is None or t > endTime: 

3870 q = self.sql.query["updateTourneyEnd"].replace("%s", self.sql.query["placeholder"]) 

3871 c.execute(q, (t, result)) 

3872 return result 

3873 

3874 def createOrUpdateTourney(self, summary): 

3875 cursor = self.get_cursor() 

3876 q = self.sql.query["getTourneyByTourneyNo"].replace("%s", self.sql.query["placeholder"]) 

3877 cursor.execute(q, (summary.siteId, summary.tourNo)) 

3878 

3879 columnNames = [desc[0] for desc in cursor.description] 

3880 result = cursor.fetchone() 

3881 

3882 if result is not None: 

3883 if self.backend == self.PGSQL: 

3884 expectedValues = ( 

3885 ("comment", "comment"), 

3886 ("tourneyName", "tourneyname"), 

3887 ("totalRebuyCount", "totalrebuycount"), 

3888 ("totalAddOnCount", "totaladdoncount"), 

3889 ("prizepool", "prizepool"), 

3890 ("startTime", "starttime"), 

3891 ("entries", "entries"), 

3892 ("commentTs", "commentts"), 

3893 ("endTime", "endtime"), 

3894 ("added", "added"), 

3895 ("addedCurrency", "addedcurrency"), 

3896 ) 

3897 else: 

3898 expectedValues = ( 

3899 ("comment", "comment"), 

3900 ("tourneyName", "tourneyName"), 

3901 ("totalRebuyCount", "totalRebuyCount"), 

3902 ("totalAddOnCount", "totalAddOnCount"), 

3903 ("prizepool", "prizepool"), 

3904 ("startTime", "startTime"), 

3905 ("entries", "entries"), 

3906 ("commentTs", "commentTs"), 

3907 ("endTime", "endTime"), 

3908 ("added", "added"), 

3909 ("addedCurrency", "addedCurrency"), 

3910 ) 

3911 updateDb = False 

3912 resultDict = dict(list(zip(columnNames, result))) 

3913 

3914 tourneyId = resultDict["id"] 

3915 for ev in expectedValues: 

3916 if ( 

3917 getattr(summary, ev[0]) is None and resultDict[ev[1]] is not None 

3918 ): # DB has this value but object doesnt, so update object 

3919 setattr(summary, ev[0], resultDict[ev[1]]) 

3920 elif ( 

3921 getattr(summary, ev[0]) is not None and not resultDict[ev[1]] 

3922 ): # object has this value but DB doesnt, so update DB 

3923 updateDb = True 

3924 # elif ev=="startTime": 

3925 # if (resultDict[ev] < summary.startTime): 

3926 # summary.startTime=resultDict[ev] 

3927 if updateDb: 

3928 q = self.sql.query["updateTourney"].replace("%s", self.sql.query["placeholder"]) 

3929 startTime, endTime = None, None 

3930 if summary.startTime is not None: 

3931 startTime = summary.startTime.replace(tzinfo=None) 

3932 if summary.endTime is not None: 

3933 endTime = summary.endTime.replace(tzinfo=None) 

3934 row = ( 

3935 summary.entries, 

3936 summary.prizepool, 

3937 startTime, 

3938 endTime, 

3939 summary.tourneyName, 

3940 summary.totalRebuyCount, 

3941 summary.totalAddOnCount, 

3942 summary.comment, 

3943 summary.commentTs, 

3944 summary.added, 

3945 summary.addedCurrency, 

3946 tourneyId, 

3947 ) 

3948 cursor.execute(q, row) 

3949 else: 

3950 startTime, endTime = None, None 

3951 if summary.startTime is not None: 

3952 startTime = summary.startTime.replace(tzinfo=None) 

3953 if summary.endTime is not None: 

3954 endTime = summary.endTime.replace(tzinfo=None) 

3955 row = ( 

3956 summary.tourneyTypeId, 

3957 None, 

3958 summary.tourNo, 

3959 summary.entries, 

3960 summary.prizepool, 

3961 startTime, 

3962 endTime, 

3963 summary.tourneyName, 

3964 summary.totalRebuyCount, 

3965 summary.totalAddOnCount, 

3966 summary.comment, 

3967 summary.commentTs, 

3968 summary.added, 

3969 summary.addedCurrency, 

3970 ) 

3971 if self.printdata: 

3972 log.debug("######## Tourneys ##########") 

3973 import pprint 

3974 

3975 pp = pprint.PrettyPrinter(indent=4) 

3976 pp.pprint(row) 

3977 log.debug("###### End Tourneys ########") 

3978 cursor.execute(self.sql.query["insertTourney"].replace("%s", self.sql.query["placeholder"]), row) 

3979 tourneyId = self.get_last_insert_id(cursor) 

3980 return tourneyId 

3981 

3982 # end def createOrUpdateTourney 

3983 

3984 def getTourneyPlayerInfo(self, siteName, tourneyNo, playerName): 

3985 c = self.get_cursor() 

3986 c.execute(self.sql.query["getTourneyPlayerInfo"], (siteName, tourneyNo, playerName)) 

3987 columnNames = c.description 

3988 

3989 names = [] 

3990 for column in columnNames: 

3991 names.append(column[0]) 

3992 

3993 data = c.fetchone() 

3994 return (names, data) 

3995 

3996 # end def getTourneyPlayerInfo 

3997 

3998 def getSqlTourneysPlayersIDs(self, hand): 

3999 result = {} 

4000 if self.tpcache is None: 

4001 self.tpcache = LambdaDict(lambda key: self.insertTourneysPlayers(key[0], key[1], key[2])) 

4002 

4003 for player in hand.players: 

4004 playerId = hand.dbid_pids[player[1]] 

4005 result[player[1]] = self.tpcache[(playerId, hand.tourneyId, hand.entryId)] 

4006 

4007 return result 

4008 

4009 def insertTourneysPlayers(self, playerId, tourneyId, entryId): 

4010 result = None 

4011 c = self.get_cursor() 

4012 q = self.sql.query["getTourneysPlayersByIds"] 

4013 q = q.replace("%s", self.sql.query["placeholder"]) 

4014 

4015 c.execute(q, (tourneyId, playerId, entryId)) 

4016 

4017 tmp = c.fetchone() 

4018 if tmp is None: # new player 

4019 c.execute( 

4020 self.sql.query["insertTourneysPlayer"].replace("%s", self.sql.query["placeholder"]), 

4021 (tourneyId, playerId, entryId, None, None, None, None, None, None), 

4022 ) 

4023 # Get last id might be faster here. 

4024 # c.execute ("SELECT id FROM Players WHERE name=%s", (name,)) 

4025 result = self.get_last_insert_id(c) 

4026 else: 

4027 result = tmp[0] 

4028 return result 

4029 

4030 def updateTourneyPlayerBounties(self, hand): 

4031 updateDb = False 

4032 cursor = self.get_cursor() 

4033 q = self.sql.query["updateTourneysPlayerBounties"].replace("%s", self.sql.query["placeholder"]) 

4034 for player, tourneysPlayersId in list(hand.tourneysPlayersIds.items()): 

4035 if player in hand.koCounts: 

4036 cursor.execute(q, (hand.koCounts[player], hand.koCounts[player], tourneysPlayersId)) 

4037 updateDb = True 

4038 if updateDb: 

4039 self.commit() 

4040 

4041 def createOrUpdateTourneysPlayers(self, summary): 

4042 tourneysPlayersIds, tplayers, inserts = {}, [], [] 

4043 cursor = self.get_cursor() 

4044 cursor.execute( 

4045 self.sql.query["getTourneysPlayersByTourney"].replace("%s", self.sql.query["placeholder"]), 

4046 (summary.tourneyId,), 

4047 ) 

4048 result = cursor.fetchall() 

4049 if result: 

4050 tplayers += [i for i in result] 

4051 for player, entries in list(summary.players.items()): 

4052 playerId = summary.dbid_pids[player] 

4053 for entryIdx in range(len(entries)): 

4054 entryId = entries[entryIdx] 

4055 if (playerId, entryId) in tplayers: 

4056 cursor.execute( 

4057 self.sql.query["getTourneysPlayersByIds"].replace("%s", self.sql.query["placeholder"]), 

4058 (summary.tourneyId, playerId, entryId), 

4059 ) 

4060 columnNames = [desc[0] for desc in cursor.description] 

4061 result = cursor.fetchone() 

4062 if self.backend == self.PGSQL: 

4063 expectedValues = ( 

4064 ("rank", "rank"), 

4065 ("winnings", "winnings"), 

4066 ("winningsCurrency", "winningscurrency"), 

4067 ("rebuyCount", "rebuycount"), 

4068 ("addOnCount", "addoncount"), 

4069 ("koCount", "kocount"), 

4070 ) 

4071 else: 

4072 expectedValues = ( 

4073 ("rank", "rank"), 

4074 ("winnings", "winnings"), 

4075 ("winningsCurrency", "winningsCurrency"), 

4076 ("rebuyCount", "rebuyCount"), 

4077 ("addOnCount", "addOnCount"), 

4078 ("koCount", "koCount"), 

4079 ) 

4080 updateDb = False 

4081 resultDict = dict(list(zip(columnNames, result))) 

4082 tourneysPlayersIds[(player, entryId)] = result[0] 

4083 for ev in expectedValues: 

4084 summaryAttribute = ev[0] 

4085 if ev[0] != "winnings" and ev[0] != "winningsCurrency": 

4086 summaryAttribute += "s" 

4087 summaryDict = getattr(summary, summaryAttribute) 

4088 if ( 

4089 summaryDict[player][entryIdx] is None and resultDict[ev[1]] is not None 

4090 ): # DB has this value but object doesnt, so update object 

4091 summaryDict[player][entryIdx] = resultDict[ev[1]] 

4092 setattr(summary, summaryAttribute, summaryDict) 

4093 elif ( 

4094 summaryDict[player][entryIdx] is not None and not resultDict[ev[1]] 

4095 ): # object has this value but DB doesnt, so update DB 

4096 updateDb = True 

4097 if updateDb: 

4098 q = self.sql.query["updateTourneysPlayer"].replace("%s", self.sql.query["placeholder"]) 

4099 inputs = ( 

4100 summary.ranks[player][entryIdx], 

4101 summary.winnings[player][entryIdx], 

4102 summary.winningsCurrency[player][entryIdx], 

4103 summary.rebuyCounts[player][entryIdx], 

4104 summary.addOnCounts[player][entryIdx], 

4105 summary.koCounts[player][entryIdx], 

4106 tourneysPlayersIds[(player, entryId)], 

4107 ) 

4108 # print q 

4109 # pp = pprint.PrettyPrinter(indent=4) 

4110 # pp.pprint(inputs) 

4111 cursor.execute(q, inputs) 

4112 else: 

4113 inserts.append( 

4114 ( 

4115 summary.tourneyId, 

4116 playerId, 

4117 entryId, 

4118 summary.ranks[player][entryIdx], 

4119 summary.winnings[player][entryIdx], 

4120 summary.winningsCurrency[player][entryIdx], 

4121 summary.rebuyCounts[player][entryIdx], 

4122 summary.addOnCounts[player][entryIdx], 

4123 summary.koCounts[player][entryIdx], 

4124 ) 

4125 ) 

4126 if inserts: 

4127 self.executemany( 

4128 cursor, self.sql.query["insertTourneysPlayer"].replace("%s", self.sql.query["placeholder"]), inserts 

4129 ) 

4130 

4131 

4132# end class Database 

4133 

4134if __name__ == "__main__": 

4135 c = Configuration.Config() 

4136 sql = SQL.Sql(db_server="sqlite") 

4137 

4138 db_connection = Database(c) # mysql fpdb holdem 

4139 # db_connection = Database(c, 'fpdb-p', 'test') # mysql fpdb holdem 

4140 # db_connection = Database(c, 'PTrackSv2', 'razz') # mysql razz 

4141 # db_connection = Database(c, 'ptracks', 'razz') # postgres 

4142 log.debug("database connection object = ", db_connection.connection) 

4143 # db_connection.recreate_tables() 

4144 db_connection.dropAllIndexes() 

4145 db_connection.createAllIndexes() 

4146 

4147 h = db_connection.get_last_hand() 

4148 log.debug("last hand = ", h) 

4149 

4150 hero = db_connection.get_player_id(c, "PokerStars", "nutOmatic") 

4151 if hero: 

4152 log.debug("nutOmatic player_id", hero) 

4153 

4154 # example of displaying query plan in sqlite: 

4155 if db_connection.backend == 4: 

4156 log.debug() 

4157 c = db_connection.get_cursor() 

4158 c.execute("explain query plan " + sql.query["get_table_name"], (h,)) 

4159 for row in c.fetchall(): 

4160 log.debug("Query plan:", row) 

4161 log.debug() 

4162 

4163 t0 = time() 

4164 stat_dict = db_connection.get_stats_from_hand(h, "ring") 

4165 t1 = time() 

4166 for p in list(stat_dict.keys()): 

4167 log.debug(p, " ", stat_dict[p]) 

4168 

4169 log.debug(("cards ="), db_connection.get_cards("1")) 

4170 db_connection.close_connection 

4171 

4172 log.debug(("get_stats took: %4.3f seconds") % (t1 - t0)) 

4173 

4174 log.debug(("Press ENTER to continue.")) 

4175 sys.stdin.readline() 

4176 

4177 

4178# Code borrowed from http://push.cx/2008/caching-dictionaries-in-python-vs-ruby 

4179class LambdaDict(dict): 

4180 def __init__(self, value_factory): 

4181 super(LambdaDict, self).__init__() 

4182 self.value_factory = value_factory 

4183 

4184 def __getitem__(self, key): 

4185 if key in self: 

4186 return self.get(key) 

4187 else: 

4188 # Use the value_factory to generate a value for the missing key 

4189 self.__setitem__(key, self.value_factory(key)) 

4190 return self.get(key)