Coverage for Importer.py: 0%
476 statements
« prev ^ index » next coverage.py v7.6.1, created at 2024-09-27 18:50 +0000
« prev ^ index » next coverage.py v7.6.1, created at 2024-09-27 18:50 +0000
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
4#Copyright 2008-2011 Steffen Schaumburg
5#This program is free software: you can redistribute it and/or modify
6#it under the terms of the GNU Affero General Public License as published by
7#the Free Software Foundation, version 3 of the License.
8#
9#This program is distributed in the hope that it will be useful,
10#but WITHOUT ANY WARRANTY; without even the implied warranty of
11#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12#GNU General Public License for more details.
13#
14#You should have received a copy of the GNU Affero General Public License
15#along with this program. If not, see <http://www.gnu.org/licenses/>.
16#In the "official" distribution you can find the license in agpl-3.0.txt.
18from __future__ import print_function
19from __future__ import division
24from past.utils import old_div
25#import L10n
26#_ = L10n.get_translation()
28# Standard Library modules
30import os # todo: remove this once import_dir is in fpdb_import
31from time import time, sleep, process_time
32import datetime
33import queue
34import shutil
35import re
36import zmq
38import logging, traceback
40from PyQt5.QtWidgets import QProgressBar, QLabel, QDialog, QVBoxLayout
41from PyQt5.QtCore import QCoreApplication
43# fpdb/FreePokerTools modules
45import Database
47import Configuration
49import IdentifySite
51from Exceptions import FpdbParseError, FpdbHandDuplicate, FpdbHandPartial
53try:
54 import xlrd
55except:
56 xlrd = None
58if __name__ == "__main__":
59 Configuration.set_logfile("fpdb-log.txt")
60# logging has been set up in fpdb.py or HUD_main.py, use their settings:
61log = logging.getLogger("importer")
63class ZMQSender:
64 def __init__(self, port="5555"):
65 self.context = zmq.Context()
66 self.socket = self.context.socket(zmq.PUSH)
67 self.socket.bind(f"tcp://127.0.0.1:{port}")
68 log.info(f"ZMQ sender initialized on port {port}")
71 def send_hand_id(self, hand_id):
72 try:
73 self.socket.send_string(str(hand_id))
74 log.debug(f"Sent hand ID {hand_id} via ZMQ")
75 except zmq.ZMQError as e:
76 log.error(f"Failed to send hand ID {hand_id}: {e}")
78 def close(self):
79 self.socket.close()
80 self.context.term()
81 log.info("ZMQ sender closed")
85class Importer(object):
86 def __init__(self, caller, settings, config, sql=None, parent=None, zmq_port="5555"):
87 """Constructor"""
88 self.settings = settings
89 self.caller = caller
90 self.config = config
91 self.sql = sql
92 self.parent = parent
94 self.idsite = IdentifySite.IdentifySite(config)
96 self.filelist = {}
97 self.dirlist = {}
98 self.siteIds = {}
99 self.removeFromFileList = {} # to remove deleted files
100 self.monitor = False
101 self.updatedsize = {}
102 self.updatedtime = {}
103 self.lines = None
104 self.faobs = None # File as one big string
105 self.mode = None
106 self.pos_in_file = {} # dict to remember how far we have read in the file
108 # Configuration des paramètres par défaut
109 self.callHud = self.config.get_import_parameters().get("callFpdbHud")
110 self.settings.setdefault("handCount", 0)
111 self.settings.setdefault("writeQSize", 1000)
112 self.settings.setdefault("writeQMaxWait", 10)
113 self.settings.setdefault("dropIndexes", "don't drop")
114 self.settings.setdefault("dropHudCache", "don't drop")
115 self.settings.setdefault("starsArchive", False)
116 self.settings.setdefault("ftpArchive", False)
117 self.settings.setdefault("testData", False)
118 self.settings.setdefault("cacheHHC", False)
120 self.writeq = None
121 self.database = Database.Database(self.config, sql=self.sql)
122 self.writerdbs = []
123 self.settings.setdefault("threads", 1)
124 for i in range(self.settings['threads']):
125 self.writerdbs.append(Database.Database(self.config, sql=self.sql))
127 # Modification : spécifier le port pour ZMQ
128 self.zmq_sender = ZMQSender(port=zmq_port)
129 process_time() # init clock in windows
132 #Set functions
133 def setMode(self, value):
134 self.mode = value
136 def setCallHud(self, value):
137 self.callHud = value
139 def setCacheSessions(self, value):
140 self.cacheSessions = value
142 def setHandCount(self, value):
143 self.settings['handCount'] = int(value)
145 def setQuiet(self, value):
146 self.settings['quiet'] = value
148 def setHandsInDB(self, value):
149 self.settings['handsInDB'] = value
151 def setThreads(self, value):
152 self.settings['threads'] = value
153 if self.settings["threads"] > len(self.writerdbs):
154 for i in range(self.settings['threads'] - len(self.writerdbs)):
155 self.writerdbs.append( Database.Database(self.config, sql = self.sql) )
157 def setDropIndexes(self, value):
158 self.settings['dropIndexes'] = value
160 def setDropHudCache(self, value):
161 self.settings['dropHudCache'] = value
163 def setStarsArchive(self, value):
164 self.settings['starsArchive'] = value
166 def setFTPArchive(self, value):
167 self.settings['ftpArchive'] = value
169 def setPrintTestData(self, value):
170 self.settings['testData'] = value
172 def setFakeCacheHHC(self, value):
173 self.settings['cacheHHC'] = value
175 def getCachedHHC(self):
176 return self.handhistoryconverter
178# def setWatchTime(self):
179# self.updated = time()
181 def clearFileList(self):
182 self.updatedsize = {}
183 self.updatetime = {}
184 self.pos_in_file = {}
185 self.filelist = {}
187 def logImport(self, type, file, stored, dups, partial, skipped, errs, ttime, id):
188 hands = stored + dups + partial + skipped + errs
189 now = datetime.datetime.utcnow()
190 ttime100 = ttime * 100
191 self.database.updateFile([type, now, now, hands, stored, dups, partial, skipped, errs, ttime100, True, id])
192 self.database.commit()
194 def addFileToList(self, fpdbfile):
195 """FPDBFile"""
196 file = os.path.splitext(os.path.basename(fpdbfile.path))[0]
197 try: #TODO: this is a dirty hack. GBI needs it, GAI fails with it.
198 file = str(file, "utf8", "replace")
199 except TypeError:
200 pass
201 fpdbfile.fileId = self.database.get_id(file)
202 if not fpdbfile.fileId:
203 now = datetime.datetime.utcnow()
204 fpdbfile.fileId = self.database.storeFile([file, fpdbfile.site.name, now, now, 0, 0, 0, 0, 0, 0, 0, False])
205 self.database.commit()
207 #Add an individual file to filelist
208 def addImportFile(self, filename, site = "auto"):
209 #DEBUG->print("addimportfile: filename is a", filename.__class__, filename)
210 # filename not guaranteed to be unicode
211 if self.filelist.get(filename)!=None or not os.path.exists(filename):
212 return False
214 self.idsite.processFile(filename)
215 if self.idsite.get_fobj(filename):
216 fpdbfile = self.idsite.filelist[filename]
217 else:
218 log.error("Importer.addImportFile: siteId Failed for: '%s'" % filename)
219 return False
221 self.addFileToList(fpdbfile)
222 self.filelist[filename] = fpdbfile
223 if site not in self.siteIds:
224 # Get id from Sites table in DB
225 result = self.database.get_site_id(fpdbfile.site.name)
226 if len(result) == 1:
227 self.siteIds[fpdbfile.site.name] = result[0][0]
228 else:
229 if len(result) == 0:
230 log.error(("Database ID for %s not found") % fpdbfile.site.name)
231 else:
232 log.error(("More than 1 Database ID found for %s") % fpdbfile.site.name)
234 return True
235 # Called from GuiBulkImport to add a file or directory. Bulk import never monitors
236 def addBulkImportImportFileOrDir(self, inputPath, site = "auto"):
237 """Add a file or directory for bulk import"""
238 #for windows platform, force os.walk variable to be unicode
239 # see fpdb-main post 9th July 2011
240 if self.config.posix:
241 pass
242 else:
243 inputPath = str(inputPath)
245 # TODO: only add sane files?
246 if os.path.isdir(inputPath):
247 for subdir in os.walk(inputPath):
248 for file in subdir[2]:
249 self.addImportFile(os.path.join(subdir[0], file), site=site)
250 return True
251 else:
252 return self.addImportFile(inputPath, site=site)
254 #Add a directory of files to filelist
255 #Only one import directory per site supported.
256 #dirlist is a hash of lists:
257 #dirlist{ 'PokerStars' => ["/path/to/import/", "filtername"] }
258 def addImportDirectory(self,dir,monitor=False, site=("default","hh"), filter="passthrough"):
259 #gets called by GuiAutoImport.
260 #This should really be using os.walk
261 #http://docs.python.org/library/os.html
262 if os.path.isdir(dir):
263 if monitor == True:
264 self.monitor = True
265 self.dirlist[site] = [dir] + [filter]
267 #print "addImportDirectory: checking files in", dir
268 for subdir in os.walk(dir):
269 for file in subdir[2]:
270 filename = os.path.join(subdir[0], file)
271 # ignore symbolic links (Linux & Mac)
272 if os.path.islink(filename):
273 log.info(f"Ignoring symlink {filename}")
274 continue
275 if (time() - os.stat(filename).st_mtime)<= 43200: # look all files modded in the last 12 hours
276 # need long time because FTP in Win does not
277 # update the timestamp on the HH during session
278 self.addImportFile(filename, "auto")
279 else:
280 log.warning(("Attempted to add non-directory '%s' as an import directory") % str(dir))
282 def runImport(self):
283 """"Run full import on self.filelist. This is called from GuiBulkImport.py"""
285 # Initial setup
286 start = datetime.datetime.now()
287 starttime = time()
288 log.info(("Started at %s -- %d files to import. indexes: %s") % (start, len(self.filelist), self.settings['dropIndexes']))
289 if self.settings['dropIndexes'] == 'auto':
290 self.settings['dropIndexes'] = self.calculate_auto2(self.database, 12.0, 500.0)
291 if 'dropHudCache' in self.settings and self.settings['dropHudCache'] == 'auto':
292 self.settings['dropHudCache'] = self.calculate_auto2(self.database, 25.0, 500.0) # returns "drop"/"don't drop"
294 (totstored, totdups, totpartial, totskipped, toterrors) = self.importFiles(None)
296 # Tidying up after import
297 #if 'dropHudCache' in self.settings and self.settings['dropHudCache'] == 'drop':
298 # log.info(("rebuild_caches"))
299 # self.database.rebuild_caches()
300 #else:
301 # log.info(("runPostImport"))
302 self.runPostImport()
303 self.database.analyzeDB()
304 endtime = time()
305 return (totstored, totdups, totpartial, totskipped, toterrors, endtime-starttime)
306 # end def runImport
308 def runPostImport(self):
309 self.database.cleanUpTourneyTypes()
310 self.database.cleanUpWeeksMonths()
311 self.database.resetClean()
313 def importFiles(self, q):
314 """"Read filenames in self.filelist and pass to despatcher."""
316 totstored = 0
317 totdups = 0
318 totpartial = 0
319 totskipped = 0
320 toterrors = 0
321 tottime = 0
322 filecount = 0
323 fileerrorcount = 0
324 moveimportedfiles = False #TODO need to wire this into GUI and make it prettier
325 movefailedfiles = False #TODO and this too
327 #prepare progress popup window
328 ProgressDialog = ImportProgressDialog(len(self.filelist), self.parent)
329 ProgressDialog.resize(500, 200)
330 ProgressDialog.show()
332 for f in self.filelist:
333 filecount = filecount + 1
334 ProgressDialog.progress_update(f, str(self.database.getHandCount()))
336 (stored, duplicates, partial, skipped, errors, ttime) = self._import_despatch(self.filelist[f])
337 totstored += stored
338 totdups += duplicates
339 totpartial += partial
340 totskipped += skipped
341 toterrors += errors
343 if moveimportedfiles and movefailedfiles:
344 try:
345 if moveimportedfiles:
346 shutil.move(f, "c:\\fpdbimported\\%d-%s" % (filecount, os.path.basename(f[3:]) ) )
347 except:
348 fileerrorcount = fileerrorcount + 1
349 if movefailedfiles:
350 shutil.move(f, "c:\\fpdbfailed\\%d-%s" % (fileerrorcount, os.path.basename(f[3:]) ) )
352 self.logImport('bulk', f, stored, duplicates, partial, skipped, errors, ttime, self.filelist[f].fileId)
354 ProgressDialog.accept()
355 del ProgressDialog
357 return (totstored, totdups, totpartial, totskipped, toterrors)
358 # end def importFiles
360 def _import_despatch(self, fpdbfile):
361 stored, duplicates, partial, skipped, errors, ttime = 0,0,0,0,0,0
362 if fpdbfile.ftype in ("hh", "both"):
363 (stored, duplicates, partial, skipped, errors, ttime) = self._import_hh_file(fpdbfile)
364 if fpdbfile.ftype == "summary":
365 (stored, duplicates, partial, skipped, errors, ttime) = self._import_summary_file(fpdbfile)
366 if fpdbfile.ftype == "both" and fpdbfile.path not in self.updatedsize:
367 self._import_summary_file(fpdbfile)
368 # pass
369 print("DEBUG: _import_summary_file.ttime: %.3f %s" % (ttime, fpdbfile.ftype))
370 return (stored, duplicates, partial, skipped, errors, ttime)
373 def calculate_auto2(self, db, scale, increment):
374 """A second heuristic to determine a reasonable value of drop/don't drop
375 This one adds up size of files to import to guess number of hands in them
376 Example values of scale and increment params might be 10 and 500 meaning
377 roughly: drop if importing more than 10% (100/scale) of hands in db or if
378 less than 500 hands in db"""
379 size_per_hand = 1300.0 # wag based on a PS 6-up FLHE file. Actual value not hugely important
380 # as values of scale and increment compensate for it anyway.
381 # decimal used to force float arithmetic
383 # get number of hands in db
384 if 'handsInDB' not in self.settings:
385 try:
386 tmpcursor = db.get_cursor()
387 tmpcursor.execute("Select count(1) from Hands;")
388 self.settings['handsInDB'] = tmpcursor.fetchone()[0]
389 except:
390 pass # if this fails we're probably doomed anyway
392 # add up size of import files
393 total_size = 0.0
394 for file in self.filelist:
395 if os.path.exists(file):
396 stat_info = os.stat(file)
397 total_size += stat_info.st_size
399 # if hands_in_db is zero or very low, we want to drop indexes, otherwise compare
400 # import size with db size somehow:
401 ret = "don't drop"
402 if self.settings['handsInDB'] < scale * (old_div(total_size,size_per_hand)) + increment:
403 ret = "drop"
404 #print "auto2: handsindb =", self.settings['handsInDB'], "total_size =", total_size, "size_per_hand =", \
405 # size_per_hand, "inc =", increment, "return:", ret
406 return ret
408 #Run import on updated files, then store latest update time. Called from GuiAutoImport.py
409 def runUpdated(self):
410 """Check for new files in monitored directories"""
411 for (site,type) in self.dirlist:
412 self.addImportDirectory(self.dirlist[(site,type)][0], False, (site,type), self.dirlist[(site,type)][1])
414 for f in self.filelist:
415 if os.path.exists(f):
416 stat_info = os.stat(f)
417 if f in self.updatedsize: # we should be able to assume that if we're in size, we're in time as well
418 if stat_info.st_size > self.updatedsize[f] or stat_info.st_mtime > self.updatedtime[f]:
419 try:
420 if not os.path.isdir(f):
421 self.caller.addText("\n"+os.path.basename(f))
422 print("os.path.basename",os.path.basename(f) )
423 print("self.caller:", self.caller)
424 print(os.path.basename(f))
425 except KeyError:
426 log.error("File '%s' seems to have disappeared" % f)
427 (stored, duplicates, partial, skipped, errors, ttime) = self._import_despatch(self.filelist[f])
428 self.logImport('auto', f, stored, duplicates, partial, skipped, errors, ttime, self.filelist[f].fileId)
429 self.database.commit()
430 try:
431 if not os.path.isdir(f): # Note: This assumes that whatever calls us has an "addText" func
432 self.caller.addText(" %d stored, %d duplicates, %d partial, %d skipped, %d errors (time = %f)" % (stored, duplicates, partial, skipped, errors, ttime))
433 print("self.caller2:",self.caller)
434 except KeyError: # TODO: Again, what error happens here? fix when we find out ..
435 pass
436 self.updatedsize[f] = stat_info.st_size
437 self.updatedtime[f] = time()
438 else:
439 if os.path.isdir(f) or (time() - stat_info.st_mtime) < 60:
440 self.updatedsize[f] = 0
441 self.updatedtime[f] = 0
442 else:
443 self.updatedsize[f] = stat_info.st_size
444 self.updatedtime[f] = time()
445 else:
446 self.removeFromFileList[f] = True
448 for file in self.removeFromFileList:
449 if file in self.filelist:
450 del self.filelist[file]
452 self.removeFromFileList = {}
453 self.database.rollback()
454 self.runPostImport()
456 def _import_hh_file(self, fpdbfile):
457 """Function for actual import of a hh file
458 This is now an internal function that should not be called directly."""
460 (stored, duplicates, partial, skipped, errors, ttime) = (0, 0, 0, 0, 0, time())
462 # Load filter, process file, pass returned filename to import_fpdb_file
463 log.info(("Converting %s") % fpdbfile.path)
465 filter_name = fpdbfile.site.filter_name
466 mod = __import__(fpdbfile.site.hhc_fname)
467 obj = getattr(mod, filter_name, None)
468 if callable(obj):
470 if fpdbfile.path in self.pos_in_file: idx = self.pos_in_file[fpdbfile.path]
471 else: self.pos_in_file[fpdbfile.path], idx = 0, 0
473 hhc = obj( self.config, in_path = fpdbfile.path, index = idx, autostart=False
474 ,starsArchive = fpdbfile.archive
475 ,ftpArchive = fpdbfile.archive
476 ,sitename = fpdbfile.site.name)
477 hhc.setAutoPop(self.mode=='auto')
478 hhc.start()
480 self.pos_in_file[fpdbfile.path] = hhc.getLastCharacterRead()
481 #Tally the results
482 partial = getattr(hhc, 'numPartial')
483 skipped = getattr(hhc, 'numSkipped')
484 errors = getattr(hhc, 'numErrors')
485 stored = getattr(hhc, 'numHands')
486 stored -= errors
487 stored -= partial
488 stored -= skipped
490 if stored > 0:
491 if self.caller: self.progressNotify()
492 handlist = hhc.getProcessedHands()
493 self.database.resetBulkCache(True)
494 self.pos_in_file[fpdbfile.path] = hhc.getLastCharacterRead()
495 (phands, ahands, ihands, to_hud) = ([], [], [], [])
496 self.database.resetBulkCache()
498 ####Lock Placeholder####
499 for hand in handlist:
500 hand.prepInsert(self.database, printtest = self.settings['testData'])
501 ahands.append(hand)
502 self.database.commit()
503 ####Lock Placeholder####
505 for hand in ahands:
506 hand.assembleHand()
507 phands.append(hand)
509 ####Lock Placeholder####
510 backtrack = False
511 id = self.database.nextHandId()
512 for i in range(len(phands)):
513 doinsert = len(phands)==i+1
514 hand = phands[i]
515 try:
516 id = hand.getHandId(self.database, id)
517 hand.updateSessionsCache(self.database, None, doinsert)
518 hand.insertHands(self.database, fpdbfile.fileId, doinsert, self.settings['testData'])
519 hand.updateCardsCache(self.database, None, doinsert)
520 hand.updatePositionsCache(self.database, None, doinsert)
521 hand.updateHudCache(self.database, doinsert)
522 hand.updateTourneyResults(self.database)
523 ihands.append(hand)
524 to_hud.append(hand.dbid_hands)
525 except FpdbHandDuplicate:
526 duplicates += 1
527 if (doinsert and ihands): backtrack = True
528 except:
529 error_trace = ''
530 formatted_lines = traceback.format_exc().splitlines()
531 for line in formatted_lines:
532 error_trace += line
533 tmp = hand.handText[0:200]
534 log.error(("Importer._import_hh_file: '%r' Fatal error: '%r'") % (fpdbfile.path, error_trace))
535 log.error(("'%r'") % tmp)
536 if (doinsert and ihands): backtrack = True
537 if backtrack: #If last hand in the file is a duplicate this will backtrack and insert the new hand records
538 hand = ihands[-1]
539 hp, hero = hand.handsplayers, hand.hero
540 hand.hero, self.database.hbulk, hand.handsplayers = 0, self.database.hbulk[:-1], [] #making sure we don't insert data from this hand
541 self.database.bbulk = [b for b in self.database.bbulk if hand.dbid_hands != b[0]]
542 hand.updateSessionsCache(self.database, None, doinsert)
543 hand.insertHands(self.database, fpdbfile.fileId, doinsert, self.settings['testData'])
544 hand.updateCardsCache(self.database, None, doinsert)
545 hand.updatePositionsCache(self.database, None, doinsert)
546 hand.updateHudCache(self.database, doinsert)
547 hand.handsplayers, hand.hero = hp, hero
548 #log.debug("DEBUG: hand.updateSessionsCache: %s" % (t5tot))
549 #log.debug("DEBUG: hand.insertHands: %s" % (t6tot))
550 #log.debug("DEBUG: hand.updateHudCache: %s" % (t7tot))
551 self.database.commit()
552 ####Lock Placeholder####
554 for i in range(len(ihands)):
555 doinsert = len(ihands)==i+1
556 hand = ihands[i]
557 hand.insertHandsPlayers(self.database, doinsert, self.settings['testData'])
558 hand.insertHandsActions(self.database, doinsert, self.settings['testData'])
559 hand.insertHandsStove(self.database, doinsert)
560 self.database.commit()
562 #pipe the Hands.id out to the HUD
563 if self.callHud:
564 print('self.callHud',self.callHud)
565 print('self.caller',self.caller)
568 for hid in list(to_hud):
569 try:
570 log.debug(f"Sending hand ID {hid} to HUD via socket")
571 self.zmq_sender.send_hand_id(hid)
572 except IOError as e:
573 log.error(f"Failed to send hand ID to HUD via socket: {e}")
576 # Really ugly hack to allow testing Hands within the HHC from someone
577 # with only an Importer objec
578 if self.settings['cacheHHC']:
579 self.handhistoryconverter = hhc
580 elif (self.mode=='auto'):
581 return (0, 0, partial, skipped, errors, time() - ttime)
583 stored -= duplicates
585 if stored>0 and ihands[0].gametype['type']=='tour':
586 if hhc.summaryInFile:
587 fpdbfile.ftype = "both"
589 ttime = time() - ttime
590 return (stored, duplicates, partial, skipped, errors, ttime)
592 def autoSummaryGrab(self, force = False):
593 for f, fpdbfile in list(self.filelist.items()):
594 stat_info = os.stat(f)
595 if ((time() - stat_info.st_mtime)> 300 or force) and fpdbfile.ftype == "both":
596 self._import_summary_file(fpdbfile)
597 fpdbfile.ftype = "hh"
599 def _import_summary_file(self, fpdbfile):
600 (stored, duplicates, partial, skipped, errors, ttime) = (0, 0, 0, 0, 0, time())
601 mod = __import__(fpdbfile.site.summary)
602 obj = getattr(mod, fpdbfile.site.summary, None)
603 if callable(obj):
604 if self.caller: self.progressNotify()
605 summaryTexts = self.readFile(obj, fpdbfile.path, fpdbfile.site.name)
606 if summaryTexts is None:
607 log.error("Found: '%s' with 0 characters... skipping" % fpbdfile.path)
608 return (0, 0, 0, 0, 1, time()) # File had 0 characters
609 ####Lock Placeholder####
610 for j, summaryText in enumerate(summaryTexts, start=1):
611 doinsert = len(summaryTexts)==j
612 try:
613 conv = obj(db=self.database, config=self.config, siteName=fpdbfile.site.name, summaryText=summaryText, in_path = fpdbfile.path, header=summaryTexts[0])
614 self.database.resetBulkCache(False)
615 conv.insertOrUpdate(printtest = self.settings['testData'])
616 except FpdbHandPartial as e:
617 partial += 1
618 except FpdbParseError as e:
619 log.error(("Summary import parse error in file: %s") % fpdbfile.path)
620 errors += 1
621 if j != 1:
622 print(("Finished importing %s/%s tournament summaries") %(j, len(summaryTexts)))
623 stored = j
624 ####Lock Placeholder####
625 ttime = time() - ttime
626 return (stored - errors - partial, duplicates, partial, skipped, errors, ttime)
628 def progressNotify(self):
629 "A callback to the interface while events are pending"
630 QCoreApplication.processEvents()
632 def readFile(self, obj, filename, site):
633 if filename.endswith('.xls') or filename.endswith('.xlsx') and xlrd:
634 obj.hhtype = "xls"
635 if site=='PokerStars':
636 tourNoField = 'Tourney'
637 else:
638 tourNoField = 'tournament key'
639 summaryTexts = obj.summaries_from_excel(filename, tourNoField)
640 else:
641 foabs = obj.readFile(obj, filename)
642 if foabs is None:
643 return None
644 re_Split = obj.getSplitRe(obj,foabs)
645 summaryTexts = re.split(re_Split, foabs)
646 # Summary identified but not split
647 if len(summaryTexts)==1:
648 return summaryTexts
649 else:
650 # The summary files tend to have a header
651 # Remove the first entry if it has < 150 characters
652 if len(summaryTexts) > 1 and len(summaryTexts[0]) <= 150:
653 del summaryTexts[0]
654 log.warn(("TourneyImport: Removing text < 150 characters from start of file"))
656 # Sometimes the summary files also have a footer
657 # Remove the last entry if it has < 100 characters
658 if len(summaryTexts) > 1 and len(summaryTexts[-1]) <= 100:
659 summaryTexts.pop()
660 log.warn(("TourneyImport: Removing text < 100 characters from end of file"))
661 return summaryTexts
663 def __del__(self):
664 if hasattr(self, 'zmq_sender'):
665 self.zmq_sender.close()
667class ImportProgressDialog(QDialog):
669 """
670 Popup window to show progress
672 Init method sets up total number of expected iterations
673 If no parent is passed to init, command line
674 mode assumed, and does not create a progress bar
675 """
677 def progress_update(self, filename, handcount):
679 self.fraction += 1
680 #update total if fraction exceeds expected total number of iterations
681 if self.fraction > self.total:
682 self.total = self.fraction
683 self.pbar.setRange(0,self.total)
685 self.pbar.setValue(self.fraction)
687 self.handcount.setText(("Database Statistics") + " - " + ("Number of Hands:") + " " + handcount)
689 now = datetime.datetime.now()
690 now_formatted = now.strftime("%H:%M:%S")
691 self.progresstext.setText(now_formatted + " - " + ("Importing") + " " +filename+"\n")
694 def __init__(self, total, parent):
695 if parent is None:
696 return
697 QDialog.__init__(self, parent)
699 self.fraction = 0
700 self.total = total
701 self.setWindowTitle(("Importing"))
703 self.setLayout(QVBoxLayout())
705 self.pbar = QProgressBar()
706 self.pbar.setRange(0, total)
707 self.layout().addWidget(self.pbar)
709 self.handcount = QLabel()
710 self.handcount.setWordWrap(True)
711 self.layout().addWidget(self.handcount)
713 self.progresstext = QLabel()
714 self.progresstext.setWordWrap(True)
715 self.layout().addWidget(self.progresstext)