changelog
-
+
version 234
+ - the action choice workflow in the manage tags dialog now merges decisions for multiple tag entry events +
- the button dialog that pops up on the new merge manage tags workflow has button tooltips to better describe the proposed action's tag and file combinations +
- the manage tags dialog only requests one petition reason for an above merged multitag petition event +
- perceptual hashes are now stored in the cache db (moved from the preferably leaner main db) +
- the database now supports multiple perceptual hashes per file +
- fleshed out the perceptual hash vptree generation and maintenance code +
- added option for main gui title to options->gui--it even updates live +
- added unicode path support when importing serialised pngs +
- added unicode path support when exporting serialised pngs +
- exporting a serialised png now reports success via the export button, which will temporarily relabel itself +
- when a GET network connection fails during the read phase due to an unexpected timeout, the request will be reattempted a couple of times, like failed connection initialisations currently are +
- escape key now closes scrolledpanelframes (review services or import status frames) +
- manage tags dialog and frame now closes due to escape key correctly +
- fixed a size calculation bug that was not initially drawing scrollbars on manage options and any other listbook-containing scrolling panel when the screen is too small to show the whole dialog +
- undo menu now works on the new menu system +
- cleaned up some bad gui-thread interaction in the file import dialog +
- fixed file->restart in the built release, including when the install path includes a space +
- the trash service no longer records which files it has physically deleted, as this information is not used and is redundant compared to the existing local files' deleted record (existing records will be deleted on update) +
- the subscription daemon will wait 90 seconds after boot before triggering--quitting the client before then will result in subs not being checked +
version 233
- made a plan for faster dupe search diff --git a/help/contact.html b/help/contact.html index 3a2c89dd..9f74dcdc 100755 --- a/help/contact.html +++ b/help/contact.html @@ -21,6 +21,7 @@
- github
- discord
- patreon
-
+
If you would like to send me something physical, you can use my PO Box:
diff --git a/include/ClientConstants.py b/include/ClientConstants.py index c8cdd09f..5a0a8a83 100755 --- a/include/ClientConstants.py +++ b/include/ClientConstants.py @@ -461,6 +461,7 @@ class GlobalBMPs( object ): GlobalBMPs.eight_chan = wx.Bitmap( os.path.join( HC.STATIC_DIR, '8chan.png' ) ) GlobalBMPs.twitter = wx.Bitmap( os.path.join( HC.STATIC_DIR, 'twitter.png' ) ) GlobalBMPs.tumblr = wx.Bitmap( os.path.join( HC.STATIC_DIR, 'tumblr.png' ) ) + GlobalBMPs.discord = wx.Bitmap( os.path.join( HC.STATIC_DIR, 'discord.png' ) ) GlobalBMPs.patreon = wx.Bitmap( os.path.join( HC.STATIC_DIR, 'patreon.png' ) ) GlobalBMPs.first = wx.Bitmap( os.path.join( HC.STATIC_DIR, 'first.png' ) ) diff --git a/include/ClientController.py b/include/ClientController.py index 21a2f9eb..eb1567ec 100755 --- a/include/ClientController.py +++ b/include/ClientController.py @@ -616,7 +616,7 @@ class Controller( HydrusController.HydrusController ): self._daemons.append( HydrusThreading.DAEMONWorker( self, 'CheckMouseIdle', ClientDaemons.DAEMONCheckMouseIdle, period = 10 ) ) self._daemons.append( HydrusThreading.DAEMONWorker( self, 'DownloadFiles', ClientDaemons.DAEMONDownloadFiles, ( 'notify_new_downloads', 'notify_new_permissions' ) ) ) self._daemons.append( HydrusThreading.DAEMONWorker( self, 'SynchroniseAccounts', ClientDaemons.DAEMONSynchroniseAccounts, ( 'permissions_are_stale', ) ) ) - self._daemons.append( HydrusThreading.DAEMONWorker( self, 'SynchroniseSubscriptions', ClientDaemons.DAEMONSynchroniseSubscriptions, ( 'notify_restart_subs_sync_daemon', 'notify_new_subscriptions' ) ) ) + self._daemons.append( HydrusThreading.DAEMONWorker( self, 'SynchroniseSubscriptions', ClientDaemons.DAEMONSynchroniseSubscriptions, ( 'notify_restart_subs_sync_daemon', 'notify_new_subscriptions' ), init_wait = 90 ) ) self._daemons.append( HydrusThreading.DAEMONBigJobWorker( self, 'CheckImportFolders', ClientDaemons.DAEMONCheckImportFolders, ( 'notify_restart_import_folders_daemon', 'notify_new_import_folders' ), period = 180 ) ) self._daemons.append( HydrusThreading.DAEMONBigJobWorker( self, 'CheckExportFolders', ClientDaemons.DAEMONCheckExportFolders, ( 'notify_restart_export_folders_daemon', 'notify_new_export_folders' ), period = 180 ) ) diff --git a/include/ClientDB.py b/include/ClientDB.py index cd4b908f..6da2b16c 100755 --- a/include/ClientDB.py +++ b/include/ClientDB.py @@ -1516,72 +1516,169 @@ class DB( HydrusDB.HydrusDB ): self._c.executemany( 'DELETE FROM ' + ac_cache_table_name + ' WHERE namespace_id = ? AND tag_id = ? AND current_count = ? AND pending_count = ?;', ( ( namespace_id, tag_id, 0, 0 ) for ( namespace_id, tag_id, current_delta, pending_delta ) in count_ids ) ) - def _CacheSimilarFilesDropPHashes( self ): + def _CacheSimilarFilesDelete( self, hash_id, phash_ids ): - # drop the two tables + phash_ids = set( phash_ids ) + + self._c.executemany( 'DELETE FROM external_caches.shape_perceptual_hash_map WHERE phash_id = ? AND hash_id = ?;', ( ( phash_id, hash_id ) for phash_id in phash_ids ) ) + + useful_phash_ids = { phash for ( phash, ) in self._c.execute( 'SELECT phash_id FROM external_caches.shape_perceptual_hash_map WHERE phash_id IN ' + HydrusData.SplayListForDB( phash_ids ) + ';' ) } + + deletee_phash_ids = phash_ids.difference( useful_phash_ids ) + + # for every deletee_phash_id: + # add to the rebalance maintenance table so they can be cleared out during maintenance + + # it'd be nice to call this on physical file deletion, but w/e + + + def _CacheSimilarFilesGeneratePHashes( self, hash_ids = None ): + + if hash_ids is None: + + # do all phashable hash_ids, selected from file_map X all local files, or something + + pass + + + # insert or ignore all hash_ids into a 'regen these phashes on maintenance' table + + + def _CacheSimilarFilesGenerateBranch( self, job_key, parent_id, phash_id, phash, children ): + + # report to job_key and splash screen + + ghd = HydrusData.GetHammingDistance + + process_queue = [ ( parent_id, phash_id, phash, children ) ] + + while len( process_queue ) > 0: + + ( parent_id, phash_id, phash, children ) = process_queue.pop( 0 ) + + if len( children ) == 0: + + left_id = None + right_id = None + + radius = None + + else: + + children = [ ( ghd( phash, child_phash ), child_id, child_phash ) for ( child_id, child_phash ) in children ] + + children.sort() + + median_index = len( children ) / 2 + + radius = children[ median_index ][0] + + left_children = [ ( child_id, child_phash ) for ( distance, child_id, child_phash ) in children if distance <= radius ] + right_children = [ ( child_id, child_phash ) for ( distance, child_id, child_phash ) in children if distance > radius ] + + ( left_id, left_phash ) = HydrusData.RandomPop( left_children ) + + if len( right_children ) == 0: + + right_id = None + + else: + + ( right_id, right_phash ) = HydrusData.RandomPop( right_children ) + + + + # insert phash_id, phash, radius, left_id, left_count, right_id, right_count, parent_id + + if left_id is not None: + + process_queue.append( ( phash_id, left_id, left_phash, left_children ) ) + + + if right_id is not None: + + process_queue.append( ( phash_id, right_id, right_phash, right_children ) ) + + + pass - def _CacheSimilarFilesDropVPTree( self ): + def _CacheSimilarFilesGetPHashId( self, phash ): - # drop the three tables + result = self._c.execute( 'SELECT phash_id FROM external_caches.shape_perceptual_hashes WHERE phash = ?;', ( sqlite3.Binary( phash ), ) ).fetchone() + + if result is None: + + self._c.execute( 'INSERT INTO external_caches.shape_perceptual_hashes ( phash ) VALUES ( ? );', ( sqlite3.Binary( phash ), ) ) + + phash_id = self._c.lastrowid + + # walk down to bottom of tree and insert it + # if there is no tree yet, create root node + # if bottom is empty on both sides, update left and set radius + # else update right + # update all the left and right counts as you go back up + # if a left/right count is out of whack in either direction, say more than two thirds on one side, schedule that node for rebalancing + # but in this case, only schedule the largest node + # check the left and right counts and rebalance things + + else: + + ( phash_id, ) = result + + + return phash_id + + + def _CacheSimilarFilesGetMaintenanceStatus( self ): + + # count up number of phashes + # count up number of files that still need to be calced + # count up number of nodes to be rebalanced + + # gui will present this as a general 'still 100,000 still to go!' and 'completely ready to go!' + + # I could stick this on local files review services, I guess, although it better belongs on a new 'all local files' service page. + + # can add the arbitrary dupe search cache to this as well pass - def _CacheSimilarFilesGeneratePHashes( self ): + def _CacheSimilarFilesInsert( self, hash_id, phashes ): - # one phash can go to many files - # when I add animations, one file can go to many phashes - - # so I need two tables: - - # phash_id idx | phash - # phash_id | hash_id (joint index, with hash_id non-unique lookup as well) - - # generating all the phashes is too big a job here, so I probably need a progress table that will generate them during maintenance time - # should be a nice way to say how far along it is getting on this - - # fudge this all on db update, but do it clean on init of a fresh db - - pass + for phash in phashes: + + phash_id = self._CacheSimilarFilesGetPHashId( phash ) + + self._c.execute( 'INSERT OR IGNORE INTO external_caches.shape_perceptual_hash_map ( phash_id, hash_id ) VALUES ( ?, ? );', ( phash_id, hash_id ) ) + - def _CacheSimilarFilesGenerateVPTree( self ): - - # the main table - # a way to hold the root node, prob just a tiny table - # a table with nodes that need rebalancing - - # then fill the main table, but don't do it one by one - # check ClientVPTree for info on selecting median node for sample of nodes to pick good radii - - # then select all phash_id | phash pairs - # construct a clientvptree or a simpler object here - # write that to the db straight with branch counts - - pass - - - def _CacheSimilarFilesInsert( self, hash_id, phash ): - - # walk down the tree and insert it - # update all the left and right counts as you go back up - # if a left/right count is out of whack in either direction, say more than two thirds on one side, schedule that node for rebalancing - # check the left and right counts and rebalance things - - pass - - - def _CacheSimilarFilesMaintain( self, job_key ): - - # rebalance the vptree based on the rebalance table - - # populate the phash table + def _CacheSimilarFilesMaintain( self, job_key, stop_time ): # broadcast to job_key on how you are doing, for the maintenance popup + # while hash_ids still in in 'phash recalc during maintenance' table: + # check stop_time + # fetch one + # get lockless file path or whatever + # gen its phashes + # this needs a hydrusfilehandling.getphashes that will deal with it cleverly + # if file doesn't exist or otherwise doesn't work, phashes = [] + # get phash_id, hash_id pairs + # get pairs already in db for hash_id + # delete any no longer needed through the normal call + # insert new ones through the normal call + # remove the hash_id from the maintenance table + + # while there are phashes in the rebalance maintenance table: + # check stop time + # select the one with the largest sum( left, right ) + # rebalance branch + pass @@ -1592,6 +1689,67 @@ class DB( HydrusDB.HydrusDB ): pass + def _CacheSimilarFilesRebalanceBranch( self, job_key, phash_id ): + + # prep, removal of old branch + + # fetch parent_id + parent_id = 'blah' + + # fetch ( phash_id, phash ) pairs of all descendants (can do easy with a recursive call) + rebalance_nodes = 'blah' + + rebalance_phash_ids = { p_id for ( p_id, p_h ) in rebalance_nodes } + + # delete everything from the maintenance rebalance table + # delete row and descendants from vptree + + useful_phash_ids = { p_id for ( p_id, ) in self._c.execute( 'SELECT phash_id FROM external_caches.shape_perceptual_hash_map WHERE phash_id IN ' + HydrusData.SplayListForDB( rebalance_phash_ids ) + ';' ) } + + deletee_phash_ids = rebalance_phash_ids.difference( useful_phash_ids ) + + self._c.executemany( 'DELETE FROM external_caches.shape_perceptual_hashes WHERE phash_id = ?;', ( ( p_id, ) for p_id in deletee_phash_ids ) ) + + # now create the new branch + + ( new_phash_id, new_phash ) = HydrusData.RandomPop( rebalance_nodes ) + + if parent_id is not None: + + ( parent_left_id, ) = ( 'blah', ) # fetch the parent's current left_id + + if parent_left_id == phash_id: + + column_name = 'left_id' + + else: + + column_name = 'right_id' + + + # update parent row, set column_name = phash_id + + + self._CacheSimilarFilesGenerateBranch( job_key, parent_id, new_phash_id, new_phash, rebalance_nodes ) + + + def _CacheSimilarFilesRegenerateVPTree( self, job_key ): + + # clear the table + + # the main table -- shape_vptree + # something like: + # phash_id (p idx), phash, radius, left_id, left_count, right_id, right_count, creation_ratio, parent_id (idx) + + # report to job_key and splash screen + + all_nodes = self._c.execute( 'SELECT phash_id, phash FROM external_caches.shape_perceptual_hashes;' ).fetchall() + + ( root_id, root_phash ) = HydrusData.RandomPop( all_nodes ) + + self._CacheSimilarFilesGenerateBranch( job_key, None, root_id, root_phash, all_nodes ) + + def _CacheSimilarFilesSearch( self, hash_id, max_hamming_distance ): search_radius = max_hamming_distance @@ -1617,7 +1775,7 @@ class DB( HydrusDB.HydrusDB ): while len( potentials ) > 0: - ( node_phash_id, search_phashes ) = potentials.pop() + ( node_phash_id, search_phashes ) = potentials.pop( 0 ) ( node_phash, node_radius, inner_phash_id, outer_phash_id ) = ( 'blah', 5, 1, 2 ) # sql here @@ -2227,8 +2385,6 @@ class DB( HydrusDB.HydrusDB ): self._c.execute( 'CREATE TABLE options ( options TEXT_YAML );', ) - self._c.execute( 'CREATE TABLE perceptual_hashes ( hash_id INTEGER PRIMARY KEY, phash BLOB_BYTES );' ) - self._c.execute( 'CREATE TABLE recent_tags ( service_id INTEGER REFERENCES services ON DELETE CASCADE, namespace_id INTEGER, tag_id INTEGER, timestamp INTEGER, PRIMARY KEY ( service_id, namespace_id, tag_id ) );' ) self._c.execute( 'CREATE TABLE remote_ratings ( service_id INTEGER REFERENCES services ON DELETE CASCADE, hash_id INTEGER, count INTEGER, rating REAL, score REAL, PRIMARY KEY( service_id, hash_id ) );' ) @@ -2270,6 +2426,13 @@ class DB( HydrusDB.HydrusDB ): self._c.execute( 'CREATE TABLE yaml_dumps ( dump_type INTEGER, dump_name TEXT, dump TEXT_YAML, PRIMARY KEY ( dump_type, dump_name ) );' ) + # cache + + self._c.execute( 'CREATE TABLE external_caches.shape_perceptual_hashes ( phash_id INTEGER PRIMARY KEY, phash BLOB_BYTES UNIQUE );' ) + + self._c.execute( 'CREATE TABLE external_caches.shape_perceptual_hash_map ( phash_id INTEGER, hash_id INTEGER, PRIMARY KEY ( phash_id, hash_id ) );' ) + self._c.execute( 'CREATE INDEX external_caches.shape_perceptual_hash_map_hash_id_index ON shape_perceptual_hash_map ( hash_id );' ) + # master self._c.execute( 'CREATE TABLE IF NOT EXISTS external_master.hashes ( hash_id INTEGER PRIMARY KEY, hash BLOB_BYTES UNIQUE );' ) @@ -2352,9 +2515,9 @@ class DB( HydrusDB.HydrusDB ): service_info_updates.append( ( -num_files, service_id, HC.SERVICE_INFO_NUM_FILES ) ) service_info_updates.append( ( -num_inbox, service_id, HC.SERVICE_INFO_NUM_INBOX ) ) - if not files_being_undeleted: + if service_id != self._trash_service_id: - # an undelete moves from trash to local, which shouldn't be remembered as a delete from the trash service + # trash service doesn't keep track of what is deleted, as this is redundant service_info_updates.append( ( num_files, service_id, HC.SERVICE_INFO_NUM_DELETED_FILES ) ) @@ -3444,21 +3607,19 @@ class DB( HydrusDB.HydrusDB ): hash_id = self._GetHashId( similar_to_hash ) - result = self._c.execute( 'SELECT phash FROM perceptual_hashes WHERE hash_id = ?;', ( hash_id, ) ).fetchone() + phashes = [ phash for ( phash, ) in self._c.execute( 'SELECT phash FROM shape_perceptual_hashes, shape_perceptual_hash_map USING ( phash_id ) WHERE hash_id = ?;', ( hash_id, ) ) ] - if result is None: + similar_hash_ids = set() + + for phash in phashes: - query_hash_ids = set() + some_similar_hash_ids = [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM shape_perceptual_hashes, shape_perceptual_hash_map USING ( phash_id ) WHERE hydrus_hamming( phash, ? ) <= ?;', ( sqlite3.Binary( phash ), max_hamming ) ) ] - else: - - ( phash, ) = result - - similar_hash_ids = [ hash_id for ( hash_id, ) in self._c.execute( 'SELECT hash_id FROM perceptual_hashes WHERE hydrus_hamming( phash, ? ) <= ?;', ( sqlite3.Binary( phash ), max_hamming ) ) ] - - query_hash_ids.intersection_update( similar_hash_ids ) + similar_hash_ids.update( some_similar_hash_ids ) + query_hash_ids.intersection_update( similar_hash_ids ) + # @@ -5361,9 +5522,9 @@ class DB( HydrusDB.HydrusDB ): if mime in ( HC.IMAGE_JPEG, HC.IMAGE_PNG ): - phash = ClientImageHandling.GeneratePerceptualHash( temp_path ) + phashes = ClientImageHandling.GenerateShapePerceptualHashes( temp_path ) - self._c.execute( 'INSERT OR REPLACE INTO perceptual_hashes ( hash_id, phash ) VALUES ( ?, ? );', ( hash_id, sqlite3.Binary( phash ) ) ) + self._CacheSimilarFilesInsert( hash_id, phashes ) # lockless because this db call is made by the locked client files manager @@ -8043,6 +8204,82 @@ class DB( HydrusDB.HydrusDB ): self._c.executemany( 'INSERT OR IGNORE INTO json_dumps_named VALUES ( ?, ?, ?, ? );', [ ( 32, 'iqdb danbooru', 1, '''["http://danbooru.iqdb.org/", 1, 0, 0, "file", {}, [[29, 1, ["link to danbooru", [27, 1, [[["td", {"class": "image"}, 1], ["a", {}, 0]], "href"]], [[30, 1, ["", 0, [27, 1, [[["section", {"id": "tag-list"}, 0], ["li", {"class": "category-1"}, null], ["a", {"class": "search-tag"}, 0]], null]], "creator"]], [30, 1, ["", 0, [27, 1, [[["section", {"id": "tag-list"}, 0], ["li", {"class": "category-3"}, null], ["a", {"class": "search-tag"}, 0]], null]], "series"]], [30, 1, ["", 0, [27, 1, [[["section", {"id": "tag-list"}, 0], ["li", {"class": "category-4"}, null], ["a", {"class": "search-tag"}, 0]], null]], "character"]], [30, 1, ["", 0, [27, 1, [[["section", {"id": "tag-list"}, 0], ["li", {"class": "category-0"}, null], ["a", {"class": "search-tag"}, 0]], null]], ""]]]]], [30, 1, ["no iqdb match found", 8, [27, 1, [[["th", {}, null]], null]], [false, true, "Best match"]]]]]''' ) ] ) + if version == 233: + + self._controller.pub( 'splash_set_status_text', 'moving phashes from main to cache' ) + + self._c.execute( 'CREATE TABLE external_caches.shape_perceptual_hashes ( phash_id INTEGER PRIMARY KEY, phash BLOB_BYTES UNIQUE );' ) + + self._c.execute( 'CREATE TABLE external_caches.shape_perceptual_hash_map ( phash_id INTEGER, hash_id INTEGER, PRIMARY KEY ( phash_id, hash_id ) );' ) + self._c.execute( 'CREATE INDEX external_caches.shape_perceptual_hash_map_hash_id_index ON shape_perceptual_hash_map ( hash_id );' ) + + try: + + def GetPHashId( phash ): + + result = self._c.execute( 'SELECT phash_id FROM external_caches.shape_perceptual_hashes WHERE phash = ?;', ( sqlite3.Binary( phash ), ) ).fetchone() + + if result is None: + + self._c.execute( 'INSERT INTO external_caches.shape_perceptual_hashes ( phash ) VALUES ( ? );', ( sqlite3.Binary( phash ), ) ) + + phash_id = self._c.lastrowid + + else: + + ( phash_id, ) = result + + + return phash_id + + + current_phash_info = self._c.execute( 'SELECT hash_id, phash FROM main.perceptual_hashes;' ).fetchall() + + num_to_do = len( current_phash_info ) + + for ( i, ( hash_id, phash ) ) in enumerate( current_phash_info ): + + if i % 500 == 0: + + self._controller.pub( 'splash_set_status_text', 'moving phashes: ' + HydrusData.ConvertValueRangeToPrettyString( i, num_to_do ) ) + + + phash_id = GetPHashId( phash ) + + self._c.execute( 'INSERT OR IGNORE INTO external_caches.shape_perceptual_hash_map ( phash_id, hash_id ) VALUES ( ?, ? );', ( phash_id, hash_id ) ) + + + except Exception as e: + + HydrusData.PrintException( e ) + + self._controller.pub( 'splash_set_status_text', 'moving phashes failed, error written to log' ) + + time.sleep( 3 ) + + + self._c.execute( 'DROP TABLE main.perceptual_hashes;' ) + + # + + self._controller.pub( 'splash_set_status_text', 'removing redundant trash deletion record' ) + + try: + + trash_service_id = self._GetServiceId( CC.TRASH_SERVICE_KEY ) + + self._c.execute( 'DELETE FROM deleted_files WHERE service_id = ?;', ( trash_service_id, ) ) + + self._c.execute( 'DELETE FROM service_info WHERE service_id = ? AND info_type = ?;', ( trash_service_id, HC.SERVICE_INFO_NUM_DELETED_FILES ) ) + + except Exception as e: + + HydrusData.PrintException( e ) + + self._controller.pub( 'splash_set_status_text', 'removing trash deletion record failed, error written to log' ) + + + self._controller.pub( 'splash_set_title_text', 'updated db to v' + str( version + 1 ) ) self._c.execute( 'UPDATE version SET version = ?;', ( version + 1, ) ) diff --git a/include/ClientData.py b/include/ClientData.py index 3417d3c0..08d30ef4 100644 --- a/include/ClientData.py +++ b/include/ClientData.py @@ -579,6 +579,10 @@ class ClientOptions( HydrusSerialisable.SerialisableBase ): self._dictionary[ 'noneable_strings' ][ 'favourite_file_lookup_script' ] = 'gelbooru md5' self._dictionary[ 'noneable_strings' ][ 'suggested_tags_layout' ] = 'notebook' + self._dictionary[ 'strings' ] = {} + + self._dictionary[ 'strings' ][ 'main_gui_title' ] = 'hydrus client' + # client_files_default = os.path.join( db_dir, 'client_files' ) @@ -1016,6 +1020,14 @@ class ClientOptions( HydrusSerialisable.SerialisableBase ): + def GetString( self, name ): + + with self._lock: + + return self._dictionary[ 'strings' ][ name ] + + + def GetSuggestedTagsFavourites( self, service_key ): with self._lock: @@ -1125,6 +1137,17 @@ class ClientOptions( HydrusSerialisable.SerialisableBase ): + def SetString( self, name, value ): + + with self._lock: + + if value is not None and value != '': + + self._dictionary[ 'strings' ][ name ] = value + + + + def SetSuggestedTagsFavourites( self, service_key, tags ): with self._lock: diff --git a/include/ClientGUI.py b/include/ClientGUI.py index 0668afae..e95c7cf0 100755 --- a/include/ClientGUI.py +++ b/include/ClientGUI.py @@ -55,7 +55,12 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ): self._controller = controller - title = self._controller.PrepStringForDisplay( 'Hydrus Client' ) + title = self._controller.GetNewOptions().GetString( 'main_gui_title' ) + + if title is None or title == '': + + title = 'hydrus client' + ClientGUITopLevelWindows.FrameThatResizes.__init__( self, None, title, 'main_gui', float_on_parent = False ) @@ -111,6 +116,7 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ): self._controller.sub( self, 'RefreshStatusBar', 'refresh_status' ) self._controller.sub( self, 'SetDBLockedStatus', 'db_locked_status' ) self._controller.sub( self, 'SetMediaFocus', 'set_media_focus' ) + self._controller.sub( self, 'SetTitle', 'main_gui_title' ) self._controller.sub( self, 'SyncToTagArchive', 'sync_to_tag_archive' ) self._menus = {} @@ -774,23 +780,26 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ): did_undo_stuff = True - menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'undo' ), undo_string ) + ClientGUIMenus.AppendMenuItem( menu, undo_string, 'Undo last operation.', self, self._controller.pub, 'undo' ) if redo_string is not None: did_undo_stuff = True - menu.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'redo' ), redo_string ) + ClientGUIMenus.AppendMenuItem( menu, redo_string, 'Redo last operation.', self, self._controller.pub, 'redo' ) if have_closed_pages: - if did_undo_stuff: menu.AppendSeparator() + if did_undo_stuff: + + menu.AppendSeparator() + undo_pages = wx.Menu() - undo_pages.Append( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'delete_all_closed_pages' ), 'clear all' ) + ClientGUIMenus.AppendMenuItem( undo_pages, 'clear all', 'Remove all closed pages from memory.', self, self._DeleteAllClosedPages ) undo_pages.AppendSeparator() @@ -800,18 +809,24 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ): for ( i, ( time_closed, index, name, page ) ) in enumerate( self._closed_pages ): - args.append( ( ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'unclose_page', i ), name + ' - ' + page.GetPrettyStatus() ) ) + args.append( ( i, name + ' - ' + page.GetPrettyStatus() ) ) args.reverse() # so that recently closed are at the top - for a in args: undo_pages.Append( *a ) + for ( index, name ) in args: + + ClientGUIMenus.AppendMenuItem( undo_pages, name, 'Restore this page.', self, self._UnclosePage, index ) + - menu.AppendMenu( CC.ID_NULL, p( 'Closed Pages' ), undo_pages ) + ClientGUIMenus.AppendMenu( menu, undo_pages, 'closed pages' ) - else: show = False + else: + + show = False + return ( menu, p( '&Undo' ), show ) @@ -1181,12 +1196,15 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ): twitter.SetBitmap( CC.GlobalBMPs.twitter ) tumblr = wx.MenuItem( links, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'tumblr' ), p( 'Tumblr' ) ) tumblr.SetBitmap( CC.GlobalBMPs.tumblr ) + discord = wx.MenuItem( links, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'discord' ), p( 'Discord' ) ) + discord.SetBitmap( CC.GlobalBMPs.discord ) patreon = wx.MenuItem( links, ClientCaches.MENU_EVENT_ID_TO_ACTION_CACHE.GetPermanentId( 'patreon' ), p( 'Patreon' ) ) patreon.SetBitmap( CC.GlobalBMPs.patreon ) links.AppendItem( site ) links.AppendItem( board ) links.AppendItem( twitter ) links.AppendItem( tumblr ) + links.AppendItem( discord ) links.AppendItem( patreon ) menu.AppendMenu( wx.ID_NONE, p( 'Links' ), links ) @@ -2316,6 +2334,11 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p self._DestroyPages( deletee_pages ) + def CurrentlyBusy( self ): + + return self._loading_session + + def EventClose( self, event ): if not event.CanVeto(): @@ -2433,7 +2456,6 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p wx.CallLater( 500 * i, HydrusData.ShowText, 'This is a delayed popup message -- ' + str( i ) ) - elif command == 'delete_all_closed_pages': self._DeleteAllClosedPages() elif command == 'delete_gui_session': self._controller.Write( 'delete_serialisable_named', HydrusSerialisable.SERIALISABLE_TYPE_GUI_SESSION, data ) @@ -2441,6 +2463,7 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p self._controller.pub( 'notify_new_sessions' ) elif command == 'delete_service_info': self._DeleteServiceInfo() + elif command == 'discord': webbrowser.open( 'https://discord.gg/vy8CUB4' ) elif command == 'fetch_ip': self._FetchIP( data ) elif command == 'force_idle_mode': @@ -2523,7 +2546,6 @@ The password is cleartext here but obscured in the entry dialog. Enter a blank p elif command == 'tab_menu_rename_page': self._RenamePage( self._tab_right_click_index ) elif command == 'tumblr': webbrowser.open( 'http://hydrus.tumblr.com/' ) elif command == 'twitter': webbrowser.open( 'https://twitter.com/#!/hydrusnetwork' ) - elif command == 'unclose_page': self._UnclosePage( data ) elif command == 'undo': self._controller.pub( 'undo' ) else: event.Skip() diff --git a/include/ClientGUICommon.py b/include/ClientGUICommon.py index dda878bf..0c5594da 100755 --- a/include/ClientGUICommon.py +++ b/include/ClientGUICommon.py @@ -855,12 +855,12 @@ class ListBook( wx.Panel ): self.Refresh() - # this tells any parent scrolled panel to recalc its scrollbars - event = wx.NotifyEvent( wx.wxEVT_SIZE, -1 ) + # this tells any parent scrolled panel to update its virtualsize and recalc its scrollbars + event = wx.NotifyEvent( wx.wxEVT_SIZE, self.GetId() ) wx.CallAfter( self.ProcessEvent, event ) - # this tells parent resizing frame/dialog that is interested in resizing that now is the time + # now the virtualsize is updated, we now tell any parent resizing frame/dialog that is interested in resizing that now is the time event = CC.SizeChangedEvent( -1 ) wx.CallAfter( self.ProcessEvent, event ) diff --git a/include/ClientGUIDialogs.py b/include/ClientGUIDialogs.py index 0efdec51..2fdefd81 100755 --- a/include/ClientGUIDialogs.py +++ b/include/ClientGUIDialogs.py @@ -604,9 +604,13 @@ class DialogButtonChoice( Dialog ): i = 0 - for ( text, data ) in choices: + for ( text, data, tooltip ) in choices: - self._buttons.append( wx.Button( self, label = text, id = i ) ) + button = wx.Button( self, label = text, id = i ) + + button.SetToolTipString( tooltip ) + + self._buttons.append( button ) self._ids_to_data[ i ] = data @@ -1512,14 +1516,11 @@ class DialogInputLocalFiles( Dialog ): self._gauge_cancel.Bind( wx.EVT_BUTTON, self.EventGaugeCancel ) self._gauge_cancel.Disable() - self._add_files_button = wx.Button( self, label = 'add files' ) - self._add_files_button.Bind( wx.EVT_BUTTON, self.EventAddPaths ) + self._add_files_button = ClientGUICommon.BetterButton( self, 'add files', self.AddPaths ) - self._add_folder_button = wx.Button( self, label = 'add folder' ) - self._add_folder_button.Bind( wx.EVT_BUTTON, self.EventAddFolder ) + self._add_folder_button = ClientGUICommon.BetterButton( self, 'add folder', self.AddFolder ) - self._remove_files_button = wx.Button( self, label = 'remove files' ) - self._remove_files_button.Bind( wx.EVT_BUTTON, self.EventRemovePaths ) + self._remove_files_button = ClientGUICommon.BetterButton( self, 'remove files', self.RemovePaths ) self._import_file_options = ClientGUICollapsible.CollapsibleOptionsImportFiles( self ) @@ -1583,6 +1584,12 @@ class DialogInputLocalFiles( Dialog ): self._job_key = ClientThreading.JobKey() + self._dialog_key = HydrusData.GenerateKey() + + HydrusGlobals.client_controller.sub( self, 'AddParsedPath', 'DialogInputLocalFiles_AddParsedPath' ) + HydrusGlobals.client_controller.sub( self, 'DoneParsing', 'DialogInputLocalFiles_DoneParsing' ) + HydrusGlobals.client_controller.sub( self, 'SetGaugeInfo', 'DialogInputLocalFiles_SetGaugeInfo' ) + if len( paths ) > 0: self._AddPathsToList( paths ) wx.CallAfter( self._add_button.SetFocus ) @@ -1615,8 +1622,6 @@ class DialogInputLocalFiles( Dialog ): paths = self._processing_queue.pop( 0 ) - self.SetGaugeInfo( None, None, '' ) - self._gauge_pause.Enable() self._gauge_cancel.Enable() @@ -1635,28 +1640,37 @@ class DialogInputLocalFiles( Dialog ): self._job_key.Cancel() - def AddParsedPath( self, path, mime, size ): + def AddFolder( self, event ): - pretty_mime = HC.mime_string_lookup[ mime ] - pretty_size = HydrusData.ConvertIntToBytes( size ) - - if path not in self._current_paths_set: + with wx.DirDialog( self, 'Select a folder to add.', style = wx.DD_DIR_MUST_EXIST ) as dlg: - self._current_paths_set.add( path ) - self._current_paths.append( path ) - - self._paths_list.Append( ( path, pretty_mime, pretty_size ), ( path, mime, size ) ) + if dlg.ShowModal() == wx.ID_OK: + + path = HydrusData.ToUnicode( dlg.GetPath() ) + + self._AddPathsToList( ( path, ) ) + - def DoneParsing( self ): + def AddParsedPath( self, dialog_key, path, mime, size ): - self._currently_parsing = False - - self._ProcessQueue() + if dialog_key == self._dialog_key: + + pretty_mime = HC.mime_string_lookup[ mime ] + pretty_size = HydrusData.ConvertIntToBytes( size ) + + if path not in self._current_paths_set: + + self._current_paths_set.add( path ) + self._current_paths.append( path ) + + self._paths_list.Append( ( path, pretty_mime, pretty_size ), ( path, mime, size ) ) + + - def EventAddPaths( self, event ): + def AddPaths( self, event ): with wx.FileDialog( self, 'Select the files to add.', style = wx.FD_MULTIPLE ) as dlg: @@ -1669,16 +1683,13 @@ class DialogInputLocalFiles( Dialog ): - def EventAddFolder( self, event ): + def DoneParsing( self, dialog_key ): - with wx.DirDialog( self, 'Select a folder to add.', style = wx.DD_DIR_MUST_EXIST ) as dlg: + if dialog_key == self._dialog_key: - if dlg.ShowModal() == wx.ID_OK: - - path = HydrusData.ToUnicode( dlg.GetPath() ) - - self._AddPathsToList( ( path, ) ) - + self._currently_parsing = False + + self._ProcessQueue() @@ -1738,8 +1749,6 @@ class DialogInputLocalFiles( Dialog ): self.EndModal( wx.ID_OK ) - def EventRemovePaths( self, event ): self.RemovePaths() - def EventTags( self, event ): if len( self._current_paths ) > 0: @@ -1770,21 +1779,24 @@ class DialogInputLocalFiles( Dialog ): self._current_paths_set = set( self._current_paths ) - def SetGaugeInfo( self, gauge_range, gauge_value, text ): + def SetGaugeInfo( self, dialog_key, gauge_range, gauge_value, text ): - if gauge_range is None: self._gauge.Pulse() - else: + if dialog_key == self._dialog_key: - self._gauge.SetRange( gauge_range ) - self._gauge.SetValue( gauge_value ) + if gauge_range is None: self._gauge.Pulse() + else: + + self._gauge.SetRange( gauge_range ) + self._gauge.SetValue( gauge_value ) + + + self._gauge_text.SetLabelText( text ) - - self._gauge_text.SetLabelText( text ) def THREADParseImportablePaths( self, raw_paths, job_key ): - wx.CallAfter( self.SetGaugeInfo, None, None, u'Parsing files and folders.' ) + HydrusGlobals.client_controller.pub( 'DialogInputLocalFiles_SetGaugeInfo', self._dialog_key, None, None, u'Parsing files and folders.' ) file_paths = ClientFiles.GetAllPaths( raw_paths ) @@ -1805,7 +1817,7 @@ class DialogInputLocalFiles( Dialog ): if i % 500 == 0: gc.collect() - wx.CallAfter( self.SetGaugeInfo, num_file_paths, i, u'Done ' + HydrusData.ConvertValueRangeToPrettyString( i, num_file_paths ) ) + HydrusGlobals.client_controller.pub( 'DialogInputLocalFiles_SetGaugeInfo', self._dialog_key, num_file_paths, i, u'Done ' + HydrusData.ConvertValueRangeToPrettyString( i, num_file_paths ) ) ( i_paused, should_quit ) = job_key.WaitIfNeeded() @@ -1831,7 +1843,7 @@ class DialogInputLocalFiles( Dialog ): num_good_files += 1 - wx.CallAfter( self.AddParsedPath, path, mime, size ) + HydrusGlobals.client_controller.pub( 'DialogInputLocalFiles_AddParsedPath', self._dialog_key, path, mime, size ) else: @@ -1891,9 +1903,8 @@ class DialogInputLocalFiles( Dialog ): HydrusData.Print( message ) - wx.CallAfter( self.SetGaugeInfo, num_file_paths, num_file_paths, message ) - - wx.CallAfter( self.DoneParsing ) + HydrusGlobals.client_controller.pub( 'DialogInputLocalFiles_SetGaugeInfo', self._dialog_key, num_file_paths, num_file_paths, message ) + HydrusGlobals.client_controller.pub( 'DialogInputLocalFiles_DoneParsing', self._dialog_key ) class DialogInputNamespaceRegex( Dialog ): diff --git a/include/ClientGUIMedia.py b/include/ClientGUIMedia.py index c2e73125..903b3965 100755 --- a/include/ClientGUIMedia.py +++ b/include/ClientGUIMedia.py @@ -2175,7 +2175,7 @@ class MediaPanelThumbnails( MediaPanel ): if len( potential_clean_indices_to_steal ) > 0: - index_to_steal = potential_clean_indices_to_steal.pop() + index_to_steal = potential_clean_indices_to_steal.pop( 0 ) self._DirtyPage( index_to_steal ) diff --git a/include/ClientGUIParsing.py b/include/ClientGUIParsing.py index 93f1f459..12d13db0 100644 --- a/include/ClientGUIParsing.py +++ b/include/ClientGUIParsing.py @@ -1792,7 +1792,7 @@ class ManageParsingScriptsPanel( ClientGUIScrolledPanels.ManagePanel ): if dlg.ShowModal() == wx.ID_OK: - path = dlg.GetPath() + path = HydrusData.ToUnicode( dlg.GetPath() ) try: diff --git a/include/ClientGUIScrolledPanels.py b/include/ClientGUIScrolledPanels.py index 39cdca72..4c26dccd 100644 --- a/include/ClientGUIScrolledPanels.py +++ b/include/ClientGUIScrolledPanels.py @@ -13,7 +13,7 @@ class ResizingScrolledPanel( wx.lib.scrolledpanel.ScrolledPanel ): def EventSizeChanged( self, event ): - self.SetVirtualSize( self.DoGetBestSize() ) + self.SetVirtualSize( self.GetBestVirtualSize() ) event.Skip() diff --git a/include/ClientGUIScrolledPanelsManagement.py b/include/ClientGUIScrolledPanelsManagement.py index c1513371..4edb7464 100644 --- a/include/ClientGUIScrolledPanelsManagement.py +++ b/include/ClientGUIScrolledPanelsManagement.py @@ -11,6 +11,7 @@ import ClientGUIScrolledPanelsEdit import ClientGUITagSuggestions import ClientGUITopLevelWindows import ClientMedia +import collections import HydrusConstants as HC import HydrusData import HydrusGlobals @@ -1068,6 +1069,8 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ): wx.Panel.__init__( self, parent ) + self._main_gui_title = wx.TextCtrl( self ) + self._default_gui_session = wx.Choice( self ) self._confirm_client_exit = wx.CheckBox( self ) @@ -1100,6 +1103,8 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ): self._new_options = HydrusGlobals.client_controller.GetNewOptions() + self._main_gui_title.SetValue( self._new_options.GetString( 'main_gui_title' ) ) + gui_session_names = HydrusGlobals.client_controller.Read( 'serialisable_names', HydrusSerialisable.SERIALISABLE_TYPE_GUI_SESSION ) if 'last session' not in gui_session_names: gui_session_names.insert( 0, 'last session' ) @@ -1147,6 +1152,7 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ): rows = [] + rows.append( ( 'Main gui title: ', self._main_gui_title ) ) rows.append( ( 'Default session on startup: ', self._default_gui_session ) ) rows.append( ( 'Confirm client exit: ', self._confirm_client_exit ) ) rows.append( ( 'Confirm sending files to trash: ', self._confirm_trash ) ) @@ -1230,6 +1236,12 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ): HC.options[ 'hide_preview' ] = self._hide_preview.GetValue() + title = self._main_gui_title.GetValue() + + self._new_options.SetString( 'main_gui_title', title ) + + HydrusGlobals.client_controller.pub( 'main_gui_title', title ) + self._new_options.SetBoolean( 'show_thumbnail_title_banner', self._show_thumbnail_title_banner.GetValue() ) self._new_options.SetBoolean( 'show_thumbnail_page', self._show_thumbnail_page.GetValue() ) @@ -2439,7 +2451,14 @@ class ManageTagsPanel( ClientGUIScrolledPanels.ManagePanel ): # If I let this go uncaught, it propagates to the media viewer above, so an Enter or a '+' closes the window or zooms in! # The DoAllowNextEvent tells wx to gen regular key_down/char events so our text box gets them like normal, despite catching the event here - event.DoAllowNextEvent() + if event.KeyCode == wx.WXK_ESCAPE: + + event.Skip() + + else: + + event.DoAllowNextEvent() + else: @@ -2650,23 +2669,23 @@ class ManageTagsPanel( ClientGUIScrolledPanels.ManagePanel ): num_files = len( self._media ) - sets_of_choices = [] + # let's figure out what these tags can mean for the media--add, remove, or what? - potential_num_reasons_needed = 0 + choices = collections.defaultdict( list ) for tag in tags: num_current = len( [ 1 for tag_manager in tag_managers if tag in tag_manager.GetCurrent( self._tag_service_key ) ] ) - choices = [] - if self._i_am_local_tag_service: if not only_remove: if num_current < num_files: - choices.append( ( 'add ' + tag + ' to ' + HydrusData.ConvertIntToPrettyString( num_files - num_current ) + ' files', ( HC.CONTENT_UPDATE_ADD, tag ) ) ) + num_non_current = num_files - num_current + + choices[ HC.CONTENT_UPDATE_ADD ].append( ( tag, num_non_current ) ) @@ -2674,7 +2693,7 @@ class ManageTagsPanel( ClientGUIScrolledPanels.ManagePanel ): if num_current > 0: - choices.append( ( 'delete ' + tag + ' from ' + HydrusData.ConvertIntToPrettyString( num_current ) + ' files', ( HC.CONTENT_UPDATE_DELETE, tag ) ) ) + choices[ HC.CONTENT_UPDATE_DELETE ].append( ( tag, num_current ) ) @@ -2685,21 +2704,26 @@ class ManageTagsPanel( ClientGUIScrolledPanels.ManagePanel ): if not only_remove: - if num_current + num_pending < num_files: choices.append( ( 'pend ' + tag + ' to ' + HydrusData.ConvertIntToPrettyString( num_files - ( num_current + num_pending ) ) + ' files', ( HC.CONTENT_UPDATE_PEND, tag ) ) ) + if num_current + num_pending < num_files: + + num_pendable = num_files - ( num_current + num_pending ) + + choices[ HC.CONTENT_UPDATE_PEND ].append( ( tag, num_pendable ) ) + if not only_add: if num_current > num_petitioned and not only_add: - choices.append( ( 'petition ' + tag + ' from ' + HydrusData.ConvertIntToPrettyString( num_current - num_petitioned ) + ' files', ( HC.CONTENT_UPDATE_PETITION, tag ) ) ) + num_petitionable = num_current - num_petitioned - potential_num_reasons_needed += 1 + choices[ HC.CONTENT_UPDATE_PETITION ].append( ( tag, num_petitionable ) ) if num_pending > 0 and not only_add: - choices.append( ( 'rescind pending ' + tag + ' from ' + HydrusData.ConvertIntToPrettyString( num_pending ) + ' files', ( HC.CONTENT_UPDATE_RESCIND_PEND, tag ) ) ) + choices[ HC.CONTENT_UPDATE_RESCIND_PEND ].append( ( tag, num_pending ) ) @@ -2707,197 +2731,173 @@ class ManageTagsPanel( ClientGUIScrolledPanels.ManagePanel ): if num_petitioned > 0: - choices.append( ( 'rescind petitioned ' + tag + ' from ' + HydrusData.ConvertIntToPrettyString( num_petitioned ) + ' files', ( HC.CONTENT_UPDATE_RESCIND_PETITION, tag ) ) ) - - - - - if len( choices ) == 0: - - continue - - - sets_of_choices.append( choices ) - - - if forced_reason is None and potential_num_reasons_needed > 1: - - no_user_choices = True not in ( len( choices ) > 1 for choices in sets_of_choices ) - - if no_user_choices: - - message = 'You are about to petition more than one tag.' - - else: - - message = 'You might be about to petition more than one tag.' - - - message += os.linesep * 2 - message += 'To save you time, would you like to use the same reason for all the petitions?' - - with ClientGUIDialogs.DialogYesNo( self, message, title = 'Many petitions found' ) as yn_dlg: - - if yn_dlg.ShowModal() == wx.ID_YES: - - message = 'Please enter your common petition reason here:' - - with ClientGUIDialogs.DialogTextEntry( self, message ) as text_dlg: - - if text_dlg.ShowModal() == wx.ID_OK: - - forced_reason = text_dlg.GetValue() - + choices[ HC.CONTENT_UPDATE_RESCIND_PETITION ].append( ( tag, num_petitioned ) ) - forced_choice_actions = [] + # now we have options, let's ask the user what they want to do - immediate_content_updates = [] - - for choices in sets_of_choices: + if len( choices ) == 1: - always_do = False + [ ( choice_action, tag_counts ) ] = choices.items() - if len( choices ) == 1: - - [ ( text_gumpf, choice ) ] = choices - - else: - - choice = None - - for forced_choice_action in forced_choice_actions: - - for possible_choice in choices: - - ( text_gumpf, ( choice_action, choice_tag ) ) = possible_choice - - if choice_action == forced_choice_action: - - choice = ( choice_action, choice_tag ) - - break - - - - if choice is not None: - - break - - - - if choice is None: - - intro = 'What would you like to do?' - - show_always_checkbox = len( sets_of_choices ) > 1 - - with ClientGUIDialogs.DialogButtonChoice( self, intro, choices, show_always_checkbox = show_always_checkbox ) as dlg: - - result = dlg.ShowModal() - - if result == wx.ID_OK: - - ( always_do, choice ) = dlg.GetData() - - else: - - break - - - - + tags = { tag for ( tag, count ) in tag_counts } - if choice is None: + else: + + bdc_choices = [] + + preferred_order = [ HC.CONTENT_UPDATE_ADD, HC.CONTENT_UPDATE_DELETE, HC.CONTENT_UPDATE_PEND, HC.CONTENT_UPDATE_RESCIND_PEND, HC.CONTENT_UPDATE_PETITION, HC.CONTENT_UPDATE_RESCIND_PETITION ] + + choice_text_lookup = {} + + choice_text_lookup[ HC.CONTENT_UPDATE_ADD ] = 'add' + choice_text_lookup[ HC.CONTENT_UPDATE_DELETE ] = 'delete' + choice_text_lookup[ HC.CONTENT_UPDATE_PEND ] = 'pend' + choice_text_lookup[ HC.CONTENT_UPDATE_PETITION ] = 'petition' + choice_text_lookup[ HC.CONTENT_UPDATE_RESCIND_PEND ] = 'rescind pend' + choice_text_lookup[ HC.CONTENT_UPDATE_RESCIND_PETITION ] = 'rescind petition' + + for choice_action in preferred_order: - continue - - - ( choice_action, choice_tag ) = choice - - if always_do: - - forced_choice_actions.append( choice_action ) - - - if choice_action == HC.CONTENT_UPDATE_ADD: media_to_affect = ( m for m in self._media if choice_tag not in m.GetTagsManager().GetCurrent( self._tag_service_key ) ) - elif choice_action == HC.CONTENT_UPDATE_DELETE: media_to_affect = ( m for m in self._media if choice_tag in m.GetTagsManager().GetCurrent( self._tag_service_key ) ) - elif choice_action == HC.CONTENT_UPDATE_PEND: media_to_affect = ( m for m in self._media if choice_tag not in m.GetTagsManager().GetCurrent( self._tag_service_key ) and choice_tag not in m.GetTagsManager().GetPending( self._tag_service_key ) ) - elif choice_action == HC.CONTENT_UPDATE_PETITION: media_to_affect = ( m for m in self._media if choice_tag in m.GetTagsManager().GetCurrent( self._tag_service_key ) and choice_tag not in m.GetTagsManager().GetPetitioned( self._tag_service_key ) ) - elif choice_action == HC.CONTENT_UPDATE_RESCIND_PEND: media_to_affect = ( m for m in self._media if choice_tag in m.GetTagsManager().GetPending( self._tag_service_key ) ) - elif choice_action == HC.CONTENT_UPDATE_RESCIND_PETITION: media_to_affect = ( m for m in self._media if choice_tag in m.GetTagsManager().GetPetitioned( self._tag_service_key ) ) - - hashes = set( itertools.chain.from_iterable( ( m.GetHashes() for m in media_to_affect ) ) ) - - content_updates = [] - - if choice_action == HC.CONTENT_UPDATE_PETITION: - - if forced_reason is None: + if choice_action not in choices: - message = 'Enter a reason for ' + choice_tag + ' to be removed. A janitor will review your petition.' + continue - with ClientGUIDialogs.DialogTextEntry( self, message ) as dlg: - - if dlg.ShowModal() == wx.ID_OK: - - reason = dlg.GetValue() - - else: - - continue - - + + choice_text_prefix = choice_text_lookup[ choice_action ] + + tag_counts = choices[ choice_action ] + + tags = { tag for ( tag, count ) in tag_counts } + + if len( tags ) == 1: + [ ( tag, count ) ] = tag_counts + + text = choice_text_prefix + ' "' + tag + '" for ' + HydrusData.ConvertIntToPrettyString( count ) + ' files' else: - reason = forced_reason + text = choice_text_prefix + ' ' + HydrusData.ConvertIntToPrettyString( len( tags ) ) + ' tags' - content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, choice_action, ( choice_tag, hashes, reason ) ) ) + data = ( choice_action, tags ) + + tooltip = os.linesep.join( ( tag + ' - ' + HydrusData.ConvertIntToPrettyString( count ) + ' files' for ( tag, count ) in tag_counts ) ) + + bdc_choices.append( ( text, data, tooltip ) ) + + + intro = 'What would you like to do?' + + with ClientGUIDialogs.DialogButtonChoice( self, intro, bdc_choices ) as dlg: + + result = dlg.ShowModal() + + if result == wx.ID_OK: + + ( always_do, ( choice_action, tags ) ) = dlg.GetData() + + else: + + return + + + + + + if choice_action == HC.CONTENT_UPDATE_PETITION: + + if forced_reason is None: + + # add the easy reason buttons here + + if len( tags ) == 1: + + ( tag, ) = tags + + tag_text = '"' + tag + '"' + + else: + + tag_text = 'the ' + HydrusData.ConvertIntToPrettyString( len( tags ) ) + ' tags' + + + message = 'Enter a reason for ' + tag_text + ' to be removed. A janitor will review your petition.' + + with ClientGUIDialogs.DialogTextEntry( self, message ) as dlg: + + if dlg.ShowModal() == wx.ID_OK: + + reason = dlg.GetValue() + + else: + + return + + else: - content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, choice_action, ( choice_tag, hashes ) ) ) + reason = forced_reason + + + + # we have an action and tags, so let's effect the content updates + + content_updates = [] + + for tag in tags: + + if choice_action == HC.CONTENT_UPDATE_ADD: media_to_affect = ( m for m in self._media if tag not in m.GetTagsManager().GetCurrent( self._tag_service_key ) ) + elif choice_action == HC.CONTENT_UPDATE_DELETE: media_to_affect = ( m for m in self._media if tag in m.GetTagsManager().GetCurrent( self._tag_service_key ) ) + elif choice_action == HC.CONTENT_UPDATE_PEND: media_to_affect = ( m for m in self._media if tag not in m.GetTagsManager().GetCurrent( self._tag_service_key ) and tag not in m.GetTagsManager().GetPending( self._tag_service_key ) ) + elif choice_action == HC.CONTENT_UPDATE_PETITION: media_to_affect = ( m for m in self._media if tag in m.GetTagsManager().GetCurrent( self._tag_service_key ) and tag not in m.GetTagsManager().GetPetitioned( self._tag_service_key ) ) + elif choice_action == HC.CONTENT_UPDATE_RESCIND_PEND: media_to_affect = ( m for m in self._media if tag in m.GetTagsManager().GetPending( self._tag_service_key ) ) + elif choice_action == HC.CONTENT_UPDATE_RESCIND_PETITION: media_to_affect = ( m for m in self._media if tag in m.GetTagsManager().GetPetitioned( self._tag_service_key ) ) + + hashes = set( itertools.chain.from_iterable( ( m.GetHashes() for m in media_to_affect ) ) ) + + if choice_action == HC.CONTENT_UPDATE_PETITION: + + content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, choice_action, ( tag, hashes, reason ) ) ) + + else: + + content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, choice_action, ( tag, hashes ) ) ) if choice_action in ( HC.CONTENT_UPDATE_ADD, HC.CONTENT_UPDATE_PEND ) and self._add_parents_checkbox.GetValue(): tag_parents_manager = HydrusGlobals.client_controller.GetManager( 'tag_parents' ) - parents = tag_parents_manager.GetParents( self._tag_service_key, choice_tag ) + parents = tag_parents_manager.GetParents( self._tag_service_key, tag ) content_updates.extend( ( HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, choice_action, ( parent, hashes ) ) for parent in parents ) ) - for m in self._media: - - for content_update in content_updates: - - m.GetMediaResult().ProcessContentUpdate( self._tag_service_key, content_update ) - - + + for m in self._media: - if self._immediate_commit: + for content_update in content_updates: - immediate_content_updates.extend( content_updates ) - - else: - - self._content_updates.extend( content_updates ) + m.GetMediaResult().ProcessContentUpdate( self._tag_service_key, content_update ) - if len( immediate_content_updates ) > 0: + if self._immediate_commit: - service_keys_to_content_updates = { self._tag_service_key : immediate_content_updates } + service_keys_to_content_updates = { self._tag_service_key : content_updates } HydrusGlobals.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates ) + else: + + self._content_updates.extend( content_updates ) + self._tags_box.SetTagsByMedia( self._media, force_reload = True ) diff --git a/include/ClientGUIScrolledPanelsReview.py b/include/ClientGUIScrolledPanelsReview.py index 64ac1ea9..2b94f4c7 100644 --- a/include/ClientGUIScrolledPanelsReview.py +++ b/include/ClientGUIScrolledPanelsReview.py @@ -36,6 +36,7 @@ class ReviewServicesPanel( ClientGUIScrolledPanels.ReviewPanel ): self.Bind( wx.EVT_NOTEBOOK_PAGE_CHANGED, self.EventPageChanged ) vbox = wx.BoxSizer( wx.VERTICAL ) + vbox.AddF( self._notebook, CC.FLAGS_EXPAND_BOTH_WAYS ) vbox.AddF( self._edit, CC.FLAGS_SMALL_INDENT ) @@ -96,6 +97,8 @@ class ReviewServicesPanel( ClientGUIScrolledPanels.ReviewPanel ): def DoGetBestSize( self ): + # this overrides the py stub in ScrolledPanel, which allows for unusual scroll behaviour driven by whatever this returns + # wx.Notebook isn't expanding on page change and hence increasing min/virtual size and so on to the scrollable panel above, nullifying the neat expand-on-change-page event # so, until I write my own or figure out a clever solution, let's just force it @@ -167,7 +170,10 @@ class ReviewServicesPanel( ClientGUIScrolledPanels.ReviewPanel ): self._files_text = wx.StaticText( self._info_panel, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) - self._deleted_files_text = wx.StaticText( self._info_panel, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + if self._service_key != CC.TRASH_SERVICE_KEY: + + self._deleted_files_text = wx.StaticText( self._info_panel, style = wx.ALIGN_CENTER | wx.ST_NO_AUTORESIZE ) + elif service_type in HC.TAG_SERVICES: @@ -316,7 +322,10 @@ class ReviewServicesPanel( ClientGUIScrolledPanels.ReviewPanel ): self._info_panel.AddF( self._files_text, CC.FLAGS_EXPAND_PERPENDICULAR ) - self._info_panel.AddF( self._deleted_files_text, CC.FLAGS_EXPAND_PERPENDICULAR ) + if self._service_key != CC.TRASH_SERVICE_KEY: + + self._info_panel.AddF( self._deleted_files_text, CC.FLAGS_EXPAND_PERPENDICULAR ) + elif service_type in HC.TAG_SERVICES: @@ -593,9 +602,12 @@ class ReviewServicesPanel( ClientGUIScrolledPanels.ReviewPanel ): self._files_text.SetLabelText( HydrusData.ConvertIntToPrettyString( num_files ) + ' files, totalling ' + HydrusData.ConvertIntToBytes( total_size ) ) - num_deleted_files = service_info[ HC.SERVICE_INFO_NUM_DELETED_FILES ] - - self._deleted_files_text.SetLabelText( HydrusData.ConvertIntToPrettyString( num_deleted_files ) + ' deleted files' ) + if self._service_key != CC.TRASH_SERVICE_KEY: + + num_deleted_files = service_info[ HC.SERVICE_INFO_NUM_DELETED_FILES ] + + self._deleted_files_text.SetLabelText( HydrusData.ConvertIntToPrettyString( num_deleted_files ) + ' deleted files' ) + elif service_type in HC.TAG_SERVICES: diff --git a/include/ClientGUISerialisable.py b/include/ClientGUISerialisable.py index f4770861..0a403bf3 100644 --- a/include/ClientGUISerialisable.py +++ b/include/ClientGUISerialisable.py @@ -4,6 +4,7 @@ import ClientGUIScrolledPanels import ClientParsing import ClientSerialisable import HydrusConstants as HC +import HydrusData import wx class PngExportPanel( ClientGUIScrolledPanels.ReviewPanel ): @@ -87,7 +88,7 @@ class PngExportPanel( ClientGUIScrolledPanels.ReviewPanel ): title = self._title.GetValue() text = self._text.GetValue() - path = self._filepicker.GetPath() + path = HydrusData.ToUnicode( self._filepicker.GetPath() ) if not path.endswith( '.png' ): @@ -96,4 +97,8 @@ class PngExportPanel( ClientGUIScrolledPanels.ReviewPanel ): ClientSerialisable.DumpToPng( payload_string, title, payload_type, text, path ) + self._export.SetLabelText( 'done!' ) + + wx.CallLater( 2000, self._export.SetLabelText, 'export' ) + \ No newline at end of file diff --git a/include/ClientGUITopLevelWindows.py b/include/ClientGUITopLevelWindows.py index d9462a22..2767b0dd 100644 --- a/include/ClientGUITopLevelWindows.py +++ b/include/ClientGUITopLevelWindows.py @@ -456,6 +456,16 @@ class FrameThatTakesScrollablePanel( FrameThatResizes ): self.Bind( wx.EVT_MENU, self.EventMenu ) self.Bind( CC.EVT_SIZE_CHANGED, self.EventChildSizeChanged ) + self.Bind( wx.EVT_CHAR_HOOK, self.EventCharHook ) + + + def EventCharHook( self, event ): + + if event.KeyCode == wx.WXK_ESCAPE: + + self.Close() + + def EventMenu( self, event ): diff --git a/include/ClientImageHandling.py b/include/ClientImageHandling.py index 5f9a521f..390e1c08 100644 --- a/include/ClientImageHandling.py +++ b/include/ClientImageHandling.py @@ -109,7 +109,7 @@ def GenerateNumPyImageFromPILImage( pil_image ): return numpy.fromstring( s, dtype = 'uint8' ).reshape( ( h, w, len( s ) // ( w * h ) ) ) -def GeneratePerceptualHash( path ): +def GenerateShapePerceptualHashes( path ): numpy_image = GenerateNumpyImage( path ) @@ -180,11 +180,13 @@ def GeneratePerceptualHash( path ): bytes.append( byte ) - answer = str( bytearray( bytes ) ) + phash = str( bytearray( bytes ) ) + + phashes = [ phash ] # we good - return answer + return phashes def ResizeNumpyImage( mime, numpy_image, ( target_x, target_y ) ): diff --git a/include/ClientNetworking.py b/include/ClientNetworking.py index a6516076..b7a26e20 100644 --- a/include/ClientNetworking.py +++ b/include/ClientNetworking.py @@ -410,13 +410,151 @@ class HTTPConnection( object ): self._RefreshConnection() - def _GetResponse( self, method_string, path_and_query, request_headers, body, attempt_number = 1 ): + def _DealWithResponse( self, method, response, parsed_response, size_of_response ): + + response_headers = { k : v for ( k, v ) in response.getheaders() if k != 'set-cookie' } + + cookies = self._ParseCookies( response.getheader( 'set-cookie' ) ) + + self._last_request_time = HydrusData.GetNow() + + if response.status == 200: + + return ( parsed_response, None, size_of_response, response_headers, cookies ) + + elif response.status in ( 301, 302, 303, 307 ): + + location = response.getheader( 'Location' ) + + if location is None: + + raise Exception( 'Received an invalid redirection response.' ) + + else: + + url = location + + if ', ' in url: + + url = url.split( ', ' )[0] + + elif ' ' in url: + + # some booru is giving daft redirect responses + HydrusData.Print( url ) + url = urllib.quote( HydrusData.ToByteString( url ), safe = '/?=&' ) + HydrusData.Print( url ) + + + if not url.startswith( self._scheme ): + + # assume it is like 'index.php' or '/index.php', rather than 'http://blah.com/index.php' + + if url.startswith( '/' ): slash_sep = '' + else: slash_sep = '/' + + url = self._scheme + '://' + self._host + slash_sep + url + + + if response.status in ( 301, 307 ): + + # 301: moved permanently, repeat request + # 307: moved temporarily, repeat request + + redirect_info = ( method, url ) + + elif response.status in ( 302, 303 ): + + # 302: moved temporarily, repeat request (except everyone treats it like 303 for no good fucking reason) + # 303: thanks, now go here with GET + + redirect_info = ( HC.GET, url ) + + + return ( parsed_response, redirect_info, size_of_response, response_headers, cookies ) + + + elif response.status == 304: raise HydrusExceptions.NotModifiedException() + else: + + if response.status == 401: raise HydrusExceptions.PermissionException( parsed_response ) + elif response.status == 403: raise HydrusExceptions.ForbiddenException( parsed_response ) + elif response.status == 404: raise HydrusExceptions.NotFoundException( parsed_response ) + elif response.status == 419: raise HydrusExceptions.SessionException( parsed_response ) + elif response.status == 426: raise HydrusExceptions.NetworkVersionException( parsed_response ) + elif response.status in ( 500, 501, 502, 503 ): + + server_header = response.getheader( 'Server' ) + + if server_header is not None and 'hydrus' in server_header: + + hydrus_service = True + + else: + + hydrus_service = False + + + if response.status == 503 and hydrus_service: + + raise HydrusExceptions.ServerBusyException( 'Server is busy, please try again later.' ) + + else: + + raise Exception( parsed_response ) + + + else: raise Exception( parsed_response ) + + + + def _SendRequestGetResponse( self, method, path_and_query, request_headers, body, report_hooks = None, temp_path = None, attempt_number = 1 ): + + if report_hooks is None: + + report_hooks = [] + + + if 'User-Agent' not in request_headers: + + request_headers[ 'User-Agent' ] = 'hydrus/' + str( HC.NETWORK_VERSION ) + + + path_and_query = HydrusData.ToByteString( path_and_query ) + + request_headers = { str( k ) : str( v ) for ( k, v ) in request_headers.items() } + + ( response, attempt_number ) = self._GetInitialResponse( method, path_and_query, request_headers, body, attempt_number = attempt_number ) + + try: + + ( parsed_response, size_of_response ) = self._ReadResponse( method, response, report_hooks, temp_path ) + + return ( response, parsed_response, size_of_response ) + + except HydrusExceptions.ShouldReattemptNetworkException: + + if method == HC.GET: + + return self._SendRequestGetResponse( method, path_and_query, request_headers, body, report_hooks = report_hooks, temp_path = temp_path, attempt_number = attempt_number + 1 ) + + else: + + raise + + + + + def _GetInitialResponse( self, method, path_and_query, request_headers, body, attempt_number = 1 ): + + if method == HC.GET: method_string = 'GET' + elif method == HC.POST: method_string = 'POST' try: self._connection.request( method_string, path_and_query, headers = request_headers, body = body ) - return self._connection.getresponse() + return ( self._connection.getresponse(), attempt_number ) except ( httplib.CannotSendRequest, httplib.BadStatusLine ): @@ -428,7 +566,7 @@ class HTTPConnection( object ): self._RefreshConnection() - return self._GetResponse( method_string, path_and_query, request_headers, body, attempt_number = attempt_number + 1 ) + return self._GetInitialResponse( method, path_and_query, request_headers, body, attempt_number = attempt_number + 1 ) else: @@ -458,7 +596,7 @@ class HTTPConnection( object ): self._RefreshConnection() - return self._GetResponse( method_string, path_and_query, request_headers, body, attempt_number = attempt_number + 1 ) + return self._GetInitialResponse( method, path_and_query, request_headers, body, attempt_number = attempt_number + 1 ) else: @@ -480,7 +618,7 @@ class HTTPConnection( object ): self._RefreshConnection() - return self._GetResponse( method_string, path_and_query, request_headers, body, attempt_number = attempt_number + 1 ) + return self._GetInitialResponse( method_string, path_and_query, request_headers, body, attempt_number = attempt_number + 1 ) else: @@ -491,7 +629,17 @@ class HTTPConnection( object ): - def _ReadResponse( self, response, report_hooks, temp_path = None ): + def _ReadResponse( self, method, response, report_hooks, temp_path = None ): + + # in general, don't want to resend POSTs + if method == HC.GET: + + recoverable_exc = HydrusExceptions.ShouldReattemptNetworkException + + else: + + recoverable_exc = HydrusExceptions.NetworkException + try: @@ -508,18 +656,22 @@ class HTTPConnection( object ): except socket.timeout as e: - raise HydrusExceptions.NetworkException( 'Connection timed out during response read.' ) + raise recoverable_exc( 'Connection timed out during response read.' ) except socket.error as e: if e.errno == errno.WSAECONNRESET: - raise HydrusExceptions.NetworkException( 'Connection reset by remote host.' ) + raise recoverable_exc( 'Connection reset by remote host.' ) + + else: + + raise except ssl.SSLEOFError: - raise HydrusExceptions.NetworkException( 'Secure connection terminated abruptly.' ) + raise recoverable_exc( 'Secure connection terminated abruptly.' ) return ( parsed_response, size_of_response ) @@ -718,118 +870,9 @@ class HTTPConnection( object ): def Request( self, method, path_and_query, request_headers, body, report_hooks = None, temp_path = None ): - if report_hooks is None: report_hooks = [] + ( response, parsed_response, size_of_response ) = self._SendRequestGetResponse( method, path_and_query, request_headers, body, report_hooks = report_hooks, temp_path = temp_path ) - if method == HC.GET: method_string = 'GET' - elif method == HC.POST: method_string = 'POST' - - if 'User-Agent' not in request_headers: - - request_headers[ 'User-Agent' ] = 'hydrus/' + str( HC.NETWORK_VERSION ) - - - path_and_query = HydrusData.ToByteString( path_and_query ) - - request_headers = { str( k ) : str( v ) for ( k, v ) in request_headers.items() } - - response = self._GetResponse( method_string, path_and_query, request_headers, body ) - - ( parsed_response, size_of_response ) = self._ReadResponse( response, report_hooks, temp_path ) - - response_headers = { k : v for ( k, v ) in response.getheaders() if k != 'set-cookie' } - - cookies = self._ParseCookies( response.getheader( 'set-cookie' ) ) - - self._last_request_time = HydrusData.GetNow() - - if response.status == 200: - - return ( parsed_response, None, size_of_response, response_headers, cookies ) - - elif response.status in ( 301, 302, 303, 307 ): - - location = response.getheader( 'Location' ) - - if location is None: - - raise Exception( 'Received an invalid redirection response.' ) - - else: - - url = location - - if ', ' in url: - - url = url.split( ', ' )[0] - - elif ' ' in url: - - # some booru is giving daft redirect responses - HydrusData.Print( url ) - url = urllib.quote( HydrusData.ToByteString( url ), safe = '/?=&' ) - HydrusData.Print( url ) - - - if not url.startswith( self._scheme ): - - # assume it is like 'index.php' or '/index.php', rather than 'http://blah.com/index.php' - - if url.startswith( '/' ): slash_sep = '' - else: slash_sep = '/' - - url = self._scheme + '://' + self._host + slash_sep + url - - - if response.status in ( 301, 307 ): - - # 301: moved permanently, repeat request - # 307: moved temporarily, repeat request - - redirect_info = ( method, url ) - - elif response.status in ( 302, 303 ): - - # 302: moved temporarily, repeat request (except everyone treats it like 303 for no good fucking reason) - # 303: thanks, now go here with GET - - redirect_info = ( HC.GET, url ) - - - return ( parsed_response, redirect_info, size_of_response, response_headers, cookies ) - - - elif response.status == 304: raise HydrusExceptions.NotModifiedException() - else: - - if response.status == 401: raise HydrusExceptions.PermissionException( parsed_response ) - elif response.status == 403: raise HydrusExceptions.ForbiddenException( parsed_response ) - elif response.status == 404: raise HydrusExceptions.NotFoundException( parsed_response ) - elif response.status == 419: raise HydrusExceptions.SessionException( parsed_response ) - elif response.status == 426: raise HydrusExceptions.NetworkVersionException( parsed_response ) - elif response.status in ( 500, 501, 502, 503 ): - - server_header = response.getheader( 'Server' ) - - if server_header is not None and 'hydrus' in server_header: - - hydrus_service = True - - else: - - hydrus_service = False - - - if response.status == 503 and hydrus_service: - - raise HydrusExceptions.ServerBusyException( 'Server is busy, please try again later.' ) - - else: - - raise Exception( parsed_response ) - - - else: raise Exception( parsed_response ) - + return self._DealWithResponse( method, response, parsed_response, size_of_response ) \ No newline at end of file diff --git a/include/ClientSerialisable.py b/include/ClientSerialisable.py index b4b7852f..37ecf2fb 100644 --- a/include/ClientSerialisable.py +++ b/include/ClientSerialisable.py @@ -4,9 +4,11 @@ import ClientParsing import cv2 import HydrusConstants as HC import HydrusData +import HydrusPaths import HydrusSerialisable import numpy import os +import shutil import struct import wx @@ -164,7 +166,25 @@ def DumpToPng( payload, title, payload_type, text, path ): finished_image = numpy.concatenate( ( top_image, payload_image ) ) - cv2.imwrite( path, finished_image, [ cv2.IMWRITE_PNG_COMPRESSION, 9 ] ) + # this is to deal with unicode paths, which cv2 can't handle + ( os_file_handle, temp_path ) = HydrusPaths.GetTempPath( suffix = '.png' ) + + try: + + cv2.imwrite( temp_path, finished_image, [ cv2.IMWRITE_PNG_COMPRESSION, 9 ] ) + + shutil.copy2( temp_path, path ) + + except Exception as e: + + HydrusData.ShowException( e ) + + raise Exception( 'Could not save the png!' ) + + finally: + + HydrusPaths.CleanUpTempPath( os_file_handle, temp_path ) + def GetPayloadTypeAndString( payload_obj ): @@ -181,9 +201,14 @@ def GetPayloadTypeAndString( payload_obj ): def LoadFromPng( path ): + # this is to deal with unicode paths, which cv2 can't handle + ( os_file_handle, temp_path ) = HydrusPaths.GetTempPath() + try: - numpy_image = cv2.imread( path, flags = IMREAD_UNCHANGED ) + shutil.copy2( path, temp_path ) + + numpy_image = cv2.imread( temp_path, flags = IMREAD_UNCHANGED ) except Exception as e: @@ -191,6 +216,10 @@ def LoadFromPng( path ): raise Exception( 'That did not appear to be a valid image!' ) + finally: + + HydrusPaths.CleanUpTempPath( os_file_handle, temp_path ) + try: diff --git a/include/HydrusConstants.py b/include/HydrusConstants.py index c4d97cf3..a14eb62a 100755 --- a/include/HydrusConstants.py +++ b/include/HydrusConstants.py @@ -44,7 +44,7 @@ options = {} # Misc NETWORK_VERSION = 17 -SOFTWARE_VERSION = 233 +SOFTWARE_VERSION = 234 UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 ) diff --git a/include/HydrusData.py b/include/HydrusData.py index bb738317..32b9b39a 100644 --- a/include/HydrusData.py +++ b/include/HydrusData.py @@ -10,6 +10,7 @@ import locale import os import pstats import psutil +import random import shutil import sqlite3 import subprocess @@ -954,6 +955,14 @@ def Profile( code, g, l ): DebugPrint( output.read() ) +def RandomPop( population ): + + random_index = random.randint( 0, len( population ) - 1 ) + + row = population.pop( random_index ) + + return row + def RecordRunningStart( db_path, instance ): path = os.path.join( db_path, instance + '_running' ) @@ -982,7 +991,24 @@ def RestartProcess(): time.sleep( 1 ) # time for ports to unmap - os.execl( sys.executable, sys.executable, *sys.argv ) + exe = sys.executable + me = sys.argv[0] + + if me.endswith( '.py' ) or me.endswith( '.pyw' ): + + # we are running from source--exe is python's exe, me is the script + + args = [ sys.executable ] + sys.argv + + else: + + # we are running a frozen release--both exe and me are the built exe + # wrap it in quotes because pyinstaller passes it on as raw text, breaking any path with spaces :/ + + args = [ '"' + me + '"' ] + sys.argv[1:] + + + os.execv( exe, args ) def SplayListForDB( xs ): return '(' + ','.join( ( str( x ) for x in xs ) ) + ')' diff --git a/include/HydrusExceptions.py b/include/HydrusExceptions.py index ec0ceccc..1ba6390a 100644 --- a/include/HydrusExceptions.py +++ b/include/HydrusExceptions.py @@ -29,4 +29,5 @@ class NotModifiedException( NetworkException ): pass class RedirectionException( NetworkException ): pass class ServerBusyException( NetworkException ): pass class SessionException( NetworkException ): pass -class WrongServiceTypeException( NetworkException ): pass \ No newline at end of file +class WrongServiceTypeException( NetworkException ): pass +class ShouldReattemptNetworkException( NetworkException ): pass diff --git a/include/HydrusPaths.py b/include/HydrusPaths.py index 47f4d853..bc164c22 100644 --- a/include/HydrusPaths.py +++ b/include/HydrusPaths.py @@ -258,8 +258,10 @@ def GetDevice( path ): def GetTempFile(): return tempfile.TemporaryFile() def GetTempFileQuick(): return tempfile.SpooledTemporaryFile( max_size = 1024 * 1024 * 4 ) -def GetTempPath(): return tempfile.mkstemp( prefix = 'hydrus' ) - +def GetTempPath( suffix = '' ): + + return tempfile.mkstemp( suffix = suffix, prefix = 'hydrus' ) + def LaunchDirectory( path ): def do_it(): diff --git a/include/HydrusThreading.py b/include/HydrusThreading.py index 68f2befb..107c8f33 100644 --- a/include/HydrusThreading.py +++ b/include/HydrusThreading.py @@ -129,7 +129,7 @@ class DAEMONQueue( DAEMON ): class DAEMONWorker( DAEMON ): - def __init__( self, controller, name, callable, topics = None, period = 3600 ): + def __init__( self, controller, name, callable, topics = None, period = 3600, init_wait = 3 ): if topics is None: topics = [] @@ -138,6 +138,7 @@ class DAEMONWorker( DAEMON ): self._callable = callable self._topics = topics self._period = period + self._init_wait = init_wait for topic in topics: self._controller.sub( self, 'set', topic ) @@ -146,7 +147,7 @@ class DAEMONWorker( DAEMON ): def run( self ): - time.sleep( 3 ) + self._event.wait( self._init_wait ) while True: diff --git a/include/TestClientImageHandling.py b/include/TestClientImageHandling.py index dbf807e5..32060915 100644 --- a/include/TestClientImageHandling.py +++ b/include/TestClientImageHandling.py @@ -9,7 +9,7 @@ class TestImageHandling( unittest.TestCase ): def test_phash( self ): - phash = ClientImageHandling.GeneratePerceptualHash( os.path.join( HC.STATIC_DIR, 'hydrus.png' ) ) + phashes = ClientImageHandling.GenerateShapePerceptualHashes( os.path.join( HC.STATIC_DIR, 'hydrus.png' ) ) - self.assertEqual( phash, '\xb0\x08\x83\xb2\x08\x0b8\x08' ) + self.assertEqual( phashes, [ '\xb0\x08\x83\xb2\x08\x0b8\x08' ] ) diff --git a/static/discord.png b/static/discord.png new file mode 100644 index 00000000..0efee2a4 Binary files /dev/null and b/static/discord.png differ