Version 287
This commit is contained in:
parent
b89367b675
commit
565ce7adc6
|
@ -8,6 +8,34 @@
|
|||
<div class="content">
|
||||
<h3>changelog</h3>
|
||||
<ul>
|
||||
<li><h3>version 287</h3></li>
|
||||
<ul>
|
||||
<li>thumbnails can now be drag-and-dropped to other pages!</li>
|
||||
<li>dragging and dropping thumbs to another page tab will remove them from the source and append them to the destination, maintaining file order!</li>
|
||||
<li>DnDing thumbs to a 'page of pages' tab will put the files in the next lowest selected media page</li>
|
||||
<li>DnDing thumbs to a blank notebook area (or a page of pages without a selected media page) will create a new page for the thumbs</li>
|
||||
<li>holding down ctrl when you drop thumbnails will not remove them from the source</li>
|
||||
<li>please forgive the thumbnail DnD cursor, which for now will be in the 'copy' state, despite the internal DnD being move by default</li>
|
||||
<li>improved page tab drag and drop drop logic--dropping onto the page area itself will no longer send the page to the right-end of the current notebook</li>
|
||||
<li>the 'file import options' object now supports three 'presentation' booleans--for new/already_in_inbox/already_in_archive files--so you can customise whether new thumbnails appear based on each state. page imports will by default show everything, while 'quieter' import queues like import folders and subscriptions will continue to just show only 'new' files in their files popup buttons. if you have a gui page with 10k+ items in its queue, try reducing the presentation to speed it up!</li>
|
||||
<li>all existing import queues will be updated when they are next loaded--but please note that for simplicity and safety they will all initialise to the 'quiet' presentation method, so if you have ongoing download pages in any of your gui sessions (including thread watchers!), they will only add 'new' thumbnails unless you edit them. I apologise for the inconvenience</li>
|
||||
<li>the regular hdd import now has a file import options button!</li>
|
||||
<li>subscription query 'finished' file popups are now merged up to the subscription level--so, a sub with five queries that each get 20 new files in a run will now ultimately leave one popup with 100 files</li>
|
||||
<li>file popups (as produced by subscriptions and a couple other places) now preserve their import order!</li>
|
||||
<li>if a subscription with many queries runs out of bandwidth, it should now only give you one 'no more bandwidth to download files' message, rather than one for every outstanding query to sync</li>
|
||||
<li>added a checkbox to turn on/off the new random subscription syncing to the options->downloading panel</li>
|
||||
<li>the file import status button's menu now supports import/export of sources to clipboard/png! it _should_ also support unicode. be careful not to paste paths into a url cache, or urls from one gallery site to another, or you'll just get errors--this is prototype, so please keep like with like for now</li>
|
||||
<li>the png import/export system now supports raw string payloads</li>
|
||||
<li>the new listctrlpanel can now hang its buttons in multiple rows</li>
|
||||
<li>the manage subscriptions panel now has an 'overwrite checker options' button to mass-set checker options</li>
|
||||
<li>the manage subscriptions panel now has a 'select subs' button that selects subs based on a basic query text search</li>
|
||||
<li>separating merged subscriptions now sets better new subscription names of 'old_sub_name: query_text'</li>
|
||||
<li>saving a session from a page of pages with a custom name will no longer suggest a session name prepended by [USER]</li>
|
||||
<li>doubled the subscription and downloader instance default bandwidth rules to 400 and 200 rqs/day</li>
|
||||
<li>the 'load_images_with_pil' and 'disable_cv_for_gifs' options are now officially BUGFIX in the options--unless you know you need them on, turn them off!</li>
|
||||
<li>added some safeguards to the new dialog-panel system's OK stuff, which sometimes catches a duplicate OK event</li>
|
||||
<li>shuffled some db update status texts around</li>
|
||||
</ul>
|
||||
<li><h3>version 286</h3></li>
|
||||
<ul>
|
||||
<li>simplified how thread watcher assigns DEAD and 404 status--it should do it more reliably now</li>
|
||||
|
|
|
@ -152,6 +152,8 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
HydrusDB.HydrusDB.__init__( self, controller, db_dir, db_name, no_wal = no_wal )
|
||||
|
||||
self._controller.pub( 'splash_set_title_text', u'booting db\u2026' )
|
||||
|
||||
|
||||
def _AddFilesInfo( self, rows, overwrite = False ):
|
||||
|
||||
|
@ -1411,7 +1413,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
job_key.SetVariable( 'popup_text_1', text )
|
||||
job_key.SetVariable( 'popup_gauge_1', ( total_done_previously + i, total_num_hash_ids_in_cache ) )
|
||||
|
||||
HG.client_controller.pub( 'splash_set_status_text', text )
|
||||
HG.client_controller.pub( 'splash_set_status_subtext', text )
|
||||
|
||||
|
||||
duplicate_hash_ids = [ duplicate_hash_id for duplicate_hash_id in self._CacheSimilarFilesSearch( hash_id, search_distance ) if duplicate_hash_id != hash_id ]
|
||||
|
@ -1486,7 +1488,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
text = 'regenerating similar file metadata - ' + HydrusData.ConvertValueRangeToPrettyString( total_done_previously + i, total_num_hash_ids_in_cache )
|
||||
|
||||
HG.client_controller.pub( 'splash_set_status_text', text )
|
||||
HG.client_controller.pub( 'splash_set_status_subtext', text )
|
||||
job_key.SetVariable( 'popup_text_1', text )
|
||||
job_key.SetVariable( 'popup_gauge_1', ( total_done_previously + i, total_num_hash_ids_in_cache ) )
|
||||
|
||||
|
@ -1603,7 +1605,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
text = 'rebalancing similar file metadata - ' + HydrusData.ConvertValueRangeToPrettyString( num_done, num_to_do )
|
||||
|
||||
HG.client_controller.pub( 'splash_set_status_text', text )
|
||||
HG.client_controller.pub( 'splash_set_status_subtext', text )
|
||||
job_key.SetVariable( 'popup_text_1', text )
|
||||
job_key.SetVariable( 'popup_gauge_1', ( num_done, num_to_do ) )
|
||||
|
||||
|
@ -6434,9 +6436,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
file_import_options = file_import_job.GetFileImportOptions()
|
||||
|
||||
( archive, exclude_deleted_files, min_size, min_resolution ) = file_import_options.ToTuple()
|
||||
|
||||
if archive:
|
||||
if file_import_options.GetAutomaticArchive():
|
||||
|
||||
self._ArchiveFiles( ( hash_id, ) )
|
||||
|
||||
|
@ -6576,6 +6576,13 @@ class DB( HydrusDB.HydrusDB ):
|
|||
self._db_filenames[ 'external_master' ] = 'client.master.db'
|
||||
|
||||
|
||||
def _InInbox( self, hash ):
|
||||
|
||||
hash_id = self._GetHashId( hash )
|
||||
|
||||
return hash_id in self._inbox_hash_ids
|
||||
|
||||
|
||||
def _IsAnOrphan( self, test_type, possible_hash ):
|
||||
|
||||
if self._HashExists( possible_hash ):
|
||||
|
@ -7999,6 +8006,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
elif action == 'hash_status': result = self._GetHashStatus( *args, **kwargs )
|
||||
elif action == 'hydrus_sessions': result = self._GetHydrusSessions( *args, **kwargs )
|
||||
elif action == 'imageboards': result = self._GetYAMLDump( YAML_DUMP_ID_IMAGEBOARD, *args, **kwargs )
|
||||
elif action == 'in_inbox': result = self._InInbox( *args, **kwargs )
|
||||
elif action == 'is_an_orphan': result = self._IsAnOrphan( *args, **kwargs )
|
||||
elif action == 'load_into_disk_cache': result = self._LoadIntoDiskCache( *args, **kwargs )
|
||||
elif action == 'local_booru_share_keys': result = self._GetYAMLDumpNames( YAML_DUMP_ID_LOCAL_BOORU )
|
||||
|
@ -8552,7 +8560,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
def _UpdateDB( self, version ):
|
||||
|
||||
self._controller.pub( 'splash_set_title_text', 'updating db to v' + str( version + 1 ) )
|
||||
self._controller.pub( 'splash_set_status_text', 'updating db to v' + str( version + 1 ) )
|
||||
|
||||
if version == 239:
|
||||
|
||||
|
@ -8562,7 +8570,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
#
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'setting up next step of similar files stuff' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'setting up next step of similar files stuff' )
|
||||
|
||||
self._c.execute( 'CREATE TABLE external_caches.shape_search_cache ( hash_id INTEGER PRIMARY KEY, searched_distance INTEGER );' )
|
||||
|
||||
|
@ -8638,7 +8646,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
# first, convert existing tag_id to subtag_id
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'converting existing tags to subtags' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'converting existing tags to subtags' )
|
||||
|
||||
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_master.subtags ( subtag_id INTEGER PRIMARY KEY, subtag TEXT UNIQUE );' )
|
||||
|
||||
|
@ -8653,7 +8661,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
# now create the new tags table
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'creating the new tags table' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'creating the new tags table' )
|
||||
|
||||
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_master.tags ( tag_id INTEGER PRIMARY KEY, namespace_id INTEGER, subtag_id INTEGER );' )
|
||||
|
||||
|
@ -8685,7 +8693,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
return tag_id
|
||||
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'compacting smaller tables' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'compacting smaller tables' )
|
||||
|
||||
#
|
||||
|
||||
|
@ -8797,7 +8805,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
for cache_table_name in cache_table_names:
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'compacting ' + cache_table_name )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'compacting ' + cache_table_name )
|
||||
|
||||
if cache_table_name.startswith( 'combined_files_ac_cache_' ) or cache_table_name.startswith( 'specific_ac_cache_' ):
|
||||
|
||||
|
@ -8827,7 +8835,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
for mapping_table_name in mapping_table_names:
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'compacting ' + mapping_table_name )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'compacting ' + mapping_table_name )
|
||||
|
||||
if mapping_table_name.startswith( 'current_mappings_' ) or mapping_table_name.startswith( 'deleted_mappings_' ) or mapping_table_name.startswith( 'pending_mappings_' ):
|
||||
|
||||
|
@ -8839,7 +8847,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
self._c.execute( 'DROP TABLE old_table;' )
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'indexing ' + mapping_table_name )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'indexing ' + mapping_table_name )
|
||||
|
||||
self._c.execute( 'CREATE UNIQUE INDEX external_mappings.' + mapping_table_name + '_hash_id_tag_id_index ON ' + mapping_table_name + ' ( hash_id, tag_id );' )
|
||||
|
||||
|
@ -8853,13 +8861,13 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
self._c.execute( 'DROP TABLE old_table;' )
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'indexing ' + mapping_table_name )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'indexing ' + mapping_table_name )
|
||||
|
||||
self._c.execute( 'CREATE UNIQUE INDEX external_mappings.' + mapping_table_name + '_hash_id_tag_id_index ON ' + mapping_table_name + ' ( hash_id, tag_id );' )
|
||||
|
||||
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'committing to disk' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'committing to disk' )
|
||||
|
||||
self._CloseDBCursor()
|
||||
|
||||
|
@ -8867,7 +8875,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
for filename in [ 'client.mappings.db', 'client.master.db', 'client.caches.db', 'client.db' ]:
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'vacuuming ' + filename )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'vacuuming ' + filename )
|
||||
|
||||
db_path = os.path.join( self._db_dir, filename )
|
||||
|
||||
|
@ -8892,7 +8900,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
for schema in [ 'main', 'external_caches', 'external_master', 'external_mappings' ]:
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'analyzing ' + schema )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'analyzing ' + schema )
|
||||
|
||||
try:
|
||||
|
||||
|
@ -8918,7 +8926,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
self._BeginImmediate()
|
||||
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'updating services' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'updating services' )
|
||||
|
||||
old_service_info = self._c.execute( 'SELECT * FROM services;' ).fetchall()
|
||||
|
||||
|
@ -9016,7 +9024,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
#
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'deleting misc old cruft' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'deleting misc old cruft' )
|
||||
|
||||
self._c.execute( 'DROP TABLE news;' )
|
||||
self._c.execute( 'DROP TABLE contacts;' )
|
||||
|
@ -9032,7 +9040,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
self._c.execute( 'DROP TABLE incoming_message_statuses;' )
|
||||
self._c.execute( 'DROP TABLE messages;' )
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'deleting old updates dir' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'deleting old updates dir' )
|
||||
|
||||
updates_dir = os.path.join( self._db_dir, 'client_updates' )
|
||||
|
||||
|
@ -9181,7 +9189,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
#
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'cleaning tags' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'cleaning tags' )
|
||||
|
||||
dirty_namespace_info = []
|
||||
dirty_subtag_info = []
|
||||
|
@ -9230,7 +9238,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', HydrusData.ConvertIntToPrettyString( len( dirty_namespace_info ) + len( dirty_subtag_info ) ) + ' dirty tag parts found, now cleaning' )
|
||||
self._controller.pub( 'splash_set_status_subtext', HydrusData.ConvertIntToPrettyString( len( dirty_namespace_info ) + len( dirty_subtag_info ) ) + ' dirty tag parts found, now cleaning' )
|
||||
|
||||
dirty_and_clean_tag_ids = []
|
||||
|
||||
|
@ -9313,7 +9321,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', HydrusData.ConvertIntToPrettyString( len( invalid_subtag_ids ) ) + ' invalid tag parts found, now replacing' )
|
||||
self._controller.pub( 'splash_set_status_subtext', HydrusData.ConvertIntToPrettyString( len( invalid_subtag_ids ) ) + ' invalid tag parts found, now replacing' )
|
||||
|
||||
for ( i, invalid_subtag_id ) in enumerate( invalid_subtag_ids ):
|
||||
|
||||
|
@ -9340,7 +9348,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
self._InitCaches()
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'cleaning tags again' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'cleaning tags again' )
|
||||
|
||||
tag_service_ids = [ service_id for ( service_id, ) in self._c.execute( 'SELECT service_id FROM services WHERE service_type IN ( ?, ? );', ( HC.TAG_REPOSITORY, HC.LOCAL_TAG ) ) ]
|
||||
|
||||
|
@ -9609,7 +9617,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
#
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'regenerating some tag search data' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'regenerating some tag search data' )
|
||||
|
||||
old_subtag_info = self._c.execute( 'SELECT docid, subtag FROM subtags_fts4;' ).fetchall()
|
||||
|
||||
|
@ -9650,7 +9658,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
if version == 260:
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'generating some new tag search data' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'generating some new tag search data' )
|
||||
|
||||
self._c.execute( 'CREATE TABLE external_caches.integer_subtags ( subtag_id INTEGER PRIMARY KEY, integer_subtag INTEGER );' )
|
||||
|
||||
|
@ -9707,7 +9715,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
if version == 262:
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'moving some hash data' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'moving some hash data' )
|
||||
|
||||
self._c.execute( 'CREATE TABLE IF NOT EXISTS external_master.hashes ( hash_id INTEGER PRIMARY KEY, hash BLOB_BYTES UNIQUE );' )
|
||||
|
||||
|
@ -9724,7 +9732,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
self._CloseDBCursor()
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'vacuuming main db ' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'vacuuming main db ' )
|
||||
|
||||
db_path = os.path.join( self._db_dir, 'client.db' )
|
||||
|
||||
|
@ -9757,7 +9765,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
#
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'generating deleted tag cache' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'generating deleted tag cache' )
|
||||
|
||||
tag_service_ids = self._GetServiceIds( HC.TAG_SERVICES )
|
||||
file_service_ids = self._GetServiceIds( HC.AUTOCOMPLETE_CACHE_SPECIFIC_FILE_SERVICES )
|
||||
|
@ -9804,7 +9812,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
if version == 263:
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'rebuilding urls table' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'rebuilding urls table' )
|
||||
|
||||
self._c.execute( 'ALTER TABLE urls RENAME TO urls_old;' )
|
||||
|
||||
|
@ -9944,7 +9952,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
if version == 277:
|
||||
|
||||
self._controller.pub( 'splash_set_status_text', 'updating tumblr urls' )
|
||||
self._controller.pub( 'splash_set_status_subtext', 'updating tumblr urls' )
|
||||
|
||||
urls = self._c.execute( 'SELECT hash_id, url FROM urls;' ).fetchall()
|
||||
|
||||
|
@ -10082,6 +10090,13 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
|
||||
|
||||
if version == 286:
|
||||
|
||||
message = '\'File import options\' now support different \'presentation\' options that change which import files\' thumbnails appear in import pages. Although _new_ import pages will continue to show everything by default, all _existing_ file import options will update to a conservative, \'quiet\' default that will only show new files. Please double-check any existing import pages if you want to see thumbnails for files that are \'already in db\'. I apologise for the inconvenience.'
|
||||
|
||||
self.pub_initial_message( message )
|
||||
|
||||
|
||||
self._controller.pub( 'splash_set_title_text', 'updated db to v' + str( version + 1 ) )
|
||||
|
||||
self._c.execute( 'UPDATE version SET version = ?;', ( version + 1, ) )
|
||||
|
|
|
@ -296,7 +296,14 @@ def DAEMONSynchroniseSubscriptions( controller ):
|
|||
|
||||
subscription_names = list( controller.Read( 'serialisable_names', HydrusSerialisable.SERIALISABLE_TYPE_SUBSCRIPTION ) )
|
||||
|
||||
random.shuffle( subscription_names )
|
||||
if controller.new_options.GetBoolean( 'process_subs_in_random_order' ):
|
||||
|
||||
random.shuffle( subscription_names )
|
||||
|
||||
else:
|
||||
|
||||
subscription_names.sort()
|
||||
|
||||
|
||||
HG.subscriptions_running = True
|
||||
|
||||
|
|
|
@ -859,6 +859,8 @@ class ClientOptions( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
self._dictionary[ 'booleans' ][ 'thumbnail_fill' ] = False
|
||||
|
||||
self._dictionary[ 'booleans' ][ 'process_subs_in_random_order' ] = True
|
||||
|
||||
#
|
||||
|
||||
self._dictionary[ 'colours' ] = HydrusSerialisable.SerialisableDictionary()
|
||||
|
|
|
@ -54,7 +54,8 @@ def SetDefaultBandwidthManagerRules( bandwidth_manager ):
|
|||
|
||||
rules = HydrusNetworking.BandwidthRules()
|
||||
|
||||
rules.AddRule( HC.BANDWIDTH_TYPE_REQUESTS, 300, 100 ) # after that first sample of small files, take it easy
|
||||
# most gallery downloaders need two rqs per file (page and file), remember
|
||||
rules.AddRule( HC.BANDWIDTH_TYPE_REQUESTS, 300, 200 ) # after that first sample of small files, take it easy
|
||||
|
||||
rules.AddRule( HC.BANDWIDTH_TYPE_DATA, 300, 128 * MB ) # after that first sample of big files, take it easy
|
||||
|
||||
|
@ -64,7 +65,8 @@ def SetDefaultBandwidthManagerRules( bandwidth_manager ):
|
|||
|
||||
rules = HydrusNetworking.BandwidthRules()
|
||||
|
||||
rules.AddRule( HC.BANDWIDTH_TYPE_REQUESTS, 86400, 200 ) # catch up on a big sub in little chunks every day
|
||||
# most gallery downloaders need two rqs per file (page and file), remember
|
||||
rules.AddRule( HC.BANDWIDTH_TYPE_REQUESTS, 86400, 400 ) # catch up on a big sub in little chunks every day
|
||||
|
||||
rules.AddRule( HC.BANDWIDTH_TYPE_DATA, 86400, 256 * MB ) # catch up on a big sub in little chunks every day
|
||||
|
||||
|
@ -251,14 +253,28 @@ def GetDefaultHentaiFoundryInfo():
|
|||
|
||||
return info
|
||||
|
||||
def GetDefaultFileImportOptions():
|
||||
def GetDefaultFileImportOptions( for_quiet_queue = False ):
|
||||
|
||||
automatic_archive = False
|
||||
exclude_deleted = HG.client_controller.options[ 'exclude_deleted_files' ]
|
||||
|
||||
if for_quiet_queue:
|
||||
|
||||
present_new_files = True
|
||||
present_already_in_inbox_files = False
|
||||
present_archived_files = False
|
||||
|
||||
else:
|
||||
|
||||
present_new_files = True
|
||||
present_already_in_inbox_files = True
|
||||
present_archived_files = True
|
||||
|
||||
|
||||
min_size = None
|
||||
min_resolution = None
|
||||
|
||||
file_import_options = ClientImporting.FileImportOptions( automatic_archive = automatic_archive, exclude_deleted = exclude_deleted, min_size = min_size, min_resolution = min_resolution )
|
||||
file_import_options = ClientImporting.FileImportOptions( automatic_archive = automatic_archive, exclude_deleted = exclude_deleted, present_new_files = present_new_files, present_already_in_inbox_files = present_already_in_inbox_files, present_archived_files = present_archived_files, min_size = min_size, min_resolution = min_resolution )
|
||||
|
||||
return file_import_options
|
||||
|
||||
|
@ -833,9 +849,9 @@ def GetDefaultURLMatches():
|
|||
name = '8chan file'
|
||||
url_type = HC.URL_TYPE_FILE
|
||||
preferred_scheme = 'https'
|
||||
netloc = 'media.8ch.net'
|
||||
allow_subdomains = False
|
||||
keep_subdomains = False
|
||||
netloc = '8ch.net'
|
||||
allow_subdomains = True
|
||||
keep_subdomains = True
|
||||
|
||||
path_components = []
|
||||
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import ClientGUICommon
|
||||
import HydrusGlobals as HG
|
||||
import json
|
||||
import wx
|
||||
|
||||
class FileDropTarget( wx.PyDropTarget ):
|
||||
|
||||
def __init__( self, parent, filenames_callable = None, url_callable = None, page_callable = None ):
|
||||
def __init__( self, parent, filenames_callable = None, url_callable = None, media_callable = None, page_callable = None ):
|
||||
|
||||
wx.PyDropTarget.__init__( self )
|
||||
|
||||
|
@ -12,6 +13,7 @@ class FileDropTarget( wx.PyDropTarget ):
|
|||
|
||||
self._filenames_callable = filenames_callable
|
||||
self._url_callable = url_callable
|
||||
self._media_callable = media_callable
|
||||
self._page_callable = page_callable
|
||||
|
||||
self._receiving_data_object = wx.DataObjectComposite()
|
||||
|
@ -64,16 +66,25 @@ class FileDropTarget( wx.PyDropTarget ):
|
|||
format_id = None
|
||||
|
||||
|
||||
if format_id == 'application/hydrus-media':
|
||||
if format_id == 'application/hydrus-media' and self._media_callable is not None:
|
||||
|
||||
result = wx.DragCancel
|
||||
data = self._hydrus_media_data_object.GetData()
|
||||
|
||||
( encoded_page_key, encoded_hashes ) = json.loads( data )
|
||||
|
||||
page_key = encoded_page_key.decode( 'hex' )
|
||||
hashes = [ encoded_hash.decode( 'hex' ) for encoded_hash in encoded_hashes ]
|
||||
|
||||
wx.CallAfter( self._media_callable, page_key, hashes ) # callafter so we can terminate dnd event now
|
||||
|
||||
result = wx.DragMove
|
||||
|
||||
|
||||
if format_id == 'application/hydrus-page-tab' and self._page_callable is not None:
|
||||
|
||||
page_key = self._hydrus_page_tab_data_object.GetData()
|
||||
|
||||
wx.CallAfter( self._page_callable, page_key ) # callafter to terminate dnd event now
|
||||
wx.CallAfter( self._page_callable, page_key ) # callafter so we can terminate dnd event now
|
||||
|
||||
result = wx.DragMove
|
||||
|
||||
|
|
|
@ -92,7 +92,7 @@ class FrameGUI( ClientGUITopLevelWindows.FrameThatResizes ):
|
|||
|
||||
self._notebook = ClientGUIPages.PagesNotebook( self, self._controller, 'top page notebook' )
|
||||
|
||||
self.SetDropTarget( ClientDragDrop.FileDropTarget( self, self.ImportFiles, self.ImportURL, self._notebook.PageDragAndDropDropped ) )
|
||||
self.SetDropTarget( ClientDragDrop.FileDropTarget( self, self.ImportFiles, self.ImportURL, self._notebook.MediaDragAndDropDropped, self._notebook.PageDragAndDropDropped ) )
|
||||
|
||||
wx.GetApp().SetTopWindow( self )
|
||||
|
||||
|
|
|
@ -850,6 +850,16 @@ class BetterListCtrl( wx.ListCtrl, ListCtrlAutoWidthMixin ):
|
|||
|
||||
|
||||
|
||||
def SelectNone( self ):
|
||||
|
||||
currently_selected = set( self._GetSelected() )
|
||||
|
||||
for index in currently_selected:
|
||||
|
||||
self.Select( index, False )
|
||||
|
||||
|
||||
|
||||
def SetData( self, datas ):
|
||||
|
||||
datas = set( datas )
|
||||
|
@ -1184,6 +1194,13 @@ class BetterListCtrlPanel( wx.Panel ):
|
|||
event.Skip()
|
||||
|
||||
|
||||
def NewButtonRow( self ):
|
||||
|
||||
self._buttonbox = wx.BoxSizer( wx.HORIZONTAL )
|
||||
|
||||
self._vbox.AddF( self._buttonbox, CC.FLAGS_BUTTON_SIZER )
|
||||
|
||||
|
||||
def SetListCtrl( self, listctrl ):
|
||||
|
||||
self._listctrl = listctrl
|
||||
|
|
|
@ -1635,6 +1635,12 @@ class ManagementPanelImporterHDD( ManagementPanelImporter ):
|
|||
self._pause_button = wx.BitmapButton( self._import_queue_panel, bitmap = CC.GlobalBMPs.pause )
|
||||
self._pause_button.Bind( wx.EVT_BUTTON, self.EventPause )
|
||||
|
||||
self._hdd_import = self._management_controller.GetVariable( 'hdd_import' )
|
||||
|
||||
file_import_options = self._hdd_import.GetFileImportOptions()
|
||||
|
||||
self._file_import_options = ClientGUIImport.FileImportOptionsButton( self._import_queue_panel, file_import_options, self._hdd_import.SetFileImportOptions )
|
||||
|
||||
#
|
||||
|
||||
vbox = wx.BoxSizer( wx.VERTICAL )
|
||||
|
@ -1646,6 +1652,7 @@ class ManagementPanelImporterHDD( ManagementPanelImporter ):
|
|||
self._import_queue_panel.AddF( self._current_action, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
self._import_queue_panel.AddF( self._seed_cache_control, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
self._import_queue_panel.AddF( self._pause_button, CC.FLAGS_LONE_BUTTON )
|
||||
self._import_queue_panel.AddF( self._file_import_options, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
vbox.AddF( self._import_queue_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
|
@ -1655,8 +1662,6 @@ class ManagementPanelImporterHDD( ManagementPanelImporter ):
|
|||
|
||||
#
|
||||
|
||||
self._hdd_import = self._management_controller.GetVariable( 'hdd_import' )
|
||||
|
||||
seed_cache = self._hdd_import.GetSeedCache()
|
||||
|
||||
self._seed_cache_control.SetSeedCache( seed_cache )
|
||||
|
|
|
@ -2465,7 +2465,9 @@ class MediaPanelThumbnails( MediaPanel ):
|
|||
|
||||
hydrus_media_data_object = wx.CustomDataObject( 'application/hydrus-media' )
|
||||
|
||||
data = json.dumps( [ hash.encode( 'hex' ) for hash in hashes ] )
|
||||
data = ( self._page_key.encode( 'hex' ), [ hash.encode( 'hex' ) for hash in hashes ] )
|
||||
|
||||
data = json.dumps( data )
|
||||
|
||||
hydrus_media_data_object.SetData( data )
|
||||
|
||||
|
|
|
@ -524,6 +524,11 @@ class Page( wx.SplitterWindow ):
|
|||
return self._media_panel.GetSortedMedia()
|
||||
|
||||
|
||||
def GetMediaPanel( self ):
|
||||
|
||||
return self._media_panel
|
||||
|
||||
|
||||
def GetName( self ):
|
||||
|
||||
return self._management_controller.GetPageName()
|
||||
|
@ -989,8 +994,6 @@ class PagesNotebook( wx.Notebook ):
|
|||
return self
|
||||
|
||||
|
||||
not_on_my_label = flags & wx.NB_HITTEST_NOWHERE
|
||||
|
||||
if HC.PLATFORM_OSX:
|
||||
|
||||
( x, y ) = screen_position
|
||||
|
@ -1003,7 +1006,7 @@ class PagesNotebook( wx.Notebook ):
|
|||
on_child_notebook_somewhere = flags & wx.NB_HITTEST_ONPAGE
|
||||
|
||||
|
||||
if not_on_my_label and on_child_notebook_somewhere:
|
||||
if on_child_notebook_somewhere:
|
||||
|
||||
return current_page._GetNotebookFromScreenPosition( screen_position )
|
||||
|
||||
|
@ -1292,7 +1295,7 @@ class PagesNotebook( wx.Notebook ):
|
|||
ClientGUIMenus.AppendMenuItem( self, submenu, name, 'Save this page of pages to the session.', page.SaveGUISession, name )
|
||||
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, submenu, 'create a new session', 'Save this page of pages to the session.', page.SaveGUISession, suggested_name = page.GetName() )
|
||||
ClientGUIMenus.AppendMenuItem( self, submenu, 'create a new session', 'Save this page of pages to the session.', page.SaveGUISession, suggested_name = page.GetDisplayName() )
|
||||
|
||||
ClientGUIMenus.AppendMenu( menu, submenu, 'save this page of pages to a session' )
|
||||
|
||||
|
@ -1816,6 +1819,91 @@ class PagesNotebook( wx.Notebook ):
|
|||
self.AppendGUISession( name, load_in_a_page_of_pages = False )
|
||||
|
||||
|
||||
def MediaDragAndDropDropped( self, source_page_key, hashes ):
|
||||
|
||||
source_page = self._GetPageFromPageKey( source_page_key )
|
||||
|
||||
if source_page is None:
|
||||
|
||||
return
|
||||
|
||||
|
||||
screen_position = wx.GetMousePosition()
|
||||
|
||||
dest_notebook = self._GetNotebookFromScreenPosition( screen_position )
|
||||
|
||||
( x, y ) = screen_position
|
||||
|
||||
( tab_index, flags ) = ClientGUICommon.NotebookScreenToHitTest( dest_notebook, ( x, y ) )
|
||||
|
||||
do_add = True
|
||||
# do chase - if we need to chase to an existing dest page on which we dropped files
|
||||
# do return - if we need to return to source page if we created a new one
|
||||
|
||||
if flags & wx.NB_HITTEST_ONPAGE:
|
||||
|
||||
dest_page = dest_notebook.GetCurrentPage()
|
||||
|
||||
elif tab_index == wx.NOT_FOUND:
|
||||
|
||||
dest_page = dest_notebook.NewPageQuery( CC.LOCAL_FILE_SERVICE_KEY, initial_hashes = hashes )
|
||||
|
||||
do_add = False
|
||||
|
||||
else:
|
||||
|
||||
dest_page = dest_notebook.GetPage( tab_index )
|
||||
|
||||
if isinstance( dest_page, PagesNotebook ):
|
||||
|
||||
result = dest_page.GetCurrentMediaPage()
|
||||
|
||||
if result is None:
|
||||
|
||||
dest_page = dest_page.NewPageQuery( CC.LOCAL_FILE_SERVICE_KEY, initial_hashes = hashes )
|
||||
|
||||
do_add = False
|
||||
|
||||
else:
|
||||
|
||||
dest_page = result
|
||||
|
||||
|
||||
|
||||
|
||||
if dest_page is None:
|
||||
|
||||
return # we somehow dropped onto a new notebook that has no pages
|
||||
|
||||
|
||||
if dest_page.GetPageKey() == source_page_key:
|
||||
|
||||
return # we dropped onto the same page we picked up on
|
||||
|
||||
|
||||
if do_add:
|
||||
|
||||
unsorted_media_results = self._controller.Read( 'media_results', hashes )
|
||||
|
||||
hashes_to_media_results = { media_result.GetHash() : media_result for media_result in unsorted_media_results }
|
||||
|
||||
sorted_media_results = [ hashes_to_media_results[ hash ] for hash in hashes ]
|
||||
|
||||
dest_page.GetMediaPanel().AddMediaResults( dest_page.GetPageKey(), sorted_media_results )
|
||||
|
||||
else:
|
||||
|
||||
self.ShowPage( source_page )
|
||||
|
||||
|
||||
ctrl_down = wx.GetKeyState( wx.WXK_COMMAND ) or wx.GetKeyState( wx.WXK_CONTROL )
|
||||
|
||||
if not ctrl_down:
|
||||
|
||||
source_page.GetMediaPanel().RemoveMedia( source_page.GetPageKey(), hashes )
|
||||
|
||||
|
||||
|
||||
def NewPage( self, management_controller, initial_hashes = None, forced_insertion_index = None, on_deepest_notebook = False ):
|
||||
|
||||
current_page = self.GetCurrentPage()
|
||||
|
@ -2113,22 +2201,12 @@ class PagesNotebook( wx.Notebook ):
|
|||
|
||||
( tab_index, flags ) = ClientGUICommon.NotebookScreenToHitTest( dest_notebook, ( x, y ) )
|
||||
|
||||
EDGE_PADDING = 10
|
||||
|
||||
( left_tab_index, gumpf ) = ClientGUICommon.NotebookScreenToHitTest( dest_notebook, ( x - EDGE_PADDING, y ) )
|
||||
( right_tab_index, gumpf ) = ClientGUICommon.NotebookScreenToHitTest( dest_notebook, ( x + EDGE_PADDING, y ) )
|
||||
|
||||
landed_near_left_edge = left_tab_index != tab_index
|
||||
landed_near_right_edge = right_tab_index != tab_index
|
||||
|
||||
landed_on_edge = landed_near_right_edge or landed_near_left_edge
|
||||
landed_in_middle = not landed_on_edge
|
||||
|
||||
there_is_a_page_to_the_left = tab_index > 0
|
||||
there_is_a_page_to_the_right = tab_index < dest_notebook.GetPageCount() - 1
|
||||
|
||||
page_on_left_is_source = there_is_a_page_to_the_left and dest_notebook.GetPage( tab_index - 1 ) == page
|
||||
page_on_right_is_source = there_is_a_page_to_the_right and dest_notebook.GetPage( tab_index + 1 ) == page
|
||||
if flags & wx.NB_HITTEST_ONPAGE:
|
||||
|
||||
# was not dropped on label area, so ditch DnD
|
||||
|
||||
return
|
||||
|
||||
|
||||
if tab_index == wx.NOT_FOUND:
|
||||
|
||||
|
@ -2146,6 +2224,23 @@ class PagesNotebook( wx.Notebook ):
|
|||
|
||||
else:
|
||||
|
||||
EDGE_PADDING = 10
|
||||
|
||||
( left_tab_index, gumpf ) = ClientGUICommon.NotebookScreenToHitTest( dest_notebook, ( x - EDGE_PADDING, y ) )
|
||||
( right_tab_index, gumpf ) = ClientGUICommon.NotebookScreenToHitTest( dest_notebook, ( x + EDGE_PADDING, y ) )
|
||||
|
||||
landed_near_left_edge = left_tab_index != tab_index
|
||||
landed_near_right_edge = right_tab_index != tab_index
|
||||
|
||||
landed_on_edge = landed_near_right_edge or landed_near_left_edge
|
||||
landed_in_middle = not landed_on_edge
|
||||
|
||||
there_is_a_page_to_the_left = tab_index > 0
|
||||
there_is_a_page_to_the_right = tab_index < dest_notebook.GetPageCount() - 1
|
||||
|
||||
page_on_left_is_source = there_is_a_page_to_the_left and dest_notebook.GetPage( tab_index - 1 ) == page
|
||||
page_on_right_is_source = there_is_a_page_to_the_right and dest_notebook.GetPage( tab_index + 1 ) == page
|
||||
|
||||
# dropped on source and not on the right edge: do nothing
|
||||
|
||||
landee_page = dest_notebook.GetPage( tab_index )
|
||||
|
|
|
@ -701,6 +701,10 @@ class EditFileImportOptions( ClientGUIScrolledPanels.EditPanel ):
|
|||
self._exclude_deleted = wx.CheckBox( self, label = 'exclude previously deleted files' )
|
||||
self._exclude_deleted.SetToolTipString( 'If this is set and an incoming file has already been seen and deleted before by this client, the import will be abandoned. This is useful to make sure you do not keep importing and deleting the same bad files over and over. Files currently in the trash count as deleted.' )
|
||||
|
||||
self._present_new_files = wx.CheckBox( self, label = 'present new files' )
|
||||
self._present_already_in_inbox_files = wx.CheckBox( self, label = 'present \'already in db\' files in inbox' )
|
||||
self._present_archived_files = wx.CheckBox( self, label = 'present \'already in db\' files in archive' )
|
||||
|
||||
self._min_size = ClientGUICommon.NoneableSpinCtrl( self, 'size', unit = 'KB', multiplier = 1024 )
|
||||
self._min_size.SetValue( 5120 )
|
||||
|
||||
|
@ -709,10 +713,13 @@ class EditFileImportOptions( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
#
|
||||
|
||||
( automatic_archive, exclude_deleted, min_size, min_resolution ) = file_import_options.ToTuple()
|
||||
( automatic_archive, exclude_deleted, present_new_files, present_already_in_inbox_files, present_archived_files, min_size, min_resolution ) = file_import_options.ToTuple()
|
||||
|
||||
self._auto_archive.SetValue( automatic_archive )
|
||||
self._exclude_deleted.SetValue( exclude_deleted )
|
||||
self._present_new_files.SetValue( present_new_files )
|
||||
self._present_already_in_inbox_files.SetValue( present_already_in_inbox_files )
|
||||
self._present_archived_files.SetValue( present_archived_files )
|
||||
self._min_size.SetValue( min_size )
|
||||
self._min_resolution.SetValue( min_resolution )
|
||||
|
||||
|
@ -722,6 +729,20 @@ class EditFileImportOptions( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
vbox.AddF( self._auto_archive, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._exclude_deleted, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
presentation_message = 'For regular import pages, \'presentation\' means if the imported file\'s thumbnail will be added. For quieter queues like subscriptions, it determines if the file will be in any popup message button.'
|
||||
presentation_message += os.linesep * 2
|
||||
presentation_message += 'If you have a very large (10k+ files) file import page, consider hiding some or all of its thumbs to reduce ui lag and increase import speed.'
|
||||
|
||||
presentation_st = ClientGUICommon.BetterStaticText( self, presentation_message )
|
||||
|
||||
presentation_st.Wrap( 440 )
|
||||
|
||||
vbox.AddF( presentation_st, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._present_new_files, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._present_already_in_inbox_files, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._present_archived_files, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
vbox.AddF( ClientGUICommon.BetterStaticText( self, 'minimum:' ), CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._min_size, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( self._min_resolution, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
@ -733,10 +754,13 @@ class EditFileImportOptions( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
automatic_archive = self._auto_archive.GetValue()
|
||||
exclude_deleted = self._exclude_deleted.GetValue()
|
||||
present_new_files = self._present_new_files.GetValue()
|
||||
present_already_in_inbox_files = self._present_already_in_inbox_files.GetValue()
|
||||
present_archived_files = self._present_archived_files.GetValue()
|
||||
min_size = self._min_size.GetValue()
|
||||
min_resolution = self._min_resolution.GetValue()
|
||||
|
||||
return ClientImporting.FileImportOptions( automatic_archive = automatic_archive, exclude_deleted = exclude_deleted, min_size = min_size, min_resolution = min_resolution )
|
||||
return ClientImporting.FileImportOptions( automatic_archive = automatic_archive, exclude_deleted = exclude_deleted, present_new_files = present_new_files, present_already_in_inbox_files = present_already_in_inbox_files, present_archived_files = present_archived_files, min_size = min_size, min_resolution = min_resolution )
|
||||
|
||||
|
||||
class EditFrameLocationPanel( ClientGUIScrolledPanels.EditPanel ):
|
||||
|
|
|
@ -1767,6 +1767,10 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
wx.Panel.__init__( self, parent )
|
||||
|
||||
general = ClientGUICommon.StaticBox( self, 'general' )
|
||||
|
||||
self._verify_regular_https = wx.CheckBox( general )
|
||||
|
||||
self._external_host = wx.TextCtrl( self )
|
||||
self._external_host.SetToolTipString( 'If you have trouble parsing your external ip using UPnP, you can force it to be this.' )
|
||||
|
||||
|
@ -1785,13 +1789,15 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
#
|
||||
|
||||
self._new_options = HG.client_controller.new_options
|
||||
|
||||
self._verify_regular_https.SetValue( self._new_options.GetBoolean( 'verify_regular_https' ) )
|
||||
|
||||
if HC.options[ 'external_host' ] is not None:
|
||||
|
||||
self._external_host.SetValue( HC.options[ 'external_host' ] )
|
||||
|
||||
|
||||
self._new_options = HG.client_controller.new_options
|
||||
|
||||
self._network_timeout.SetValue( self._new_options.GetInteger( 'network_timeout' ) )
|
||||
|
||||
self._proxy_type.Append( 'http', 'http' )
|
||||
|
@ -1824,6 +1830,14 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
#
|
||||
|
||||
rows = []
|
||||
|
||||
rows.append( ( 'BUGFIX: verify regular https traffic:', self._verify_regular_https ) )
|
||||
|
||||
gridbox = ClientGUICommon.WrapInGrid( general, rows )
|
||||
|
||||
general.AddF( gridbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
|
||||
|
||||
text = 'You have to restart the client for proxy settings to take effect.'
|
||||
text += os.linesep
|
||||
text += 'This is in a buggy prototype stage right now, pending a rewrite of the networking engine.'
|
||||
|
@ -1857,6 +1871,7 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
vbox = wx.BoxSizer( wx.VERTICAL )
|
||||
|
||||
vbox.AddF( general, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
|
||||
vbox.AddF( proxy_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
|
@ -1865,6 +1880,8 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
def UpdateOptions( self ):
|
||||
|
||||
self._new_options.SetBoolean( 'verify_regular_https', self._verify_regular_https.GetValue() )
|
||||
|
||||
if self._proxy_address.GetValue() == '':
|
||||
|
||||
HC.options[ 'proxy' ] = None
|
||||
|
@ -1904,10 +1921,6 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
self._new_options = new_options
|
||||
|
||||
general = ClientGUICommon.StaticBox( self, 'general' )
|
||||
|
||||
self._verify_regular_https = wx.CheckBox( general )
|
||||
|
||||
#
|
||||
|
||||
gallery_downloader = ClientGUICommon.StaticBox( self, 'gallery downloader' )
|
||||
|
@ -1916,6 +1929,13 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
#
|
||||
|
||||
subscriptions = ClientGUICommon.StaticBox( self, 'subscriptions' )
|
||||
|
||||
self._process_subs_in_random_order = wx.CheckBox( subscriptions )
|
||||
self._process_subs_in_random_order.SetToolTipString( 'Processing in random order is useful whenever bandwidth is tight, as it stops an \'aardvark\' subscription from always getting first whack at what is available. Otherwise, they will be processed in alphabetical order.' )
|
||||
|
||||
#
|
||||
|
||||
thread_checker = ClientGUICommon.StaticBox( self, 'thread checker' )
|
||||
|
||||
self._permit_watchers_to_name_their_pages = wx.CheckBox( thread_checker )
|
||||
|
@ -1929,7 +1949,7 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
#
|
||||
|
||||
self._verify_regular_https.SetValue( self._new_options.GetBoolean( 'verify_regular_https' ) )
|
||||
self._process_subs_in_random_order.SetValue( self._new_options.GetBoolean( 'process_subs_in_random_order' ) )
|
||||
|
||||
self._gallery_file_limit.SetValue( HC.options[ 'gallery_file_limit' ] )
|
||||
|
||||
|
@ -1940,17 +1960,17 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
#
|
||||
|
||||
rows = []
|
||||
|
||||
rows.append( ( 'BUGFIX: verify regular https traffic:', self._verify_regular_https ) )
|
||||
|
||||
gridbox = ClientGUICommon.WrapInGrid( general, rows )
|
||||
|
||||
general.AddF( gridbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
|
||||
gallery_downloader.AddF( self._gallery_file_limit, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
#
|
||||
|
||||
gallery_downloader.AddF( self._gallery_file_limit, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
rows = []
|
||||
|
||||
rows.append( ( 'Sync subscriptions in random order:', self._process_subs_in_random_order ) )
|
||||
|
||||
gridbox = ClientGUICommon.WrapInGrid( subscriptions, rows )
|
||||
|
||||
subscriptions.AddF( gridbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
|
||||
|
||||
#
|
||||
|
||||
|
@ -1970,8 +1990,8 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
vbox = wx.BoxSizer( wx.VERTICAL )
|
||||
|
||||
vbox.AddF( general, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( gallery_downloader, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( subscriptions, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
vbox.AddF( thread_checker, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
self.SetSizer( vbox )
|
||||
|
@ -1979,9 +1999,10 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
def UpdateOptions( self ):
|
||||
|
||||
self._new_options.SetBoolean( 'verify_regular_https', self._verify_regular_https.GetValue() )
|
||||
HC.options[ 'gallery_file_limit' ] = self._gallery_file_limit.GetValue()
|
||||
|
||||
self._new_options.SetBoolean( 'process_subs_in_random_order', self._process_subs_in_random_order.GetValue() )
|
||||
|
||||
self._new_options.SetBoolean( 'permit_watchers_to_name_their_pages', self._permit_watchers_to_name_their_pages.GetValue() )
|
||||
|
||||
self._new_options.SetDefaultThreadCheckerOptions( self._thread_checker_options.GetValue() )
|
||||
|
@ -2855,10 +2876,10 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
self._animation_start_position = wx.SpinCtrl( self, min = 0, max = 100 )
|
||||
|
||||
self._disable_cv_for_gifs = wx.CheckBox( self )
|
||||
self._disable_cv_for_gifs.SetToolTipString( 'OpenCV is good at rendering gifs, but if you have problems with it and your graphics card, check this and the less reliable and slower PIL will be used instead.' )
|
||||
self._disable_cv_for_gifs.SetToolTipString( 'OpenCV is good at rendering gifs, but if you have problems with it and your graphics card, check this and the less reliable and slower PIL will be used instead. EDIT: OpenCV is much better these days--this is mostly not needed.' )
|
||||
|
||||
self._load_images_with_pil = wx.CheckBox( self )
|
||||
self._load_images_with_pil.SetToolTipString( 'OpenCV is much faster than PIL, but it is sometimes less reliable. Switch this on if you experience crashes or other unusual problems while importing or viewing certain images.' )
|
||||
self._load_images_with_pil.SetToolTipString( 'OpenCV is much faster than PIL, but it is sometimes less reliable. Switch this on if you experience crashes or other unusual problems while importing or viewing certain images. EDIT: OpenCV is much better these days--this is mostly not needed.' )
|
||||
|
||||
self._do_not_import_decompression_bombs = wx.CheckBox( self )
|
||||
self._do_not_import_decompression_bombs.SetToolTipString( 'Some images, called Decompression Bombs, consume huge amounts of memory and CPU time (typically multiple GB and 30s+) to render. These can be malicious attacks or accidentally inelegant compressions of very large (typically 100MegaPixel+) images. Check this to disallow them before they blat your computer.' )
|
||||
|
@ -2913,12 +2934,12 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
rows = []
|
||||
|
||||
rows.append( ( 'Start animations this % in: ', self._animation_start_position ) )
|
||||
rows.append( ( 'Disable OpenCV for gifs: ', self._disable_cv_for_gifs ) )
|
||||
rows.append( ( 'Load images with PIL: ', self._load_images_with_pil ) )
|
||||
rows.append( ( 'Do not import Decompression Bombs: ', self._do_not_import_decompression_bombs ) )
|
||||
rows.append( ( 'Prefer system FFMPEG: ', self._use_system_ffmpeg ) )
|
||||
rows.append( ( 'WINDOWS ONLY: Hide and anchor mouse cursor on slow canvas drags: ', self._anchor_and_hide_canvas_drags ) )
|
||||
rows.append( ( 'Media zooms: ', self._media_zooms ) )
|
||||
rows.append( ( 'WINDOWS ONLY: Hide and anchor mouse cursor on slow canvas drags: ', self._anchor_and_hide_canvas_drags ) )
|
||||
rows.append( ( 'BUGFIX: Load images with PIL: ', self._load_images_with_pil ) )
|
||||
rows.append( ( 'BUGFIX: Disable OpenCV for gifs: ', self._disable_cv_for_gifs ) )
|
||||
|
||||
gridbox = ClientGUICommon.WrapInGrid( self, rows )
|
||||
|
||||
|
@ -5048,7 +5069,7 @@ class ManageSubscriptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
subscriptions_panel.AddButton( 'edit', self.Edit, enabled_only_on_selection = True )
|
||||
subscriptions_panel.AddButton( 'delete', self.Delete, enabled_only_on_selection = True )
|
||||
|
||||
subscriptions_panel.AddSeparator()
|
||||
subscriptions_panel.NewButtonRow()
|
||||
|
||||
subscriptions_panel.AddButton( 'merge', self.Merge, enabled_check_func = self._CanMerge )
|
||||
subscriptions_panel.AddButton( 'separate', self.Separate, enabled_check_func = self._CanSeparate )
|
||||
|
@ -5061,6 +5082,11 @@ class ManageSubscriptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
subscriptions_panel.AddButton( 'check queries now', self.CheckNow, enabled_check_func = self._CanCheckNow )
|
||||
subscriptions_panel.AddButton( 'reset', self.Reset, enabled_check_func = self._CanReset )
|
||||
|
||||
subscriptions_panel.NewButtonRow()
|
||||
|
||||
subscriptions_panel.AddButton( 'select subscriptions', self.SelectSubscriptions )
|
||||
subscriptions_panel.AddButton( 'overwrite checker timings', self.SetCheckerOptions, enabled_only_on_selection = True )
|
||||
|
||||
#
|
||||
|
||||
self._subscriptions.AddDatas( subscriptions )
|
||||
|
@ -5610,6 +5636,33 @@ class ManageSubscriptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
self._subscriptions.UpdateDatas( subscriptions )
|
||||
|
||||
|
||||
def SelectSubscriptions( self ):
|
||||
|
||||
message = 'This selects subscriptions based on query text. Please enter some search text, and any subscription that has a query that includes that text will be selected.'
|
||||
|
||||
with ClientGUIDialogs.DialogTextEntry( self, message ) as dlg:
|
||||
|
||||
if dlg.ShowModal() == wx.ID_OK:
|
||||
|
||||
search_text = dlg.GetValue()
|
||||
|
||||
self._subscriptions.SelectNone()
|
||||
|
||||
selectee_subscriptions = []
|
||||
|
||||
for subscription in self._subscriptions.GetData():
|
||||
|
||||
if subscription.HasQuerySearchText( search_text ):
|
||||
|
||||
selectee_subscriptions.append( subscription )
|
||||
|
||||
|
||||
|
||||
self._subscriptions.SelectDatas( selectee_subscriptions )
|
||||
|
||||
|
||||
|
||||
|
||||
def Separate( self ):
|
||||
|
||||
message = 'Are you sure you want to separate the selected subscriptions? This will cause all the subscriptions with multiple queries to be split into duplicates that each only have one query.'
|
||||
|
@ -5639,6 +5692,32 @@ class ManageSubscriptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
|
||||
|
||||
def SetCheckerOptions( self ):
|
||||
|
||||
checker_options = ClientData.CheckerOptions( intended_files_per_check = 5, never_faster_than = 86400, never_slower_than = 90 * 86400, death_file_velocity = ( 1, 90 * 86400 ) )
|
||||
|
||||
with ClientGUITopLevelWindows.DialogEdit( self, 'edit check timings' ) as dlg:
|
||||
|
||||
panel = ClientGUITime.EditCheckerOptions( dlg, checker_options )
|
||||
|
||||
dlg.SetPanel( panel )
|
||||
|
||||
if dlg.ShowModal() == wx.ID_OK:
|
||||
|
||||
checker_options = panel.GetValue()
|
||||
|
||||
subscriptions = self._subscriptions.GetData( only_selected = True )
|
||||
|
||||
for subscription in subscriptions:
|
||||
|
||||
subscription.SetCheckerOptions( checker_options )
|
||||
|
||||
|
||||
self._subscriptions.UpdateDatas( subscriptions )
|
||||
|
||||
|
||||
|
||||
|
||||
class ManageTagsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
||||
|
||||
def __init__( self, parent, file_service_key, media, immediate_commit = False, canvas_key = None ):
|
||||
|
|
|
@ -3,12 +3,15 @@ import ClientGUICommon
|
|||
import ClientGUIDialogs
|
||||
import ClientGUIListCtrl
|
||||
import ClientGUIMenus
|
||||
import ClientGUISerialisable
|
||||
import ClientGUIScrolledPanels
|
||||
import ClientGUITopLevelWindows
|
||||
import ClientSerialisable
|
||||
import HydrusConstants as HC
|
||||
import HydrusData
|
||||
import HydrusGlobals as HG
|
||||
import HydrusPaths
|
||||
import HydrusText
|
||||
import os
|
||||
import webbrowser
|
||||
import wx
|
||||
|
@ -320,6 +323,105 @@ class SeedCacheButton( ClientGUICommon.BetterBitmapButton ):
|
|||
|
||||
|
||||
|
||||
def _GetExportableSourcesString( self ):
|
||||
|
||||
seed_cache = self._seed_cache_get_callable()
|
||||
|
||||
seeds = seed_cache.GetSeeds()
|
||||
|
||||
sources = [ seed.seed_data for seed in seeds ]
|
||||
|
||||
return os.linesep.join( sources )
|
||||
|
||||
|
||||
def _GetSourcesFromSourcesString( self, sources_string ):
|
||||
|
||||
sources_string = HydrusData.ToUnicode( sources_string )
|
||||
|
||||
sources = HydrusText.DeserialiseNewlinedTexts( sources_string )
|
||||
|
||||
return sources
|
||||
|
||||
|
||||
def _ImportFromClipboard( self ):
|
||||
|
||||
raw_text = HG.client_controller.GetClipboardText()
|
||||
|
||||
sources = self._GetSourcesFromSourcesString( raw_text )
|
||||
|
||||
try:
|
||||
|
||||
self._ImportSources( sources )
|
||||
|
||||
except:
|
||||
|
||||
wx.MessageBox( 'Could not import!' )
|
||||
|
||||
raise
|
||||
|
||||
|
||||
|
||||
def _ImportFromPng( self ):
|
||||
|
||||
with wx.FileDialog( self, 'select the png with the sources', wildcard = 'PNG (*.png)|*.png' ) as dlg:
|
||||
|
||||
if dlg.ShowModal() == wx.ID_OK:
|
||||
|
||||
path = HydrusData.ToUnicode( dlg.GetPath() )
|
||||
|
||||
payload = ClientSerialisable.LoadFromPng( path )
|
||||
|
||||
try:
|
||||
|
||||
sources = self._GetSourcesFromSourcesString( payload )
|
||||
|
||||
self._ImportSources( sources )
|
||||
|
||||
except:
|
||||
|
||||
wx.MessageBox( 'Could not import!' )
|
||||
|
||||
raise
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def _ImportSources( self, sources ):
|
||||
|
||||
seed_cache = self._seed_cache_get_callable()
|
||||
|
||||
if sources[0].startswith( 'http' ):
|
||||
|
||||
seed_cache.AddURLs( sources )
|
||||
|
||||
else:
|
||||
|
||||
seed_cache.AddPaths( sources )
|
||||
|
||||
|
||||
|
||||
def _ExportToPng( self ):
|
||||
|
||||
payload = self._GetExportableSourcesString()
|
||||
|
||||
with ClientGUITopLevelWindows.DialogNullipotent( self, 'export to png' ) as dlg:
|
||||
|
||||
panel = ClientGUISerialisable.PngExportPanel( dlg, payload )
|
||||
|
||||
dlg.SetPanel( panel )
|
||||
|
||||
dlg.ShowModal()
|
||||
|
||||
|
||||
|
||||
def _ExportToClipboard( self ):
|
||||
|
||||
payload = self._GetExportableSourcesString()
|
||||
|
||||
HG.client_controller.pub( 'clipboard', 'text', payload )
|
||||
|
||||
|
||||
def _RetryFailures( self ):
|
||||
|
||||
message = 'Are you sure you want to retry all the failed files?'
|
||||
|
@ -386,15 +488,15 @@ class SeedCacheButton( ClientGUICommon.BetterBitmapButton ):
|
|||
|
||||
def EventShowMenu( self, event ):
|
||||
|
||||
seed_cache = self._seed_cache_get_callable()
|
||||
menu = wx.Menu()
|
||||
|
||||
menu_items = []
|
||||
seed_cache = self._seed_cache_get_callable()
|
||||
|
||||
num_failures = seed_cache.GetSeedCount( CC.STATUS_FAILED )
|
||||
|
||||
if num_failures > 0:
|
||||
|
||||
menu_items.append( ( 'normal', 'retry ' + HydrusData.ConvertIntToPrettyString( num_failures ) + ' failures', 'Tell this cache to reattempt all its failures.', self._RetryFailures ) )
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'retry ' + HydrusData.ConvertIntToPrettyString( num_failures ) + ' failures', 'Tell this cache to reattempt all its failures.', self._RetryFailures )
|
||||
|
||||
|
||||
num_unknown = seed_cache.GetSeedCount( CC.STATUS_UNKNOWN )
|
||||
|
@ -403,46 +505,30 @@ class SeedCacheButton( ClientGUICommon.BetterBitmapButton ):
|
|||
|
||||
if num_processed > 0:
|
||||
|
||||
menu_items.append( ( 'normal', 'delete ' + HydrusData.ConvertIntToPrettyString( num_processed ) + ' \'processed\' files from the queue', 'Tell this cache to clear out processed files, reducing the size of the queue.', self._ClearProcessed ) )
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, 'delete ' + HydrusData.ConvertIntToPrettyString( num_processed ) + ' \'processed\' files from the queue', 'Tell this cache to clear out processed files, reducing the size of the queue.', self._ClearProcessed )
|
||||
|
||||
|
||||
if len( menu_items ) > 0:
|
||||
ClientGUIMenus.AppendSeparator( menu )
|
||||
|
||||
if len( seed_cache ) > 0:
|
||||
|
||||
menu = wx.Menu()
|
||||
submenu = wx.Menu()
|
||||
|
||||
for ( item_type, title, description, data ) in menu_items:
|
||||
|
||||
if item_type == 'normal':
|
||||
|
||||
func = data
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, menu, title, description, func )
|
||||
|
||||
elif item_type == 'check':
|
||||
|
||||
check_manager = data
|
||||
|
||||
current_value = check_manager.GetCurrentValue()
|
||||
func = check_manager.Invert
|
||||
|
||||
if current_value is not None:
|
||||
|
||||
ClientGUIMenus.AppendMenuCheckItem( self, menu, title, description, current_value, func )
|
||||
|
||||
|
||||
elif item_type == 'separator':
|
||||
|
||||
ClientGUIMenus.AppendSeparator( menu )
|
||||
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, submenu, 'to clipboard', 'Copy all the sources in this list to the clipboard.', self._ExportToClipboard )
|
||||
ClientGUIMenus.AppendMenuItem( self, submenu, 'to png', 'Export all the sources in this list to a png file.', self._ExportToPng )
|
||||
|
||||
HG.client_controller.PopupMenu( self, menu )
|
||||
|
||||
else:
|
||||
|
||||
event.Skip()
|
||||
ClientGUIMenus.AppendMenu( menu, submenu, 'export all sources' )
|
||||
|
||||
|
||||
submenu = wx.Menu()
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( self, submenu, 'from clipboard', 'Import new urls or paths to this list from the clipboard.', self._ImportFromClipboard )
|
||||
ClientGUIMenus.AppendMenuItem( self, submenu, 'from png', 'Import new urls or paths to this list from a png file.', self._ImportFromPng )
|
||||
|
||||
ClientGUIMenus.AppendMenu( menu, submenu, 'import new sources' )
|
||||
|
||||
HG.client_controller.PopupMenu( self, menu )
|
||||
|
||||
|
||||
class SeedCacheStatusControl( wx.Panel ):
|
||||
|
||||
|
|
|
@ -525,6 +525,11 @@ class DialogNullipotent( DialogThatTakesScrollablePanelClose ):
|
|||
|
||||
def DoOK( self ):
|
||||
|
||||
if not self.IsModal():
|
||||
|
||||
return
|
||||
|
||||
|
||||
SaveTLWSizeAndPosition( self, self._frame_key )
|
||||
|
||||
self.EndModal( wx.ID_OK )
|
||||
|
@ -536,12 +541,10 @@ class DialogNullipotentVetoable( DialogThatTakesScrollablePanelClose ):
|
|||
|
||||
DialogThatTakesScrollablePanelClose.__init__( self, parent, title, style_override = style_override )
|
||||
|
||||
self._ok_done = False
|
||||
|
||||
|
||||
def DoOK( self ):
|
||||
|
||||
if self._ok_done:
|
||||
if not self.IsModal():
|
||||
|
||||
return
|
||||
|
||||
|
@ -566,8 +569,6 @@ class DialogNullipotentVetoable( DialogThatTakesScrollablePanelClose ):
|
|||
|
||||
self.EndModal( wx.ID_OK )
|
||||
|
||||
self._ok_done = True
|
||||
|
||||
|
||||
class DialogThatTakesScrollablePanelApplyCancel( DialogThatTakesScrollablePanel ):
|
||||
|
||||
|
@ -600,6 +601,11 @@ class DialogEdit( DialogThatTakesScrollablePanelApplyCancel ):
|
|||
|
||||
def DoOK( self ):
|
||||
|
||||
if not self.IsModal():
|
||||
|
||||
return
|
||||
|
||||
|
||||
try:
|
||||
|
||||
value = self._panel.GetValue()
|
||||
|
@ -625,6 +631,11 @@ class DialogManage( DialogThatTakesScrollablePanelApplyCancel ):
|
|||
|
||||
def DoOK( self ):
|
||||
|
||||
if not self.IsModal():
|
||||
|
||||
return
|
||||
|
||||
|
||||
try:
|
||||
|
||||
self._panel.CommitChanges()
|
||||
|
|
|
@ -97,7 +97,7 @@ def THREADDownloadURL( job_key, url, url_string ):
|
|||
job_key.SetVariable( 'popup_text_1', 'was already in the database!' )
|
||||
|
||||
|
||||
job_key.SetVariable( 'popup_files', ( { hash }, 'download' ) )
|
||||
job_key.SetVariable( 'popup_files', ( [ hash ], 'download' ) )
|
||||
|
||||
elif result == CC.STATUS_DELETED:
|
||||
|
||||
|
@ -116,7 +116,8 @@ def THREADDownloadURLs( job_key, urls, title ):
|
|||
num_deleted = 0
|
||||
num_failed = 0
|
||||
|
||||
successful_hashes = set()
|
||||
presentation_hashes = []
|
||||
presentation_hashes_fast = set()
|
||||
|
||||
for ( i, url ) in enumerate( urls ):
|
||||
|
||||
|
@ -189,7 +190,12 @@ def THREADDownloadURLs( job_key, urls, title ):
|
|||
num_redundant += 1
|
||||
|
||||
|
||||
successful_hashes.add( hash )
|
||||
if hash not in presentation_hashes_fast:
|
||||
|
||||
presentation_hashes.append( hash )
|
||||
|
||||
|
||||
presentation_hashes_fast.add( hash )
|
||||
|
||||
elif result == CC.STATUS_DELETED:
|
||||
|
||||
|
@ -223,9 +229,9 @@ def THREADDownloadURLs( job_key, urls, title ):
|
|||
|
||||
job_key.SetVariable( 'popup_text_1', ', '.join( text_components ) )
|
||||
|
||||
if len( successful_hashes ) > 0:
|
||||
if len( presentation_hashes ) > 0:
|
||||
|
||||
job_key.SetVariable( 'popup_files', ( successful_hashes, 'downloads' ) )
|
||||
job_key.SetVariable( 'popup_files', ( presentation_hashes, 'downloads' ) )
|
||||
|
||||
|
||||
job_key.DeleteVariable( 'popup_gauge_1' )
|
||||
|
@ -300,9 +306,7 @@ class FileImportJob( object ):
|
|||
|
||||
if self._pre_import_status == CC.STATUS_REDUNDANT:
|
||||
|
||||
( automatic_archive, exclude_deleted, min_size, min_resolution ) = self._file_import_options.ToTuple()
|
||||
|
||||
if automatic_archive:
|
||||
if self._file_import_options.GetAutomaticArchive():
|
||||
|
||||
service_keys_to_content_updates = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY : [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE, set( ( self._hash, ) ) ) ] }
|
||||
|
||||
|
@ -313,7 +317,7 @@ class FileImportJob( object ):
|
|||
|
||||
def IsGoodToImport( self ):
|
||||
|
||||
( automatic_archive, exclude_deleted, min_size, min_resolution ) = self._file_import_options.ToTuple()
|
||||
( automatic_archive, exclude_deleted, present_new_files, present_already_in_inbox_files, present_archived_files, min_size, min_resolution ) = self._file_import_options.ToTuple()
|
||||
|
||||
( size, mime, width, height, duration, num_frames, num_words ) = self._file_info
|
||||
|
||||
|
@ -350,9 +354,7 @@ class FileImportJob( object ):
|
|||
|
||||
if self._pre_import_status == CC.STATUS_DELETED:
|
||||
|
||||
( automatic_archive, exclude_deleted, min_size, min_resolution ) = self._file_import_options.ToTuple()
|
||||
|
||||
if not exclude_deleted:
|
||||
if not self._file_import_options.GetExcludeDeleted():
|
||||
|
||||
return True
|
||||
|
||||
|
@ -637,9 +639,14 @@ class GalleryImport( HydrusSerialisable.SerialisableBase ):
|
|||
HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
|
||||
|
||||
|
||||
( media_result, ) = HG.client_controller.Read( 'media_results', ( hash, ) )
|
||||
in_inbox = HG.client_controller.Read( 'in_inbox', hash )
|
||||
|
||||
HG.client_controller.pub( 'add_media_results', page_key, ( media_result, ) )
|
||||
if self._file_import_options.ShouldPresent( status, in_inbox ):
|
||||
|
||||
( media_result, ) = HG.client_controller.Read( 'media_results', ( hash, ) )
|
||||
|
||||
HG.client_controller.pub( 'add_media_results', page_key, ( media_result, ) )
|
||||
|
||||
|
||||
|
||||
except HydrusExceptions.CancelledException:
|
||||
|
@ -1392,28 +1399,71 @@ class FileImportOptions( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_FILE_IMPORT_OPTIONS
|
||||
SERIALISABLE_NAME = 'File Import Options'
|
||||
SERIALISABLE_VERSION = 1
|
||||
SERIALISABLE_VERSION = 2
|
||||
|
||||
def __init__( self, automatic_archive = None, exclude_deleted = None, min_size = None, min_resolution = None ):
|
||||
def __init__( self, automatic_archive = None, exclude_deleted = None, present_new_files = None, present_already_in_inbox_files = None, present_archived_files = None, min_size = None, min_resolution = None ):
|
||||
|
||||
HydrusSerialisable.SerialisableBase.__init__( self )
|
||||
|
||||
if automatic_archive is None:
|
||||
|
||||
automatic_archive = False
|
||||
|
||||
|
||||
if exclude_deleted is None:
|
||||
|
||||
exclude_deleted = True
|
||||
|
||||
|
||||
if present_new_files is None:
|
||||
|
||||
present_new_files = True
|
||||
|
||||
|
||||
if present_already_in_inbox_files is None:
|
||||
|
||||
present_already_in_inbox_files = True
|
||||
|
||||
|
||||
if present_archived_files is None:
|
||||
|
||||
present_archived_files = True
|
||||
|
||||
|
||||
self._automatic_archive = automatic_archive
|
||||
self._exclude_deleted = exclude_deleted
|
||||
self._present_new_files = present_new_files
|
||||
self._present_already_in_inbox_files = present_already_in_inbox_files
|
||||
self._present_archived_files = present_archived_files
|
||||
self._min_size = min_size
|
||||
self._min_resolution = min_resolution
|
||||
|
||||
|
||||
def _GetSerialisableInfo( self ):
|
||||
|
||||
return ( self._automatic_archive, self._exclude_deleted, self._min_size, self._min_resolution )
|
||||
return ( self._automatic_archive, self._exclude_deleted, self._present_new_files, self._present_already_in_inbox_files, self._present_archived_files, self._min_size, self._min_resolution )
|
||||
|
||||
|
||||
def _InitialiseFromSerialisableInfo( self, serialisable_info ):
|
||||
|
||||
( self._automatic_archive, self._exclude_deleted, self._min_size, self._min_resolution ) = serialisable_info
|
||||
( self._automatic_archive, self._exclude_deleted, self._present_new_files, self._present_already_in_inbox_files, self._present_archived_files, self._min_size, self._min_resolution ) = serialisable_info
|
||||
|
||||
|
||||
def _UpdateSerialisableInfo( self, version, old_serialisable_info ):
|
||||
|
||||
if version == 1:
|
||||
|
||||
( automatic_archive, exclude_deleted, min_size, min_resolution ) = old_serialisable_info
|
||||
|
||||
present_new_files = True
|
||||
present_already_in_inbox_files = False
|
||||
present_archived_files = False
|
||||
|
||||
new_serialisable_info = ( automatic_archive, exclude_deleted, present_new_files, present_already_in_inbox_files, present_archived_files, min_size, min_resolution )
|
||||
|
||||
return ( 2, new_serialisable_info )
|
||||
|
||||
|
||||
def FileIsValid( self, size, resolution = None ):
|
||||
|
||||
if self._min_size is not None and size < self._min_size:
|
||||
|
@ -1460,6 +1510,36 @@ class FileImportOptions( HydrusSerialisable.SerialisableBase ):
|
|||
statements.append( 'excluding previously deleted' )
|
||||
|
||||
|
||||
presentation_statements = []
|
||||
|
||||
if self._present_new_files:
|
||||
|
||||
presentation_statements.append( 'new' )
|
||||
|
||||
|
||||
if self._present_already_in_inbox_files:
|
||||
|
||||
presentation_statements.append( 'already in inbox' )
|
||||
|
||||
|
||||
if self._present_archived_files:
|
||||
|
||||
presentation_statements.append( 'already in archive' )
|
||||
|
||||
|
||||
if len( presentation_statements ) == 0:
|
||||
|
||||
statements.append( 'not presenting any files' )
|
||||
|
||||
elif len( presentation_statements ) == 3:
|
||||
|
||||
statements.append( 'presenting all files' )
|
||||
|
||||
else:
|
||||
|
||||
statements.append( 'presenting ' + ', '.join( presentation_statements ) + ' files' )
|
||||
|
||||
|
||||
if self._min_size is not None:
|
||||
|
||||
statements.append( 'excluding < ' + HydrusData.ConvertIntToBytes( self._min_size ) )
|
||||
|
@ -1472,19 +1552,35 @@ class FileImportOptions( HydrusSerialisable.SerialisableBase ):
|
|||
statements.append( 'excluding < ( ' + HydrusData.ConvertIntToPrettyString( width ) + ' x ' + HydrusData.ConvertIntToPrettyString( height ) + ' )' )
|
||||
|
||||
|
||||
if len( statements ) == 0:
|
||||
|
||||
statements.append( 'no options set' )
|
||||
|
||||
|
||||
summary = ', '.join( statements )
|
||||
summary = os.linesep.join( statements )
|
||||
|
||||
return summary
|
||||
|
||||
|
||||
def ShouldPresent( self, status, inbox ):
|
||||
|
||||
if status == CC.STATUS_SUCCESSFUL and self._present_new_files:
|
||||
|
||||
return True
|
||||
|
||||
elif status == CC.STATUS_REDUNDANT:
|
||||
|
||||
if inbox and self._present_already_in_inbox_files:
|
||||
|
||||
return True
|
||||
|
||||
elif not inbox and self._present_archived_files:
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def ToTuple( self ):
|
||||
|
||||
return ( self._automatic_archive, self._exclude_deleted, self._min_size, self._min_resolution )
|
||||
return ( self._automatic_archive, self._exclude_deleted, self._present_new_files, self._present_already_in_inbox_files, self._present_archived_files, self._min_size, self._min_resolution )
|
||||
|
||||
|
||||
HydrusSerialisable.SERIALISABLE_TYPES_TO_OBJECT_TYPES[ HydrusSerialisable.SERIALISABLE_TYPE_FILE_IMPORT_OPTIONS ] = FileImportOptions
|
||||
|
@ -1633,9 +1729,14 @@ class HDDImport( HydrusSerialisable.SerialisableBase ):
|
|||
HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
|
||||
|
||||
|
||||
( media_result, ) = HG.client_controller.Read( 'media_results', ( hash, ) )
|
||||
in_inbox = HG.client_controller.Read( 'in_inbox', hash )
|
||||
|
||||
HG.client_controller.pub( 'add_media_results', page_key, ( media_result, ) )
|
||||
if self._file_import_options.ShouldPresent( status, in_inbox ):
|
||||
|
||||
( media_result, ) = HG.client_controller.Read( 'media_results', ( hash, ) )
|
||||
|
||||
HG.client_controller.pub( 'add_media_results', page_key, ( media_result, ) )
|
||||
|
||||
|
||||
if self._delete_after_success:
|
||||
|
||||
|
@ -1734,6 +1835,14 @@ class HDDImport( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
|
||||
|
||||
def GetFileImportOptions( self ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
return self._file_import_options
|
||||
|
||||
|
||||
|
||||
def GetSeedCache( self ):
|
||||
|
||||
return self._paths_cache
|
||||
|
@ -1757,6 +1866,14 @@ class HDDImport( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
|
||||
|
||||
def SetFileImportOptions( self, file_import_options ):
|
||||
|
||||
with self._lock:
|
||||
|
||||
self._file_import_options = file_import_options
|
||||
|
||||
|
||||
|
||||
def Start( self, page_key ):
|
||||
|
||||
HG.client_controller.CallToThreadLongRunning( self._THREADWork, page_key )
|
||||
|
@ -1779,7 +1896,7 @@ class ImportFolder( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
|
||||
if file_import_options is None:
|
||||
|
||||
file_import_options = ClientDefaults.GetDefaultFileImportOptions()
|
||||
file_import_options = ClientDefaults.GetDefaultFileImportOptions( for_quiet_queue = True )
|
||||
|
||||
|
||||
if tag_import_options is None:
|
||||
|
@ -2082,7 +2199,9 @@ class ImportFolder( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
|
||||
self._path_cache.AddPaths( new_paths )
|
||||
|
||||
successful_hashes = set()
|
||||
num_files_imported = 0
|
||||
presentation_hashes = []
|
||||
presentation_hashes_fast = set()
|
||||
|
||||
i = 0
|
||||
|
||||
|
@ -2174,10 +2293,19 @@ class ImportFolder( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
|
||||
|
||||
|
||||
|
||||
if status == CC.STATUS_SUCCESSFUL:
|
||||
num_files_imported += 1
|
||||
|
||||
successful_hashes.add( hash )
|
||||
if hash not in presentation_hashes_fast:
|
||||
|
||||
in_inbox = HG.client_controller.Read( 'in_inbox', hash )
|
||||
|
||||
if self._file_import_options.ShouldPresent( status, in_inbox ):
|
||||
|
||||
presentation_hashes.append( hash )
|
||||
|
||||
presentation_hashes_fast.add( hash )
|
||||
|
||||
|
||||
|
||||
|
||||
else:
|
||||
|
@ -2202,16 +2330,16 @@ class ImportFolder( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
|
||||
|
||||
|
||||
if len( successful_hashes ) > 0:
|
||||
if num_files_imported > 0:
|
||||
|
||||
HydrusData.Print( 'Import folder ' + self._name + ' imported ' + HydrusData.ConvertIntToPrettyString( len( successful_hashes ) ) + ' files.' )
|
||||
HydrusData.Print( 'Import folder ' + self._name + ' imported ' + HydrusData.ConvertIntToPrettyString( num_files_imported ) + ' files.' )
|
||||
|
||||
if self._open_popup:
|
||||
if len( presentation_hashes ) > 0 and self._open_popup:
|
||||
|
||||
job_key = ClientThreading.JobKey()
|
||||
|
||||
job_key.SetVariable( 'popup_title', 'import folder - ' + self._name )
|
||||
job_key.SetVariable( 'popup_files', ( successful_hashes, self._name ) )
|
||||
job_key.SetVariable( 'popup_files', ( presentation_hashes, self._name ) )
|
||||
|
||||
HG.client_controller.pub( 'message', job_key )
|
||||
|
||||
|
@ -2466,7 +2594,9 @@ class PageOfImagesImport( HydrusSerialisable.SerialisableBase ):
|
|||
seed.SetStatus( status, note = note )
|
||||
|
||||
|
||||
if status in ( CC.STATUS_SUCCESSFUL, CC.STATUS_REDUNDANT ):
|
||||
in_inbox = HG.client_controller.Read( 'in_inbox', hash )
|
||||
|
||||
if self._file_import_options.ShouldPresent( status, in_inbox ):
|
||||
|
||||
( media_result, ) = HG.client_controller.Read( 'media_results', ( hash, ) )
|
||||
|
||||
|
@ -3377,7 +3507,7 @@ class SeedCache( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
def AddPaths( self, paths ):
|
||||
|
||||
seeds = [ Seed( SEED_TYPE_HDD, path ) for path in paths ]
|
||||
seeds = [ Seed( SEED_TYPE_HDD, path ) for path in paths if not self.HasPath( path ) ]
|
||||
|
||||
self.AddSeeds( seeds )
|
||||
|
||||
|
@ -3411,7 +3541,7 @@ class SeedCache( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
def AddURLs( self, urls ):
|
||||
|
||||
seeds = [ Seed( SEED_TYPE_URL, url ) for url in urls ]
|
||||
seeds = [ Seed( SEED_TYPE_URL, url ) for url in urls if not self.HasURL( url ) ]
|
||||
|
||||
self.AddSeeds( seeds )
|
||||
|
||||
|
@ -3733,7 +3863,7 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
self._periodic_file_limit = 50
|
||||
self._paused = False
|
||||
|
||||
self._file_import_options = ClientDefaults.GetDefaultFileImportOptions()
|
||||
self._file_import_options = ClientDefaults.GetDefaultFileImportOptions( for_quiet_queue = True )
|
||||
|
||||
new_options = HG.client_controller.new_options
|
||||
|
||||
|
@ -3749,6 +3879,27 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
self._no_work_until_reason = reason
|
||||
|
||||
|
||||
def _GetQueriesForProcessing( self ):
|
||||
|
||||
queries = list( self._queries )
|
||||
|
||||
if HG.client_controller.new_options.GetBoolean( 'process_subs_in_random_order' ):
|
||||
|
||||
random.shuffle( queries )
|
||||
|
||||
else:
|
||||
|
||||
def key( q ):
|
||||
|
||||
return q.GetQueryText()
|
||||
|
||||
|
||||
queries.sort( key = key )
|
||||
|
||||
|
||||
return queries
|
||||
|
||||
|
||||
def _GetSerialisableInfo( self ):
|
||||
|
||||
serialisable_gallery_identifier = self._gallery_identifier.GetSerialisableTuple()
|
||||
|
@ -3850,13 +4001,13 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
|
||||
error_count = 0
|
||||
|
||||
# do it randomly so we don't have the first one choking the others for bandwidth over and over
|
||||
all_presentation_hashes = []
|
||||
|
||||
random_queries = list( self._queries )
|
||||
queries = self._GetQueriesForProcessing()
|
||||
|
||||
random.shuffle( random_queries )
|
||||
|
||||
for query in random_queries:
|
||||
for query in queries:
|
||||
|
||||
this_query_has_done_work = False
|
||||
|
||||
( query_text, seed_cache ) = query.GetQueryAndSeedCache()
|
||||
|
||||
|
@ -3884,7 +4035,8 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
|
||||
num_urls = seed_cache.GetSeedCount()
|
||||
|
||||
successful_hashes = set()
|
||||
presentation_hashes = []
|
||||
presentation_hashes_fast = set()
|
||||
|
||||
while True:
|
||||
|
||||
|
@ -3920,7 +4072,7 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
|
||||
if p1 or p3 or p4:
|
||||
|
||||
if p4:
|
||||
if p4 and this_query_has_done_work:
|
||||
|
||||
job_key.SetVariable( 'popup_text_2', 'no more bandwidth to download files, so stopping for now' )
|
||||
|
||||
|
@ -3990,8 +4142,6 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
|
||||
job_key.SetVariable( 'popup_text_2', x_out_of_y + 'import successful' )
|
||||
|
||||
successful_hashes.add( hash )
|
||||
|
||||
elif status == CC.STATUS_DELETED:
|
||||
|
||||
job_key.SetVariable( 'popup_text_2', x_out_of_y + 'previously deleted' )
|
||||
|
@ -4001,6 +4151,23 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
job_key.SetVariable( 'popup_text_2', x_out_of_y + 'already in db' )
|
||||
|
||||
|
||||
if status in ( CC.STATUS_SUCCESSFUL, CC.STATUS_REDUNDANT ):
|
||||
|
||||
if hash not in presentation_hashes_fast:
|
||||
|
||||
in_inbox = HG.client_controller.Read( 'in_inbox', hash )
|
||||
|
||||
if self._file_import_options.ShouldPresent( status, in_inbox ):
|
||||
|
||||
all_presentation_hashes.append( hash )
|
||||
|
||||
presentation_hashes.append( hash )
|
||||
|
||||
presentation_hashes_fast.add( hash )
|
||||
|
||||
|
||||
|
||||
|
||||
finally:
|
||||
|
||||
HydrusPaths.CleanUpTempPath( os_file_handle, temp_path )
|
||||
|
@ -4061,9 +4228,11 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
|
||||
|
||||
|
||||
if len( successful_hashes ) > 0:
|
||||
this_query_has_done_work = True
|
||||
|
||||
if len( presentation_hashes ) > 0:
|
||||
|
||||
job_key.SetVariable( 'popup_files', ( set( successful_hashes ), file_popup_text ) )
|
||||
job_key.SetVariable( 'popup_files', ( list( presentation_hashes ), file_popup_text ) )
|
||||
|
||||
|
||||
time.sleep( 0.1 )
|
||||
|
@ -4071,14 +4240,16 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
HG.client_controller.WaitUntilViewFree()
|
||||
|
||||
|
||||
if len( successful_hashes ) > 0:
|
||||
|
||||
files_job_key = ClientThreading.JobKey()
|
||||
|
||||
files_job_key.SetVariable( 'popup_files', ( set( successful_hashes ), file_popup_text ) )
|
||||
|
||||
HG.client_controller.pub( 'message', files_job_key )
|
||||
|
||||
|
||||
if len( all_presentation_hashes ) > 0:
|
||||
|
||||
file_popup_text = self._name
|
||||
|
||||
files_job_key = ClientThreading.JobKey()
|
||||
|
||||
files_job_key.SetVariable( 'popup_files', ( all_presentation_hashes, file_popup_text ) )
|
||||
|
||||
HG.client_controller.pub( 'message', files_job_key )
|
||||
|
||||
|
||||
job_key.DeleteVariable( 'popup_files' )
|
||||
|
@ -4132,7 +4303,9 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
|
||||
def _SyncQuery( self, job_key ):
|
||||
|
||||
for query in self._queries:
|
||||
queries = self._GetQueriesForProcessing()
|
||||
|
||||
for query in queries:
|
||||
|
||||
if not query.CanSync():
|
||||
|
||||
|
@ -4388,6 +4561,21 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
return self._tag_import_options
|
||||
|
||||
|
||||
def HasQuerySearchText( self, search_text ):
|
||||
|
||||
for query in self._queries:
|
||||
|
||||
query_text = query.GetQueryText()
|
||||
|
||||
if search_text in query_text:
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def Merge( self, potential_mergee_subscriptions ):
|
||||
|
||||
unmergable_subscriptions = []
|
||||
|
@ -4443,12 +4631,24 @@ class Subscription( HydrusSerialisable.SerialisableBaseNamed ):
|
|||
|
||||
subscription._queries = [ query.Duplicate() ]
|
||||
|
||||
subscription.SetName( self._name + ': ' + query.GetQueryText() )
|
||||
|
||||
subscriptions.append( subscription )
|
||||
|
||||
|
||||
return subscriptions
|
||||
|
||||
|
||||
def SetCheckerOptions( self, checker_options ):
|
||||
|
||||
self._checker_options = checker_options
|
||||
|
||||
for query in self._queries:
|
||||
|
||||
query.UpdateNextCheckTime( self._checker_options )
|
||||
|
||||
|
||||
|
||||
def SetTuple( self, gallery_identifier, gallery_stream_identifiers, queries, checker_options, get_tags_if_url_known_and_file_redundant, initial_file_limit, periodic_file_limit, paused, file_import_options, tag_import_options, no_work_until ):
|
||||
|
||||
self._gallery_identifier = gallery_identifier
|
||||
|
@ -4652,6 +4852,11 @@ class SubscriptionQuery( HydrusSerialisable.SerialisableBase ):
|
|||
return ( self._query, self._seed_cache )
|
||||
|
||||
|
||||
def GetQueryText( self ):
|
||||
|
||||
return self._query
|
||||
|
||||
|
||||
def GetSeedCache( self ):
|
||||
|
||||
return self._seed_cache
|
||||
|
@ -5433,9 +5638,14 @@ class ThreadWatcherImport( HydrusSerialisable.SerialisableBase ):
|
|||
HG.client_controller.WriteSynchronous( 'content_updates', service_keys_to_content_updates )
|
||||
|
||||
|
||||
( media_result, ) = HG.client_controller.Read( 'media_results', ( hash, ) )
|
||||
in_inbox = HG.client_controller.Read( 'in_inbox', hash )
|
||||
|
||||
HG.client_controller.pub( 'add_media_results', page_key, ( media_result, ) )
|
||||
if self._file_import_options.ShouldPresent( status, in_inbox ):
|
||||
|
||||
( media_result, ) = HG.client_controller.Read( 'media_results', ( hash, ) )
|
||||
|
||||
HG.client_controller.pub( 'add_media_results', page_key, ( media_result, ) )
|
||||
|
||||
|
||||
|
||||
except HydrusExceptions.MimeException as e:
|
||||
|
@ -5894,9 +6104,14 @@ class URLsImport( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
if status in ( CC.STATUS_SUCCESSFUL, CC.STATUS_REDUNDANT ):
|
||||
|
||||
( media_result, ) = HG.client_controller.Read( 'media_results', ( hash, ) )
|
||||
in_inbox = HG.client_controller.Read( 'in_inbox', hash )
|
||||
|
||||
HG.client_controller.pub( 'add_media_results', page_key, ( media_result, ) )
|
||||
if self._file_import_options.ShouldPresent( status, in_inbox ):
|
||||
|
||||
( media_result, ) = HG.client_controller.Read( 'media_results', ( hash, ) )
|
||||
|
||||
HG.client_controller.pub( 'add_media_results', page_key, ( media_result, ) )
|
||||
|
||||
|
||||
|
||||
except HydrusExceptions.MimeException as e:
|
||||
|
|
|
@ -987,6 +987,7 @@ class NetworkJob( object ):
|
|||
|
||||
self._stream_io = cStringIO.StringIO()
|
||||
|
||||
self._error_exception = Exception( 'Exception not initialised.' ) # PyLint hint, wew
|
||||
self._error_exception = None
|
||||
self._error_text = None
|
||||
|
||||
|
|
|
@ -182,28 +182,40 @@ def DumpToPng( width, payload, title, payload_description, text, path ):
|
|||
|
||||
HydrusPaths.CleanUpTempPath( os_file_handle, temp_path )
|
||||
|
||||
|
||||
def GetPayloadTypeString( payload_obj ):
|
||||
|
||||
if isinstance( payload_obj, HydrusSerialisable.SerialisableList ):
|
||||
def GetPayloadString( payload_obj ):
|
||||
|
||||
if isinstance( payload_obj, ( str, unicode ) ):
|
||||
|
||||
return 'A list of ' + HydrusData.ConvertIntToPrettyString( len( payload_obj ) ) + ' ' + GetPayloadTypeString( payload_obj[0] )
|
||||
return HydrusData.ToByteString( payload_obj )
|
||||
|
||||
else:
|
||||
|
||||
if isinstance( payload_obj, HydrusSerialisable.SerialisableBase ):
|
||||
|
||||
return payload_obj.SERIALISABLE_NAME
|
||||
|
||||
else:
|
||||
|
||||
return repr( type( payload_obj ) )
|
||||
|
||||
return payload_obj.DumpToNetworkString()
|
||||
|
||||
|
||||
def GetPayloadTypeString( payload_obj ):
|
||||
|
||||
if isinstance( payload_obj, ( str, unicode ) ):
|
||||
|
||||
return 'String'
|
||||
|
||||
elif isinstance( payload_obj, HydrusSerialisable.SerialisableList ):
|
||||
|
||||
return 'A list of ' + HydrusData.ConvertIntToPrettyString( len( payload_obj ) ) + ' ' + GetPayloadTypeString( payload_obj[0] )
|
||||
|
||||
elif isinstance( payload_obj, HydrusSerialisable.SerialisableBase ):
|
||||
|
||||
return payload_obj.SERIALISABLE_NAME
|
||||
|
||||
else:
|
||||
|
||||
return repr( type( payload_obj ) )
|
||||
|
||||
|
||||
def GetPayloadDescriptionAndString( payload_obj ):
|
||||
|
||||
payload_string = payload_obj.DumpToNetworkString()
|
||||
payload_string = GetPayloadString( payload_obj )
|
||||
|
||||
payload_description = GetPayloadTypeString( payload_obj ) + ' - ' + HydrusData.ConvertIntToBytes( len( payload_string ) )
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ options = {}
|
|||
# Misc
|
||||
|
||||
NETWORK_VERSION = 18
|
||||
SOFTWARE_VERSION = 286
|
||||
SOFTWARE_VERSION = 287
|
||||
|
||||
UNSCALED_THUMBNAIL_DIMENSIONS = ( 200, 200 )
|
||||
|
||||
|
|
|
@ -495,7 +495,7 @@ class TestSerialisables( unittest.TestCase ):
|
|||
periodic_file_limit = 50
|
||||
paused = False
|
||||
|
||||
file_import_options = ClientImporting.FileImportOptions( automatic_archive = False, exclude_deleted = True, min_size = 8 * 1024, min_resolution = [ 25, 25 ] )
|
||||
file_import_options = ClientImporting.FileImportOptions( automatic_archive = False, exclude_deleted = True, present_new_files = True, present_already_in_inbox_files = True, present_archived_files = False, min_size = 8 * 1024, min_resolution = [ 25, 25 ] )
|
||||
tag_import_options = ClientImporting.TagImportOptions( service_keys_to_namespaces = { HydrusData.GenerateKey() : { 'series', '' } }, service_keys_to_explicit_tags = { HydrusData.GenerateKey() : { 'test explicit tag', 'and another' } } )
|
||||
|
||||
no_work_until = HydrusData.GetNow() - 86400 * 20
|
||||
|
|
Loading…
Reference in New Issue