Version 423
This commit is contained in:
parent
ff51cf492e
commit
02760aac68
10
client.py
10
client.py
|
@ -31,6 +31,7 @@ try:
|
|||
argparser.add_argument( '-d', '--db_dir', help = 'set an external db location' )
|
||||
argparser.add_argument( '--temp_dir', help = 'override the program\'s temporary directory' )
|
||||
argparser.add_argument( '--db_journal_mode', default = 'WAL', choices = [ 'WAL', 'TRUNCATE', 'PERSIST', 'MEMORY' ], help = 'change db journal mode (default=WAL)' )
|
||||
argparser.add_argument( '--db_cache_size', type = int, help = 'override SQLite cache_size per db file, in MB (default=200)' )
|
||||
argparser.add_argument( '--db_synchronous_override', type = int, choices = range(4), help = 'override SQLite Synchronous PRAGMA (default=2)' )
|
||||
argparser.add_argument( '--no_db_temp_files', action='store_true', help = 'run db temp operations entirely in memory' )
|
||||
argparser.add_argument( '--boot_debug', action='store_true', help = 'print additional bootup information to the log' )
|
||||
|
@ -88,6 +89,15 @@ try:
|
|||
HG.db_journal_mode = 'MEMORY'
|
||||
|
||||
|
||||
if result.db_cache_size is not None:
|
||||
|
||||
HG.db_cache_size = result.db_cache_size
|
||||
|
||||
else:
|
||||
|
||||
HG.db_cache_size = 200
|
||||
|
||||
|
||||
if result.db_synchronous_override is not None:
|
||||
|
||||
HG.db_synchronous = int( result.db_synchronous_override )
|
||||
|
|
10
client.pyw
10
client.pyw
|
@ -31,6 +31,7 @@ try:
|
|||
argparser.add_argument( '-d', '--db_dir', help = 'set an external db location' )
|
||||
argparser.add_argument( '--temp_dir', help = 'override the program\'s temporary directory' )
|
||||
argparser.add_argument( '--db_journal_mode', default = 'WAL', choices = [ 'WAL', 'TRUNCATE', 'PERSIST', 'MEMORY' ], help = 'change db journal mode (default=WAL)' )
|
||||
argparser.add_argument( '--db_cache_size', type = int, help = 'override SQLite cache_size per db file, in MB (default=200)' )
|
||||
argparser.add_argument( '--db_synchronous_override', type = int, choices = range(4), help = 'override SQLite Synchronous PRAGMA (default=2)' )
|
||||
argparser.add_argument( '--no_db_temp_files', action='store_true', help = 'run db temp operations entirely in memory' )
|
||||
argparser.add_argument( '--boot_debug', action='store_true', help = 'print additional bootup information to the log' )
|
||||
|
@ -88,6 +89,15 @@ try:
|
|||
HG.db_journal_mode = 'MEMORY'
|
||||
|
||||
|
||||
if result.db_cache_size is not None:
|
||||
|
||||
HG.db_cache_size = result.db_cache_size
|
||||
|
||||
else:
|
||||
|
||||
HG.db_cache_size = 200
|
||||
|
||||
|
||||
if result.db_synchronous_override is not None:
|
||||
|
||||
HG.db_synchronous = int( result.db_synchronous_override )
|
||||
|
|
|
@ -8,6 +8,30 @@
|
|||
<div class="content">
|
||||
<h3>changelog</h3>
|
||||
<ul>
|
||||
<li><h3>version 423</h3></li>
|
||||
<ul>
|
||||
<li>tag autocomplete searches:</li>
|
||||
<li>the 'fetch results as you type' and 'do-not-autocomplete character threshold' options are moved from _options->speed and memory_ to _tags->manage tag display and search_. they are now service specific!</li>
|
||||
<li>getting the raw '*' autocomplete is now hugely faster when both file and tag domains are specific (i.e. not 'all known xxx')</li>
|
||||
<li>getting the raw '*' autocomplete is now hugely faster in 'all known tags' domain. this is likely still bonkers on any decent sized client that syncs with the PTR, but if you have a small client that once synced with the PTR, this is now snappy</li>
|
||||
<li>the cancelability of 'namespace:*' and 'match namespaces from normal search' searches should be improved</li>
|
||||
<li>'namespace:*' queries are now much faster in some situations, particularly when searching in a specific tag domain (typically this happens in manage tags dialog) or a small-file client, but is still pretty slow for clients with many files, and I think some scenarios are still bananas. I am not happy here and have started a plan to improve my service domain caches to deal with several ongoing problems with slow namespace and subtag lookup in different situations</li>
|
||||
<li>fixed an issue with advanced autocomplete result matching where a previously cached 'character:sam' result could match 'char:sam' search text</li>
|
||||
<li>some misc code cleanup and UI label improvements in autocomplete</li>
|
||||
<li>.</li>
|
||||
<li>the rest:</li>
|
||||
<li>the siblings & parents tag menu, which proved a little tall after all, is now compressed to group siblings, parents, and children by the shared services that hold them. it takes less space, and odd exceptions should be easy to spot</li>
|
||||
<li>this menu also no longer has the 'this is the ideal tag' line</li>
|
||||
<li>added 'sort pages by name a-z/z-a' to page right-click menu and tucked the sorts into a submenu</li>
|
||||
<li>the parsing test panel now shows up to 64KB of what you pulled (previously 1KB)</li>
|
||||
<li>the parsing test panel now shows json in prettier indented form</li>
|
||||
<li>when the parsing test panel is told to fetch a URL that is neither HTML or JSON, this is now caught more safely and tested against permitted file types. if it was really a jpeg, it will now say 'looks like a jpeg' and disable parse testing. if the data type could not be figured out, it tries to throw the mess into view and permits parse testing, in case this is some weird javascript or something that you'll want to pre-parse convert</li>
|
||||
<li>the dreaded null-character is now eliminated in all cases when text is decoded from a site, even if the site has invalid unicode or no encoding can be found (e.g. if it is truly a jpeg or something and we just end up wanting to throw a preview of that mess into UI)</li>
|
||||
<li>the 'enter example path here' input on import folders' filename tagging options edit panel now uses placeholder text and auto-removes 'file:///' URL prefixes (e.g. if your paste happens to add them)</li>
|
||||
<li>the 'fix invalid tags' routine now updates the tag row in the local tags cache, so users who found some broken tags were not updating should now be sorted</li>
|
||||
<li>added --db_cache_size launch parameter, and added some text to the launch_parameters help about it. by default, hydrus permits 200MB per file, which means a megaclient under persistent heavy load might want 800MB. users with megamemory but slow drives might want to play with this, let me know what you find</li>
|
||||
<li>updated to cloudscraper 1.2.50</li>
|
||||
</ul>
|
||||
<li><h3>version 422</h3></li>
|
||||
<ul>
|
||||
<li>advanced tags:</li>
|
||||
|
|
|
@ -43,6 +43,10 @@
|
|||
<li>MEMORY - Danger mode. Extremely fast, but you had better guarantee a lot of free ram.</li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<b>--db_cache_size DB_CACHE_SIZE</b>
|
||||
<p>Change the cache SQLite will use for each db file. By default this is 200MB, which for the four main client db files could mean 800MB peak use if you run a very heavy client and perform a long period of PTR sync. This does not matter so much (nor should it be fully used) if you have a smaller client.</p>
|
||||
</li>
|
||||
<li>
|
||||
<b>--db_synchronous_override {0,1,2,3}</b>
|
||||
<p>Change the rules governing how SQLite writes committed changes to your disk. Full docs <a href="https://sqlite.org/pragma.html#pragma_synchronous">here</a>. The hydrus default is 1 with WAL, 2 otherwise.</p>
|
||||
|
|
|
@ -255,6 +255,36 @@ def GenerateCombinedFilesMappingsACCacheTableName( tag_display_type, tag_service
|
|||
|
||||
return combined_ac_cache_table_name
|
||||
|
||||
def GenerateCombinedFilesTagsTableName( tag_display_type, tag_service_id ):
|
||||
|
||||
if tag_display_type == ClientTags.TAG_DISPLAY_STORAGE:
|
||||
|
||||
name = 'combined_files_tags_cache'
|
||||
|
||||
elif tag_display_type == ClientTags.TAG_DISPLAY_ACTUAL:
|
||||
|
||||
name = 'combined_files_display_tags_cache'
|
||||
|
||||
|
||||
cache_tags_table_name = 'external_caches.{}_{}'.format( name, tag_service_id )
|
||||
|
||||
return cache_tags_table_name
|
||||
|
||||
def GenerateCombinedTagsTagsTableName( tag_display_type, file_service_id ):
|
||||
|
||||
if tag_display_type == ClientTags.TAG_DISPLAY_STORAGE:
|
||||
|
||||
name = 'combined_tags_tags_cache'
|
||||
|
||||
elif tag_display_type == ClientTags.TAG_DISPLAY_ACTUAL:
|
||||
|
||||
name = 'combined_tags_display_tags_cache'
|
||||
|
||||
|
||||
cache_tags_table_name = 'external_caches.{}_{}'.format( name, file_service_id )
|
||||
|
||||
return cache_tags_table_name
|
||||
|
||||
def GenerateMappingsTableNames( service_id ):
|
||||
|
||||
suffix = str( service_id )
|
||||
|
@ -284,14 +314,6 @@ def GenerateRepositoryUpdatesTableName( service_id ):
|
|||
|
||||
return repository_updates_table_name
|
||||
|
||||
def GenerateSpecificFilesTableName( file_service_id, tag_service_id ):
|
||||
|
||||
suffix = '{}_{}'.format( file_service_id, tag_service_id )
|
||||
|
||||
cache_files_table_name = 'external_caches.specific_files_cache_{}'.format( suffix )
|
||||
|
||||
return cache_files_table_name
|
||||
|
||||
def GenerateSpecificACCacheTableName( tag_display_type, file_service_id, tag_service_id ):
|
||||
|
||||
if tag_display_type == ClientTags.TAG_DISPLAY_STORAGE:
|
||||
|
@ -331,6 +353,31 @@ def GenerateSpecificMappingsCacheTableNames( file_service_id, tag_service_id ):
|
|||
|
||||
return ( cache_current_mappings_table_name, cache_deleted_mappings_table_name, cache_pending_mappings_table_name )
|
||||
|
||||
def GenerateSpecificFilesTableName( file_service_id, tag_service_id ):
|
||||
|
||||
suffix = '{}_{}'.format( file_service_id, tag_service_id )
|
||||
|
||||
cache_files_table_name = 'external_caches.specific_files_cache_{}'.format( suffix )
|
||||
|
||||
return cache_files_table_name
|
||||
|
||||
def GenerateSpecificTagsTableName( tag_display_type, file_service_id, tag_service_id ):
|
||||
|
||||
if tag_display_type == ClientTags.TAG_DISPLAY_STORAGE:
|
||||
|
||||
name = 'specific_tags_cache'
|
||||
|
||||
elif tag_display_type == ClientTags.TAG_DISPLAY_ACTUAL:
|
||||
|
||||
name = 'specific_display_tags_cache'
|
||||
|
||||
|
||||
suffix = '{}_{}'.format( file_service_id, tag_service_id )
|
||||
|
||||
cache_files_table_name = 'external_caches.{}_{}'.format( name, suffix )
|
||||
|
||||
return cache_files_table_name
|
||||
|
||||
def GenerateTagParentsLookupCacheTableName( display_type: int, service_id: int ):
|
||||
|
||||
( cache_ideal_tag_parents_lookup_table_name, cache_actual_tag_parents_lookup_table_name ) = GenerateTagParentsLookupCacheTableNames( service_id )
|
||||
|
@ -3585,6 +3632,88 @@ class DB( HydrusDB.HydrusDB ):
|
|||
self._CacheTagParentsRegenChains( interested_tag_service_ids, tag_ids_that_changed )
|
||||
|
||||
|
||||
def _CacheTagsAddTags( self, tag_display_type, file_service_id, tag_service_id, tag_ids ):
|
||||
|
||||
# ideally we call this lad only with what was added, so we aren't wasting time
|
||||
# this is hard-synced with the ac_cache for this domain
|
||||
|
||||
# cache_tags_table_name = self._CacheTagsGetTable( tag_display_type, file_service_id, tag_service_id )
|
||||
|
||||
# self._c.executemany( 'INSERT OR IGNORE INTO {} SELECT tag_id, namespace_id, subtag_id FROM tags WHERE tag_id = ?;'.format( the_table ), ( ( tag_id, ) for tag_id in tag_ids ) )
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def _CacheTagsDeleteTags( self, tag_display_type, file_service_id, tag_service_id, tag_ids ):
|
||||
|
||||
# we can only call this lad with what was deleted
|
||||
# this is hard-synced with the ac_cache for this domain
|
||||
|
||||
# cache_tags_table_name = self._CacheTagsGetTable( tag_display_type, file_service_id, tag_service_id )
|
||||
|
||||
# self._c.executemany( 'DELETE FROM {} WHERE tag_id = ?;'.format( the_table ), ( ( tag_id, ) for tag_id in tag_ids ) )
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def _CacheTagsDrop( self, tag_display_type, file_service_id, tag_service_id ):
|
||||
|
||||
cache_tags_table_name = self._CacheTagsGetTable( tag_display_type, file_service_id, tag_service_id )
|
||||
|
||||
self._c.execute( 'DROP TABLE IF EXISTS {};'.format( cache_tags_table_name ) )
|
||||
|
||||
|
||||
def _CacheTagsGenerate( self, tag_display_type, file_service_id, tag_service_id ):
|
||||
|
||||
cache_tags_table_name = self._CacheTagsGetTable( tag_display_type, file_service_id, tag_service_id )
|
||||
|
||||
self._c.execute( 'CREATE TABLE IF NOT EXISTS {} ( tag_id INTEGER PRIMARY KEY, namespace_id INTEGER, subtag_id INTEGER );'.format( cache_tags_table_name ) )
|
||||
self._CreateIndex( cache_tags_table_name, [ 'namespace_id', 'subtag_id' ], unique = True )
|
||||
self._CreateIndex( cache_tags_table_name, [ 'subtag_id' ] )
|
||||
|
||||
# I could potentially store subtags_fts4 here, for each domain, instead of in master, too, if it isn't that huge IRL, but I think it is huge
|
||||
# with that could come subtags searchable
|
||||
|
||||
|
||||
def _CacheTagsGetTable( self, tag_display_type, file_service_id, tag_service_id ):
|
||||
|
||||
# OK, rethink this a bit:
|
||||
# if instead of tag_display_type we just merge storage and display with getchainmembers and 'sync to ac cache' instead of delete, then we use half the storage
|
||||
# we are no longer hard-synced to ac cache
|
||||
|
||||
# one new thought: if I have all this and no longer query tags for namespace_id or subtag_id, I can drop those indices mate
|
||||
|
||||
# also lmao this assumes availability of combined tags cache, so may ultimately want to go for that first
|
||||
|
||||
if file_service_id == self._combined_file_service_id:
|
||||
|
||||
cache_tags_table_name = GenerateCombinedFilesTagsTableName( tag_display_type, tag_service_id )
|
||||
|
||||
elif tag_service_id == self._combined_tag_service_id:
|
||||
|
||||
cache_tags_table_name = GenerateCombinedTagsTagsTableName( tag_display_type, file_service_id )
|
||||
|
||||
else:
|
||||
|
||||
cache_tags_table_name = GenerateSpecificTagsTableName( tag_display_type, file_service_id, tag_service_id )
|
||||
|
||||
|
||||
return cache_tags_table_name
|
||||
|
||||
|
||||
def _CacheTagsPopulate( self, tag_display_type, file_service_id, tag_service_id ):
|
||||
|
||||
# if I decide to sync to display, this would be a union of storage and display ac caches
|
||||
|
||||
# ac_cache_table_name = getthatlad
|
||||
|
||||
#cache_tags_table_name = self._CacheTagsGetTable( tag_display_type, file_service_id, tag_service_id )
|
||||
|
||||
#self._c.execute( 'INSERT OR IGNORE INTO {} SELECT tag_id, namespace_id, subtag_id FROM {} CROSS JOIN tags USING ( tag_id );'.format( cache_tags_table_name, ac_cache_table_name ) )
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def _CacheTagSiblingsDrop( self, tag_service_id ):
|
||||
|
||||
( cache_ideal_tag_siblings_lookup_table_name, cache_actual_tag_siblings_lookup_table_name ) = GenerateTagSiblingsLookupCacheTableNames( tag_service_id )
|
||||
|
@ -7269,7 +7398,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
return ( current_tag_ids, current_tag_weight, pending_tag_ids, pending_tag_weight )
|
||||
|
||||
|
||||
def _GetAutocompleteTagIds( self, tag_display_type, tag_service_key, search_text, exact_match, job_key = None ):
|
||||
def _GetAutocompleteTagIds( self, tag_display_type, tag_service_id, file_service_id, search_text, exact_match, job_key = None ):
|
||||
|
||||
if search_text == '':
|
||||
|
||||
|
@ -7288,8 +7417,6 @@ class DB( HydrusDB.HydrusDB ):
|
|||
namespace = ''
|
||||
|
||||
|
||||
tag_service_id = self._GetServiceId( tag_service_key )
|
||||
|
||||
if exact_match:
|
||||
|
||||
table_join = 'subtags_searchable_map NATURAL JOIN tags'
|
||||
|
@ -7346,13 +7473,24 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
if tag_service_id == self._combined_tag_service_id:
|
||||
|
||||
cursor = self._c.execute( 'SELECT tag_id FROM tags;' )
|
||||
search_tag_service_ids = self._GetServiceIds( HC.REAL_TAG_SERVICES )
|
||||
|
||||
query = ' UNION '.join( 'SELECT tag_id FROM {}'.format( GenerateSpecificACCacheTableName( tag_display_type, file_service_id, search_tag_service_id ) ) for search_tag_service_id in search_tag_service_ids )
|
||||
|
||||
cursor = self._c.execute( '{};'.format( query ) )
|
||||
|
||||
else:
|
||||
|
||||
combined_ac_cache_table_name = GenerateCombinedFilesMappingsACCacheTableName( tag_display_type, tag_service_id )
|
||||
if file_service_id == self._combined_file_service_id:
|
||||
|
||||
ac_cache_table_name = GenerateCombinedFilesMappingsACCacheTableName( tag_display_type, tag_service_id )
|
||||
|
||||
else:
|
||||
|
||||
ac_cache_table_name = GenerateSpecificACCacheTableName( tag_display_type, file_service_id, tag_service_id )
|
||||
|
||||
|
||||
cursor = self._c.execute( 'SELECT tag_id FROM {};'.format( combined_ac_cache_table_name ) )
|
||||
cursor = self._c.execute( 'SELECT tag_id FROM {};'.format( ac_cache_table_name ) )
|
||||
|
||||
|
||||
cancelled_hook = None
|
||||
|
@ -7366,7 +7504,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
else:
|
||||
|
||||
tag_ids = self._GetTagIdsFromNamespaceIds( namespace_ids, job_key = job_key )
|
||||
tag_ids = self._GetTagIdsFromNamespaceIds( tag_display_type, tag_service_id, file_service_id, namespace_ids, job_key = job_key )
|
||||
|
||||
|
||||
else:
|
||||
|
@ -7386,7 +7524,10 @@ class DB( HydrusDB.HydrusDB ):
|
|||
|
||||
# now fetch siblings, add to set
|
||||
|
||||
final_tag_ids = set( tag_ids )
|
||||
if not isinstance( tag_ids, set ):
|
||||
|
||||
tag_ids = set( tag_ids )
|
||||
|
||||
|
||||
if tag_service_id == self._combined_tag_service_id:
|
||||
|
||||
|
@ -7397,11 +7538,13 @@ class DB( HydrusDB.HydrusDB ):
|
|||
sibling_tag_service_ids = ( tag_service_id, )
|
||||
|
||||
|
||||
tag_ids_without_siblings = list( tag_ids )
|
||||
|
||||
for sibling_tag_service_id in sibling_tag_service_ids:
|
||||
|
||||
seen_ideal_tag_ids = set()
|
||||
|
||||
for batch_of_tag_ids in HydrusData.SplitIteratorIntoChunks( tag_ids, 10240 ):
|
||||
for batch_of_tag_ids in HydrusData.SplitListIntoChunks( tag_ids_without_siblings, 10240 ):
|
||||
|
||||
if job_key is not None and job_key.IsCancelled():
|
||||
|
||||
|
@ -7413,11 +7556,11 @@ class DB( HydrusDB.HydrusDB ):
|
|||
ideal_tag_ids.difference_update( seen_ideal_tag_ids )
|
||||
seen_ideal_tag_ids.update( ideal_tag_ids )
|
||||
|
||||
final_tag_ids.update( self._CacheTagSiblingsGetChainsMembersFromIdeals( ClientTags.TAG_DISPLAY_ACTUAL, sibling_tag_service_id, ideal_tag_ids ) )
|
||||
tag_ids.update( self._CacheTagSiblingsGetChainsMembersFromIdeals( ClientTags.TAG_DISPLAY_ACTUAL, sibling_tag_service_id, ideal_tag_ids ) )
|
||||
|
||||
|
||||
|
||||
return final_tag_ids
|
||||
return tag_ids
|
||||
|
||||
|
||||
def _GetAutocompletePredicates(
|
||||
|
@ -7438,13 +7581,16 @@ class DB( HydrusDB.HydrusDB ):
|
|||
include_current = tag_search_context.include_current_tags
|
||||
include_pending = tag_search_context.include_pending_tags
|
||||
|
||||
tag_ids = self._GetAutocompleteTagIds( tag_display_type, tag_service_key, search_text, exact_match, job_key = job_key )
|
||||
tag_service_id = self._GetServiceId( tag_service_key )
|
||||
file_service_id = self._GetServiceId( file_service_key )
|
||||
|
||||
tag_ids = self._GetAutocompleteTagIds( tag_display_type, tag_service_id, file_service_id, search_text, exact_match, job_key = job_key )
|
||||
|
||||
if ':' not in search_text and search_namespaces_into_full_tags and not exact_match:
|
||||
|
||||
special_search_text = '{}*:*'.format( search_text )
|
||||
|
||||
tag_ids.update( self._GetAutocompleteTagIds( tag_display_type, tag_service_key, special_search_text, exact_match, job_key = job_key ) )
|
||||
tag_ids.update( self._GetAutocompleteTagIds( tag_display_type, tag_service_id, file_service_id, special_search_text, exact_match, job_key = job_key ) )
|
||||
|
||||
|
||||
if job_key is not None and job_key.IsCancelled():
|
||||
|
@ -7452,9 +7598,6 @@ class DB( HydrusDB.HydrusDB ):
|
|||
return []
|
||||
|
||||
|
||||
tag_service_id = self._GetServiceId( tag_service_key )
|
||||
file_service_id = self._GetServiceId( file_service_key )
|
||||
|
||||
if tag_service_id == self._combined_tag_service_id and file_service_id == self._combined_file_service_id:
|
||||
|
||||
return []
|
||||
|
@ -9436,7 +9579,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
else:
|
||||
|
||||
# temp tags to mappings
|
||||
queries = [ 'SELECT DISTINCT hash_id FROM {} CROSS JOIN {} USING ( tag_id );'.format( temp_tag_ids_table_name, table_name ) for table_name in table_names ]
|
||||
queries = [ 'SELECT hash_id FROM {} CROSS JOIN {} USING ( tag_id );'.format( temp_tag_ids_table_name, table_name ) for table_name in table_names ]
|
||||
|
||||
|
||||
for query in queries:
|
||||
|
@ -11444,12 +11587,85 @@ class DB( HydrusDB.HydrusDB ):
|
|||
return tag_id
|
||||
|
||||
|
||||
def _GetTagIdsFromNamespaceIds( self, namespace_ids: typing.Collection[ int ], job_key = None ):
|
||||
def _GetTagIdsFromNamespaceIds( self, tag_display_type: int, tag_service_id: int, file_service_id: int, namespace_ids: typing.Collection[ int ], job_key = None ):
|
||||
|
||||
# YO, this will be obviated when we get the master definition cache sorted
|
||||
|
||||
# ok, so this lad can get bonkers. if a user syncs (or once synced) with the PTR, then their master tables are huge
|
||||
# when they are just searching a little domain, or any specific file domain, we don't really want to effectively SCAN that gubbins only to later cut it down to 20 results, we want to cross-reference
|
||||
# HOWEVER, namespace_id is only indexed in 'tags' table atm, so we do need to hit that, and if we go ac_cache table first, then this is a SCAN
|
||||
# the question is whether a cache scan is faster than a tags search
|
||||
# specific caches are so small, we can iterate them real quick and do CROSS JOIN
|
||||
|
||||
if len( namespace_ids ) == 0:
|
||||
|
||||
return set()
|
||||
|
||||
|
||||
with HydrusDB.TemporaryIntegerTable( self._c, namespace_ids, 'namespace_id' ) as temp_namespace_ids_table_name:
|
||||
|
||||
# temp namespaces to tags
|
||||
cursor = self._c.execute( 'SELECT DISTINCT tag_id FROM {} CROSS JOIN tags USING ( namespace_id );'.format( temp_namespace_ids_table_name ) )
|
||||
self._AnalyzeTempTable( temp_namespace_ids_table_name )
|
||||
|
||||
if file_service_id == self._combined_file_service_id:
|
||||
|
||||
ac_cache_table_name = GenerateCombinedFilesMappingsACCacheTableName( tag_display_type, tag_service_id )
|
||||
|
||||
# it is possible this is a 98%-of-master-size cache, so we will do NATURAL here
|
||||
# in some experimental testing, it looks like sqlite is always doing tags first anyway, even on tiny ac tables, so bleh, but at least this cuts down final answer size
|
||||
|
||||
do_natural = True
|
||||
|
||||
else:
|
||||
|
||||
if tag_service_id == self._combined_tag_service_id:
|
||||
|
||||
search_tag_service_ids = self._GetServiceIds( HC.REAL_TAG_SERVICES )
|
||||
|
||||
else:
|
||||
|
||||
search_tag_service_ids = ( tag_service_id, )
|
||||
|
||||
|
||||
union_select = ' UNION '.join( 'SELECT tag_id FROM {}'.format( GenerateSpecificACCacheTableName( tag_display_type, file_service_id, search_tag_service_id ) ) for search_tag_service_id in search_tag_service_ids )
|
||||
|
||||
ac_cache_table_name = '( {} )'.format( union_select )
|
||||
|
||||
# typical scenarios here are that these are tiny, no problem to SCAN
|
||||
# ugly situation is if this is a million+ file service and the namespace is small
|
||||
# in the ugly situation, the user must expect a little slowdown, so there we are for now
|
||||
|
||||
# essentially, I think this adds overhead to all queries, particularly for large file clients, but it reduces distaster scenario
|
||||
|
||||
do_natural = False
|
||||
|
||||
|
||||
if do_natural:
|
||||
|
||||
tag_table_join = '{} NATURAL JOIN tags'.format( ac_cache_table_name )
|
||||
|
||||
else:
|
||||
|
||||
tag_table_join = '{} CROSS JOIN tags USING ( tag_id )'.format( ac_cache_table_name )
|
||||
|
||||
|
||||
if len( namespace_ids ) == 1:
|
||||
|
||||
( namespace_id, ) = namespace_ids
|
||||
|
||||
cursor = self._c.execute( 'SELECT tag_id FROM {} WHERE namespace_id = ?;'.format( tag_table_join ), ( namespace_id, ) )
|
||||
|
||||
else:
|
||||
|
||||
if do_natural:
|
||||
|
||||
cursor = self._c.execute( 'SELECT tag_id FROM {} NATURAL JOIN {};'.format( tag_table_join, temp_namespace_ids_table_name ) )
|
||||
|
||||
else:
|
||||
|
||||
# temp namespaces to tags
|
||||
cursor = self._c.execute( 'SELECT tag_id FROM {} CROSS JOIN {} USING ( namespace_id );'.format( tag_table_join, temp_namespace_ids_table_name ) )
|
||||
|
||||
|
||||
|
||||
cancelled_hook = None
|
||||
|
||||
|
@ -11458,10 +11674,10 @@ class DB( HydrusDB.HydrusDB ):
|
|||
cancelled_hook = job_key.IsCancelled
|
||||
|
||||
|
||||
tag_ids = self._STS( HydrusDB.ReadFromCancellableCursor( cursor, 128, cancelled_hook = cancelled_hook ) )
|
||||
result_tag_ids = self._STS( HydrusDB.ReadFromCancellableCursor( cursor, 128, cancelled_hook = cancelled_hook ) )
|
||||
|
||||
|
||||
return tag_ids
|
||||
return result_tag_ids
|
||||
|
||||
|
||||
def _GetTagIdsFromNamespaceIdsSubtagIds( self, namespace_ids: typing.Collection[ int ], subtag_ids: typing.Collection[ int ], job_key = None ):
|
||||
|
@ -15637,6 +15853,7 @@ class DB( HydrusDB.HydrusDB ):
|
|||
subtag_id = self._GetSubtagId( subtag )
|
||||
|
||||
self._c.execute( 'UPDATE tags SET namespace_id = ?, subtag_id = ? WHERE tag_id = ?;', ( namespace_id, subtag_id, tag_id ) )
|
||||
self._c.execute( 'UPDATE local_tags_cache SET tag = ? WHERE tag_id = ?;', ( tag, tag_id ) )
|
||||
|
||||
try:
|
||||
|
||||
|
|
|
@ -209,8 +209,6 @@ class ClientOptions( HydrusSerialisable.SerialisableBase ):
|
|||
self._dictionary[ 'booleans' ][ 'watch_clipboard_for_watcher_urls' ] = False
|
||||
self._dictionary[ 'booleans' ][ 'watch_clipboard_for_other_recognised_urls' ] = False
|
||||
|
||||
self._dictionary[ 'booleans' ][ 'autocomplete_results_fetch_automatically' ] = True
|
||||
|
||||
self._dictionary[ 'booleans' ][ 'autocomplete_float_main_gui' ] = True
|
||||
self._dictionary[ 'booleans' ][ 'autocomplete_float_frames' ] = False
|
||||
|
||||
|
@ -401,8 +399,6 @@ class ClientOptions( HydrusSerialisable.SerialisableBase ):
|
|||
self._dictionary[ 'noneable_integers' ][ 'file_viewing_statistics_preview_min_time' ] = 5
|
||||
self._dictionary[ 'noneable_integers' ][ 'file_viewing_statistics_preview_max_time' ] = 60
|
||||
|
||||
self._dictionary[ 'noneable_integers' ][ 'autocomplete_exact_match_threshold' ] = 2
|
||||
|
||||
self._dictionary[ 'noneable_integers' ][ 'subscription_file_error_cancel_threshold' ] = 5
|
||||
|
||||
self._dictionary[ 'noneable_integers' ][ 'media_viewer_cursor_autohide_time_ms' ] = 700
|
||||
|
|
|
@ -2505,7 +2505,9 @@ def FilterPredicatesBySearchText( service_key, search_text, predicates: typing.C
|
|||
|
||||
beginning = r'\A'
|
||||
|
||||
s = s.replace( r':', r'(:|.*\s)', 1 )
|
||||
( namespace, subtag ) = s.split( ':', 1 )
|
||||
|
||||
s = r'{}:(.*\s)?{}'.format( namespace, subtag )
|
||||
|
||||
elif s.startswith( '.*' ):
|
||||
|
||||
|
|
|
@ -523,6 +523,7 @@ class FrameGUI( ClientGUITopLevelWindows.MainFrameThatResizes ):
|
|||
library_versions.append( ( 'db dir', HG.client_controller.db_dir ) )
|
||||
library_versions.append( ( 'temp dir', HydrusPaths.GetCurrentTempDir() ) )
|
||||
library_versions.append( ( 'db journal mode', HG.db_journal_mode ) )
|
||||
library_versions.append( ( 'db cache size per file', '{}MB'.format( HG.db_cache_size ) ) )
|
||||
library_versions.append( ( 'db synchronous value', str( HG.db_synchronous ) ) )
|
||||
library_versions.append( ( 'db using memory for temp?', str( HG.no_db_temp_files ) ) )
|
||||
|
||||
|
|
|
@ -523,8 +523,8 @@ class NetworkJobControl( QW.QFrame ):
|
|||
|
||||
if self._network_job is None or self._network_job.NoEngineYet():
|
||||
|
||||
self._left_text.setText( '' )
|
||||
self._right_text.setText( '' )
|
||||
self._left_text.clear()
|
||||
self._right_text.clear()
|
||||
self._gauge.SetRange( 1 )
|
||||
self._gauge.SetValue( 0 )
|
||||
|
||||
|
@ -731,7 +731,7 @@ class TextAndPasteCtrl( QW.QWidget ):
|
|||
|
||||
self._add_callable( ( text, ) )
|
||||
|
||||
self._text_input.setText( '' )
|
||||
self._text_input.clear()
|
||||
|
||||
|
||||
def GetValue( self ):
|
||||
|
|
|
@ -547,7 +547,7 @@ class DialogManageUPnP( ClientGUIDialogs.Dialog ):
|
|||
|
||||
self._mappings_list.SetData( mappings )
|
||||
|
||||
self._status_st.setText( '' )
|
||||
self._status_st.clear()
|
||||
|
||||
if self._external_ip is not None:
|
||||
|
||||
|
|
|
@ -365,7 +365,7 @@ class EditGUGPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
if example_url is None:
|
||||
|
||||
self._matched_url_class.setText( '' )
|
||||
self._matched_url_class.clear()
|
||||
|
||||
else:
|
||||
|
||||
|
@ -1706,8 +1706,8 @@ class EditURLClassPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
self._example_url_classes.setText( 'Example does not match - '+reason )
|
||||
self._example_url_classes.setObjectName( 'HydrusInvalid' )
|
||||
|
||||
self._normalised_url.setText( '' )
|
||||
self._api_url.setText( '' )
|
||||
self._normalised_url.clear()
|
||||
self._api_url.clear()
|
||||
|
||||
|
||||
self._example_url_classes.style().polish( self._example_url_classes )
|
||||
|
|
|
@ -747,8 +747,8 @@ class FileSeedCacheStatusControl( QW.QFrame ):
|
|||
|
||||
if self._file_seed_cache is None:
|
||||
|
||||
self._import_summary_st.setText( '' )
|
||||
self._progress_st.setText( '' )
|
||||
self._import_summary_st.clear()
|
||||
self._progress_st.clear()
|
||||
self._progress_gauge.SetRange( 1 )
|
||||
self._progress_gauge.SetValue( 0 )
|
||||
|
||||
|
@ -767,7 +767,7 @@ class FileSeedCacheStatusControl( QW.QFrame ):
|
|||
|
||||
if num_to_do == 0:
|
||||
|
||||
self._progress_st.setText( '' )
|
||||
self._progress_st.clear()
|
||||
|
||||
else:
|
||||
|
||||
|
|
|
@ -623,7 +623,7 @@ class GallerySeedLogStatusControl( QW.QFrame ):
|
|||
|
||||
if self._gallery_seed_log is None:
|
||||
|
||||
self._log_summary_st.setText( '' )
|
||||
self._log_summary_st.clear()
|
||||
|
||||
if self._gallery_seed_log_button.isEnabled():
|
||||
|
||||
|
|
|
@ -411,7 +411,7 @@ class FilenameTaggingOptionsPanel( QW.QWidget ):
|
|||
|
||||
self._regexes.addItem( regex )
|
||||
|
||||
self._regex_box.setText( '' )
|
||||
self._regex_box.clear()
|
||||
|
||||
self._refresh_callable()
|
||||
|
||||
|
@ -1597,7 +1597,7 @@ class EditFilenameTaggingOptionPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
#
|
||||
|
||||
self._example_path_input.setText( 'enter example path here' )
|
||||
self._example_path_input.setPlaceholderText( 'enter example path here' )
|
||||
self._example_output.setEnabled( False )
|
||||
|
||||
#
|
||||
|
@ -1640,6 +1640,17 @@ class EditFilenameTaggingOptionPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
def ScheduleRefreshTags( self ):
|
||||
|
||||
path = self._example_path_input.text()
|
||||
|
||||
if path.startswith( 'file:///' ):
|
||||
|
||||
path = path.replace( 'file:///', '', 1 )
|
||||
|
||||
self._example_path_input.setText( path )
|
||||
|
||||
return
|
||||
|
||||
|
||||
if self._schedule_refresh_tags_job is not None:
|
||||
|
||||
self._schedule_refresh_tags_job.Cancel()
|
||||
|
@ -1760,11 +1771,11 @@ class GalleryImportPanel( ClientGUICommon.StaticBox ):
|
|||
self._file_import_options.setEnabled( False )
|
||||
self._tag_import_options.setEnabled( False )
|
||||
|
||||
self._query_text.setText( '' )
|
||||
self._query_text.clear()
|
||||
|
||||
self._file_status.setText( '' )
|
||||
self._file_status.clear()
|
||||
|
||||
self._gallery_status.setText( '' )
|
||||
self._gallery_status.clear()
|
||||
|
||||
self._file_seed_cache_control.SetFileSeedCache( None )
|
||||
|
||||
|
@ -2339,15 +2350,15 @@ class WatcherReviewPanel( ClientGUICommon.StaticBox ):
|
|||
self._file_import_options.setEnabled( False )
|
||||
self._tag_import_options.setEnabled( False )
|
||||
|
||||
self._watcher_subject.setText( '' )
|
||||
self._watcher_subject.clear()
|
||||
|
||||
self._watcher_url.setText( '' )
|
||||
self._watcher_url.clear()
|
||||
|
||||
self._file_status.setText( '' )
|
||||
self._file_status.clear()
|
||||
|
||||
self._file_velocity_status.setText( '' )
|
||||
self._file_velocity_status.clear()
|
||||
|
||||
self._watcher_status.setText( '' )
|
||||
self._watcher_status.clear()
|
||||
|
||||
self._file_seed_cache_control.SetFileSeedCache( None )
|
||||
|
||||
|
@ -2371,7 +2382,7 @@ class WatcherReviewPanel( ClientGUICommon.StaticBox ):
|
|||
|
||||
else:
|
||||
|
||||
self._watcher_url.setText( '' )
|
||||
self._watcher_url.clear()
|
||||
|
||||
|
||||
checker_options = self._watcher.GetCheckerOptions()
|
||||
|
|
|
@ -3980,10 +3980,10 @@ class ManagementPanelPetitions( ManagementPanel ):
|
|||
|
||||
if self._current_petition is None:
|
||||
|
||||
self._action_text.setText( '' )
|
||||
self._action_text.clear()
|
||||
self._action_text.setProperty( 'hydrus_text', 'default' )
|
||||
|
||||
self._reason_text.setPlainText( '' )
|
||||
self._reason_text.clear()
|
||||
self._reason_text.setProperty( 'hydrus_text', 'default' )
|
||||
|
||||
self._contents.clear()
|
||||
|
|
|
@ -1619,6 +1619,7 @@ class PagesNotebook( QP.TabWidgetWithDnD ):
|
|||
can_end = tab_index < end_index - 1
|
||||
|
||||
if can_home:
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( menu, 'move to left end', 'Move this page all the way to the left.', self._ShiftPage, tab_index, new_index=0 )
|
||||
|
||||
|
||||
|
@ -1633,14 +1634,20 @@ class PagesNotebook( QP.TabWidgetWithDnD ):
|
|||
|
||||
|
||||
if can_end:
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( menu, 'move to right end', 'Move this page all the way to the right.', self._ShiftPage, tab_index, new_index=end_index )
|
||||
|
||||
|
||||
|
||||
ClientGUIMenus.AppendSeparator( menu )
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( menu, 'sort pages by most files first', 'Sort these pages according to how many files they appear to have.', self._SortPagesByFileCount, 'desc' )
|
||||
ClientGUIMenus.AppendMenuItem( menu, 'sort pages by fewest files first', 'Sort these pages according to how few files they appear to have.', self._SortPagesByFileCount, 'asc' )
|
||||
submenu = QW.QMenu( menu )
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( submenu, 'by most files first', 'Sort these pages according to how many files they appear to have.', self._SortPagesByFileCount, 'desc' )
|
||||
ClientGUIMenus.AppendMenuItem( submenu, 'by fewest files first', 'Sort these pages according to how few files they appear to have.', self._SortPagesByFileCount, 'asc' )
|
||||
ClientGUIMenus.AppendMenuItem( submenu, 'by name a-z', 'Sort these pages according to how many files they appear to have.', self._SortPagesByName, 'asc' )
|
||||
ClientGUIMenus.AppendMenuItem( submenu, 'by name z-a', 'Sort these pages according to how many files they appear to have.', self._SortPagesByName, 'desc' )
|
||||
|
||||
ClientGUIMenus.AppendMenu( menu, submenu, 'sort pages' )
|
||||
|
||||
|
||||
ClientGUIMenus.AppendSeparator( menu )
|
||||
|
@ -1703,8 +1710,6 @@ class PagesNotebook( QP.TabWidgetWithDnD ):
|
|||
|
||||
def _SortPagesByFileCount( self, order ):
|
||||
|
||||
ordered_pages = list( self.GetPages() )
|
||||
|
||||
def key( page ):
|
||||
|
||||
( total_num_files, ( total_num_value, total_num_range ) ) = page.GetNumFileSummary()
|
||||
|
@ -1712,12 +1717,28 @@ class PagesNotebook( QP.TabWidgetWithDnD ):
|
|||
return ( total_num_files, total_num_range, total_num_value )
|
||||
|
||||
|
||||
ordered_pages.sort( key = key )
|
||||
ordered_pages = sorted( self.GetPages(), key = key, reverse = order == 'desc' )
|
||||
|
||||
if order == 'desc':
|
||||
self._SortPagesSetPages( ordered_pages )
|
||||
|
||||
|
||||
def _SortPagesByName( self, order ):
|
||||
|
||||
def file_count_secondary( page ):
|
||||
|
||||
ordered_pages.reverse()
|
||||
( total_num_files, ( total_num_value, total_num_range ) ) = page.GetNumFileSummary()
|
||||
|
||||
return ( total_num_files, total_num_range, total_num_value )
|
||||
|
||||
|
||||
ordered_pages = sorted( self.GetPages(), key = file_count_secondary, reverse = True )
|
||||
|
||||
ordered_pages = sorted( ordered_pages, key = lambda page: page.GetName(), reverse = order == 'desc' )
|
||||
|
||||
self._SortPagesSetPages( ordered_pages )
|
||||
|
||||
|
||||
def _SortPagesSetPages( self, ordered_pages ):
|
||||
|
||||
selected_page = self.currentWidget()
|
||||
|
||||
|
@ -1738,12 +1759,14 @@ class PagesNotebook( QP.TabWidgetWithDnD ):
|
|||
|
||||
for page in ordered_pages:
|
||||
|
||||
is_selected = page == selected_page
|
||||
|
||||
name = pages_to_names[ page ]
|
||||
|
||||
self.addTab( page, name )
|
||||
if is_selected: self.setCurrentIndex( self.count() - 1 )
|
||||
|
||||
if page == selected_page:
|
||||
|
||||
self.setCurrentIndex( self.count() - 1 )
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import itertools
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
|
@ -12,7 +13,9 @@ from qtpy import QtGui as QG
|
|||
from hydrus.core import HydrusConstants as HC
|
||||
from hydrus.core import HydrusData
|
||||
from hydrus.core import HydrusExceptions
|
||||
from hydrus.core import HydrusFileHandling
|
||||
from hydrus.core import HydrusGlobals as HG
|
||||
from hydrus.core import HydrusPaths
|
||||
from hydrus.core import HydrusSerialisable
|
||||
from hydrus.core import HydrusText
|
||||
|
||||
|
@ -956,7 +959,7 @@ class EditFormulaPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
if self._current_formula is None:
|
||||
|
||||
self._formula_description.setPlainText( '' )
|
||||
self._formula_description.clear()
|
||||
|
||||
self._edit_formula.setEnabled( False )
|
||||
self._change_formula_type.setEnabled( False )
|
||||
|
@ -3183,7 +3186,7 @@ class EditPageParserPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
def wait_and_do_it( network_job ):
|
||||
|
||||
def qt_tidy_up( example_data ):
|
||||
def qt_tidy_up( example_data, example_bytes ):
|
||||
|
||||
if not self or not QP.isValid( self ):
|
||||
|
||||
|
@ -3197,17 +3200,21 @@ class EditPageParserPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
self._test_panel.SetExampleParsingContext( example_parsing_context )
|
||||
|
||||
self._test_panel.SetExampleData( example_data )
|
||||
self._test_panel.SetExampleData( example_data, example_bytes = example_bytes )
|
||||
|
||||
self._test_network_job_control.ClearNetworkJob()
|
||||
|
||||
|
||||
example_bytes = None
|
||||
|
||||
try:
|
||||
|
||||
network_job.WaitUntilDone()
|
||||
|
||||
example_data = network_job.GetContentText()
|
||||
|
||||
example_bytes = network_job.GetContentBytes()
|
||||
|
||||
except HydrusExceptions.CancelledException:
|
||||
|
||||
example_data = 'fetch cancelled'
|
||||
|
@ -3219,7 +3226,7 @@ class EditPageParserPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
HydrusData.ShowException( e )
|
||||
|
||||
|
||||
QP.CallAfter( qt_tidy_up, example_data )
|
||||
QP.CallAfter( qt_tidy_up, example_data, example_bytes )
|
||||
|
||||
|
||||
url = self._test_url.text()
|
||||
|
@ -4100,7 +4107,7 @@ class ScriptManagementControl( QW.QWidget ):
|
|||
|
||||
def _Reset( self ):
|
||||
|
||||
self._status.setText( '' )
|
||||
self._status.clear()
|
||||
self._gauge.SetRange( 1 )
|
||||
self._gauge.SetValue( 0 )
|
||||
|
||||
|
@ -4328,7 +4335,7 @@ class TestPanel( QW.QWidget ):
|
|||
|
||||
def _FetchFromURL( self ):
|
||||
|
||||
def qt_code( example_data ):
|
||||
def qt_code( example_data, example_bytes ):
|
||||
|
||||
if not self or not QP.isValid( self ):
|
||||
|
||||
|
@ -4342,7 +4349,7 @@ class TestPanel( QW.QWidget ):
|
|||
|
||||
self._example_parsing_context.SetValue( example_parsing_context )
|
||||
|
||||
self._SetExampleData( example_data )
|
||||
self._SetExampleData( example_data, example_bytes = example_bytes )
|
||||
|
||||
|
||||
def do_it( url ):
|
||||
|
@ -4353,12 +4360,16 @@ class TestPanel( QW.QWidget ):
|
|||
|
||||
HG.client_controller.network_engine.AddJob( network_job )
|
||||
|
||||
example_bytes = None
|
||||
|
||||
try:
|
||||
|
||||
network_job.WaitUntilDone()
|
||||
|
||||
example_data = network_job.GetContentText()
|
||||
|
||||
example_bytes = network_job.GetContentBytes()
|
||||
|
||||
except HydrusExceptions.CancelledException:
|
||||
|
||||
example_data = 'fetch cancelled'
|
||||
|
@ -4370,7 +4381,7 @@ class TestPanel( QW.QWidget ):
|
|||
HydrusData.ShowException( e )
|
||||
|
||||
|
||||
QP.CallAfter( qt_code, example_data )
|
||||
QP.CallAfter( qt_code, example_data, example_bytes )
|
||||
|
||||
|
||||
message = 'Enter URL to fetch data for.'
|
||||
|
@ -4392,6 +4403,15 @@ class TestPanel( QW.QWidget ):
|
|||
|
||||
raw_text = HG.client_controller.GetClipboardText()
|
||||
|
||||
try:
|
||||
|
||||
raw_bytes = raw_text.decode( 'utf-8' )
|
||||
|
||||
except:
|
||||
|
||||
raw_bytes = None
|
||||
|
||||
|
||||
except HydrusExceptions.DataMissing as e:
|
||||
|
||||
QW.QMessageBox.critical( self, 'Error', str(e) )
|
||||
|
@ -4399,50 +4419,132 @@ class TestPanel( QW.QWidget ):
|
|||
return
|
||||
|
||||
|
||||
self._SetExampleData( raw_text )
|
||||
self._SetExampleData( raw_text, example_bytes = raw_bytes )
|
||||
|
||||
|
||||
def _SetExampleData( self, example_data ):
|
||||
def _SetExampleData( self, example_data, example_bytes = None ):
|
||||
|
||||
self._example_data_raw = example_data
|
||||
|
||||
test_parse_ok = True
|
||||
looked_like_json = False
|
||||
|
||||
MAX_CHARS_IN_PREVIEW = 1024 * 64
|
||||
|
||||
if len( example_data ) > 0:
|
||||
|
||||
parse_phrase = 'uncertain data type'
|
||||
good_type_found = True
|
||||
|
||||
# can't just throw this at bs4 to see if it 'works', as it'll just wrap any unparsable string in some bare <html><body><p> tags
|
||||
if HydrusText.LooksLikeHTML( example_data ):
|
||||
|
||||
parse_phrase = 'looks like HTML'
|
||||
|
||||
|
||||
# put this second, so if the JSON contains some HTML, it'll overwrite here. decent compromise
|
||||
if HydrusText.LooksLikeJSON( example_data ):
|
||||
|
||||
# prioritise this, so if the JSON contains some HTML, it'll overwrite here. decent compromise
|
||||
|
||||
looked_like_json = True
|
||||
|
||||
parse_phrase = 'looks like JSON'
|
||||
|
||||
|
||||
description = HydrusData.ToHumanBytes( len( example_data ) ) + ' total, ' + parse_phrase
|
||||
|
||||
if len( example_data ) > 1024:
|
||||
elif HydrusText.LooksLikeHTML( example_data ):
|
||||
|
||||
preview = 'PREVIEW:' + os.linesep + str( example_data[:1024] )
|
||||
# can't just throw this at bs4 to see if it 'works', as it'll just wrap any unparsable string in some bare <html><body><p> tags
|
||||
|
||||
parse_phrase = 'looks like HTML'
|
||||
|
||||
else:
|
||||
|
||||
preview = example_data
|
||||
good_type_found = False
|
||||
|
||||
if example_bytes is not None:
|
||||
|
||||
( os_file_handle, temp_path ) = HydrusPaths.GetTempPath()
|
||||
|
||||
try:
|
||||
|
||||
with open( temp_path, 'wb' ) as f:
|
||||
|
||||
f.write( example_bytes )
|
||||
|
||||
|
||||
mime = HydrusFileHandling.GetMime( temp_path )
|
||||
|
||||
except:
|
||||
|
||||
mime = HC.APPLICATION_UNKNOWN
|
||||
|
||||
finally:
|
||||
|
||||
HydrusPaths.CleanUpTempPath( os_file_handle, temp_path )
|
||||
|
||||
|
||||
else:
|
||||
|
||||
mime = HC.APPLICATION_UNKNOWN
|
||||
|
||||
|
||||
|
||||
self._test_parse.setEnabled( True )
|
||||
if good_type_found:
|
||||
|
||||
description = HydrusData.ToHumanBytes( len( example_data ) ) + ' total, ' + parse_phrase
|
||||
|
||||
example_data_to_show = example_data
|
||||
|
||||
if looked_like_json:
|
||||
|
||||
try:
|
||||
|
||||
j = HG.client_controller.parsing_cache.GetJSON( example_data )
|
||||
|
||||
example_data_to_show = json.dumps( j, indent = 4 )
|
||||
|
||||
except:
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
||||
if len( example_data_to_show ) > MAX_CHARS_IN_PREVIEW:
|
||||
|
||||
preview = 'PREVIEW:' + os.linesep + str( example_data_to_show[:MAX_CHARS_IN_PREVIEW] )
|
||||
|
||||
else:
|
||||
|
||||
preview = example_data_to_show
|
||||
|
||||
|
||||
else:
|
||||
|
||||
if mime in HC.ALLOWED_MIMES:
|
||||
|
||||
description = 'that looked like a {}!'.format( HC.mime_string_lookup[ mime ] )
|
||||
|
||||
preview = 'no preview'
|
||||
|
||||
test_parse_ok = False
|
||||
|
||||
else:
|
||||
|
||||
description = 'that did not look like HTML or JSON, but will try to show it anyway'
|
||||
|
||||
if len( example_data ) > MAX_CHARS_IN_PREVIEW:
|
||||
|
||||
preview = 'PREVIEW:' + os.linesep + repr( example_data[:MAX_CHARS_IN_PREVIEW] )
|
||||
|
||||
else:
|
||||
|
||||
preview = repr( example_data )
|
||||
|
||||
|
||||
|
||||
|
||||
else:
|
||||
|
||||
description = 'no example data set yet'
|
||||
preview = ''
|
||||
|
||||
self._test_parse.setEnabled( False )
|
||||
test_parse_ok = False
|
||||
|
||||
|
||||
self._test_parse.setEnabled( test_parse_ok )
|
||||
|
||||
self._example_data_raw_description.setText( description )
|
||||
self._example_data_raw_preview.setPlainText( preview )
|
||||
|
||||
|
@ -4464,9 +4566,9 @@ class TestPanel( QW.QWidget ):
|
|||
return self.GetTestData()
|
||||
|
||||
|
||||
def SetExampleData( self, example_data ):
|
||||
def SetExampleData( self, example_data, example_bytes = None ):
|
||||
|
||||
self._SetExampleData( example_data )
|
||||
self._SetExampleData( example_data, example_bytes = example_bytes )
|
||||
|
||||
|
||||
def SetExampleParsingContext( self, example_parsing_context ):
|
||||
|
@ -4590,9 +4692,9 @@ class TestPanelPageParser( TestPanel ):
|
|||
self._SetExampleData( self._example_data_raw )
|
||||
|
||||
|
||||
def _SetExampleData( self, example_data ):
|
||||
def _SetExampleData( self, example_data, example_bytes = None ):
|
||||
|
||||
TestPanel._SetExampleData( self, example_data )
|
||||
TestPanel._SetExampleData( self, example_data, example_bytes = example_bytes )
|
||||
|
||||
pre_parsing_converter = self._pre_parsing_converter_callable()
|
||||
|
||||
|
@ -4717,9 +4819,9 @@ class TestPanelPageParserSubsidiary( TestPanelPageParser ):
|
|||
HG.client_controller.pub( 'clipboard', 'text', joiner.join( self._example_data_post_separation ) )
|
||||
|
||||
|
||||
def _SetExampleData( self, example_data ):
|
||||
def _SetExampleData( self, example_data, example_bytes = None ):
|
||||
|
||||
TestPanelPageParser._SetExampleData( self, example_data )
|
||||
TestPanelPageParser._SetExampleData( self, example_data, example_bytes = example_bytes )
|
||||
|
||||
formula = self._formula_callable()
|
||||
|
||||
|
|
|
@ -2513,15 +2513,6 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
#
|
||||
|
||||
ac_panel = ClientGUICommon.StaticBox( self, 'tag autocomplete' )
|
||||
|
||||
self._autocomplete_results_fetch_automatically = QW.QCheckBox( ac_panel )
|
||||
|
||||
self._autocomplete_exact_match_threshold = ClientGUICommon.NoneableSpinCtrl( ac_panel, none_phrase = 'always do full search', min = 1, max = 1024 )
|
||||
self._autocomplete_exact_match_threshold.setToolTip( 'If the search input has this many characters or fewer, it will fetch exact results rather than full autocomplete results.' )
|
||||
|
||||
#
|
||||
|
||||
misc_panel = ClientGUICommon.StaticBox( self, 'misc' )
|
||||
|
||||
self._forced_search_limit = ClientGUICommon.NoneableSpinCtrl( misc_panel, '', min = 1, max = 100000 )
|
||||
|
@ -2537,10 +2528,6 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
self._video_buffer_size_mb.setValue( self._new_options.GetInteger( 'video_buffer_size_mb' ) )
|
||||
|
||||
self._autocomplete_results_fetch_automatically.setChecked( self._new_options.GetBoolean( 'autocomplete_results_fetch_automatically' ) )
|
||||
|
||||
self._autocomplete_exact_match_threshold.SetValue( self._new_options.GetNoneableInteger( 'autocomplete_exact_match_threshold' ) )
|
||||
|
||||
self._forced_search_limit.SetValue( self._new_options.GetNoneableInteger( 'forced_search_limit' ) )
|
||||
|
||||
#
|
||||
|
@ -2601,23 +2588,6 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
#
|
||||
|
||||
text = 'If you disable automatic autocomplete results fetching, use Ctrl+Space to fetch results manually.'
|
||||
|
||||
ac_panel.Add( QW.QLabel( text, ac_panel ), CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
rows = []
|
||||
|
||||
rows.append( ( 'Automatically fetch autocomplete results: ', self._autocomplete_results_fetch_automatically ) )
|
||||
rows.append( ( 'Fetch exact match results if input has <= this many characters: ', self._autocomplete_exact_match_threshold ) )
|
||||
|
||||
gridbox = ClientGUICommon.WrapInGrid( ac_panel, rows )
|
||||
|
||||
ac_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
|
||||
|
||||
QP.AddToLayout( vbox, ac_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
#
|
||||
|
||||
rows = []
|
||||
|
||||
rows.append( ( 'Forced system:limit for all searches: ', self._forced_search_limit ) )
|
||||
|
@ -2684,9 +2654,6 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
self._new_options.SetNoneableInteger( 'forced_search_limit', self._forced_search_limit.GetValue() )
|
||||
|
||||
self._new_options.SetBoolean( 'autocomplete_results_fetch_automatically', self._autocomplete_results_fetch_automatically.isChecked() )
|
||||
self._new_options.SetNoneableInteger( 'autocomplete_exact_match_threshold', self._autocomplete_exact_match_threshold.GetValue() )
|
||||
|
||||
|
||||
|
||||
class _StylePanel( QW.QWidget ):
|
||||
|
@ -3120,7 +3087,7 @@ class ManageOptionsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
self._namespace_colours.SetNamespaceColour( namespace, QG.QColor( random.randint(0,255), random.randint(0,255), random.randint(0,255) ) )
|
||||
|
||||
self._new_namespace_colour.setText( '' )
|
||||
self._new_namespace_colour.clear()
|
||||
|
||||
|
||||
|
||||
|
@ -3770,7 +3737,7 @@ class ManageURLsPanel( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
self._EnterURL( url )
|
||||
|
||||
self._url_input.setText( '' )
|
||||
self._url_input.clear()
|
||||
|
||||
except Exception as e:
|
||||
|
||||
|
|
|
@ -2879,7 +2879,7 @@ class ReviewLocalFileImports( ClientGUIScrolledPanels.ReviewPanel ):
|
|||
|
||||
else:
|
||||
|
||||
self._delete_after_success_st.setText( '' )
|
||||
self._delete_after_success_st.clear()
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -940,7 +940,7 @@ class EditStringMatchPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
if match_type == ClientParsing.STRING_MATCH_FIXED:
|
||||
|
||||
self._example_string_matches.setText( '' )
|
||||
self._example_string_matches.clear()
|
||||
|
||||
else:
|
||||
|
||||
|
|
|
@ -92,6 +92,12 @@ class EditTagAutocompleteOptionsPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
self._fetch_all_allowed = QW.QCheckBox( self )
|
||||
self._fetch_all_allowed.setToolTip( 'If on, a search for "*" will return all tags. On large tag services, these searches are extremely slow.' )
|
||||
|
||||
self._fetch_results_automatically = QW.QCheckBox( self )
|
||||
self._fetch_results_automatically.setToolTip( 'If on, results will load as you type. If off, you will have to hit Ctrl+Space to load results.' )
|
||||
|
||||
self._exact_match_character_threshold = ClientGUICommon.NoneableSpinCtrl( self, none_phrase = 'always autocomplete (only appropriate for small tag services)', min = 1, max = 256, unit = 'characters' )
|
||||
self._exact_match_character_threshold.setToolTip( 'When the search text has <= this many characters, autocomplete will not occur and you will only get results that exactly match the input. Increasing this value makes autocomplete snappier but reduces the number of results.' )
|
||||
|
||||
#
|
||||
|
||||
self._write_autocomplete_tag_domain.SetValue( tag_autocomplete_options.GetWriteAutocompleteTagDomain() )
|
||||
|
@ -101,11 +107,16 @@ class EditTagAutocompleteOptionsPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
self._namespace_bare_fetch_all_allowed.setChecked( tag_autocomplete_options.NamespaceBareFetchAllAllowed() )
|
||||
self._namespace_fetch_all_allowed.setChecked( tag_autocomplete_options.NamespaceFetchAllAllowed() )
|
||||
self._fetch_all_allowed.setChecked( tag_autocomplete_options.FetchAllAllowed() )
|
||||
self._fetch_results_automatically.setChecked( tag_autocomplete_options.FetchResultsAutomatically() )
|
||||
self._exact_match_character_threshold.SetValue( tag_autocomplete_options.GetExactMatchCharacterThreshold() )
|
||||
|
||||
#
|
||||
|
||||
rows = []
|
||||
|
||||
rows.append( ( 'Fetch results as you type: ', self._fetch_results_automatically ) )
|
||||
rows.append( ( 'Do-not-autocomplete character threshold: ', self._exact_match_character_threshold ) )
|
||||
|
||||
if tag_autocomplete_options.GetServiceKey() == CC.COMBINED_TAG_SERVICE_KEY:
|
||||
|
||||
self._write_autocomplete_tag_domain.setVisible( False )
|
||||
|
@ -114,9 +125,9 @@ class EditTagAutocompleteOptionsPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
else:
|
||||
|
||||
rows.append( ( 'Set manage tags default autocomplete file domain: ', self._override_write_autocomplete_file_domain ) )
|
||||
rows.append( ( 'Manage tags default autocomplete file domain: ', self._write_autocomplete_file_domain ) )
|
||||
rows.append( ( 'Manage tags default autocomplete tag domain: ', self._write_autocomplete_tag_domain ) )
|
||||
rows.append( ( 'Override default autocomplete file domain in _manage tags_: ', self._override_write_autocomplete_file_domain ) )
|
||||
rows.append( ( 'Default autocomplete file domain in _manage tags_: ', self._write_autocomplete_file_domain ) )
|
||||
rows.append( ( 'Default autocomplete tag domain in _manage tags_: ', self._write_autocomplete_tag_domain ) )
|
||||
|
||||
|
||||
rows.append( ( 'Search namespaces with normal input: ', self._search_namespaces_into_full_tags ) )
|
||||
|
@ -203,6 +214,9 @@ class EditTagAutocompleteOptionsPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
fetch_all_allowed
|
||||
)
|
||||
|
||||
tag_autocomplete_options.SetFetchResultsAutomatically( self._fetch_results_automatically.isChecked() )
|
||||
tag_autocomplete_options.SetExactMatchCharacterThreshold( self._exact_match_character_threshold.GetValue() )
|
||||
|
||||
return tag_autocomplete_options
|
||||
|
||||
|
||||
|
@ -516,10 +530,20 @@ class EditTagDisplayManagerPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
if self._service_key == CC.COMBINED_TAG_SERVICE_KEY:
|
||||
|
||||
message = 'These filters apply to all tag services, or to where the tag domain is "all known tags".'
|
||||
message = 'These options apply to all tag services, or to where the tag domain is "all known tags".'
|
||||
message += os.linesep * 2
|
||||
message += 'This tag domain is the union of all other services, so it can be more computationally expensive. You most often see it on new search pages.'
|
||||
|
||||
QP.AddToLayout( vbox, ClientGUICommon.BetterStaticText( self, message ), CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
else:
|
||||
|
||||
message = 'This is just one tag service. You most often search a specific tag service in the manage tags dialog.'
|
||||
|
||||
|
||||
st = ClientGUICommon.BetterStaticText( self, message )
|
||||
|
||||
st.setWordWrap( True )
|
||||
|
||||
QP.AddToLayout( vbox, st, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
||||
QP.AddToLayout( vbox, self._display_box, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
QP.AddToLayout( vbox, self._tao_box, CC.FLAGS_EXPAND_PERPENDICULAR )
|
||||
|
@ -1327,7 +1351,7 @@ class EditTagFilterPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
if whitelist_possible:
|
||||
|
||||
self._simple_whitelist_error_st.setText( '' )
|
||||
self._simple_whitelist_error_st.clear()
|
||||
|
||||
self._simple_whitelist.setEnabled( True )
|
||||
self._simple_whitelist_global_checkboxes.setEnabled( True )
|
||||
|
@ -1396,7 +1420,7 @@ class EditTagFilterPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
if blacklist_possible:
|
||||
|
||||
self._simple_blacklist_error_st.setText( '' )
|
||||
self._simple_blacklist_error_st.clear()
|
||||
|
||||
self._simple_blacklist.setEnabled( True )
|
||||
self._simple_blacklist_global_checkboxes.setEnabled( True )
|
||||
|
@ -1513,7 +1537,7 @@ class EditTagFilterPanel( ClientGUIScrolledPanels.EditPanel ):
|
|||
|
||||
self._test_result_st.setObjectName( '' )
|
||||
|
||||
self._test_result_st.setText( '' )
|
||||
self._test_result_st.clear()
|
||||
self._test_result_st.style().polish( self._test_result_st )
|
||||
|
||||
if self._only_show_blacklist:
|
||||
|
@ -4342,7 +4366,7 @@ class ManageTagSiblings( ClientGUIScrolledPanels.ManagePanel ):
|
|||
|
||||
if len( new_tags ) == 0:
|
||||
|
||||
self._new_sibling.setText( '' )
|
||||
self._new_sibling.clear()
|
||||
|
||||
self._current_new = None
|
||||
|
||||
|
|
|
@ -2401,15 +2401,116 @@ class ListBoxTags( ListBox ):
|
|||
|
||||
service_keys_in_order = HG.client_controller.services_manager.GetServiceKeys( HC.REAL_TAG_SERVICES )
|
||||
|
||||
num_siblings = 0
|
||||
num_parents = 0
|
||||
num_children = 0
|
||||
all_siblings = set()
|
||||
|
||||
for ( sibling_chain_members, ideal_tag, descendants, ancestors ) in service_keys_to_siblings_and_parents.values():
|
||||
siblings_to_service_keys = collections.defaultdict( set )
|
||||
parents_to_service_keys = collections.defaultdict( set )
|
||||
children_to_service_keys = collections.defaultdict( set )
|
||||
|
||||
ideals_to_service_keys = collections.defaultdict( set )
|
||||
|
||||
for ( service_key, ( sibling_chain_members, ideal_tag, descendants, ancestors ) ) in service_keys_to_siblings_and_parents.items():
|
||||
|
||||
num_siblings += len( sibling_chain_members ) - 1
|
||||
num_parents += len( ancestors )
|
||||
num_children += len( descendants )
|
||||
all_siblings.update( sibling_chain_members )
|
||||
|
||||
for sibling in sibling_chain_members:
|
||||
|
||||
if sibling == ideal_tag:
|
||||
|
||||
ideals_to_service_keys[ ideal_tag ].add( service_key )
|
||||
|
||||
continue
|
||||
|
||||
|
||||
if sibling == selected_tag: # don't care about the selected tag unless it is ideal
|
||||
|
||||
continue
|
||||
|
||||
|
||||
siblings_to_service_keys[ sibling ].add( service_key )
|
||||
|
||||
|
||||
for ancestor in ancestors:
|
||||
|
||||
parents_to_service_keys[ ancestor ].add( service_key )
|
||||
|
||||
|
||||
for descendant in descendants:
|
||||
|
||||
children_to_service_keys[ descendant ].add( service_key )
|
||||
|
||||
|
||||
|
||||
all_siblings.discard( selected_tag )
|
||||
|
||||
num_siblings = len( all_siblings )
|
||||
num_parents = len( parents_to_service_keys )
|
||||
num_children = len( children_to_service_keys )
|
||||
|
||||
service_keys_to_service_names = { service_key : HG.client_controller.services_manager.GetName( service_key ) for service_key in service_keys_in_order }
|
||||
|
||||
ALL_SERVICES_LABEL = 'all services'
|
||||
|
||||
def convert_service_keys_to_name_string( s_ks ):
|
||||
|
||||
if len( s_ks ) == len( service_keys_in_order ):
|
||||
|
||||
return ALL_SERVICES_LABEL
|
||||
|
||||
|
||||
return ', '.join( ( service_keys_to_service_names[ service_key ] for service_key in service_keys_in_order if service_key in s_ks ) )
|
||||
|
||||
|
||||
def group_and_sort_siblings_to_service_keys( t_to_s_ks ):
|
||||
|
||||
# convert "tag -> everywhere I am" to "sorted groups of locations -> what we have in common, also sorted"
|
||||
|
||||
service_key_groups_to_tags = collections.defaultdict( list )
|
||||
|
||||
for ( t, s_ks ) in t_to_s_ks.items():
|
||||
|
||||
service_key_groups_to_tags[ tuple( s_ks ) ].append( t )
|
||||
|
||||
|
||||
for t_list in service_key_groups_to_tags.values():
|
||||
|
||||
ClientTags.SortTags( CC.SORT_BY_LEXICOGRAPHIC_ASC, t_list )
|
||||
|
||||
|
||||
service_key_groups = sorted( service_key_groups_to_tags.keys(), key = lambda s_k_g: ( -len( s_k_g ), convert_service_keys_to_name_string( s_k_g ) ) )
|
||||
|
||||
service_key_group_names_and_tags = [ ( convert_service_keys_to_name_string( s_k_g ), service_key_groups_to_tags[ s_k_g ] ) for s_k_g in service_key_groups ]
|
||||
|
||||
return service_key_group_names_and_tags
|
||||
|
||||
|
||||
def group_and_sort_parents_to_service_keys( p_to_s_ks, c_to_s_ks ):
|
||||
|
||||
# convert two lots of "tag -> everywhere I am" to "sorted groups of locations -> what we have in common, also sorted"
|
||||
|
||||
service_key_groups_to_tags = collections.defaultdict( lambda: ( [], [] ) )
|
||||
|
||||
for ( p, s_ks ) in p_to_s_ks.items():
|
||||
|
||||
service_key_groups_to_tags[ tuple( s_ks ) ][0].append( p )
|
||||
|
||||
|
||||
for ( c, s_ks ) in c_to_s_ks.items():
|
||||
|
||||
service_key_groups_to_tags[ tuple( s_ks ) ][1].append( c )
|
||||
|
||||
|
||||
for ( t_list_1, t_list_2 ) in service_key_groups_to_tags.values():
|
||||
|
||||
ClientTags.SortTags( CC.SORT_BY_LEXICOGRAPHIC_ASC, t_list_1 )
|
||||
ClientTags.SortTags( CC.SORT_BY_LEXICOGRAPHIC_ASC, t_list_2 )
|
||||
|
||||
|
||||
service_key_groups = sorted( service_key_groups_to_tags.keys(), key = lambda s_k_g: ( -len( s_k_g ), convert_service_keys_to_name_string( s_k_g ) ) )
|
||||
|
||||
service_key_group_names_and_tags = [ ( convert_service_keys_to_name_string( s_k_g ), service_key_groups_to_tags[ s_k_g ] ) for s_k_g in service_key_groups ]
|
||||
|
||||
return service_key_group_names_and_tags
|
||||
|
||||
|
||||
if num_siblings == 0:
|
||||
|
@ -2420,64 +2521,42 @@ class ListBoxTags( ListBox ):
|
|||
|
||||
siblings_menu.setTitle( '{} siblings'.format( HydrusData.ToHumanInt( num_siblings ) ) )
|
||||
|
||||
for service_key in service_keys_in_order:
|
||||
#
|
||||
|
||||
ideals = sorted( ideals_to_service_keys.keys(), key = HydrusTags.ConvertTagToSortable )
|
||||
|
||||
for ideal in ideals:
|
||||
|
||||
if service_key not in service_keys_to_siblings_and_parents:
|
||||
if ideal == selected_tag:
|
||||
|
||||
continue
|
||||
|
||||
|
||||
( sibling_chain_members, ideal_tag, descendants, ancestors ) = service_keys_to_siblings_and_parents[ service_key ]
|
||||
|
||||
if len( sibling_chain_members ) <= 1:
|
||||
|
||||
continue
|
||||
|
||||
|
||||
try:
|
||||
|
||||
service_name = HG.client_controller.services_manager.GetName( service_key )
|
||||
|
||||
except HydrusExceptions.DataMissing:
|
||||
|
||||
service_name = 'missing service'
|
||||
|
||||
|
||||
ClientGUIMenus.AppendSeparator( siblings_menu )
|
||||
|
||||
ClientGUIMenus.AppendMenuLabel( siblings_menu, '{} ({} siblings)'.format( service_name, HydrusData.ToHumanInt( len( sibling_chain_members ) - 1 ) ) )
|
||||
|
||||
ClientGUIMenus.AppendSeparator( siblings_menu )
|
||||
|
||||
if ideal_tag == selected_tag:
|
||||
|
||||
ideal_label = 'this is the ideal tag'
|
||||
|
||||
else:
|
||||
|
||||
ideal_label = 'ideal: {}'.format( ideal_tag )
|
||||
|
||||
ideal_label = 'ideal is {} on: {}'.format( ideal, convert_service_keys_to_name_string( ideals_to_service_keys[ ideal ] ) )
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( siblings_menu, ideal_label, ideal_label, HG.client_controller.pub, 'clipboard', 'text', ideal_tag )
|
||||
|
||||
|
||||
#
|
||||
|
||||
for ( s_k_name, tags ) in group_and_sort_siblings_to_service_keys( siblings_to_service_keys ):
|
||||
|
||||
ClientGUIMenus.AppendSeparator( siblings_menu )
|
||||
|
||||
sibling_chain_members_list = list( sibling_chain_members )
|
||||
|
||||
ClientTags.SortTags( CC.SORT_BY_LEXICOGRAPHIC_ASC, sibling_chain_members_list )
|
||||
|
||||
for sibling in sibling_chain_members_list:
|
||||
if s_k_name != ALL_SERVICES_LABEL:
|
||||
|
||||
if sibling == ideal_tag:
|
||||
|
||||
continue
|
||||
|
||||
ClientGUIMenus.AppendMenuLabel( siblings_menu, '--{}--'.format( s_k_name ) )
|
||||
|
||||
ClientGUIMenus.AppendMenuLabel( siblings_menu, sibling )
|
||||
|
||||
for tag in tags:
|
||||
|
||||
ClientGUIMenus.AppendMenuLabel( siblings_menu, tag )
|
||||
|
||||
|
||||
|
||||
|
||||
#
|
||||
|
||||
if num_parents + num_children == 0:
|
||||
|
||||
parents_menu.setTitle( 'no parents' )
|
||||
|
@ -2486,63 +2565,27 @@ class ListBoxTags( ListBox ):
|
|||
|
||||
parents_menu.setTitle( '{} parents, {} children'.format( HydrusData.ToHumanInt( num_parents ), HydrusData.ToHumanInt( num_children ) ) )
|
||||
|
||||
for service_key in service_keys_in_order:
|
||||
|
||||
if service_key not in service_keys_to_siblings_and_parents:
|
||||
|
||||
continue
|
||||
|
||||
|
||||
( sibling_chain_members, ideal_tag, descendants, ancestors ) = service_keys_to_siblings_and_parents[ service_key ]
|
||||
|
||||
if len( ancestors ) + len( descendants ) == 0:
|
||||
|
||||
continue
|
||||
|
||||
|
||||
try:
|
||||
|
||||
service_name = HG.client_controller.services_manager.GetName( service_key )
|
||||
|
||||
except HydrusExceptions.DataMissing:
|
||||
|
||||
service_name = 'missing service'
|
||||
|
||||
for ( s_k_name, ( parents, children ) ) in group_and_sort_parents_to_service_keys( parents_to_service_keys, children_to_service_keys ):
|
||||
|
||||
ClientGUIMenus.AppendSeparator( parents_menu )
|
||||
|
||||
ClientGUIMenus.AppendMenuLabel( parents_menu, '{} ({} parents, {} children)'.format( service_name, HydrusData.ToHumanInt( len( ancestors ) ), HydrusData.ToHumanInt( len( descendants ) ) ) )
|
||||
|
||||
ClientGUIMenus.AppendSeparator( parents_menu )
|
||||
|
||||
if len( ancestors ) > 0:
|
||||
if s_k_name != ALL_SERVICES_LABEL:
|
||||
|
||||
ancestors_list = list( ancestors )
|
||||
|
||||
ClientTags.SortTags( CC.SORT_BY_LEXICOGRAPHIC_ASC, ancestors_list )
|
||||
|
||||
for ancestor in ancestors_list:
|
||||
|
||||
ancestor_label = 'parent: {}'.format( ancestor )
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( parents_menu, ancestor_label, ancestor_label, HG.client_controller.pub, 'clipboard', 'text', ancestor )
|
||||
|
||||
ClientGUIMenus.AppendMenuLabel( parents_menu, '--{}--'.format( s_k_name ) )
|
||||
|
||||
|
||||
if len( descendants ) > 0:
|
||||
for parent in parents:
|
||||
|
||||
ClientGUIMenus.AppendSeparator( parents_menu )
|
||||
parent_label = 'parent: {}'.format( parent )
|
||||
|
||||
descendants_list = list( descendants )
|
||||
ClientGUIMenus.AppendMenuItem( parents_menu, parent_label, parent_label, HG.client_controller.pub, 'clipboard', 'text', parent )
|
||||
|
||||
ClientTags.SortTags( CC.SORT_BY_LEXICOGRAPHIC_ASC, descendants_list )
|
||||
|
||||
for child in children:
|
||||
|
||||
for descendant in descendants_list:
|
||||
|
||||
descendant_label = 'child: {}'.format( descendant )
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( parents_menu, descendant_label, descendant_label, HG.client_controller.pub, 'clipboard', 'text', descendant )
|
||||
|
||||
child_label = 'child: {}'.format( child )
|
||||
|
||||
ClientGUIMenus.AppendMenuItem( parents_menu, child_label, child_label, HG.client_controller.pub, 'clipboard', 'text', child )
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -198,7 +198,7 @@ def ReadFetch(
|
|||
|
||||
is_explicit_wildcard = parsed_autocomplete_text.IsExplicitWildcard()
|
||||
|
||||
small_exact_match_search = ShouldDoExactSearch( strict_search_text ) and not is_explicit_wildcard
|
||||
small_exact_match_search = ShouldDoExactSearch( parsed_autocomplete_text )
|
||||
|
||||
matches = []
|
||||
|
||||
|
@ -385,30 +385,37 @@ def PutAtTopOfMatches( matches: list, predicate: ClientSearch.Predicate, insert_
|
|||
|
||||
|
||||
|
||||
def ShouldDoExactSearch( entry_text ):
|
||||
def ShouldDoExactSearch( parsed_autocomplete_text: ClientSearch.ParsedAutocompleteText ):
|
||||
|
||||
if entry_text is None:
|
||||
if parsed_autocomplete_text.IsExplicitWildcard():
|
||||
|
||||
return False
|
||||
|
||||
|
||||
autocomplete_exact_match_threshold = HG.client_controller.new_options.GetNoneableInteger( 'autocomplete_exact_match_threshold' )
|
||||
strict_search_text = parsed_autocomplete_text.GetSearchText( False )
|
||||
|
||||
if autocomplete_exact_match_threshold is None:
|
||||
exact_match_character_threshold = parsed_autocomplete_text.GetTagAutocompleteOptions().GetExactMatchCharacterThreshold()
|
||||
|
||||
if exact_match_character_threshold is None:
|
||||
|
||||
return False
|
||||
|
||||
|
||||
if ':' in entry_text:
|
||||
if ':' in strict_search_text:
|
||||
|
||||
( namespace, test_text ) = HydrusTags.SplitTag( entry_text )
|
||||
( namespace, test_text ) = HydrusTags.SplitTag( strict_search_text )
|
||||
|
||||
else:
|
||||
|
||||
test_text = entry_text
|
||||
test_text = strict_search_text
|
||||
|
||||
|
||||
return 0 < len( test_text ) <= autocomplete_exact_match_threshold
|
||||
if len( test_text ) == 0:
|
||||
|
||||
return False
|
||||
|
||||
|
||||
return len( test_text ) <= exact_match_character_threshold
|
||||
|
||||
def WriteFetch( win, job_key, results_callable, parsed_autocomplete_text: ClientSearch.ParsedAutocompleteText, tag_search_context: ClientSearch.TagSearchContext, file_service_key: bytes, expand_parents: bool, results_cache: ClientSearch.PredicateResultsCache ):
|
||||
|
||||
|
@ -425,7 +432,7 @@ def WriteFetch( win, job_key, results_callable, parsed_autocomplete_text: Client
|
|||
strict_search_text = parsed_autocomplete_text.GetSearchText( False )
|
||||
autocomplete_search_text = parsed_autocomplete_text.GetSearchText( True )
|
||||
|
||||
small_exact_match_search = ShouldDoExactSearch( strict_search_text ) and not is_explicit_wildcard
|
||||
small_exact_match_search = ShouldDoExactSearch( parsed_autocomplete_text )
|
||||
|
||||
if small_exact_match_search:
|
||||
|
||||
|
@ -823,7 +830,7 @@ class AutoCompleteDropdown( QW.QWidget ):
|
|||
|
||||
self._text_ctrl.blockSignals( True )
|
||||
|
||||
self._text_ctrl.setText( '' )
|
||||
self._text_ctrl.clear()
|
||||
|
||||
self._SetResultsToList( [], self._GetParsedAutocompleteText() )
|
||||
|
||||
|
@ -1229,7 +1236,9 @@ class AutoCompleteDropdown( QW.QWidget ):
|
|||
|
||||
else:
|
||||
|
||||
if HG.client_controller.new_options.GetBoolean( 'autocomplete_results_fetch_automatically' ):
|
||||
parsed_autocomplete_text = self._GetParsedAutocompleteText()
|
||||
|
||||
if parsed_autocomplete_text.GetTagAutocompleteOptions().FetchResultsAutomatically():
|
||||
|
||||
self._ScheduleResultsRefresh( 0.0 )
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ class TagAutocompleteOptions( HydrusSerialisable.SerialisableBase ):
|
|||
|
||||
SERIALISABLE_TYPE = HydrusSerialisable.SERIALISABLE_TYPE_TAG_AUTOCOMPLETE_OPTIONS
|
||||
SERIALISABLE_NAME = 'Tag Autocomplete Options'
|
||||
SERIALISABLE_VERSION = 2
|
||||
SERIALISABLE_VERSION = 3
|
||||
|
||||
def __init__( self, service_key: typing.Optional[ bytes ] = None ):
|
||||
|
||||
|
@ -48,6 +48,8 @@ class TagAutocompleteOptions( HydrusSerialisable.SerialisableBase ):
|
|||
self._namespace_bare_fetch_all_allowed = False
|
||||
self._namespace_fetch_all_allowed = False
|
||||
self._fetch_all_allowed = False
|
||||
self._fetch_results_automatically = True
|
||||
self._exact_match_character_threshold = 2
|
||||
|
||||
|
||||
def _GetSerialisableInfo( self ):
|
||||
|
@ -65,7 +67,9 @@ class TagAutocompleteOptions( HydrusSerialisable.SerialisableBase ):
|
|||
self._search_namespaces_into_full_tags,
|
||||
self._namespace_bare_fetch_all_allowed,
|
||||
self._namespace_fetch_all_allowed,
|
||||
self._fetch_all_allowed
|
||||
self._fetch_all_allowed,
|
||||
self._fetch_results_automatically,
|
||||
self._exact_match_character_threshold
|
||||
]
|
||||
|
||||
return serialisable_info
|
||||
|
@ -81,7 +85,9 @@ class TagAutocompleteOptions( HydrusSerialisable.SerialisableBase ):
|
|||
self._search_namespaces_into_full_tags,
|
||||
self._namespace_bare_fetch_all_allowed,
|
||||
self._namespace_fetch_all_allowed,
|
||||
self._fetch_all_allowed
|
||||
self._fetch_all_allowed,
|
||||
self._fetch_results_automatically,
|
||||
self._exact_match_character_threshold
|
||||
] = serialisable_info
|
||||
|
||||
self._service_key = bytes.fromhex( serialisable_service_key )
|
||||
|
@ -120,11 +126,54 @@ class TagAutocompleteOptions( HydrusSerialisable.SerialisableBase ):
|
|||
return ( 2, new_serialisable_info )
|
||||
|
||||
|
||||
if version == 2:
|
||||
|
||||
[
|
||||
serialisable_service_key,
|
||||
serialisable_write_autocomplete_tag_domain,
|
||||
override_write_autocomplete_file_domain,
|
||||
serialisable_write_autocomplete_file_domain,
|
||||
search_namespaces_into_full_tags,
|
||||
namespace_bare_fetch_all_allowed,
|
||||
namespace_fetch_all_allowed,
|
||||
fetch_all_allowed
|
||||
] = old_serialisable_info
|
||||
|
||||
fetch_results_automatically = True
|
||||
exact_match_character_threshold = 2
|
||||
|
||||
new_serialisable_info = [
|
||||
serialisable_service_key,
|
||||
serialisable_write_autocomplete_tag_domain,
|
||||
override_write_autocomplete_file_domain,
|
||||
serialisable_write_autocomplete_file_domain,
|
||||
search_namespaces_into_full_tags,
|
||||
namespace_bare_fetch_all_allowed,
|
||||
namespace_fetch_all_allowed,
|
||||
fetch_all_allowed,
|
||||
fetch_results_automatically,
|
||||
exact_match_character_threshold
|
||||
]
|
||||
|
||||
return ( 3, new_serialisable_info )
|
||||
|
||||
|
||||
|
||||
def FetchAllAllowed( self ):
|
||||
|
||||
return self._fetch_all_allowed
|
||||
|
||||
|
||||
def FetchResultsAutomatically( self ):
|
||||
|
||||
return self._fetch_results_automatically
|
||||
|
||||
|
||||
def GetExactMatchCharacterThreshold( self ):
|
||||
|
||||
return self._exact_match_character_threshold
|
||||
|
||||
|
||||
def GetServiceKey( self ):
|
||||
|
||||
return self._service_key
|
||||
|
@ -182,6 +231,16 @@ class TagAutocompleteOptions( HydrusSerialisable.SerialisableBase ):
|
|||
return self._search_namespaces_into_full_tags
|
||||
|
||||
|
||||
def SetExactMatchCharacterThreshold( self, exact_match_character_threshold: typing.Optional[ int ] ):
|
||||
|
||||
self._exact_match_character_threshold = exact_match_character_threshold
|
||||
|
||||
|
||||
def SetFetchResultsAutomatically( self, fetch_results_automatically: bool ):
|
||||
|
||||
self._fetch_results_automatically = fetch_results_automatically
|
||||
|
||||
|
||||
def SetTuple( self,
|
||||
write_autocomplete_tag_domain: bytes,
|
||||
override_write_autocomplete_file_domain: bool,
|
||||
|
|
|
@ -70,7 +70,7 @@ options = {}
|
|||
# Misc
|
||||
|
||||
NETWORK_VERSION = 19
|
||||
SOFTWARE_VERSION = 422
|
||||
SOFTWARE_VERSION = 423
|
||||
CLIENT_API_VERSION = 15
|
||||
|
||||
SERVER_THUMBNAIL_DIMENSIONS = ( 200, 200 )
|
||||
|
|
|
@ -581,7 +581,10 @@ class HydrusDB( object ):
|
|||
|
||||
for db_name in db_names:
|
||||
|
||||
self._c.execute( 'PRAGMA {}.cache_size = -200000;'.format( db_name ) )
|
||||
# mb -> kb
|
||||
cache_size = HG.db_cache_size * 1000
|
||||
|
||||
self._c.execute( 'PRAGMA {}.cache_size = -{};'.format( db_name, cache_size ) )
|
||||
|
||||
self._c.execute( 'PRAGMA {}.journal_mode = {};'.format( db_name, HG.db_journal_mode ) )
|
||||
|
||||
|
|
|
@ -327,11 +327,6 @@ def GetMime( path, ok_to_look_for_hydrus_updates = False ):
|
|||
HydrusData.PrintException( e, do_wait = False )
|
||||
|
||||
|
||||
if HydrusText.LooksLikeHTML( bit_to_check ):
|
||||
|
||||
return HC.TEXT_HTML
|
||||
|
||||
|
||||
if ok_to_look_for_hydrus_updates:
|
||||
|
||||
with open( path, 'rb' ) as f:
|
||||
|
@ -358,5 +353,10 @@ def GetMime( path, ok_to_look_for_hydrus_updates = False ):
|
|||
|
||||
|
||||
|
||||
if HydrusText.LooksLikeHTML( bit_to_check ):
|
||||
|
||||
return HC.TEXT_HTML
|
||||
|
||||
|
||||
return HC.APPLICATION_UNKNOWN
|
||||
|
||||
|
|
|
@ -13,6 +13,8 @@ no_db_temp_files = False
|
|||
|
||||
boot_debug = False
|
||||
|
||||
db_cache_size = 200
|
||||
|
||||
# if this is set to 1, transactions are not immediately synced to the journal so multiple can be undone following a power-loss
|
||||
# if set to 2, all transactions are synced, so once a new one starts you know the last one is on disk
|
||||
# corruption cannot occur either way, but since we have multiple ATTACH dbs with diff journals, let's not mess around when power-cut during heavy file import or w/e
|
||||
|
|
|
@ -95,6 +95,10 @@ def LooksLikeJSON( file_data ):
|
|||
return False
|
||||
|
||||
|
||||
|
||||
UNICODE_REPLACEMENT_CHARACTER = u'\ufffd'
|
||||
NULL_CHARACTER = '\x00'
|
||||
|
||||
def NonFailingUnicodeDecode( data, encoding ):
|
||||
|
||||
try:
|
||||
|
@ -103,12 +107,9 @@ def NonFailingUnicodeDecode( data, encoding ):
|
|||
|
||||
except UnicodeDecodeError:
|
||||
|
||||
unicode_replacement_character = u'\ufffd'
|
||||
null_character = '\x00'
|
||||
|
||||
text = str( data, encoding, errors = 'replace' )
|
||||
|
||||
error_count = text.count( unicode_replacement_character )
|
||||
error_count = text.count( UNICODE_REPLACEMENT_CHARACTER )
|
||||
|
||||
if CHARDET_OK:
|
||||
|
||||
|
@ -120,13 +121,13 @@ def NonFailingUnicodeDecode( data, encoding ):
|
|||
|
||||
chardet_text = str( data, chardet_encoding, errors = 'replace' )
|
||||
|
||||
chardet_error_count = chardet_text.count( unicode_replacement_character )
|
||||
chardet_error_count = chardet_text.count( UNICODE_REPLACEMENT_CHARACTER )
|
||||
|
||||
if chardet_error_count < error_count:
|
||||
|
||||
if null_character in chardet_text:
|
||||
if NULL_CHARACTER in chardet_text:
|
||||
|
||||
chardet_text = chardet_text.replace( null_character, '' )
|
||||
chardet_text = chardet_text.replace( NULL_CHARACTER, '' )
|
||||
|
||||
|
||||
return ( chardet_text, chardet_encoding )
|
||||
|
@ -135,6 +136,15 @@ def NonFailingUnicodeDecode( data, encoding ):
|
|||
|
||||
|
||||
|
||||
if NULL_CHARACTER in text:
|
||||
|
||||
# I guess this is valid in unicode for some reason
|
||||
# funnily enough, it is not replaced by 'replace'
|
||||
# nor does it raise an error in normal str creation
|
||||
|
||||
text = text.replace( NULL_CHARACTER, '' )
|
||||
|
||||
|
||||
return ( text, encoding )
|
||||
|
||||
def RemoveNewlines( text ):
|
||||
|
|
|
@ -1128,6 +1128,7 @@ class TestTagObjects( unittest.TestCase ):
|
|||
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus ar*' ) ), { character_samus_aran } )
|
||||
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus br*' ) ), set() )
|
||||
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'character:samus aran*' ) ), { character_samus_aran } )
|
||||
self.assertEqual( set( predicate_results_cache.FilterPredicates( CC.COMBINED_TAG_SERVICE_KEY, 'characte:samus aran*' ) ), set() )
|
||||
|
||||
|
||||
def test_predicate_results_cache_namespace_explicit_fetch_all( self ):
|
||||
|
@ -1807,3 +1808,28 @@ class TestTagObjects( unittest.TestCase ):
|
|||
self.assertEqual( p.GetTextsAndNamespaces(), or_texts_and_namespaces )
|
||||
|
||||
|
||||
def test_tag_import_options_simple( self ):
|
||||
|
||||
tag_autocomplete_options = ClientTagsHandling.TagAutocompleteOptions( CC.COMBINED_TAG_SERVICE_KEY )
|
||||
|
||||
self.assertTrue( tag_autocomplete_options.FetchResultsAutomatically() )
|
||||
self.assertEqual( tag_autocomplete_options.GetExactMatchCharacterThreshold(), 2 )
|
||||
|
||||
#
|
||||
|
||||
tag_autocomplete_options.SetFetchResultsAutomatically( False )
|
||||
|
||||
self.assertFalse( tag_autocomplete_options.FetchResultsAutomatically() )
|
||||
|
||||
tag_autocomplete_options.SetFetchResultsAutomatically( True )
|
||||
|
||||
self.assertTrue( tag_autocomplete_options.FetchResultsAutomatically() )
|
||||
|
||||
tag_autocomplete_options.SetExactMatchCharacterThreshold( None )
|
||||
|
||||
self.assertEqual( tag_autocomplete_options.GetExactMatchCharacterThreshold(), None )
|
||||
|
||||
tag_autocomplete_options.SetExactMatchCharacterThreshold( 2 )
|
||||
|
||||
self.assertEqual( tag_autocomplete_options.GetExactMatchCharacterThreshold(), 2 )
|
||||
|
||||
|
|
10
server.py
10
server.py
|
@ -41,6 +41,7 @@ try:
|
|||
argparser.add_argument( '-d', '--db_dir', help = 'set an external db location' )
|
||||
argparser.add_argument( '--temp_dir', help = 'override the program\'s temporary directory' )
|
||||
argparser.add_argument( '--db_journal_mode', default = 'WAL', choices = [ 'WAL', 'TRUNCATE', 'PERSIST', 'MEMORY' ], help = 'change db journal mode (default=WAL)' )
|
||||
argparser.add_argument( '--db_cache_size', type = int, help = 'override SQLite cache_size per db file, in MB (default=200)' )
|
||||
argparser.add_argument( '--db_synchronous_override', type = int, choices = range(4), help = 'override SQLite Synchronous PRAGMA (default=2)' )
|
||||
argparser.add_argument( '--no_db_temp_files', action='store_true', help = 'run db temp operations entirely in memory' )
|
||||
argparser.add_argument( '--boot_debug', action='store_true', help = 'print additional bootup information to the log' )
|
||||
|
@ -100,6 +101,15 @@ try:
|
|||
HG.db_journal_mode = 'MEMORY'
|
||||
|
||||
|
||||
if result.db_cache_size is not None:
|
||||
|
||||
HG.db_cache_size = result.db_cache_size
|
||||
|
||||
else:
|
||||
|
||||
HG.db_cache_size = 200
|
||||
|
||||
|
||||
if result.db_synchronous_override is not None:
|
||||
|
||||
HG.db_synchronous = int( result.db_synchronous_override )
|
||||
|
|
Loading…
Reference in New Issue