mirror of https://github.com/Textualize/rich.git
imports
This commit is contained in:
parent
9aeb79f831
commit
d4e55f7629
|
@ -17,6 +17,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
- Fix interaction between `Capture` contexts and `Console(record=True)` https://github.com/Textualize/rich/pull/2343
|
||||
- Fixed hash issue in Styles class https://github.com/Textualize/rich/pull/2346
|
||||
|
||||
### Changed
|
||||
|
||||
- `Style.__add__` will no longer return `NotImplemented`
|
||||
- Remove rich.\_lru_cache
|
||||
|
||||
## [12.4.4] - 2022-05-24
|
||||
|
||||
### Changed
|
||||
|
|
|
@ -1,116 +0,0 @@
|
|||
from threading import Lock
|
||||
from typing import Dict, Generic, List, Optional, TypeVar, Union, overload
|
||||
|
||||
CacheKey = TypeVar("CacheKey")
|
||||
CacheValue = TypeVar("CacheValue")
|
||||
DefaultValue = TypeVar("DefaultValue")
|
||||
|
||||
|
||||
class LRUCache(Generic[CacheKey, CacheValue]):
|
||||
"""
|
||||
A dictionary-like container that stores a given maximum items.
|
||||
|
||||
If an additional item is added when the LRUCache is full, the least
|
||||
recently used key is discarded to make room for the new item.
|
||||
|
||||
The implementation is similar to functools.lru_cache, which uses a linked
|
||||
list to keep track of the most recently used items.
|
||||
|
||||
Each entry is stored as [PREV, NEXT, KEY, VALUE] where PREV is a reference
|
||||
to the previous entry, and NEXT is a reference to the next value.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, maxsize: int) -> None:
|
||||
self.maxsize = maxsize
|
||||
self.cache: Dict[CacheKey, List[object]] = {}
|
||||
self.full = False
|
||||
self.root: List[object] = []
|
||||
self._lock = Lock()
|
||||
super().__init__()
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.cache)
|
||||
|
||||
def set(self, key: CacheKey, value: CacheValue) -> None:
|
||||
"""Set a value.
|
||||
|
||||
Args:
|
||||
key (CacheKey): Key.
|
||||
value (CacheValue): Value.
|
||||
"""
|
||||
with self._lock:
|
||||
link = self.cache.get(key)
|
||||
if link is None:
|
||||
root = self.root
|
||||
if not root:
|
||||
self.root[:] = [self.root, self.root, key, value]
|
||||
else:
|
||||
self.root = [root[0], root, key, value]
|
||||
root[0][1] = self.root # type: ignore[index]
|
||||
root[0] = self.root
|
||||
self.cache[key] = self.root
|
||||
|
||||
if self.full or len(self.cache) > self.maxsize:
|
||||
self.full = True
|
||||
root = self.root
|
||||
last = root[0]
|
||||
last[0][1] = root # type: ignore[index]
|
||||
root[0] = last[0] # type: ignore[index]
|
||||
del self.cache[last[2]] # type: ignore[index]
|
||||
|
||||
__setitem__ = set
|
||||
|
||||
@overload
|
||||
def get(self, key: CacheKey) -> Optional[CacheValue]:
|
||||
...
|
||||
|
||||
@overload
|
||||
def get(
|
||||
self, key: CacheKey, default: DefaultValue
|
||||
) -> Union[CacheValue, DefaultValue]:
|
||||
...
|
||||
|
||||
def get(
|
||||
self, key: CacheKey, default: Optional[DefaultValue] = None
|
||||
) -> Union[CacheValue, Optional[DefaultValue]]:
|
||||
"""Get a value from the cache, or return a default if the key is not present.
|
||||
|
||||
Args:
|
||||
key (CacheKey): Key
|
||||
default (Optional[DefaultValue], optional): Default to return if key is not present. Defaults to None.
|
||||
|
||||
Returns:
|
||||
Union[CacheValue, Optional[DefaultValue]]: Either the value or a default.
|
||||
"""
|
||||
link = self.cache.get(key)
|
||||
if link is None:
|
||||
return default
|
||||
if link is not self.root:
|
||||
with self._lock:
|
||||
link[0][1] = link[1] # type: ignore[index]
|
||||
link[1][0] = link[0] # type: ignore[index]
|
||||
root = self.root
|
||||
link[0] = root[0]
|
||||
link[1] = root
|
||||
root[0][1] = link # type: ignore[index]
|
||||
root[0] = link
|
||||
self.root = link
|
||||
return link[3] # type: ignore[return-value]
|
||||
|
||||
def __getitem__(self, key: CacheKey) -> CacheValue:
|
||||
link = self.cache[key]
|
||||
if link is not self.root:
|
||||
with self._lock:
|
||||
link[0][1] = link[1] # type: ignore[index]
|
||||
link[1][0] = link[0] # type: ignore[index]
|
||||
root = self.root
|
||||
link[0] = root[0]
|
||||
link[1] = root
|
||||
root[0][1] = link # type: ignore[index]
|
||||
root[0] = link
|
||||
self.root = link
|
||||
return link[3] # type: ignore[return-value]
|
||||
|
||||
def __contains__(self, key: CacheKey) -> bool:
|
||||
return key in self.cache
|
|
@ -1,15 +1,15 @@
|
|||
import re
|
||||
from functools import lru_cache
|
||||
from typing import List
|
||||
from typing import Callable, List
|
||||
|
||||
from ._cell_widths import CELL_WIDTHS
|
||||
from ._lru_cache import LRUCache
|
||||
|
||||
# Regex to match sequence of the most common character ranges
|
||||
_is_single_cell_widths = re.compile("^[\u0020-\u006f\u00a0\u02ff\u0370-\u0482]*$").match
|
||||
|
||||
|
||||
def cell_len(text: str, _cache: LRUCache[str, int] = LRUCache(1024 * 4)) -> int:
|
||||
@lru_cache(4096)
|
||||
def _cached_cell_len(text: str) -> int:
|
||||
"""Get the number of cells required to display text.
|
||||
|
||||
Args:
|
||||
|
@ -18,14 +18,24 @@ def cell_len(text: str, _cache: LRUCache[str, int] = LRUCache(1024 * 4)) -> int:
|
|||
Returns:
|
||||
int: Get the number of cells required to display text.
|
||||
"""
|
||||
cached_result = _cache.get(text, None)
|
||||
if cached_result is not None:
|
||||
return cached_result
|
||||
|
||||
_get_size = get_character_cell_size
|
||||
total_size = sum(_get_size(character) for character in text)
|
||||
if len(text) <= 512:
|
||||
_cache[text] = total_size
|
||||
return total_size
|
||||
|
||||
|
||||
def cell_len(text: str, _cell_len: Callable[[str], int] = _cached_cell_len) -> int:
|
||||
"""Get the number of cells required to display text.
|
||||
|
||||
Args:
|
||||
text (str): Text to display.
|
||||
|
||||
Returns:
|
||||
int: Get the number of cells required to display text.
|
||||
"""
|
||||
if len(text) < 512:
|
||||
return _cell_len(text)
|
||||
_get_size = get_character_cell_size
|
||||
total_size = sum(_get_size(character) for character in text)
|
||||
return total_size
|
||||
|
||||
|
||||
|
|
|
@ -2,10 +2,9 @@ import sys
|
|||
from functools import lru_cache
|
||||
from marshal import dumps, loads
|
||||
from random import randint
|
||||
from typing import Any, Dict, Iterable, List, Optional, Tuple, Type, Union, cast
|
||||
from typing import Any, Dict, Iterable, List, Optional, Type, Union, cast
|
||||
|
||||
from . import errors
|
||||
from ._lru_cache import LRUCache
|
||||
from .color import Color, ColorParseError, ColorSystem, blend_rgb
|
||||
from .repr import Result, rich_repr
|
||||
from .terminal_theme import DEFAULT_TERMINAL_THEME, TerminalTheme
|
||||
|
@ -120,9 +119,6 @@ class Style:
|
|||
"o": "overline",
|
||||
}
|
||||
|
||||
# Caches results of Style.__add__
|
||||
_add_cache: LRUCache[Tuple["Style", Optional["Style"]], "Style"] = LRUCache(1024)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
|
@ -708,13 +704,8 @@ class Style:
|
|||
text = text or str(self)
|
||||
sys.stdout.write(f"{self.render(text)}\n")
|
||||
|
||||
def __add__(self, style: Optional["Style"]) -> "Style":
|
||||
cache_key = (self, style)
|
||||
cached_style = self._add_cache.get(cache_key)
|
||||
if cached_style is not None:
|
||||
return cached_style.copy() if cached_style.link else cached_style
|
||||
if not (isinstance(style, Style) or style is None):
|
||||
return NotImplemented
|
||||
@lru_cache(maxsize=1024)
|
||||
def _add(self, style: Optional["Style"]) -> "Style":
|
||||
if style is None or style._null:
|
||||
return self
|
||||
if self._null:
|
||||
|
@ -736,9 +727,12 @@ class Style:
|
|||
else:
|
||||
new_style._meta = self._meta or style._meta
|
||||
new_style._hash = None
|
||||
self._add_cache[cache_key] = new_style
|
||||
return new_style
|
||||
|
||||
def __add__(self, style: Optional["Style"]) -> "Style":
|
||||
combined_style = self._add(style)
|
||||
return combined_style.copy() if combined_style.link else combined_style
|
||||
|
||||
|
||||
NULL_STYLE = Style()
|
||||
|
||||
|
|
|
@ -1,59 +0,0 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
from rich._lru_cache import LRUCache
|
||||
|
||||
|
||||
def test_lru_cache():
|
||||
cache = LRUCache(3)
|
||||
|
||||
# insert some values
|
||||
cache["foo"] = 1
|
||||
cache["bar"] = 2
|
||||
cache["baz"] = 3
|
||||
assert "foo" in cache
|
||||
assert "bar" in cache
|
||||
assert "baz" in cache
|
||||
|
||||
# Cache size is 3, so the following should kick oldest one out
|
||||
cache["egg"] = 4
|
||||
assert "foo" not in cache
|
||||
assert "egg" in cache
|
||||
|
||||
# cache is now full
|
||||
# look up two keys
|
||||
cache["bar"]
|
||||
cache["baz"]
|
||||
|
||||
# Insert a new value
|
||||
cache["eggegg"] = 5
|
||||
assert len(cache) == 3
|
||||
# Check it kicked out the 'oldest' key
|
||||
assert "egg" not in cache
|
||||
assert "eggegg" in cache
|
||||
|
||||
|
||||
def test_lru_cache_get():
|
||||
cache = LRUCache(3)
|
||||
|
||||
# insert some values
|
||||
cache["foo"] = 1
|
||||
cache["bar"] = 2
|
||||
cache["baz"] = 3
|
||||
assert "foo" in cache
|
||||
|
||||
# Cache size is 3, so the following should kick oldest one out
|
||||
cache["egg"] = 4
|
||||
# assert len(cache) == 3
|
||||
assert cache.get("foo") is None
|
||||
assert "egg" in cache
|
||||
|
||||
# cache is now full
|
||||
# look up two keys
|
||||
cache.get("bar")
|
||||
cache.get("baz")
|
||||
|
||||
# Insert a new value
|
||||
cache["eggegg"] = 5
|
||||
# Check it kicked out the 'oldest' key
|
||||
assert "egg" not in cache
|
||||
assert "eggegg" in cache
|
|
@ -1,7 +1,7 @@
|
|||
import pytest
|
||||
|
||||
from rich.color import Color, ColorSystem, ColorType
|
||||
from rich import errors
|
||||
from rich.color import Color, ColorSystem, ColorType
|
||||
from rich.style import Style, StyleStack
|
||||
|
||||
|
||||
|
@ -168,7 +168,6 @@ def test_test():
|
|||
|
||||
def test_add():
|
||||
assert Style(color="red") + None == Style(color="red")
|
||||
assert Style().__add__("foo") == NotImplemented
|
||||
|
||||
|
||||
def test_iadd():
|
||||
|
|
Loading…
Reference in New Issue