Make fsspec requirement the same across subpackages (#19085)

This commit is contained in:
Adrian Wälchli 2023-11-29 16:36:28 +01:00 committed by GitHub
parent 9bcb983d26
commit 710cac4ce9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 5 additions and 5 deletions

View File

@ -3,7 +3,7 @@ packaging
typing-extensions >=4.4.0, <4.8.0
deepdiff >=5.7.0, <6.6.0
starsessions >=1.2.1, <2.0 # strict
fsspec >=2022.5.0, <2023.11.0
fsspec[http] >=2022.5.0, <2023.11.0
croniter >=1.3.0, <1.5.0 # strict; TODO: for now until we find something more robust.
traitlets >=5.3.0, <5.10.0
arrow >=1.2.0, <1.3.0

View File

@ -1,5 +1,5 @@
# NOTE: the upper bound for the package version is only set for CI stability, and it is dropped while installing this package
# in case you want to preserve/enforce restrictions on the latest compatible version, add "strict" as an in-line comment
fsspec[http] >2021.06.0, <2023.11.0
fsspec[http] >=2022.5.0, <2023.11.0
s3fs >=2022.5.0, <2023.7.0

View File

@ -3,7 +3,7 @@
numpy >=1.17.2, <1.27.0
torch >=1.12.0, <2.2.0
fsspec[http]>2021.06.0, <2023.11.0
fsspec[http] >=2022.5.0, <2023.11.0
packaging >=20.0, <=23.1
typing-extensions >=4.4.0, <4.8.0
lightning-utilities >=0.8.0, <0.10.0

View File

@ -5,7 +5,7 @@ numpy >=1.17.2, <1.27.0
torch >=1.12.0, <2.2.0
tqdm >=4.57.0, <4.67.0
PyYAML >=5.4, <6.1.0
fsspec[http] >2021.06.0, <2023.11.0
fsspec[http] >=2022.5.0, <2023.11.0
torchmetrics >=0.7.0, <1.3.0 # needed for using fixed compare_version
packaging >=20.0, <=23.1
typing-extensions >=4.4.0, <4.8.0

View File

@ -164,7 +164,7 @@ def __warn_dataloader_iter_limitations(model: "pl.LightningModule") -> None:
"You are using the `dataloader_iter` step flavor. If you consume the iterator more than once per step, the"
" `batch_idx` argument in any hook that takes it will not match with the batch index of the last batch"
" consumed. This might have unforeseen effects on callbacks or code that expects to get the correct index."
" This will also no work well with gradient accumulation. This feature is very experimental and subject to"
" This will also not work well with gradient accumulation. This feature is very experimental and subject to"
" change. Here be dragons.",
category=PossibleUserWarning,
)