Relax app dependencies for `lightning` | breaking imports from `L` (#18386)
* make App conditional import
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
* ci: min req.
* data + store
* typo
* module_available
* extra
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
* typo
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
* envs
* doctest
* more
* docs
* **
* rm
* -f
* Apply suggestions from code review
* docs
* Revert "docs"
This reverts commit 20e8e6be98
.
* Apply suggestions from code review
* warnings
* breaking
* ...
* fixing source
* fixing examples
* fixing tests
* fixing docs
* fixing source
* fix mock
* fix trainer
---------
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
parent
d3ee410100
commit
5f5d99eae6
|
@ -36,9 +36,9 @@ REQUIREMENT_FILES = {
|
|||
"requirements/pytorch/examples.txt",
|
||||
),
|
||||
"app": (
|
||||
"requirements/app/base.txt",
|
||||
"requirements/app/ui.txt",
|
||||
"requirements/app/app.txt",
|
||||
"requirements/app/cloud.txt",
|
||||
"requirements/app/ui.txt",
|
||||
),
|
||||
"fabric": (
|
||||
"requirements/fabric/base.txt",
|
||||
|
|
|
@ -8,6 +8,10 @@ inputs:
|
|||
pkg-name:
|
||||
description: Package name to import
|
||||
required: true
|
||||
pkg-extra:
|
||||
description: argument for install extra
|
||||
required: false
|
||||
default: ""
|
||||
pip-flags:
|
||||
description: Additional pip install flags
|
||||
required: false
|
||||
|
@ -19,16 +23,24 @@ runs:
|
|||
- name: Choose package import
|
||||
working-directory: ${{ inputs.pkg-folder }}
|
||||
run: |
|
||||
ls -l
|
||||
python -c "print('PKG_IMPORT=' + {'app': 'lightning_app', 'fabric': 'lightning_fabric', 'pytorch': 'pytorch_lightning'}.get('${{matrix.pkg-name}}', 'lightning'))" >> $GITHUB_ENV
|
||||
python -c "import glob ; ls = glob.glob('*.tar.gz') ; print('PKG_SOURCE=' + ls[0])" >> $GITHUB_ENV
|
||||
python -c "import glob ; ls = glob.glob('*.whl') ; print('PKG_WHEEL=' + ls[0])" >> $GITHUB_ENV
|
||||
shell: bash
|
||||
import os, glob
|
||||
|
||||
lut = {'app': 'lightning_app', 'fabric': 'lightning_fabric', 'pytorch': 'pytorch_lightning'}
|
||||
act_pkg = lut.get('${{inputs.pkg-name}}', 'lightning')
|
||||
pkg_sdist = glob.glob('*.tar.gz')[0]
|
||||
pkg_wheel = glob.glob('*.whl')[0]
|
||||
extra = '${{inputs.pkg-extra}}'
|
||||
extra = f'[{extra}]' if extra else ''
|
||||
|
||||
envs = [f'PKG_IMPORT={act_pkg}', f'PKG_SOURCE={pkg_sdist}', f'PKG_WHEEL={pkg_wheel}', f'PKG_EXTRA={extra}']
|
||||
with open(os.getenv('GITHUB_ENV'), "a") as gh_env:
|
||||
gh_env.write(os.linesep.join(envs))
|
||||
shell: python
|
||||
|
||||
- name: Install package - wheel
|
||||
working-directory: ${{ inputs.pkg-folder }}
|
||||
run: |
|
||||
pip install ${PKG_WHEEL} ${{ inputs.pip-flags }}
|
||||
pip install "${PKG_WHEEL}${PKG_EXTRA}" ${{ inputs.pip-flags }}
|
||||
pip list | grep lightning
|
||||
python -c "import ${{ env.PKG_IMPORT }}; print(${{ env.PKG_IMPORT }}.__version__)"
|
||||
shell: bash
|
||||
|
@ -36,7 +48,7 @@ runs:
|
|||
- name: Install package - archive
|
||||
working-directory: ${{ inputs.pkg-folder }}
|
||||
run: |
|
||||
pip install ${PKG_SOURCE} ${{ inputs.pip-flags }}
|
||||
pip install "${PKG_SOURCE}${PKG_EXTRA}" ${{ inputs.pip-flags }}
|
||||
pip list | grep lightning
|
||||
python -c "import ${{ env.PKG_IMPORT }}; print(${{ env.PKG_IMPORT }}.__version__)"
|
||||
shell: bash
|
||||
|
|
|
@ -42,10 +42,15 @@ jobs:
|
|||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-22.04, macOS-12, windows-2022]
|
||||
os: ["ubuntu-22.04", "macOS-12", "windows-2022"]
|
||||
pkg-name: ["app", "fabric", "pytorch", "lightning", "notset"]
|
||||
python-version: ["3.8" , "3.11"]
|
||||
# TODO: add also install from source
|
||||
include:
|
||||
- {os: "macOS-12", pkg-name: "lightning", python-version: "3.9", pkg-extra: "app"}
|
||||
- {os: "macOS-12", pkg-name: "notset", python-version: "3.9", pkg-extra: "app"}
|
||||
- {os: "ubuntu-22.04", pkg-name: "lightning", python-version: "3.9", pkg-extra: "app"}
|
||||
- {os: "ubuntu-22.04", pkg-name: "notset", python-version: "3.9", pkg-extra: "app"}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
|
@ -56,19 +61,23 @@ jobs:
|
|||
name: dist-packages-${{ github.sha }}
|
||||
path: dist
|
||||
|
||||
- run: |
|
||||
- name: Set package dir
|
||||
run: |
|
||||
python -c "print('PKG_DIR=' + {'notset': 'lightning'}.get('${{matrix.pkg-name}}', '${{matrix.pkg-name}}'))" >> $GITHUB_ENV
|
||||
- name: Install package - wheel & archive
|
||||
uses: ./.github/actions/pkg-install
|
||||
with:
|
||||
pkg-folder: dist/${{ env.PKG_DIR }}
|
||||
pkg-name: ${{ matrix.pkg-name }}
|
||||
pkg-extra: ${{ matrix.pkg-extra }}
|
||||
|
||||
- name: Run CLI (via python)
|
||||
if: ${{ matrix.pkg-name == 'lightning' || matrix.pkg-name == 'notset' }}
|
||||
if: ${{ (matrix.pkg-name == 'lightning' || matrix.pkg-name == 'notset') && matrix.pkg-extra == 'app' }}
|
||||
run: python -m lightning --version
|
||||
- name: Run CLI (direct bash)
|
||||
if: ${{ matrix.pkg-name == 'lightning' || matrix.pkg-name == 'notset' || matrix.pkg-name == 'app' }}
|
||||
if: |
|
||||
((matrix.pkg-name == 'lightning' || matrix.pkg-name == 'notset') && matrix.pkg-extra == 'app') ||
|
||||
matrix.pkg-name == 'app'
|
||||
run: lightning --version
|
||||
|
||||
- name: DocTests actions
|
||||
|
@ -77,25 +86,33 @@ jobs:
|
|||
pip install -q pytest -r requirements.txt
|
||||
python -m pytest assistant.py
|
||||
|
||||
- name: build source
|
||||
run: python setup.py build
|
||||
- name: Adjust code for standalone
|
||||
if: contains(fromJSON('["app", "fabric", "pytorch"]'), matrix.pkg-name)
|
||||
run: |
|
||||
python .actions/assistant.py copy_replace_imports --source_dir="./src" \
|
||||
--source_import="lightning.pytorch,lightning.fabric,lightning.app" \
|
||||
--target_import="pytorch_lightning,lightning_fabric,lightning_app"
|
||||
rm src/lightning/__*.py
|
||||
- name: Rename src folders
|
||||
working-directory: src/
|
||||
run: mv lightning lit
|
||||
run: |
|
||||
python -c "n = '${{matrix.pkg-name}}' ; n = n if n in ('app', 'fabric', 'pytorch') else '' ; print('PKG_NAME=' + n)" >> $GITHUB_ENV
|
||||
rm -f ./*/__*.py
|
||||
rm -f ./**/__*.py
|
||||
mv lightning lit # rename lightning folder to prevent accidental local imports
|
||||
- name: drop App doctest
|
||||
if: ${{ (matrix.pkg-name == 'lightning' || matrix.pkg-name == 'notset') && matrix.pkg-extra != 'app' }}
|
||||
working-directory: src/lit
|
||||
run: |
|
||||
rm -rf app
|
||||
rm -rf data
|
||||
rm -rf store
|
||||
- name: Install pytest doctest extension
|
||||
run: |
|
||||
pip install -q "pytest-doctestplus>=0.9.0"
|
||||
pip list
|
||||
|
||||
- name: DocTest package
|
||||
env:
|
||||
LIGHTING_TESTING: 1 # path for require wrapper
|
||||
PY_IGNORE_IMPORTMISMATCH: 1
|
||||
run: |
|
||||
pip install -q "pytest-doctestplus>=0.9.0"
|
||||
pip list
|
||||
PKG_NAME=$(python -c "n = '${{matrix.pkg-name}}'; print(n if n in ('app', 'fabric', 'pytorch') else '')")
|
||||
echo $PKG_NAME
|
||||
python -m pytest src/lit/${PKG_NAME} --ignore-glob="**/cli/*-template/**" --doctest-plus
|
||||
run: python -m pytest src/lit/${PKG_NAME} --ignore-glob="**/cli/*-template/**" --doctest-plus
|
||||
|
|
|
@ -68,7 +68,7 @@ jobs:
|
|||
- name: Install package & dependencies
|
||||
run: |
|
||||
python -m pip install -q pip -U
|
||||
pip install -e ".[store-test]" "pytest-timeout" -U -f ${TORCH_URL} --prefer-binary
|
||||
pip install -e ".[store,store-test]" "pytest-timeout" -U -f ${TORCH_URL} --prefer-binary
|
||||
pip list
|
||||
|
||||
- name: Testing Store
|
||||
|
|
|
@ -33,6 +33,7 @@ env:
|
|||
FREEZE_REQUIREMENTS: "1"
|
||||
TORCH_URL: "https://download.pytorch.org/whl/cpu/torch_stable.html"
|
||||
PYPI_CACHE_DIR: "_pip-wheels"
|
||||
PYPI_LOCAL_DIR: "pypi_pkgs/"
|
||||
|
||||
jobs:
|
||||
docs-checks:
|
||||
|
@ -63,12 +64,12 @@ jobs:
|
|||
unzip -qq awscli.zip
|
||||
bash ./aws/install
|
||||
aws --version
|
||||
- run: aws s3 sync s3://sphinx-packages/ pypi_pkgs/
|
||||
- run: aws s3 sync s3://sphinx-packages/ ${PYPI_LOCAL_DIR}
|
||||
if: ${{ matrix.pkg-name != 'pytorch' }}
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
||||
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
|
||||
if: ${{ matrix.pkg-name != 'pytorch' }}
|
||||
|
||||
- name: pip wheels cache
|
||||
uses: actions/cache/restore@v3
|
||||
|
@ -83,11 +84,11 @@ jobs:
|
|||
sudo apt-get install -y pandoc texlive-latex-extra dvipng texlive-pictures
|
||||
- name: Install package & dependencies
|
||||
run: |
|
||||
mkdir -p $PYPI_CACHE_DIR # in case cache was not hit
|
||||
ls -lh $PYPI_CACHE_DIR
|
||||
mkdir -p pypi_pkgs # in case template is not pulled
|
||||
pip install .[extra,cloud,ui] -U -r requirements/${{ matrix.pkg-name }}/docs.txt \
|
||||
-f pypi_pkgs/ -f $PYPI_CACHE_DIR -f ${TORCH_URL}
|
||||
mkdir -p ${PYPI_CACHE_DIR} # in case cache was not hit
|
||||
ls -lh ${PYPI_CACHE_DIR}
|
||||
mkdir -p ${PYPI_LOCAL_DIR} # in case template is not pulled
|
||||
pip install .[app,extra,cloud,ui] -U -r requirements/${{ matrix.pkg-name }}/docs.txt \
|
||||
-f ${PYPI_LOCAL_DIR} -f ${PYPI_CACHE_DIR} -f ${TORCH_URL}
|
||||
pip list
|
||||
|
||||
- name: Test Documentation
|
||||
|
@ -128,12 +129,12 @@ jobs:
|
|||
unzip -qq awscli.zip
|
||||
bash ./aws/install
|
||||
aws --version
|
||||
- run: aws s3 sync s3://sphinx-packages/ pypi_pkgs/
|
||||
- run: aws s3 sync s3://sphinx-packages/ ${PYPI_LOCAL_DIR}
|
||||
if: ${{ matrix.pkg-name != 'pytorch' }}
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
||||
AWS_DEFAULT_REGION: ${{ secrets.AWS_REGION }}
|
||||
if: ${{ matrix.pkg-name != 'pytorch' }}
|
||||
|
||||
- name: pip wheels cache
|
||||
uses: actions/cache/restore@v3
|
||||
|
@ -148,12 +149,12 @@ jobs:
|
|||
sudo apt-get install -y pandoc texlive-latex-extra dvipng texlive-pictures
|
||||
- name: Install package & dependencies
|
||||
run: |
|
||||
mkdir -p $PYPI_CACHE_DIR # in case cache was not hit
|
||||
ls -lh $PYPI_CACHE_DIR
|
||||
mkdir -p pypi_pkgs # in case template is not pulled
|
||||
mkdir -p ${PYPI_CACHE_DIR} # in case cache was not hit
|
||||
ls -lh ${PYPI_CACHE_DIR}
|
||||
mkdir -p ${PYPI_LOCAL_DIR} # in case template is not pulled
|
||||
pip --version
|
||||
pip install . -U -r requirements/${{ matrix.pkg-name }}/docs.txt \
|
||||
-f pypi_pkgs/ -f $PYPI_CACHE_DIR -f ${TORCH_URL}
|
||||
pip install .[app] -U -r requirements/${{ matrix.pkg-name }}/docs.txt \
|
||||
-f ${PYPI_LOCAL_DIR} -f ${PYPI_CACHE_DIR} -f ${TORCH_URL}
|
||||
pip list
|
||||
shell: bash
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import lightning as L
|
||||
from lightning.app import LightningWork
|
||||
|
||||
|
||||
class ExampleWork(L.LightningWork):
|
||||
class ExampleWork(LightningWork):
|
||||
def run(self, *args, **kwargs):
|
||||
print(f"I received the following props: args: {args} kwargs: {kwargs}")
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import lightning as L
|
||||
from lightning.app import LightningWork
|
||||
|
||||
|
||||
class ExampleWork(L.LightningWork):
|
||||
class ExampleWork(LightningWork):
|
||||
def __init__(self):
|
||||
super().__init__(cache_calls=False)
|
||||
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
import lightning as L
|
||||
from lightning.app import LightningFlow, LightningApp, CloudCompute
|
||||
from lightning.app.components import TracerPythonScript
|
||||
|
||||
|
||||
class RootFlow(L.LightningFlow):
|
||||
class RootFlow(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.runner = TracerPythonScript(
|
||||
"train.py",
|
||||
cloud_compute=L.CloudCompute("gpu"),
|
||||
cloud_compute=CloudCompute("gpu"),
|
||||
)
|
||||
|
||||
def run(self):
|
||||
self.runner.run()
|
||||
|
||||
|
||||
app = L.LightningApp(RootFlow())
|
||||
app = LightningApp(RootFlow())
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
from docs.quickstart.app_02 import HourLongWork
|
||||
|
||||
|
||||
class RootFlow(L.LightningFlow):
|
||||
def __init__(self, child_work_1: L.LightningWork, child_work_2: L.LightningWork):
|
||||
class RootFlow(LightningFlow):
|
||||
def __init__(self, child_work_1: LightningWork, child_work_2: LightningWork):
|
||||
super().__init__()
|
||||
self.child_work_1 = child_work_1
|
||||
self.child_work_2 = child_work_2
|
||||
|
@ -18,4 +18,4 @@ class RootFlow(L.LightningFlow):
|
|||
print("1 hour later `child_work_2` started!")
|
||||
|
||||
|
||||
app = L.LightningApp(RootFlow(HourLongWork(parallel=True), HourLongWork(parallel=True)))
|
||||
app = LightningApp(RootFlow(HourLongWork(parallel=True), HourLongWork(parallel=True)))
|
||||
|
|
|
@ -2,14 +2,14 @@ import flash
|
|||
from flash.core.data.utils import download_data
|
||||
from flash.image import ImageClassificationData, ImageClassifier
|
||||
|
||||
import lightning as L
|
||||
from pytorch_lightning.callbacks import ModelCheckpoint
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp, CloudCompute
|
||||
from lightning.pytorch.callbacks import ModelCheckpoint
|
||||
|
||||
|
||||
# Step 1: Create a training LightningWork component that gets a backbone as input
|
||||
# and saves the best model and its score
|
||||
class ImageClassifierTrainWork(L.LightningWork):
|
||||
def __init__(self, max_epochs: int, backbone: str, cloud_compute: L.CloudCompute):
|
||||
class ImageClassifierTrainWork(LightningWork):
|
||||
def __init__(self, max_epochs: int, backbone: str, cloud_compute: CloudCompute):
|
||||
# parallel is set to True to run asynchronously
|
||||
super().__init__(parallel=True, cloud_compute=cloud_compute)
|
||||
# Number of epochs to run
|
||||
|
@ -44,7 +44,7 @@ class ImageClassifierTrainWork(L.LightningWork):
|
|||
|
||||
|
||||
# Step 2: Create a serving LightningWork component that gets a model input and serves it
|
||||
class ImageClassifierServeWork(L.LightningWork):
|
||||
class ImageClassifierServeWork(LightningWork):
|
||||
def run(self, best_model_path: str):
|
||||
# Load the model from the model path
|
||||
model = ImageClassifier.load_from_checkpoint(best_model_path)
|
||||
|
@ -53,7 +53,7 @@ class ImageClassifierServeWork(L.LightningWork):
|
|||
|
||||
# Step 3: Create a root LightningFlow component that gets number of epochs and a path to
|
||||
# a dataset as inputs, initialize 2 training components and serves the best model
|
||||
class RootFlow(L.LightningFlow):
|
||||
class RootFlow(LightningFlow):
|
||||
def __init__(self, max_epochs: int, data_dir: str):
|
||||
super().__init__()
|
||||
self.data_dir = data_dir
|
||||
|
@ -89,4 +89,4 @@ class RootFlow(L.LightningFlow):
|
|||
download_data("https://pl-flash-data.s3.amazonaws.com/hymenoptera_data.zip", "./data")
|
||||
|
||||
# Initialize your Lightning app with 5 epochs
|
||||
app = L.LightningApp(RootFlow(5, "./data/hymenoptera_data"))
|
||||
app = LightningApp(RootFlow(5, "./data/hymenoptera_data"))
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
from lightning.app.utilities.app_helpers import pretty_state
|
||||
|
||||
|
||||
class Work(L.LightningWork):
|
||||
class Work(LightningWork):
|
||||
def __init__(self):
|
||||
super().__init__(cache_calls=False)
|
||||
# Attributes are registered automatically in the state.
|
||||
|
@ -13,7 +13,7 @@ class Work(L.LightningWork):
|
|||
self.counter += 1
|
||||
|
||||
|
||||
class Flow(L.LightningFlow):
|
||||
class Flow(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.w = Work()
|
||||
|
@ -24,4 +24,4 @@ class Flow(L.LightningFlow):
|
|||
self.w.run()
|
||||
|
||||
|
||||
app = L.LightningApp(Flow())
|
||||
app = LightningApp(Flow())
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
from time import sleep
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
|
||||
|
||||
# This work takes an hour to run
|
||||
class HourLongWork(L.LightningWork):
|
||||
class HourLongWork(LightningWork):
|
||||
def __init__(self, parallel: bool = False):
|
||||
super().__init__(parallel=parallel)
|
||||
self.progress = 0.0
|
||||
|
@ -16,8 +16,8 @@ class HourLongWork(L.LightningWork):
|
|||
sleep(1)
|
||||
|
||||
|
||||
class RootFlow(L.LightningFlow):
|
||||
def __init__(self, child_work: L.LightningWork):
|
||||
class RootFlow(LightningFlow):
|
||||
def __init__(self, child_work: LightningWork):
|
||||
super().__init__()
|
||||
self.child_work = child_work
|
||||
|
||||
|
@ -29,4 +29,4 @@ class RootFlow(L.LightningFlow):
|
|||
print("1 hour later!")
|
||||
|
||||
|
||||
app = L.LightningApp(RootFlow(HourLongWork()))
|
||||
app = LightningApp(RootFlow(HourLongWork()))
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
from time import sleep
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
|
||||
|
||||
class HourLongWork(L.LightningWork):
|
||||
class HourLongWork(LightningWork):
|
||||
def __init__(self):
|
||||
super().__init__(cache_calls=False)
|
||||
self.progress = 0.0
|
||||
|
@ -15,8 +15,8 @@ class HourLongWork(L.LightningWork):
|
|||
sleep(1)
|
||||
|
||||
|
||||
class RootFlow(L.LightningFlow):
|
||||
def __init__(self, child_work: L.LightningWork):
|
||||
class RootFlow(LightningFlow):
|
||||
def __init__(self, child_work: LightningWork):
|
||||
super().__init__()
|
||||
self.child_work = child_work
|
||||
|
||||
|
@ -28,4 +28,4 @@ class RootFlow(L.LightningFlow):
|
|||
print("1 hour later!")
|
||||
|
||||
|
||||
app = L.LightningApp(RootFlow(HourLongWork()))
|
||||
app = LightningApp(RootFlow(HourLongWork()))
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import lightning as L
|
||||
from lightning.app import LightningFlow, LightningApp
|
||||
from lightning.app.testing import EmptyFlow, EmptyWork
|
||||
|
||||
|
||||
class FlowB(L.LightningFlow):
|
||||
class FlowB(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.flow_d = EmptyFlow()
|
||||
|
@ -12,7 +12,7 @@ class FlowB(L.LightningFlow):
|
|||
...
|
||||
|
||||
|
||||
class FlowA(L.LightningFlow):
|
||||
class FlowA(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.flow_b = FlowB()
|
||||
|
@ -23,4 +23,4 @@ class FlowA(L.LightningFlow):
|
|||
...
|
||||
|
||||
|
||||
app = L.LightningApp(FlowA())
|
||||
app = LightningApp(FlowA())
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import lightning as L
|
||||
from lightning.app import LightningFlow, LightningApp
|
||||
|
||||
|
||||
# Step 1: Subclass LightningFlow component to define the app flow.
|
||||
class HelloWorld(L.LightningFlow):
|
||||
class HelloWorld(LightningFlow):
|
||||
# Step 2: Add the app logic to the LightningFlow run method to
|
||||
# ``print("Hello World!")`.
|
||||
# The LightningApp executes the run method of the main LightningFlow
|
||||
|
@ -12,4 +12,4 @@ class HelloWorld(L.LightningFlow):
|
|||
|
||||
|
||||
# Step 3: Initialize a LightningApp with the LightningFlow you defined (in step 1)
|
||||
app = L.LightningApp(HelloWorld())
|
||||
app = LightningApp(HelloWorld())
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import os
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningFlow, LightningApp
|
||||
from lightning.app.frontend import StaticWebFrontend, StreamlitFrontend
|
||||
from lightning.app.utilities.state import AppState
|
||||
|
||||
|
||||
# Step 1: Define your LightningFlow component with the app UI
|
||||
class UIStreamLit(L.LightningFlow):
|
||||
class UIStreamLit(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.should_print = False
|
||||
|
@ -31,14 +31,14 @@ def render_fn(state: AppState):
|
|||
|
||||
|
||||
# Step 4: Implement a Static Web Frontend. This could be react, vue, etc.
|
||||
class UIStatic(L.LightningFlow):
|
||||
class UIStatic(LightningFlow):
|
||||
# Step 5:
|
||||
def configure_layout(self):
|
||||
return StaticWebFrontend(os.path.join(os.path.dirname(__file__), "ui"))
|
||||
|
||||
|
||||
# Step 6: Implement the root flow.
|
||||
class HelloWorld(L.LightningFlow):
|
||||
class HelloWorld(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.static_ui = UIStatic()
|
||||
|
@ -54,4 +54,4 @@ class HelloWorld(L.LightningFlow):
|
|||
]
|
||||
|
||||
|
||||
app = L.LightningApp(HelloWorld())
|
||||
app = LightningApp(HelloWorld())
|
||||
|
|
|
@ -384,8 +384,8 @@ doctest_test_doctest_blocks = ""
|
|||
doctest_global_setup = """
|
||||
import importlib
|
||||
import os
|
||||
import lightning as L
|
||||
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp, CloudCompute
|
||||
from lightning.fabric.loggers.tensorboard import _TENSORBOARD_AVAILABLE, _TENSORBOARDX_AVAILABLE
|
||||
"""
|
||||
coverage_skip_undoc_in_source = True
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
from lightning.app.utilities.app_helpers import pretty_state
|
||||
|
||||
|
||||
class Work(L.LightningWork):
|
||||
class Work(LightningWork):
|
||||
def __init__(self):
|
||||
super().__init__(cache_calls=False)
|
||||
# Attributes are registered automatically in the state.
|
||||
|
@ -13,7 +13,7 @@ class Work(L.LightningWork):
|
|||
self.counter += 1
|
||||
|
||||
|
||||
class Flow(L.LightningFlow):
|
||||
class Flow(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.w = Work()
|
||||
|
@ -24,4 +24,4 @@ class Flow(L.LightningFlow):
|
|||
self.w.run()
|
||||
|
||||
|
||||
app = L.LightningApp(Flow())
|
||||
app = LightningApp(Flow())
|
||||
|
|
|
@ -5,11 +5,11 @@ import uuid
|
|||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp
|
||||
from lightning.app.storage import Drive
|
||||
|
||||
|
||||
class FileServer(L.LightningWork):
|
||||
class FileServer(LightningWork):
|
||||
def __init__(
|
||||
self,
|
||||
drive: Drive,
|
||||
|
@ -26,7 +26,7 @@ class FileServer(L.LightningWork):
|
|||
|
||||
"""
|
||||
super().__init__(
|
||||
cloud_build_config=L.BuildConfig(["flask, flask-cors"]),
|
||||
cloud_build_config=BuildConfig(["flask, flask-cors"]),
|
||||
parallel=True,
|
||||
**kwargs,
|
||||
)
|
||||
|
|
|
@ -80,8 +80,8 @@ class PyTorchLightningGithubRepoRunner(GithubRepoRunner):
|
|||
self.best_model_score = None
|
||||
|
||||
def configure_tracer(self):
|
||||
from pytorch_lightning import Trainer
|
||||
from pytorch_lightning.callbacks import Callback
|
||||
from lightning.pytorch import Trainer
|
||||
from lightning.pytorch.callbacks import Callback
|
||||
|
||||
tracer = super().configure_tracer()
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class YourComponent(L.LightningWork):
|
||||
class YourComponent(LightningWork):
|
||||
def run(self):
|
||||
print('RUN ANY PYTHON CODE HERE')
|
||||
|
||||
|
||||
# run on a cloud machine ("cpu", "gpu", ...)
|
||||
component = YourComponent(cloud_compute=L.CloudCompute("cpu"))
|
||||
app = L.LightningApp(component)
|
||||
component = YourComponent(cloud_compute=CloudCompute("cpu"))
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# !pip install torchvision
|
||||
import lightning as L
|
||||
from lightning.app import LightningApp, CloudCompute
|
||||
from lightning.app.components.serve import PythonServer, Image, Number
|
||||
import base64, io, torchvision, torch
|
||||
from PIL import Image as PILImage
|
||||
|
@ -26,6 +26,6 @@ class PyTorchServer(PythonServer):
|
|||
|
||||
|
||||
component = PyTorchServer(
|
||||
input_type=Image, output_type=Number, cloud_compute=L.CloudCompute('gpu')
|
||||
input_type=Image, output_type=Number, cloud_compute=CloudCompute('gpu')
|
||||
)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp
|
||||
|
||||
|
||||
class YourComponent(L.LightningWork):
|
||||
class YourComponent(LightningWork):
|
||||
def run(self):
|
||||
print('RUN ANY PYTHON CODE HERE')
|
||||
|
||||
|
||||
|
||||
component = YourComponent()
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class YourComponent(L.LightningWork):
|
||||
class YourComponent(LightningWork):
|
||||
def run(self):
|
||||
print('RUN ANY PYTHON CODE HERE')
|
||||
|
||||
# run on a cloud machine ("cpu", "gpu", ...)
|
||||
compute = L.CloudCompute("gpu")
|
||||
compute = CloudCompute("gpu")
|
||||
component = YourComponent(cloud_compute=compute)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
# !pip install torch
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp, CloudCompute
|
||||
from lightning.app.components import MultiNode
|
||||
|
||||
|
||||
class MultiNodeComponent(L.LightningWork):
|
||||
class MultiNodeComponent(LightningWork):
|
||||
def run(
|
||||
self,
|
||||
main_address: str,
|
||||
|
@ -24,6 +24,6 @@ class MultiNodeComponent(L.LightningWork):
|
|||
|
||||
|
||||
# gpu-multi-fast has 4 GPUs x 8 nodes = 32 GPUs
|
||||
component = MultiNodeComponent(cloud_compute=L.CloudCompute("gpu-multi-fast"))
|
||||
component = MultiNodeComponent(cloud_compute=CloudCompute("gpu-multi-fast"))
|
||||
component = MultiNode(component, nodes=8)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,19 +1,20 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning import Trainer
|
||||
from lightning.app import LightningWork, LightningApp, CloudCompute
|
||||
from lightning.app.components import LightningTrainerMultiNode
|
||||
from lightning.pytorch.demos.boring_classes import BoringModel
|
||||
|
||||
|
||||
class LightningTrainerDistributed(L.LightningWork):
|
||||
class LightningTrainerDistributed(LightningWork):
|
||||
def run(self):
|
||||
model = BoringModel()
|
||||
trainer = L.Trainer(max_epochs=10, strategy="ddp")
|
||||
trainer = Trainer(max_epochs=10, strategy="ddp")
|
||||
trainer.fit(model)
|
||||
|
||||
# 8 GPUs: (2 nodes of 4 x v100)
|
||||
component = LightningTrainerMultiNode(
|
||||
LightningTrainerDistributed,
|
||||
num_nodes=4,
|
||||
cloud_compute=L.CloudCompute("gpu-fast-multi"), # 4 x v100
|
||||
cloud_compute=CloudCompute("gpu-fast-multi"), # 4 x v100
|
||||
)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# app.py
|
||||
# ! pip install torch
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp, CloudCompute
|
||||
from lightning.app.components import MultiNode
|
||||
import torch
|
||||
from torch.nn.parallel.distributed import DistributedDataParallel
|
||||
|
@ -45,7 +45,7 @@ def distributed_train(local_rank: int, main_address: str, main_port: int, num_no
|
|||
|
||||
print("Multi Node Distributed Training Done!")
|
||||
|
||||
class PyTorchDistributed(L.LightningWork):
|
||||
class PyTorchDistributed(LightningWork):
|
||||
def run(self, main_address: str, main_port: int, num_nodes: int, node_rank: int):
|
||||
nprocs = torch.cuda.device_count() if torch.cuda.is_available() else 1
|
||||
torch.multiprocessing.spawn(
|
||||
|
@ -55,6 +55,6 @@ class PyTorchDistributed(L.LightningWork):
|
|||
)
|
||||
|
||||
# 32 GPUs: (8 nodes x 4 v 100)
|
||||
compute = L.CloudCompute("gpu-fast-multi") # 4xV100
|
||||
compute = CloudCompute("gpu-fast-multi") # 4xV100
|
||||
component = MultiNode(PyTorchDistributed, num_nodes=8, cloud_compute=compute)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# app.py
|
||||
# !curl https://raw.githubusercontent.com/Lightning-AI/lightning/master/examples/app/multi_node/pl_boring_script.py -o pl_boring_script.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningApp, CloudCompute
|
||||
from lightning.app.components.training import LightningTrainerScript
|
||||
|
||||
# run script that trains PyTorch with the Lightning Trainer
|
||||
|
@ -8,6 +8,6 @@ model_script = 'pl_boring_script.py'
|
|||
component = LightningTrainerScript(
|
||||
model_script,
|
||||
num_nodes=1,
|
||||
cloud_compute=L.CloudCompute("gpu")
|
||||
cloud_compute=CloudCompute("gpu")
|
||||
)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# app.py
|
||||
# !pip install streamlit omegaconf scipy
|
||||
# !pip install torch
|
||||
import lightning as L
|
||||
from lightning.app import LightningApp
|
||||
import torch
|
||||
from io import BytesIO
|
||||
from functools import partial
|
||||
|
@ -9,7 +9,7 @@ from scipy.io.wavfile import write
|
|||
import streamlit as st
|
||||
|
||||
|
||||
class StreamlitApp(L.app.components.ServeStreamlit):
|
||||
class StreamlitApp(app.components.ServeStreamlit):
|
||||
def build_model(self):
|
||||
sample_rate = 48000
|
||||
model, _ = torch.hub.load('snakers4/silero-models', model='silero_tts',speaker="v3_en")
|
||||
|
@ -27,4 +27,4 @@ class StreamlitApp(L.app.components.ServeStreamlit):
|
|||
audio.seek(0)
|
||||
st.audio(audio)
|
||||
|
||||
app = L.LightningApp(StreamlitApp())
|
||||
app = LightningApp(StreamlitApp())
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
# A hello world component
|
||||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class YourComponent(L.LightningWork):
|
||||
class YourComponent(LightningWork):
|
||||
def run(self):
|
||||
print('RUN ANY PYTHON CODE HERE')
|
||||
|
||||
|
||||
|
||||
# run on a cloud machine
|
||||
compute = L.CloudCompute("cpu")
|
||||
compute = CloudCompute("cpu")
|
||||
worker = YourComponent(cloud_compute=compute)
|
||||
app = L.LightningApp(worker)
|
||||
app = LightningApp(worker)
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
# app.py
|
||||
# ! pip install torch
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp, CloudCompute
|
||||
import torch
|
||||
|
||||
class PyTorchComponent(L.LightningWork):
|
||||
class PyTorchComponent(LightningWork):
|
||||
def run(self):
|
||||
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
||||
model = torch.nn.Sequential(torch.nn.Linear(1, 1),
|
||||
|
@ -23,6 +23,6 @@ class PyTorchComponent(L.LightningWork):
|
|||
loss.backward()
|
||||
optimizer.step()
|
||||
|
||||
compute = L.CloudCompute('gpu')
|
||||
compute = CloudCompute('gpu')
|
||||
componet = PyTorchComponent(cloud_compute=compute)
|
||||
app = L.LightningApp(componet)
|
||||
app = LightningApp(componet)
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
# app.py
|
||||
# !pip install scikit-learn xgboost
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp
|
||||
from sklearn import datasets
|
||||
from sklearn.model_selection import train_test_split
|
||||
from xgboost import XGBClassifier
|
||||
|
||||
class XGBoostComponent(L.LightningWork):
|
||||
class XGBoostComponent(LightningWork):
|
||||
def run(self):
|
||||
iris = datasets.load_iris()
|
||||
X, y = iris.data, iris.target
|
||||
|
@ -18,4 +18,4 @@ class XGBoostComponent(L.LightningWork):
|
|||
print(f'preds: {preds}')
|
||||
|
||||
|
||||
app = L.LightningApp(XGBoostComponent())
|
||||
app = LightningApp(XGBoostComponent())
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# app.py
|
||||
# !pip install sklearn xgboost
|
||||
# !conda install py-xgboost-gpu
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp, CloudCompute
|
||||
from sklearn import datasets
|
||||
from sklearn.model_selection import train_test_split
|
||||
from xgboost import XGBClassifier
|
||||
|
||||
class XGBoostComponent(L.LightningWork):
|
||||
class XGBoostComponent(LightningWork):
|
||||
def run(self):
|
||||
iris = datasets.load_iris()
|
||||
X, y = iris.data, iris.target
|
||||
|
@ -18,5 +18,5 @@ class XGBoostComponent(L.LightningWork):
|
|||
preds = bst.predict(X_test)
|
||||
print(f'preds: {preds}')
|
||||
|
||||
compute = L.CloudCompute('gpu')
|
||||
app = L.LightningApp(XGBoostComponent(cloud_compute=compute))
|
||||
compute = CloudCompute('gpu')
|
||||
app = LightningApp(XGBoostComponent(cloud_compute=compute))
|
||||
|
|
|
@ -1,21 +1,21 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class YourComponent(L.LightningWork):
|
||||
class YourComponent(LightningWork):
|
||||
def run(self):
|
||||
print('RUN ANY PYTHON CODE HERE')
|
||||
|
||||
|
||||
# custom accelerators
|
||||
compute = L.CloudCompute('gpu')
|
||||
compute = CloudCompute('gpu')
|
||||
component = YourComponent(cloud_compute=compute)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
||||
# OTHER ACCELERATORS:
|
||||
# compute = L.CloudCompute('default') # 1 CPU
|
||||
# compute = L.CloudCompute('cpu-medium') # 8 CPUs
|
||||
# compute = L.CloudCompute('gpu') # 1 T4 GPU
|
||||
# compute = L.CloudCompute('gpu-fast-multi') # 4 V100 GPU
|
||||
# compute = L.CloudCompute('p4d.24xlarge') # AWS instance name (8 A100 GPU)
|
||||
# compute = CloudCompute('default') # 1 CPU
|
||||
# compute = CloudCompute('cpu-medium') # 8 CPUs
|
||||
# compute = CloudCompute('gpu') # 1 T4 GPU
|
||||
# compute = CloudCompute('gpu-fast-multi') # 4 V100 GPU
|
||||
# compute = CloudCompute('p4d.24xlarge') # AWS instance name (8 A100 GPU)
|
||||
# compute = ...
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class YourComponent(L.LightningWork):
|
||||
class YourComponent(LightningWork):
|
||||
def run(self):
|
||||
print('RUN ANY PYTHON CODE HERE')
|
||||
|
||||
|
||||
# if the machine hasn't started after 60 seconds, cancel the work
|
||||
compute = L.CloudCompute('gpu', wait_timeout=60)
|
||||
compute = CloudCompute('gpu', wait_timeout=60)
|
||||
component = YourComponent(cloud_compute=compute)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp
|
||||
|
||||
|
||||
class YourComponent(L.LightningWork):
|
||||
class YourComponent(LightningWork):
|
||||
def run(self):
|
||||
print('RUN ANY PYTHON CODE HERE')
|
||||
|
||||
|
||||
# custom image (from any provider)
|
||||
config= L.BuildConfig(image="gcr.io/google-samples/hello-app:1.0")
|
||||
config= BuildConfig(image="gcr.io/google-samples/hello-app:1.0")
|
||||
component = YourComponent(cloud_build_config=config)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class YourComponent(L.LightningWork):
|
||||
class YourComponent(LightningWork):
|
||||
def run(self):
|
||||
print('RUN ANY PYTHON CODE HERE')
|
||||
|
||||
|
||||
# stop the machine when idle for 10 seconds
|
||||
compute = L.CloudCompute('gpu', idle_timeout=10)
|
||||
compute = CloudCompute('gpu', idle_timeout=10)
|
||||
component = YourComponent(cloud_compute=compute)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class YourComponent(L.LightningWork):
|
||||
class YourComponent(LightningWork):
|
||||
def run(self):
|
||||
print('RUN ANY PYTHON CODE HERE')
|
||||
|
||||
|
||||
# use 100 GB of space on that machine (max size: 64 TB)
|
||||
compute = L.CloudCompute('gpu', disk_size=100)
|
||||
compute = CloudCompute('gpu', disk_size=100)
|
||||
component = YourComponent(cloud_compute=compute)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp, CloudCompute
|
||||
import os
|
||||
|
||||
|
||||
class YourComponent(L.LightningWork):
|
||||
class YourComponent(LightningWork):
|
||||
def run(self):
|
||||
os.listdir('/foo')
|
||||
|
||||
# mount the files on the s3 bucket under this path
|
||||
mount = L.Mount(source="s3://lightning-example-public/", mount_path="/foo")
|
||||
compute = L.CloudCompute(mounts=mount)
|
||||
mount = Mount(source="s3://lightning-example-public/", mount_path="/foo")
|
||||
compute = CloudCompute(mounts=mount)
|
||||
component = YourComponent(cloud_compute=compute)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class YourComponent(L.LightningWork):
|
||||
class YourComponent(LightningWork):
|
||||
def run(self):
|
||||
print('RUN ANY PYTHON CODE HERE')
|
||||
|
||||
# spot machines can be turned off without notice, use for non-critical, resumable work
|
||||
# request a spot machine, after 60 seconds of waiting switch to full-price
|
||||
compute = L.CloudCompute('gpu', wait_timeout=60, spot=True)
|
||||
compute = CloudCompute('gpu', wait_timeout=60, spot=True)
|
||||
component = YourComponent(cloud_compute=compute)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
|
||||
class Component(L.LightningWork):
|
||||
class Component(LightningWork):
|
||||
def run(self, x):
|
||||
print(x)
|
||||
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.component = Component()
|
||||
|
@ -14,4 +14,4 @@ class WorkflowOrchestrator(L.LightningFlow):
|
|||
def run(self):
|
||||
self.component.run('i love Lightning')
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
from lightning.app.pdb import set_trace
|
||||
|
||||
class Component(L.LightningWork):
|
||||
class Component(LightningWork):
|
||||
def run(self, x):
|
||||
print(x)
|
||||
set_trace()
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.component = Component()
|
||||
|
@ -15,4 +15,4 @@ class WorkflowOrchestrator(L.LightningFlow):
|
|||
def run(self):
|
||||
self.component.run('i love Lightning')
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
from lightning.app.runners import MultiProcessRuntime
|
||||
|
||||
|
||||
class TrainComponent(L.LightningWork):
|
||||
class TrainComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'train a model on {x}')
|
||||
|
||||
class AnalyzeComponent(L.LightningWork):
|
||||
class AnalyzeComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'analyze model on {x}')
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.train = TrainComponent()
|
||||
|
@ -21,5 +21,5 @@ class WorkflowOrchestrator(L.LightningFlow):
|
|||
self.train.run("GPU machine 1")
|
||||
self.analyze.run("CPU machine 2")
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
MultiProcessRuntime(app).dispatch()
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
|
||||
|
||||
|
||||
class TrainComponent(L.LightningWork):
|
||||
class TrainComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'train a model on {x}')
|
||||
|
||||
class AnalyzeComponent(L.LightningWork):
|
||||
class AnalyzeComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'analyze model on {x}')
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.train = TrainComponent()
|
||||
|
@ -21,4 +21,4 @@ class WorkflowOrchestrator(L.LightningFlow):
|
|||
self.train.run("CPU machine 1")
|
||||
self.analyze.run("CPU machine 2")
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
|
||||
class Component(L.LightningWork):
|
||||
class Component(LightningWork):
|
||||
def run(self, x):
|
||||
print(x)
|
||||
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.component = Component()
|
||||
|
@ -14,4 +14,4 @@ class WorkflowOrchestrator(L.LightningFlow):
|
|||
def run(self):
|
||||
self.component.run('i love Lightning')
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
from lightning.app.pdb import set_trace
|
||||
|
||||
class Component(L.LightningWork):
|
||||
class Component(LightningWork):
|
||||
def run(self, x):
|
||||
print(x)
|
||||
set_trace()
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.component = Component()
|
||||
|
@ -15,4 +15,4 @@ class WorkflowOrchestrator(L.LightningFlow):
|
|||
def run(self):
|
||||
self.component.run('i love Lightning')
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
|
|
|
@ -1,23 +1,23 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class TrainComponent(L.LightningWork):
|
||||
class TrainComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'train a model on {x}')
|
||||
|
||||
class AnalyzeComponent(L.LightningWork):
|
||||
class AnalyzeComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'analyze model on {x}')
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.train = TrainComponent(cloud_compute=L.CloudCompute('cpu'))
|
||||
self.analyze = AnalyzeComponent(cloud_compute=L.CloudCompute('gpu'))
|
||||
self.train = TrainComponent(cloud_compute=CloudCompute('cpu'))
|
||||
self.analyze = AnalyzeComponent(cloud_compute=CloudCompute('gpu'))
|
||||
|
||||
def run(self):
|
||||
self.train.run("CPU machine 1")
|
||||
self.analyze.run("GPU machine 2")
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class TrainComponent(L.LightningWork):
|
||||
class TrainComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'train a model on {x}')
|
||||
|
||||
class AnalyzeComponent(L.LightningWork):
|
||||
class AnalyzeComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'analyze model on {x}')
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.train = TrainComponent(cloud_compute=L.CloudCompute('cpu'))
|
||||
self.analyze = AnalyzeComponent(cloud_compute=L.CloudCompute('gpu'))
|
||||
self.train = TrainComponent(cloud_compute=CloudCompute('cpu'))
|
||||
self.analyze = AnalyzeComponent(cloud_compute=CloudCompute('gpu'))
|
||||
|
||||
def run(self):
|
||||
# run() starts the machine
|
||||
|
@ -27,4 +27,4 @@ class WorkflowOrchestrator(L.LightningFlow):
|
|||
if self.train.status.STOPPED:
|
||||
self.analyze.run("CPU machine 2")
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class TrainComponent(L.LightningWork):
|
||||
class TrainComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'train a model on {x}')
|
||||
|
||||
class AnalyzeComponent(L.LightningWork):
|
||||
class AnalyzeComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'analyze model on {x}')
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.train = TrainComponent(cloud_compute=L.CloudCompute('cpu'))
|
||||
self.analyze = AnalyzeComponent(cloud_compute=L.CloudCompute('gpu'))
|
||||
self.train = TrainComponent(cloud_compute=CloudCompute('cpu'))
|
||||
self.analyze = AnalyzeComponent(cloud_compute=CloudCompute('gpu'))
|
||||
|
||||
def run(self):
|
||||
# run training once
|
||||
|
@ -24,4 +24,4 @@ class WorkflowOrchestrator(L.LightningFlow):
|
|||
if self.schedule("5 4 * * *"):
|
||||
self.analyze.run("CPU machine 2")
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class TrainComponent(L.LightningWork):
|
||||
class TrainComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'train a model on {x}')
|
||||
|
||||
class AnalyzeComponent(L.LightningWork):
|
||||
class AnalyzeComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'analyze model on {x}')
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.train = TrainComponent(cloud_compute=L.CloudCompute('cpu'))
|
||||
self.analyze = AnalyzeComponent(cloud_compute=L.CloudCompute('gpu'))
|
||||
self.train = TrainComponent(cloud_compute=CloudCompute('cpu'))
|
||||
self.analyze = AnalyzeComponent(cloud_compute=CloudCompute('gpu'))
|
||||
|
||||
def run(self):
|
||||
# run training once
|
||||
|
@ -24,4 +24,4 @@ class WorkflowOrchestrator(L.LightningFlow):
|
|||
if self.schedule("hourly"):
|
||||
self.analyze.run("CPU machine 2")
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
# app.py
|
||||
import subprocess
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class ExternalModelServer(L.LightningWork):
|
||||
class ExternalModelServer(LightningWork):
|
||||
def run(self, x):
|
||||
# compile
|
||||
process = subprocess.Popen('g++ model_server.cpp -o model_server')
|
||||
|
@ -11,19 +11,19 @@ class ExternalModelServer(L.LightningWork):
|
|||
process = subprocess.Popen('./model_server')
|
||||
process.wait()
|
||||
|
||||
class LocustLoadTester(L.LightningWork):
|
||||
class LocustLoadTester(LightningWork):
|
||||
def run(self, x):
|
||||
cmd = f'locust --master-host {self.host} --master-port {self.port}'
|
||||
process = subprocess.Popen(cmd)
|
||||
process.wait()
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.serve = ExternalModelServer(
|
||||
cloud_compute=L.CloudCompute('cpu'), parallel=True
|
||||
cloud_compute=CloudCompute('cpu'), parallel=True
|
||||
)
|
||||
self.load_test = LocustLoadTester(cloud_compute=L.CloudCompute('cpu'))
|
||||
self.load_test = LocustLoadTester(cloud_compute=CloudCompute('cpu'))
|
||||
|
||||
def run(self):
|
||||
# start the server (on a CPU machine 1)
|
||||
|
@ -33,4 +33,4 @@ class WorkflowOrchestrator(L.LightningFlow):
|
|||
if self.serve.state.RUNNING:
|
||||
self.load_test.run()
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
|
||||
class Component(L.LightningWork):
|
||||
class Component(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'MACHINE 1: this string came from machine 0: "{x}"')
|
||||
print('MACHINE 1: this string is on machine 1')
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.component = Component()
|
||||
|
@ -15,4 +15,4 @@ class WorkflowOrchestrator(L.LightningFlow):
|
|||
x = 'hello from machine 0'
|
||||
self.component.run(x)
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
from lightning.app.runners import MultiProcessRuntime
|
||||
|
||||
|
||||
class TrainComponent(L.LightningWork):
|
||||
class TrainComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'train a model on {x}')
|
||||
|
||||
class AnalyzeComponent(L.LightningWork):
|
||||
class AnalyzeComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'analyze model on {x}')
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.train = TrainComponent()
|
||||
|
@ -21,5 +21,5 @@ class WorkflowOrchestrator(L.LightningFlow):
|
|||
self.train.run("GPU machine 1")
|
||||
self.analyze.run("CPU machine 2")
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
MultiProcessRuntime(app).dispatch()
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
|
||||
|
||||
|
||||
class TrainComponent(L.LightningWork):
|
||||
class TrainComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'train a model on {x}')
|
||||
|
||||
class AnalyzeComponent(L.LightningWork):
|
||||
class AnalyzeComponent(LightningWork):
|
||||
def run(self, x):
|
||||
print(f'analyze model on {x}')
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.train = TrainComponent()
|
||||
|
@ -21,4 +21,4 @@ class WorkflowOrchestrator(L.LightningFlow):
|
|||
self.train.run("CPU machine 1")
|
||||
self.analyze.run("CPU machine 2")
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
|
||||
|
||||
class EmbeddingProcessor(L.LightningWork):
|
||||
class EmbeddingProcessor(LightningWork):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.embeddings = None
|
||||
|
@ -10,15 +10,15 @@ class EmbeddingProcessor(L.LightningWork):
|
|||
def run(self):
|
||||
print('PROCESSOR: Generating embeddings...')
|
||||
fake_embeddings = [[1, 2, 3], [2, 3, 4]]
|
||||
self.embeddings = L.storage.Payload(fake_embeddings)
|
||||
self.embeddings = storage.Payload(fake_embeddings)
|
||||
|
||||
class EmbeddingServer(L.LightningWork):
|
||||
class EmbeddingServer(LightningWork):
|
||||
def run(self, payload):
|
||||
print('SERVER: Using embeddings from processor', payload)
|
||||
embeddings = payload.value
|
||||
print('serving embeddings sent from EmbeddingProcessor: ', embeddings)
|
||||
|
||||
class WorkflowOrchestrator(L.LightningFlow):
|
||||
class WorkflowOrchestrator(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.processor = EmbeddingProcessor()
|
||||
|
@ -28,4 +28,4 @@ class WorkflowOrchestrator(L.LightningFlow):
|
|||
self.processor.run()
|
||||
self.server.run(self.processor.embeddings)
|
||||
|
||||
app = L.LightningApp(WorkflowOrchestrator())
|
||||
app = LightningApp(WorkflowOrchestrator())
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
import time
|
||||
|
||||
class A(L.LightningWork):
|
||||
class A(LightningWork):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.msg_changed = False
|
||||
|
@ -17,11 +17,11 @@ class A(L.LightningWork):
|
|||
self.new_msg = f'A is at step: {step}'
|
||||
print(self.new_msg)
|
||||
|
||||
class B(L.LightningWork):
|
||||
class B(LightningWork):
|
||||
def run(self, msg):
|
||||
print(f'B: message from A: {msg}')
|
||||
|
||||
class Example(L.LightningFlow):
|
||||
class Example(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.a = A(parallel=True)
|
||||
|
@ -33,4 +33,4 @@ class Example(L.LightningFlow):
|
|||
self.a.msg_changed = False
|
||||
self.b.run(self.a.new_msg)
|
||||
|
||||
app = L.LightningApp(Example())
|
||||
app = LightningApp(Example())
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
import time
|
||||
|
||||
class TrainComponent(L.LightningWork):
|
||||
class TrainComponent(LightningWork):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.last_checkpoint_path = None
|
||||
|
@ -17,11 +17,11 @@ class TrainComponent(L.LightningWork):
|
|||
self.last_checkpoint_path = f'/some/path/{step=}_{fake_loss=}'
|
||||
print(f'TRAIN COMPONENT: saved new checkpoint: {self.last_checkpoint_path}')
|
||||
|
||||
class ModelDeploymentComponent(L.LightningWork):
|
||||
class ModelDeploymentComponent(LightningWork):
|
||||
def run(self, new_checkpoint):
|
||||
print(f'DEPLOY COMPONENT: load new model from checkpoint: {new_checkpoint}')
|
||||
|
||||
class ContinuousDeployment(L.LightningFlow):
|
||||
class ContinuousDeployment(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.train = TrainComponent(parallel=True)
|
||||
|
@ -32,4 +32,4 @@ class ContinuousDeployment(L.LightningFlow):
|
|||
if self.train.last_checkpoint_path:
|
||||
self.model_deployment.run(self.train.last_checkpoint_path)
|
||||
|
||||
app = L.LightningApp(ContinuousDeployment())
|
||||
app = LightningApp(ContinuousDeployment())
|
||||
|
|
|
@ -4,11 +4,11 @@ import pandas as pd
|
|||
import plotly.express as px
|
||||
from dash import Dash, dcc, html, Input, Output
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
from lightning.app.storage import Payload
|
||||
|
||||
|
||||
class LitDash(L.LightningWork):
|
||||
class LitDash(LightningWork):
|
||||
def __init__(self):
|
||||
super().__init__(parallel=True)
|
||||
self.df = None
|
||||
|
@ -57,7 +57,7 @@ class LitDash(L.LightningWork):
|
|||
dash_app.run_server(host=self.host, port=self.port)
|
||||
|
||||
|
||||
class Processor(L.LightningWork):
|
||||
class Processor(LightningWork):
|
||||
def run(self, df: Payload, selected_year: Optional[str]):
|
||||
if selected_year:
|
||||
df = df.value
|
||||
|
@ -66,7 +66,7 @@ class Processor(L.LightningWork):
|
|||
print(filtered_df)
|
||||
|
||||
|
||||
class LitApp(L.LightningFlow):
|
||||
class LitApp(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.lit_dash = LitDash()
|
||||
|
@ -83,4 +83,4 @@ class LitApp(L.LightningFlow):
|
|||
return tab1
|
||||
|
||||
|
||||
app = L.LightningApp(LitApp())
|
||||
app = LightningApp(LitApp())
|
||||
|
|
|
@ -3,11 +3,11 @@ import dash_daq as daq
|
|||
import dash_renderjson
|
||||
from dash import html, Input, Output
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
from lightning.app.utilities.state import AppState
|
||||
|
||||
|
||||
class LitDash(L.LightningWork):
|
||||
class LitDash(LightningWork):
|
||||
def run(self):
|
||||
dash_app = dash.Dash(__name__)
|
||||
|
||||
|
@ -23,7 +23,7 @@ class LitDash(L.LightningWork):
|
|||
dash_app.run_server(host=self.host, port=self.port)
|
||||
|
||||
|
||||
class LitApp(L.LightningFlow):
|
||||
class LitApp(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.lit_dash = LitDash(parallel=True)
|
||||
|
@ -36,4 +36,4 @@ class LitApp(L.LightningFlow):
|
|||
return tab1
|
||||
|
||||
|
||||
app = L.LightningApp(LitApp())
|
||||
app = LightningApp(LitApp())
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from commands.notebook.run import RunNotebook, RunNotebookConfig
|
||||
from lit_jupyter import JupyterLab
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningFlow, LightningApp, CloudCompute
|
||||
from lightning.app.structures import Dict
|
||||
|
||||
|
||||
class Flow(L.LightningFlow):
|
||||
class Flow(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.notebooks = Dict()
|
||||
|
@ -17,7 +17,7 @@ class Flow(L.LightningFlow):
|
|||
else:
|
||||
# 2. Dynamically creates the Notebook if it doesn't exist and runs it.
|
||||
self.notebooks[config.name] = JupyterLab(
|
||||
cloud_compute=L.CloudCompute(config.cloud_compute)
|
||||
cloud_compute=CloudCompute(config.cloud_compute)
|
||||
)
|
||||
self.notebooks[config.name].run()
|
||||
return f"The Notebook {config.name} was created."
|
||||
|
@ -32,4 +32,4 @@ class Flow(L.LightningFlow):
|
|||
return [{"name": n, "content": w} for n, w in self.notebooks.items()]
|
||||
|
||||
|
||||
app = L.LightningApp(Flow())
|
||||
app = LightningApp(Flow())
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import lightning as L
|
||||
from lightning.app import LightningFlow, LightningApp
|
||||
from lightning.app.api import Post
|
||||
|
||||
|
||||
class Flow(L.LightningFlow):
|
||||
class Flow(LightningFlow):
|
||||
# 1. Define the state
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
@ -23,4 +23,4 @@ class Flow(L.LightningFlow):
|
|||
return [Post(route="/name", method=self.handle_post)]
|
||||
|
||||
|
||||
app = L.LightningApp(Flow())
|
||||
app = LightningApp(Flow())
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import lightning as L
|
||||
from lightning.app import LightningFlow, LightningApp
|
||||
from lightning.app.api import Post
|
||||
|
||||
|
||||
class Flow(L.LightningFlow):
|
||||
class Flow(LightningFlow):
|
||||
# 1. Define the state
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
@ -22,4 +22,4 @@ class Flow(L.LightningFlow):
|
|||
return [Post(route="/name", method=self.handle_post)]
|
||||
|
||||
|
||||
app = L.LightningApp(Flow())
|
||||
app = LightningApp(Flow())
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
from models import NamePostConfig # 2. Import your custom model.
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningFlow, LightningApp
|
||||
from lightning.app.api import Post
|
||||
|
||||
|
||||
class Flow(L.LightningFlow):
|
||||
class Flow(LightningFlow):
|
||||
# 1. Define the state
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
@ -29,4 +29,4 @@ class Flow(L.LightningFlow):
|
|||
]
|
||||
|
||||
|
||||
app = L.LightningApp(Flow())
|
||||
app = LightningApp(Flow())
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class TrainComponent(L.LightningWork):
|
||||
class TrainComponent(LightningWork):
|
||||
def run(self, message):
|
||||
for i in range(100000000000):
|
||||
print(message, i)
|
||||
|
||||
class AnalyzeComponent(L.LightningWork):
|
||||
class AnalyzeComponent(LightningWork):
|
||||
def run(self, message):
|
||||
for i in range(100000000000):
|
||||
print(message, i)
|
||||
|
||||
class LitWorkflow(L.LightningFlow):
|
||||
class LitWorkflow(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.train = TrainComponent(cloud_compute=L.CloudCompute('cpu'))
|
||||
self.analyze = AnalyzeComponent(cloud_compute=L.CloudCompute('cpu'))
|
||||
self.train = TrainComponent(cloud_compute=CloudCompute('cpu'))
|
||||
self.analyze = AnalyzeComponent(cloud_compute=CloudCompute('cpu'))
|
||||
|
||||
|
||||
def run(self):
|
||||
|
@ -24,4 +24,4 @@ class LitWorkflow(L.LightningFlow):
|
|||
self.analyze.run("machine B counting")
|
||||
|
||||
|
||||
app = L.LightningApp(LitWorkflow())
|
||||
app = LightningApp(LitWorkflow())
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class TrainComponent(L.LightningWork):
|
||||
class TrainComponent(LightningWork):
|
||||
def run(self, message):
|
||||
for i in range(100000000000):
|
||||
print(message, i)
|
||||
|
||||
class AnalyzeComponent(L.LightningWork):
|
||||
class AnalyzeComponent(LightningWork):
|
||||
def run(self, message):
|
||||
for i in range(100000000000):
|
||||
print(message, i)
|
||||
|
||||
class LitWorkflow(L.LightningFlow):
|
||||
class LitWorkflow(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.train = TrainComponent(cloud_compute=L.CloudCompute('cpu'), parallel=True)
|
||||
self.analyze = AnalyzeComponent(cloud_compute=L.CloudCompute('cpu'))
|
||||
self.train = TrainComponent(cloud_compute=CloudCompute('cpu'), parallel=True)
|
||||
self.analyze = AnalyzeComponent(cloud_compute=CloudCompute('cpu'))
|
||||
|
||||
|
||||
def run(self):
|
||||
|
@ -24,4 +24,4 @@ class LitWorkflow(L.LightningFlow):
|
|||
self.analyze.run("machine B counting")
|
||||
|
||||
|
||||
app = L.LightningApp(LitWorkflow())
|
||||
app = LightningApp(LitWorkflow())
|
||||
|
|
|
@ -1,27 +1,27 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class TrainComponent(L.LightningWork):
|
||||
class TrainComponent(LightningWork):
|
||||
def run(self, message):
|
||||
for i in range(100000000000):
|
||||
print(message, i)
|
||||
|
||||
class AnalyzeComponent(L.LightningWork):
|
||||
class AnalyzeComponent(LightningWork):
|
||||
def run(self, message):
|
||||
for i in range(100000000000):
|
||||
print(message, i)
|
||||
|
||||
class LitWorkflow(L.LightningFlow):
|
||||
class LitWorkflow(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.train = TrainComponent(cloud_compute=L.CloudCompute('cpu'), parallel=True)
|
||||
self.baseline_1 = TrainComponent(cloud_compute=L.CloudCompute('cpu'), parallel=True)
|
||||
self.analyze = AnalyzeComponent(cloud_compute=L.CloudCompute('cpu'))
|
||||
self.train = TrainComponent(cloud_compute=CloudCompute('cpu'), parallel=True)
|
||||
self.baseline_1 = TrainComponent(cloud_compute=CloudCompute('cpu'), parallel=True)
|
||||
self.analyze = AnalyzeComponent(cloud_compute=CloudCompute('cpu'))
|
||||
|
||||
def run(self):
|
||||
self.train.run("machine A counting")
|
||||
self.baseline_1.run("machine C counting")
|
||||
self.analyze.run("machine B counting")
|
||||
|
||||
app = L.LightningApp(LitWorkflow())
|
||||
app = LightningApp(LitWorkflow())
|
||||
|
|
|
@ -1,27 +1,27 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp, CloudCompute
|
||||
|
||||
|
||||
class TrainComponent(L.LightningWork):
|
||||
class TrainComponent(LightningWork):
|
||||
def run(self, message):
|
||||
for i in range(100000000000):
|
||||
print(message, i)
|
||||
|
||||
class AnalyzeComponent(L.LightningWork):
|
||||
class AnalyzeComponent(LightningWork):
|
||||
def run(self, message):
|
||||
for i in range(100000000000):
|
||||
print(message, i)
|
||||
|
||||
class LitWorkflow(L.LightningFlow):
|
||||
class LitWorkflow(LightningFlow):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.train = TrainComponent(cloud_compute=L.CloudCompute('cpu'))
|
||||
self.baseline_1 = TrainComponent(cloud_compute=L.CloudCompute('cpu'))
|
||||
self.analyze = AnalyzeComponent(cloud_compute=L.CloudCompute('cpu'))
|
||||
self.train = TrainComponent(cloud_compute=CloudCompute('cpu'))
|
||||
self.baseline_1 = TrainComponent(cloud_compute=CloudCompute('cpu'))
|
||||
self.analyze = AnalyzeComponent(cloud_compute=CloudCompute('cpu'))
|
||||
|
||||
def run(self):
|
||||
self.train.run("machine A counting")
|
||||
self.baseline_1.run("machine C counting")
|
||||
self.analyze.run("machine B counting")
|
||||
|
||||
app = L.LightningApp(LitWorkflow())
|
||||
app = LightningApp(LitWorkflow())
|
||||
|
|
|
@ -2,11 +2,11 @@ import os
|
|||
|
||||
import torch
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningWork, LightningFlow, LightningApp
|
||||
from lightning.app.storage import Path
|
||||
|
||||
|
||||
class ModelTraining(L.LightningWork):
|
||||
class ModelTraining(LightningWork):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.checkpoints_path = Path("./checkpoints")
|
||||
|
@ -21,7 +21,7 @@ class ModelTraining(L.LightningWork):
|
|||
torch.save(checkpoint_2, str(checkpoint_path).format("2"))
|
||||
|
||||
|
||||
class ModelDeploy(L.LightningWork):
|
||||
class ModelDeploy(LightningWork):
|
||||
def __init__(self, ckpt_path, *args, **kwargs):
|
||||
super().__init__()
|
||||
self.ckpt_path = ckpt_path
|
||||
|
@ -34,7 +34,7 @@ class ModelDeploy(L.LightningWork):
|
|||
print(f"Loaded checkpoint_2: {checkpoint_2}")
|
||||
|
||||
|
||||
class LitApp(L.LightningFlow):
|
||||
class LitApp(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.train = ModelTraining()
|
||||
|
@ -45,4 +45,4 @@ class LitApp(L.LightningFlow):
|
|||
self.deploy.run()
|
||||
|
||||
|
||||
app = L.LightningApp(LitApp())
|
||||
app = LightningApp(LitApp())
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import argparse
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp, LightningFlow, LightningWork
|
||||
|
||||
|
||||
class Work(L.LightningWork):
|
||||
class Work(LightningWork):
|
||||
def __init__(self, cloud_compute):
|
||||
super().__init__(cloud_compute=cloud_compute)
|
||||
|
||||
|
@ -11,7 +11,7 @@ class Work(L.LightningWork):
|
|||
pass
|
||||
|
||||
|
||||
class Flow(L.LightningFlow):
|
||||
class Flow(LightningFlow):
|
||||
def __init__(self, cloud_compute):
|
||||
super().__init__()
|
||||
self.work = Work(cloud_compute)
|
||||
|
@ -25,4 +25,4 @@ if __name__ == "__main__":
|
|||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--use_gpu", action="store_true", default=False, help="Whether to use GPU in the cloud")
|
||||
hparams = parser.parse_args()
|
||||
app = L.LightningApp(Flow(L.CloudCompute("gpu" if hparams.use_gpu else "cpu")))
|
||||
app = LightningApp(Flow(CloudCompute("gpu" if hparams.use_gpu else "cpu")))
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp, LightningFlow, LightningWork
|
||||
from lightning.app.components import TracerPythonScript
|
||||
from lightning.app.storage import Path
|
||||
|
||||
|
@ -12,8 +12,8 @@ Are you already 🤯 ? Stick with us, this is only the beginning. Lightning is
|
|||
"""
|
||||
|
||||
|
||||
class SourceFileWork(L.LightningWork):
|
||||
def __init__(self, cloud_compute: L.CloudCompute = L.CloudCompute(), **kwargs):
|
||||
class SourceFileWork(LightningWork):
|
||||
def __init__(self, cloud_compute: CloudCompute = CloudCompute(), **kwargs):
|
||||
super().__init__(parallel=True, **kwargs, cloud_compute=cloud_compute)
|
||||
self.boring_path = None
|
||||
|
||||
|
@ -31,7 +31,7 @@ class DestinationFileAndServeWork(TracerPythonScript):
|
|||
super().run()
|
||||
|
||||
|
||||
class BoringApp(L.LightningFlow):
|
||||
class BoringApp(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.source_work = SourceFileWork()
|
||||
|
@ -39,7 +39,7 @@ class BoringApp(L.LightningFlow):
|
|||
script_path=os.path.join(os.path.dirname(__file__), "scripts/serve.py"),
|
||||
port=1111,
|
||||
parallel=False, # runs until killed.
|
||||
cloud_compute=L.CloudCompute(),
|
||||
cloud_compute=CloudCompute(),
|
||||
raise_exception=True,
|
||||
)
|
||||
|
||||
|
@ -58,4 +58,4 @@ class BoringApp(L.LightningFlow):
|
|||
return {"name": "Boring Tab", "content": self.dest_work.url + "/file"}
|
||||
|
||||
|
||||
app = L.LightningApp(BoringApp())
|
||||
app = LightningApp(BoringApp())
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp, LightningFlow, LightningWork
|
||||
from lightning.app.components import TracerPythonScript
|
||||
from lightning.app.storage import Path
|
||||
from lightning.app.structures import Dict
|
||||
|
@ -13,8 +13,8 @@ Are you already 🤯 ? Stick with us, this is only the beginning. Lightning is
|
|||
"""
|
||||
|
||||
|
||||
class SourceFileWork(L.LightningWork):
|
||||
def __init__(self, cloud_compute: L.CloudCompute = L.CloudCompute(), **kwargs):
|
||||
class SourceFileWork(LightningWork):
|
||||
def __init__(self, cloud_compute: CloudCompute = CloudCompute(), **kwargs):
|
||||
super().__init__(parallel=True, **kwargs, cloud_compute=cloud_compute)
|
||||
self.boring_path = None
|
||||
|
||||
|
@ -32,7 +32,7 @@ class DestinationFileAndServeWork(TracerPythonScript):
|
|||
super().run()
|
||||
|
||||
|
||||
class BoringApp(L.LightningFlow):
|
||||
class BoringApp(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.dict = Dict()
|
||||
|
@ -57,7 +57,7 @@ class BoringApp(L.LightningFlow):
|
|||
script_path=os.path.join(os.path.dirname(__file__), "scripts/serve.py"),
|
||||
port=1111,
|
||||
parallel=False, # runs until killed.
|
||||
cloud_compute=L.CloudCompute(),
|
||||
cloud_compute=CloudCompute(),
|
||||
raise_exception=True,
|
||||
)
|
||||
|
||||
|
@ -69,4 +69,4 @@ class BoringApp(L.LightningFlow):
|
|||
return {"name": "Boring Tab", "content": self.dict["dst_w"].url + "/file" if "dst_w" in self.dict else ""}
|
||||
|
||||
|
||||
app = L.LightningApp(BoringApp(), log_level="debug")
|
||||
app = LightningApp(BoringApp(), log_level="debug")
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import lightning as L
|
||||
from examples.components.python.component_tracer import PLTracerPythonScript
|
||||
from lightning.app import LightningApp, LightningFlow
|
||||
|
||||
|
||||
class RootFlow(L.LightningFlow):
|
||||
class RootFlow(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
script_path = Path(__file__).parent / "pl_script.py"
|
||||
|
@ -21,4 +21,4 @@ class RootFlow(L.LightningFlow):
|
|||
self.stop("tracer script failed")
|
||||
|
||||
|
||||
app = L.LightningApp(RootFlow())
|
||||
app = LightningApp(RootFlow())
|
||||
|
|
|
@ -5,7 +5,7 @@ import requests
|
|||
import torch
|
||||
from PIL import Image
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningApp, LightningFlow
|
||||
from lightning.app.components import ServeGradio
|
||||
|
||||
|
||||
|
@ -34,7 +34,7 @@ class AnimeGANv2UI(ServeGradio):
|
|||
return partial(face2paint, model=model)
|
||||
|
||||
|
||||
class RootFlow(L.LightningFlow):
|
||||
class RootFlow(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.demo = AnimeGANv2UI()
|
||||
|
@ -49,4 +49,4 @@ class RootFlow(L.LightningFlow):
|
|||
return tabs
|
||||
|
||||
|
||||
app = L.LightningApp(RootFlow())
|
||||
app = LightningApp(RootFlow())
|
||||
|
|
|
@ -6,7 +6,7 @@ import pandas as pd
|
|||
from sklearn import datasets
|
||||
from sklearn.metrics import mean_squared_error
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningApp, LightningFlow, LightningWork
|
||||
from lightning.app.components import TracerPythonScript
|
||||
from lightning.app.storage import Payload
|
||||
from lightning.app.structures import Dict, List
|
||||
|
@ -16,7 +16,7 @@ def get_path(path):
|
|||
return os.path.join(os.path.dirname(__file__), path)
|
||||
|
||||
|
||||
class GetDataWork(L.LightningWork):
|
||||
class GetDataWork(LightningWork):
|
||||
"""This component is responsible to download some data and store them with a PayLoad."""
|
||||
|
||||
def __init__(self):
|
||||
|
@ -32,7 +32,7 @@ class GetDataWork(L.LightningWork):
|
|||
print("Finished data collection.")
|
||||
|
||||
|
||||
class ModelWork(L.LightningWork):
|
||||
class ModelWork(LightningWork):
|
||||
"""This component is receiving some data and train a sklearn model."""
|
||||
|
||||
def __init__(self, model_path: str, parallel: bool):
|
||||
|
@ -50,7 +50,7 @@ class ModelWork(L.LightningWork):
|
|||
print(f"Finished training and evaluating {self.model_name}.")
|
||||
|
||||
|
||||
class DAG(L.LightningFlow):
|
||||
class DAG(LightningFlow):
|
||||
"""This component is a DAG."""
|
||||
|
||||
def __init__(self, models_paths: list):
|
||||
|
@ -100,7 +100,7 @@ class DAG(L.LightningFlow):
|
|||
self.has_completed = True
|
||||
|
||||
|
||||
class ScheduledDAG(L.LightningFlow):
|
||||
class ScheduledDAG(LightningFlow):
|
||||
def __init__(self, dag_cls, **dag_kwargs):
|
||||
super().__init__()
|
||||
self.dags = List()
|
||||
|
@ -119,7 +119,7 @@ class ScheduledDAG(L.LightningFlow):
|
|||
dag.run()
|
||||
|
||||
|
||||
app = L.LightningApp(
|
||||
app = LightningApp(
|
||||
ScheduledDAG(
|
||||
DAG,
|
||||
models_paths=[
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import lightning as L
|
||||
from lightning.app import LightningApp, LightningFlow, LightningWork
|
||||
|
||||
|
||||
class Work(L.LightningWork):
|
||||
class Work(LightningWork):
|
||||
def __init__(self, start_with_flow=True):
|
||||
super().__init__(start_with_flow=start_with_flow)
|
||||
|
||||
|
@ -9,7 +9,7 @@ class Work(L.LightningWork):
|
|||
pass
|
||||
|
||||
|
||||
class Flow(L.LightningFlow):
|
||||
class Flow(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.w = Work()
|
||||
|
@ -22,4 +22,4 @@ class Flow(L.LightningFlow):
|
|||
self.w1.run()
|
||||
|
||||
|
||||
app = L.LightningApp(Flow())
|
||||
app = LightningApp(Flow())
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import os
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningApp, LightningFlow, LightningWork
|
||||
from lightning.app.storage import Drive
|
||||
|
||||
|
||||
class Work_1(L.LightningWork):
|
||||
class Work_1(LightningWork):
|
||||
def run(self, drive: Drive):
|
||||
# 1. Create a file.
|
||||
with open("a.txt", "w") as f:
|
||||
|
@ -17,7 +17,7 @@ class Work_1(L.LightningWork):
|
|||
os.remove("a.txt")
|
||||
|
||||
|
||||
class Work_2(L.LightningWork):
|
||||
class Work_2(LightningWork):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
|
@ -34,7 +34,7 @@ class Work_2(L.LightningWork):
|
|||
print(f.readlines()[0]) # Prints Hello World !
|
||||
|
||||
|
||||
class Flow(L.LightningFlow):
|
||||
class Flow(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.drive_1 = Drive("lit://drive_1")
|
||||
|
@ -48,4 +48,4 @@ class Flow(L.LightningFlow):
|
|||
self.stop("Application End!")
|
||||
|
||||
|
||||
app = L.LightningApp(Flow())
|
||||
app = LightningApp(Flow())
|
||||
|
|
|
@ -4,11 +4,11 @@ import optuna
|
|||
from hyperplot import HiPlotFlow
|
||||
from objective import ObjectiveWork
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp, LightningFlow
|
||||
from lightning.app.structures import Dict
|
||||
|
||||
|
||||
class RootHPOFlow(L.LightningFlow):
|
||||
class RootHPOFlow(LightningFlow):
|
||||
def __init__(self, script_path, data_dir, total_trials, simultaneous_trials):
|
||||
super().__init__()
|
||||
self.script_path = script_path
|
||||
|
@ -32,7 +32,7 @@ class RootHPOFlow(L.LightningFlow):
|
|||
objective_work = ObjectiveWork(
|
||||
script_path=self.script_path,
|
||||
data_dir=self.data_dir,
|
||||
cloud_compute=L.CloudCompute("cpu"),
|
||||
cloud_compute=CloudCompute("cpu"),
|
||||
)
|
||||
self.ws[work_name] = objective_work
|
||||
if not self.ws[work_name].has_started:
|
||||
|
@ -51,7 +51,7 @@ class RootHPOFlow(L.LightningFlow):
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = L.LightningApp(
|
||||
app = LightningApp(
|
||||
RootHPOFlow(
|
||||
script_path=str(Path(__file__).parent / "pl_script.py"),
|
||||
data_dir="data/hymenoptera_data_version_0",
|
||||
|
|
|
@ -3,11 +3,11 @@ from pathlib import Path
|
|||
import optuna
|
||||
from objective import ObjectiveWork
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp, LightningFlow
|
||||
from lightning.app.structures import Dict
|
||||
|
||||
|
||||
class RootHPOFlow(L.LightningFlow):
|
||||
class RootHPOFlow(LightningFlow):
|
||||
def __init__(self, script_path, data_dir, total_trials, simultaneous_trials):
|
||||
super().__init__()
|
||||
self.script_path = script_path
|
||||
|
@ -30,7 +30,7 @@ class RootHPOFlow(L.LightningFlow):
|
|||
objective_work = ObjectiveWork(
|
||||
script_path=self.script_path,
|
||||
data_dir=self.data_dir,
|
||||
cloud_compute=L.CloudCompute("cpu"),
|
||||
cloud_compute=CloudCompute("cpu"),
|
||||
)
|
||||
self.ws[work_name] = objective_work
|
||||
if not self.ws[work_name].has_started:
|
||||
|
@ -48,7 +48,7 @@ class RootHPOFlow(L.LightningFlow):
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = L.LightningApp(
|
||||
app = LightningApp(
|
||||
RootHPOFlow(
|
||||
script_path=str(Path(__file__).parent / "pl_script.py"),
|
||||
data_dir="data/hymenoptera_data_version_0",
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import lightning as L
|
||||
from lightning.app import LightningFlow
|
||||
from lightning.app.frontend import StreamlitFrontend
|
||||
from lightning.app.utilities.state import AppState
|
||||
|
||||
|
||||
class HiPlotFlow(L.LightningFlow):
|
||||
class HiPlotFlow(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.data = []
|
||||
|
|
|
@ -8,12 +8,12 @@ import torch
|
|||
from optuna.distributions import CategoricalDistribution, LogUniformDistribution
|
||||
from torchmetrics import Accuracy
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import CloudCompute
|
||||
from lightning.app.components import TracerPythonScript
|
||||
|
||||
|
||||
class ObjectiveWork(TracerPythonScript):
|
||||
def __init__(self, script_path: str, data_dir: str, cloud_compute: Optional[L.CloudCompute]):
|
||||
def __init__(self, script_path: str, data_dir: str, cloud_compute: Optional[CloudCompute]):
|
||||
timestamp = datetime.now().strftime("%H:%M:%S")
|
||||
tmpdir = tempfile.TemporaryDirectory().name
|
||||
submission_path = os.path.join(tmpdir, f"{timestamp}.csv")
|
||||
|
|
|
@ -4,17 +4,17 @@
|
|||
# !pip install lmdb
|
||||
import lmdb
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp, LightningFlow, LightningWork
|
||||
|
||||
|
||||
class YourComponent(L.LightningWork):
|
||||
class YourComponent(LightningWork):
|
||||
def run(self):
|
||||
print(lmdb.version())
|
||||
print("lmdb successfully installed")
|
||||
print("Accessing a module in a Work or Flow body works!")
|
||||
|
||||
|
||||
class RootFlow(L.LightningFlow):
|
||||
class RootFlow(LightningFlow):
|
||||
def __init__(self, work):
|
||||
super().__init__()
|
||||
self.work = work
|
||||
|
@ -27,6 +27,6 @@ print(f"Accessing an object in main code body works!: version = {lmdb.version()}
|
|||
|
||||
|
||||
# run on a cloud machine
|
||||
compute = L.CloudCompute("cpu")
|
||||
compute = CloudCompute("cpu")
|
||||
worker = YourComponent(cloud_compute=compute)
|
||||
app = L.LightningApp(RootFlow(worker))
|
||||
app = LightningApp(RootFlow(worker))
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
from time import sleep
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp, LightningFlow, LightningWork
|
||||
|
||||
|
||||
class Work(L.LightningWork):
|
||||
class Work(LightningWork):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.counter = 0
|
||||
|
@ -15,11 +15,11 @@ class Work(L.LightningWork):
|
|||
sleep(1)
|
||||
|
||||
|
||||
class Flow(L.LightningFlow):
|
||||
class Flow(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.w = Work(
|
||||
cloud_compute=L.CloudCompute("gpu", interruptible=True),
|
||||
cloud_compute=CloudCompute("gpu", interruptible=True),
|
||||
start_with_flow=False,
|
||||
parallel=True,
|
||||
)
|
||||
|
@ -29,4 +29,4 @@ class Flow(L.LightningFlow):
|
|||
print(self.w.counter)
|
||||
|
||||
|
||||
app = L.LightningApp(Flow())
|
||||
app = LightningApp(Flow())
|
||||
|
|
|
@ -11,11 +11,11 @@ This starts one server for each flow that returns a UI. Access the UI at the lin
|
|||
import os
|
||||
from time import sleep
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningApp, LightningFlow
|
||||
from lightning.app.frontend import StaticWebFrontend, StreamlitFrontend
|
||||
|
||||
|
||||
class C11(L.LightningFlow):
|
||||
class C11(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.message = "Hello Streamlit!"
|
||||
|
@ -33,7 +33,7 @@ def render_c11(state):
|
|||
st.write(state.message)
|
||||
|
||||
|
||||
class C21(L.LightningFlow):
|
||||
class C21(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
|
@ -44,7 +44,7 @@ class C21(L.LightningFlow):
|
|||
return StaticWebFrontend(os.path.join(os.path.dirname(__file__), "ui1"))
|
||||
|
||||
|
||||
class C22(L.LightningFlow):
|
||||
class C22(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
|
@ -55,7 +55,7 @@ class C22(L.LightningFlow):
|
|||
return StaticWebFrontend(os.path.join(os.path.dirname(__file__), "ui2"))
|
||||
|
||||
|
||||
class C1(L.LightningFlow):
|
||||
class C1(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.c11 = C11()
|
||||
|
@ -64,7 +64,7 @@ class C1(L.LightningFlow):
|
|||
pass
|
||||
|
||||
|
||||
class C2(L.LightningFlow):
|
||||
class C2(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.c21 = C21()
|
||||
|
@ -80,7 +80,7 @@ class C2(L.LightningFlow):
|
|||
]
|
||||
|
||||
|
||||
class Root(L.LightningFlow):
|
||||
class Root(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.c1 = C1()
|
||||
|
@ -98,4 +98,4 @@ class Root(L.LightningFlow):
|
|||
]
|
||||
|
||||
|
||||
app = L.LightningApp(Root())
|
||||
app = LightningApp(Root())
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
import os
|
||||
|
||||
import lightning as L
|
||||
from lightning_app import CloudCompute
|
||||
from lightning_app.storage import Mount
|
||||
from lightning.app import CloudCompute, LightningApp, LightningFlow, LightningWork
|
||||
from lightning.app.storage import Mount
|
||||
|
||||
|
||||
class Work(L.LightningWork):
|
||||
class Work(LightningWork):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
@ -16,7 +15,7 @@ class Work(L.LightningWork):
|
|||
assert "esRedditJson1" in files
|
||||
|
||||
|
||||
class Flow(L.LightningFlow):
|
||||
class Flow(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.work_1 = Work(
|
||||
|
@ -32,4 +31,4 @@ class Flow(L.LightningFlow):
|
|||
self.work_1.run()
|
||||
|
||||
|
||||
app = L.LightningApp(Flow())
|
||||
app = LightningApp(Flow())
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import lightning as L
|
||||
from lightning.pytorch import Trainer
|
||||
from lightning.pytorch.demos.boring_classes import BoringModel
|
||||
|
||||
if __name__ == "__main__":
|
||||
model = BoringModel()
|
||||
trainer = L.Trainer(max_epochs=1)
|
||||
trainer = Trainer(max_epochs=1)
|
||||
trainer.fit(model)
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp, LightningWork
|
||||
from lightning.app.components import MultiNode
|
||||
|
||||
|
||||
class AnyDistributedComponent(L.LightningWork):
|
||||
class AnyDistributedComponent(LightningWork):
|
||||
def run(
|
||||
self,
|
||||
main_address: str,
|
||||
|
@ -13,10 +13,10 @@ class AnyDistributedComponent(L.LightningWork):
|
|||
print(f"ADD YOUR DISTRIBUTED CODE: {main_address} {main_port} {num_nodes} {node_rank}.")
|
||||
|
||||
|
||||
app = L.LightningApp(
|
||||
app = LightningApp(
|
||||
MultiNode(
|
||||
AnyDistributedComponent,
|
||||
num_nodes=2,
|
||||
cloud_compute=L.CloudCompute("gpu"),
|
||||
cloud_compute=CloudCompute("gpu"),
|
||||
)
|
||||
)
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import torch
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp, LightningWork
|
||||
from lightning.app.components import FabricMultiNode
|
||||
from lightning.fabric import Fabric
|
||||
|
||||
|
||||
class FabricPyTorchDistributed(L.LightningWork):
|
||||
class FabricPyTorchDistributed(LightningWork):
|
||||
def run(self):
|
||||
# 1. Prepare the model
|
||||
model = torch.nn.Sequential(
|
||||
|
@ -32,10 +32,10 @@ class FabricPyTorchDistributed(L.LightningWork):
|
|||
|
||||
|
||||
# 8 GPUs: (2 nodes of 4 x v100)
|
||||
app = L.LightningApp(
|
||||
app = LightningApp(
|
||||
FabricMultiNode(
|
||||
FabricPyTorchDistributed,
|
||||
cloud_compute=L.CloudCompute("gpu-fast-multi"), # 4 x V100
|
||||
cloud_compute=CloudCompute("gpu-fast-multi"), # 4 x V100
|
||||
num_nodes=2,
|
||||
)
|
||||
)
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
# app.py
|
||||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp, LightningWork
|
||||
from lightning.app.components import LightningTrainerMultiNode
|
||||
from lightning.pytorch import Trainer
|
||||
from lightning.pytorch.demos.boring_classes import BoringModel
|
||||
|
||||
|
||||
class LightningTrainerDistributed(L.LightningWork):
|
||||
class LightningTrainerDistributed(LightningWork):
|
||||
def run(self):
|
||||
model = BoringModel()
|
||||
trainer = L.Trainer(max_epochs=10, strategy="ddp")
|
||||
trainer = Trainer(max_epochs=10, strategy="ddp")
|
||||
trainer.fit(model)
|
||||
|
||||
|
||||
|
@ -15,6 +16,6 @@ class LightningTrainerDistributed(L.LightningWork):
|
|||
component = LightningTrainerMultiNode(
|
||||
LightningTrainerDistributed,
|
||||
num_nodes=2,
|
||||
cloud_compute=L.CloudCompute("gpu-fast-multi"), # 4 x v100
|
||||
cloud_compute=CloudCompute("gpu-fast-multi"), # 4 x v100
|
||||
)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp
|
||||
from lightning.app.components import LightningTrainerScript
|
||||
from lightning.app.utilities.packaging.cloud_compute import CloudCompute
|
||||
|
||||
# 8 GPUs: (2 nodes of 4 x v100)
|
||||
app = L.LightningApp(
|
||||
app = LightningApp(
|
||||
LightningTrainerScript(
|
||||
"pl_boring_script.py",
|
||||
num_nodes=2,
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
import torch
|
||||
from torch.nn.parallel.distributed import DistributedDataParallel
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp, LightningWork
|
||||
from lightning.app.components import MultiNode
|
||||
|
||||
|
||||
|
@ -47,7 +47,7 @@ def distributed_train(local_rank: int, main_address: str, main_port: int, num_no
|
|||
print("Multi Node Distributed Training Done!")
|
||||
|
||||
|
||||
class PyTorchDistributed(L.LightningWork):
|
||||
class PyTorchDistributed(LightningWork):
|
||||
def run(self, main_address: str, main_port: int, num_nodes: int, node_rank: int):
|
||||
nprocs = torch.cuda.device_count() if torch.cuda.is_available() else 1
|
||||
torch.multiprocessing.spawn(
|
||||
|
@ -56,6 +56,6 @@ class PyTorchDistributed(L.LightningWork):
|
|||
|
||||
|
||||
# 8 GPUs: (2 nodes x 4 v 100)
|
||||
compute = L.CloudCompute("gpu-fast-multi") # 4 x v100
|
||||
compute = CloudCompute("gpu-fast-multi") # 4 x v100
|
||||
component = MultiNode(PyTorchDistributed, num_nodes=2, cloud_compute=compute)
|
||||
app = L.LightningApp(component)
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import torch
|
||||
from torch.nn.parallel.distributed import DistributedDataParallel
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp, LightningWork
|
||||
from lightning.app.components import PyTorchSpawnMultiNode
|
||||
|
||||
|
||||
class PyTorchDistributed(L.LightningWork):
|
||||
class PyTorchDistributed(LightningWork):
|
||||
def run(
|
||||
self,
|
||||
world_size: int,
|
||||
|
@ -43,10 +43,10 @@ class PyTorchDistributed(L.LightningWork):
|
|||
|
||||
|
||||
# 8 GPUs: (2 nodes x 4 v 100)
|
||||
app = L.LightningApp(
|
||||
app = LightningApp(
|
||||
PyTorchSpawnMultiNode(
|
||||
PyTorchDistributed,
|
||||
num_nodes=2,
|
||||
cloud_compute=L.CloudCompute("gpu-fast-multi"), # 4 x v100
|
||||
cloud_compute=CloudCompute("gpu-fast-multi"), # 4 x v100
|
||||
)
|
||||
)
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import lightning as L
|
||||
from lightning.app import LightningApp, LightningFlow, LightningWork
|
||||
from lightning.app.storage import Payload
|
||||
|
||||
|
||||
class SourceFileWriterWork(L.LightningWork):
|
||||
class SourceFileWriterWork(LightningWork):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.value = None
|
||||
|
@ -11,12 +11,12 @@ class SourceFileWriterWork(L.LightningWork):
|
|||
self.value = Payload(42)
|
||||
|
||||
|
||||
class DestinationWork(L.LightningWork):
|
||||
class DestinationWork(LightningWork):
|
||||
def run(self, payload):
|
||||
assert payload.value == 42
|
||||
|
||||
|
||||
class RootFlow(L.LightningFlow):
|
||||
class RootFlow(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.src = SourceFileWriterWork()
|
||||
|
@ -28,4 +28,4 @@ class RootFlow(L.LightningFlow):
|
|||
self.stop("Application End!")
|
||||
|
||||
|
||||
app = L.LightningApp(RootFlow())
|
||||
app = LightningApp(RootFlow())
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import logging
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningApp, LightningFlow, LightningWork
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PickleChecker(L.LightningWork):
|
||||
class PickleChecker(LightningWork):
|
||||
def run(self, pickle_image: bytes):
|
||||
parsed = self.parse_image(pickle_image)
|
||||
if parsed == b"it is a pickle":
|
||||
|
@ -19,7 +19,7 @@ class PickleChecker(L.LightningWork):
|
|||
return image_str
|
||||
|
||||
|
||||
class Slack(L.LightningFlow):
|
||||
class Slack(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
|
@ -31,7 +31,7 @@ class Slack(L.LightningFlow):
|
|||
pass
|
||||
|
||||
|
||||
class RootComponent(L.LightningFlow):
|
||||
class RootComponent(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.pickle_checker = PickleChecker()
|
||||
|
@ -51,4 +51,4 @@ class RootComponent(L.LightningFlow):
|
|||
self.stop("Pickle or Not End")
|
||||
|
||||
|
||||
app = L.LightningApp(RootComponent())
|
||||
app = LightningApp(RootComponent())
|
||||
|
|
|
@ -7,7 +7,7 @@ import torchvision
|
|||
from PIL import Image
|
||||
from pydantic import BaseModel
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp
|
||||
from lightning.app.components.serve import Image as InputImage
|
||||
from lightning.app.components.serve import PythonServer
|
||||
|
||||
|
@ -38,5 +38,5 @@ class OutputData(BaseModel):
|
|||
prediction: int
|
||||
|
||||
|
||||
component = PyTorchServer(input_type=InputImage, output_type=OutputData, cloud_compute=L.CloudCompute("gpu"))
|
||||
app = L.LightningApp(component)
|
||||
component = PyTorchServer(input_type=InputImage, output_type=OutputData, cloud_compute=CloudCompute("gpu"))
|
||||
app = LightningApp(component)
|
||||
|
|
|
@ -5,18 +5,18 @@ import torch
|
|||
import torchvision
|
||||
from pydantic import BaseModel
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import CloudCompute, LightningApp
|
||||
|
||||
|
||||
class BatchRequestModel(BaseModel):
|
||||
inputs: List[L.app.components.Image]
|
||||
inputs: List[app.components.Image]
|
||||
|
||||
|
||||
class BatchResponse(BaseModel):
|
||||
outputs: List[L.app.components.Number]
|
||||
outputs: List[app.components.Number]
|
||||
|
||||
|
||||
class PyTorchServer(L.app.components.PythonServer):
|
||||
class PyTorchServer(app.components.PythonServer):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(
|
||||
input_type=BatchRequestModel,
|
||||
|
@ -44,7 +44,7 @@ class PyTorchServer(L.app.components.PythonServer):
|
|||
)
|
||||
images = []
|
||||
for request in requests.inputs:
|
||||
image = L.app.components.serve.types.image.Image.deserialize(request.image)
|
||||
image = app.components.serve.types.image.Image.deserialize(request.image)
|
||||
image = transforms(image).unsqueeze(0)
|
||||
images.append(image)
|
||||
images = torch.cat(images)
|
||||
|
@ -54,7 +54,7 @@ class PyTorchServer(L.app.components.PythonServer):
|
|||
return BatchResponse(outputs=[{"prediction": pred} for pred in results])
|
||||
|
||||
|
||||
class MyAutoScaler(L.app.components.AutoScaler):
|
||||
class MyAutoScaler(app.components.AutoScaler):
|
||||
def scale(self, replicas: int, metrics: dict) -> int:
|
||||
pending_requests = metrics["pending_requests"]
|
||||
active_or_pending_works = replicas + metrics["pending_works"]
|
||||
|
@ -77,19 +77,19 @@ class MyAutoScaler(L.app.components.AutoScaler):
|
|||
return replicas
|
||||
|
||||
|
||||
app = L.LightningApp(
|
||||
app = LightningApp(
|
||||
MyAutoScaler(
|
||||
# work class and args
|
||||
PyTorchServer,
|
||||
cloud_compute=L.CloudCompute("gpu"),
|
||||
cloud_compute=CloudCompute("gpu"),
|
||||
# autoscaler specific args
|
||||
min_replicas=1,
|
||||
max_replicas=4,
|
||||
scale_out_interval=10,
|
||||
scale_in_interval=10,
|
||||
endpoint="predict",
|
||||
input_type=L.app.components.Image,
|
||||
output_type=L.app.components.Number,
|
||||
input_type=app.components.Image,
|
||||
output_type=app.components.Number,
|
||||
timeout_batching=1,
|
||||
max_batch_size=8,
|
||||
)
|
||||
|
|
|
@ -3,11 +3,11 @@ import os
|
|||
from datetime import datetime
|
||||
from time import sleep
|
||||
|
||||
import lightning as L
|
||||
from lightning.app import LightningApp, LightningFlow
|
||||
from lightning.app.frontend import StaticWebFrontend
|
||||
|
||||
|
||||
class Word(L.LightningFlow):
|
||||
class Word(LightningFlow):
|
||||
def __init__(self, letter):
|
||||
super().__init__()
|
||||
self.letter = letter
|
||||
|
@ -20,7 +20,7 @@ class Word(L.LightningFlow):
|
|||
return StaticWebFrontend(os.path.join(os.path.dirname(__file__), f"ui/{self.letter}"))
|
||||
|
||||
|
||||
class V0App(L.LightningFlow):
|
||||
class V0App(LightningFlow):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.aas = Word("a")
|
||||
|
@ -46,4 +46,4 @@ class V0App(L.LightningFlow):
|
|||
return [tab1, tab2, tab3]
|
||||
|
||||
|
||||
app = L.LightningApp(V0App(), log_level="debug")
|
||||
app = LightningApp(V0App(), log_level="debug")
|
||||
|
|
|
@ -7,8 +7,10 @@ import torch
|
|||
import torch.nn.functional as F
|
||||
import torch.optim as optim
|
||||
import torch.optim.lr_scheduler as lr_scheduler
|
||||
from torch.utils.data import Dataset
|
||||
|
||||
import lightning as L
|
||||
from lightning import LightningModule, Trainer
|
||||
from lightning.data import LightningDataset
|
||||
from lightning.pytorch.utilities.model_helpers import get_torchvision_model
|
||||
|
||||
parser = ArgumentParser()
|
||||
|
@ -22,7 +24,7 @@ args = parser.parse_args()
|
|||
# --------------------------------
|
||||
|
||||
|
||||
class ImageNetLightningModel(L.LightningModule):
|
||||
class ImageNetLightningModel(LightningModule):
|
||||
"""
|
||||
>>> ImageNetLightningModel(data_path='missing') # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
|
||||
ImageNetLightningModel(
|
||||
|
@ -121,7 +123,7 @@ class ImageNetLightningModel(L.LightningModule):
|
|||
# -------------------
|
||||
|
||||
|
||||
class S3LightningImagenetDataset(L.LightningDataset):
|
||||
class S3LightningImagenetDataset(LightningDataset):
|
||||
def __init__(
|
||||
self,
|
||||
data_source: str,
|
||||
|
@ -181,7 +183,7 @@ if __name__ == "__main__":
|
|||
batch_size=args.batchsize,
|
||||
workers=args.workers,
|
||||
)
|
||||
trainer = L.Trainer()
|
||||
trainer = Trainer()
|
||||
|
||||
print("Train Model")
|
||||
if args.evaluate:
|
||||
|
|
|
@ -12,38 +12,19 @@ formatter = logging.Formatter("%(levelname)s: %(message)s")
|
|||
_console.setFormatter(formatter)
|
||||
_logger.addHandler(_console)
|
||||
|
||||
from lightning.__about__ import * # noqa: E402, F401, F403
|
||||
from lightning.__about__ import * # noqa: E402, F403
|
||||
from lightning.__version__ import version as __version__ # noqa: E402, F401
|
||||
from lightning.app import storage # noqa: E402
|
||||
from lightning.app.core.app import LightningApp # noqa: E402
|
||||
from lightning.app.core.flow import LightningFlow # noqa: E402
|
||||
from lightning.app.core.work import LightningWork # noqa: E402
|
||||
from lightning.app.utilities.packaging.build_config import BuildConfig # noqa: E402
|
||||
from lightning.app.utilities.packaging.cloud_compute import CloudCompute # noqa: E402
|
||||
from lightning.data import LightningDataset, LightningIterableDataset # noqa: E402
|
||||
from lightning.fabric.fabric import Fabric # noqa: E402
|
||||
from lightning.fabric.utilities.seed import seed_everything # noqa: E402
|
||||
from lightning.pytorch.callbacks import Callback # noqa: E402
|
||||
from lightning.pytorch.core import LightningDataModule, LightningModule # noqa: E402
|
||||
from lightning.pytorch.trainer import Trainer # noqa: E402
|
||||
|
||||
import lightning.app # isort: skip # noqa: E402
|
||||
import lightning.store # isort: skip # noqa: E402, F401
|
||||
|
||||
|
||||
__all__ = [
|
||||
"LightningApp",
|
||||
"LightningFlow",
|
||||
"LightningWork",
|
||||
"BuildConfig",
|
||||
"CloudCompute",
|
||||
"Trainer",
|
||||
"LightningDataset",
|
||||
"LightningIterableDataset",
|
||||
"LightningDataModule",
|
||||
"LightningModule",
|
||||
"Callback",
|
||||
"seed_everything",
|
||||
"Fabric",
|
||||
"storage",
|
||||
]
|
||||
|
|
|
@ -39,20 +39,32 @@ def _prepare_extras() -> Dict[str, Any]:
|
|||
for p in req_files
|
||||
if p.name not in ("docs.txt", "base.txt") and not p.parent.name.startswith("_")
|
||||
}
|
||||
|
||||
# project specific extras groups
|
||||
extras["fabric-all"] = extras["fabric-strategies"] + extras["fabric-examples"]
|
||||
extras["fabric-dev"] = extras["fabric-all"] + extras["fabric-test"]
|
||||
extras["pytorch-all"] = extras["pytorch-extra"] + extras["pytorch-strategies"] + extras["pytorch-examples"]
|
||||
extras["pytorch-dev"] = extras["pytorch-all"] + extras["pytorch-test"]
|
||||
extras["app-extra"] = extras["app-cloud"] + extras["app-ui"] + extras["app-components"]
|
||||
extras["app-extra"] = extras["app-app"] + extras["app-cloud"] + extras["app-ui"] + extras["app-components"]
|
||||
extras["app-all"] = extras["app-extra"]
|
||||
extras["app-dev"] = extras["app-all"] + extras["app-test"]
|
||||
extras["data-data"] += extras["app-app"] # todo: consider cutting/leaning this dependency
|
||||
extras["data-all"] = extras["data-data"] + extras["data-cloud"] + extras["data-examples"]
|
||||
extras["data-dev"] = extras["data-all"] + extras["data-test"]
|
||||
extras["store-store"] = extras["app-app"] # todo: consider cutting/leaning this dependency
|
||||
|
||||
# merge per-project extras of the same category, e.g. `app-test` + `fabric-test`
|
||||
for extra in list(extras):
|
||||
name = "-".join(extra.split("-")[1:])
|
||||
extras[name] = extras.get(name, []) + extras[extra]
|
||||
|
||||
# drop quasi base the req. file has the same name sub-package
|
||||
for k in list(extras.keys()):
|
||||
kk = k.split("-")
|
||||
if not (len(kk) == 2 and kk[0] == kk[1]):
|
||||
continue
|
||||
extras[kk[0]] = list(extras[k])
|
||||
del extras[k]
|
||||
extras = {name: sorted(set(reqs)) for name, reqs in extras.items()}
|
||||
print("The extras are: ", extras)
|
||||
return extras
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue