diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 5747fe09e..6ae41a3d9 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -92,7 +92,7 @@ jobs:
# install dependencies (requires internet connectivity)
tox -e py --notest
# run tests with loopback only. We need to sudo for unshare, which means we need an absolute path for tox.
- sudo unshare --net -- sh -c "ip link set lo up; $(which tox) -e py"
+ sudo unshare --net -- sh -c "ip link set lo up; $(which tox) -e py"
if: matrix.os == 'ubuntu-latest'
- uses: codecov/codecov-action@a1ed4b322b4b38cb846afb5a0ebfa17086917d27
# mirrored below and at https://github.com/mitmproxy/mitmproxy/settings/actions
@@ -113,9 +113,6 @@ jobs:
platform: linux
runs-on: ${{ matrix.image }}
env:
- CI_BUILD_WHEEL: ${{ matrix.platform == 'linux' }}
- CI_BUILD_PYINSTALLER: 1
- CI_BUILD_WININSTALLER: ${{ matrix.platform == 'windows' }}
CI_BUILD_KEY: ${{ secrets.CI_BUILD_KEY }}
steps:
- uses: actions/checkout@v2
@@ -131,12 +128,24 @@ jobs:
path: release/installbuilder/setup
key: installbuilder
- run: pip install -e .[dev]
- - run: python release/cibuild.py build
- # artifacts must have different names, see https://github.com/actions/upload-artifact/issues/24
- - uses: actions/upload-artifact@v2
+ - if: matrix.platform == 'linux'
+ run: python -u release/build.py standalone-binaries wheel
+ - if: matrix.platform == 'windows'
+ run: python -u release/build.py standalone-binaries installbuilder-installer msix-installer
+ - if: matrix.platform == 'macos'
+ run: python -u release/build.py standalone-binaries
+ - if: matrix.platform == 'windows' # separate artifact because we don't want it on the snapshot server.
+ uses: actions/upload-artifact@v3
with:
+ name: msix-installer
+ path: release/dist/*.msix
+ - uses: actions/upload-artifact@v3
+ with:
+ # artifacts must have different names, see https://github.com/actions/upload-artifact/issues/24
name: binaries.${{ matrix.platform }}
- path: release/dist
+ path: |
+ release/dist
+ !release/dist/*.msix
test-web-ui:
runs-on: ubuntu-latest
@@ -182,7 +191,7 @@ jobs:
sudo dpkg -i hugo*.deb
- run: pip install -e .[dev]
- run: ./docs/build.py
- - uses: actions/upload-artifact@v2
+ - uses: actions/upload-artifact@v3
with:
name: docs
path: docs/public
@@ -190,9 +199,9 @@ jobs:
# Separate from everything else because slow.
build-and-deploy-docker:
if: github.repository == 'mitmproxy/mitmproxy' && (
- github.ref == 'refs/heads/main' ||
- github.ref == 'refs/heads/dockertest' ||
- startsWith(github.ref, 'refs/tags/')
+ github.ref == 'refs/heads/main'
+ || github.ref == 'refs/heads/citest'
+ || startsWith(github.ref, 'refs/tags/')
)
environment: deploy-docker
needs:
@@ -202,7 +211,6 @@ jobs:
- docs
runs-on: ubuntu-latest
env:
- CI_BUILD_DOCKER: 1
DOCKER_USERNAME: mitmbot
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
steps:
@@ -218,9 +226,7 @@ jobs:
path: release/dist
- uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # v1.2.0
- uses: docker/setup-buildx-action@b1f1f719c7cd5364be7c82e366366da322d01f7c # v1.6.0
- - run: pip install -e .[dev]
- - run: python release/cibuild.py build
- - run: python release/cibuild.py upload
+ - run: python release/build-and-deploy-docker.py
deploy:
# This action has access to our AWS keys, so we are extra careful here.
@@ -239,6 +245,10 @@ jobs:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: us-west-2
+ MSFT_APP_ID: 9NWNDLQMNZD7
+ MSFT_TENANT_ID: ${{ secrets.MSFT_TENANT_ID }}
+ MSFT_CLIENT_ID: ${{ secrets.MSFT_CLIENT_ID }}
+ MSFT_CLIENT_SECRET: ${{ secrets.MSFT_CLIENT_SECRET }}
steps:
- uses: actions/checkout@v2
with:
@@ -248,13 +258,34 @@ jobs:
python-version: '3.10'
- run: sudo apt-get update
- run: sudo apt-get install -y twine awscli
- - uses: actions/download-artifact@v2
+ - uses: actions/download-artifact@v3
with:
+ name: docs
+ path: docs/public
+ - uses: actions/download-artifact@v3
+ with:
+ name: binaries.windows
path: release/dist
- - run: mv release/dist/docs docs/public
- # move artifacts from their subfolders into release/dist
- - run: find release/dist -mindepth 2 -type f -exec mv {} release/dist \;
- # and then delete the empty folders
- - run: find release/dist -type d -empty -delete
+ - uses: actions/download-artifact@v3
+ with:
+ name: binaries.linux
+ path: release/dist
+ - uses: actions/download-artifact@v3
+ with:
+ name: binaries.macos
+ path: release/dist
+ - run: ls docs/public
- run: ls release/dist
- run: ./release/deploy.py
+ # We don't want the MSIX installer on the snapshot server, so we get it only now.
+ - if: github.ref == 'refs/heads/citest' || startsWith(github.ref, 'refs/tags/')
+ uses: actions/download-artifact@v3
+ with:
+ name: msix-installer
+ path: release/dist/
+ - if: github.ref == 'refs/heads/citest'
+ run: ./release/deploy-microsoft-store.py release/dist/*.msix
+ env:
+ MSFT_APP_FLIGHT: 174ca570-8cae-4444-9858-c07293f1f13a
+ - if: startsWith(github.ref, 'refs/tags/')
+ run: ./release/deploy-microsoft-store.py release/dist/*.msix
diff --git a/release/README.md b/release/README.md
index b14737122..5b6bafa17 100644
--- a/release/README.md
+++ b/release/README.md
@@ -1,14 +1,16 @@
# Release Checklist
-These steps assume you are on the correct branch and have a git remote called `origin` that points to the `mitmproxy/mitmproxy` repo. If necessary, create a major version branch starting off the release tag (e.g. `git checkout -b v4.x v4.0.0`) first.
+These steps assume you are on the correct branch and have a git remote called `origin` that points to the
+`mitmproxy/mitmproxy` repo. If necessary, create a major version branch starting off the release tag
+(e.g. `git checkout -b v4.x 4.0.0`) first.
- Update CHANGELOG.
- Verify that the compiled mitmweb assets are up-to-date (`npm start prod`).
- Verify that all CI tests pass.
- Verify that `mitmproxy/version.py` is correct. Remove `.dev` suffix if it exists.
- Tag the release and push to GitHub.
- - `git tag v4.0.0`
- - `git push origin v4.0.0`
+ - `git tag 4.0.0`
+ - `git push origin 4.0.0`
- Wait for tag CI to complete.
### GitHub Releases
diff --git a/release/__init__.py b/release/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/release/build-and-deploy-docker.py b/release/build-and-deploy-docker.py
new file mode 100644
index 000000000..15ee80b4d
--- /dev/null
+++ b/release/build-and-deploy-docker.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python3
+"""
+Building and deploying docker images is a bit of a special snowflake as we don't get a file we can upload/download
+as an artifact. So we need to do everything in one job.
+"""
+import os
+import shutil
+import subprocess
+from pathlib import Path
+from typing import Optional
+
+# Security: No third-party dependencies here!
+
+root = Path(__file__).absolute().parent.parent
+
+ref = os.environ["GITHUB_REF"]
+branch: Optional[str] = None
+tag: Optional[str] = None
+if ref.startswith("refs/heads/"):
+ branch = ref.replace("refs/heads/", "")
+elif ref.startswith("refs/tags/"):
+ tag = ref.replace("refs/tags/", "")
+else:
+ raise AssertionError
+
+(whl,) = root.glob("release/dist/mitmproxy-*-py3-none-any.whl")
+docker_build_dir = root / "release/docker"
+shutil.copy(whl, docker_build_dir / whl.name)
+
+# Build for this platform and test if it runs.
+subprocess.check_call(
+ [
+ "docker",
+ "buildx",
+ "build",
+ "--tag",
+ "localtesting",
+ "--load",
+ "--build-arg",
+ f"MITMPROXY_WHEEL={whl.name}",
+ ".",
+ ],
+ cwd=docker_build_dir,
+)
+r = subprocess.run(
+ [
+ "docker",
+ "run",
+ "--rm",
+ "localtesting",
+ "mitmdump",
+ "--version",
+ ],
+ check=True,
+ capture_output=True,
+)
+print(r.stdout.decode())
+assert "Mitmproxy: " in r.stdout.decode()
+
+# Now we can deploy.
+subprocess.check_call(
+ [
+ "docker",
+ "login",
+ "-u",
+ os.environ["DOCKER_USERNAME"],
+ "-p",
+ os.environ["DOCKER_PASSWORD"],
+ ]
+)
+
+
+def _buildx(docker_tag):
+ subprocess.check_call(
+ [
+ "docker",
+ "buildx",
+ "build",
+ "--tag",
+ docker_tag,
+ "--push",
+ "--platform",
+ "linux/amd64,linux/arm64",
+ "--build-arg",
+ f"MITMPROXY_WHEEL={whl.name}",
+ ".",
+ ],
+ cwd=docker_build_dir,
+ )
+
+
+if branch == "main":
+ _buildx("mitmproxy/mitmproxy:dev")
+elif branch == "citest":
+ _buildx("mitmproxy/mitmproxy:citest")
+elif tag:
+ _buildx(f"mitmproxy/mitmproxy:{tag}")
+ _buildx("mitmproxy/mitmproxy:latest")
+else:
+ raise AssertionError
diff --git a/release/build.py b/release/build.py
new file mode 100644
index 000000000..73c6bb78f
--- /dev/null
+++ b/release/build.py
@@ -0,0 +1,255 @@
+#!/usr/bin/env python3
+from __future__ import annotations
+
+import hashlib
+import os
+import platform
+import shutil
+import subprocess
+import tarfile
+import urllib.request
+import zipfile
+from datetime import datetime
+from pathlib import Path
+from typing import Literal
+
+import click
+import cryptography.fernet
+
+here = Path(__file__).absolute().parent
+
+TEMP_DIR = here / "build"
+DIST_DIR = here / "dist"
+
+
+@click.group(chain=True)
+@click.option("--dirty", is_flag=True)
+def cli(dirty):
+ if dirty:
+ print("Keeping temporary files.")
+ else:
+ print("Cleaning up temporary files...")
+ if TEMP_DIR.exists():
+ shutil.rmtree(TEMP_DIR)
+ if DIST_DIR.exists():
+ shutil.rmtree(DIST_DIR)
+
+ TEMP_DIR.mkdir()
+ DIST_DIR.mkdir()
+
+
+@cli.command()
+def wheel():
+ """Build the wheel for PyPI."""
+ print("Building wheel...")
+ subprocess.check_call(
+ [
+ "python",
+ "setup.py",
+ "-q",
+ "bdist_wheel",
+ "--dist-dir",
+ DIST_DIR,
+ ]
+ )
+ (whl,) = DIST_DIR.glob("mitmproxy-*-py3-none-any.whl")
+ print(f"Found wheel package: {whl}")
+ subprocess.check_call(["tox", "-e", "wheeltest", "--", whl])
+
+
+class ZipFile2(zipfile.ZipFile):
+ # ZipFile and tarfile have slightly different APIs. Let's fix that.
+ def add(self, name: str, arcname: str) -> None:
+ return self.write(name, arcname)
+
+ def __enter__(self) -> ZipFile2:
+ return self
+
+ @property
+ def name(self) -> str:
+ assert self.filename
+ return self.filename
+
+
+def archive(path: Path) -> tarfile.TarFile | ZipFile2:
+ if platform.system() == "Windows":
+ return ZipFile2(path.with_suffix(".zip"), "w")
+ else:
+ return tarfile.open(path.with_suffix(".tar.gz"), "w:gz")
+
+
+def version() -> str:
+ if ref := os.environ.get("GITHUB_REF", ""):
+ if ref.startswith("refs/heads/"):
+ return ref.replace("refs/heads/", "")
+ if ref.startswith("refs/tags/"):
+ return ref.replace("refs/tags/", "")
+ return os.environ.get("BUILD_VERSION", "dev")
+
+
+def operating_system() -> Literal["windows", "linux", "macos", "unknown"]:
+ match platform.system():
+ case "Windows":
+ return "windows"
+ case "Linux":
+ return "linux"
+ case "Darwin":
+ return "macos"
+ case _:
+ return "unknown"
+
+
+def _pyinstaller(specfile: str) -> None:
+ print(f"Invoking PyInstaller with {specfile}...")
+ subprocess.check_call(
+ [
+ "pyinstaller",
+ "--clean",
+ "--workpath",
+ TEMP_DIR / "pyinstaller/temp",
+ "--distpath",
+ TEMP_DIR / "pyinstaller/dist",
+ specfile,
+ ],
+ cwd=here / "specs",
+ )
+
+
+@cli.command()
+def standalone_binaries():
+ """All platforms: Build the standalone binaries generated with PyInstaller"""
+ with archive(DIST_DIR / f"mitmproxy-{version()}-{operating_system()}") as f:
+ _pyinstaller("standalone.spec")
+
+ for tool in ["mitmproxy", "mitmdump", "mitmweb"]:
+ executable = TEMP_DIR / "pyinstaller/dist" / tool
+ if platform.system() == "Windows":
+ executable = executable.with_suffix(".exe")
+
+ # Test if it works at all O:-)
+ print(f"> {executable} --version")
+ subprocess.check_call([executable, "--version"])
+
+ f.add(str(executable), str(executable.name))
+ print(f"Packed {f.name}.")
+
+
+def _ensure_pyinstaller_onedir():
+ if not (TEMP_DIR / "pyinstaller/dist/onedir").exists():
+ _pyinstaller("windows-dir.spec")
+
+ for tool in ["mitmproxy", "mitmdump", "mitmweb"]:
+ print(f"> {tool} --version")
+ executable = (TEMP_DIR / "pyinstaller/dist/onedir" / tool).with_suffix(".exe")
+ subprocess.check_call([executable, "--version"])
+
+
+@cli.command()
+def msix_installer():
+ """Windows: Build the MSIX installer for the Windows Store."""
+ _ensure_pyinstaller_onedir()
+
+ shutil.copytree(
+ TEMP_DIR / "pyinstaller/dist/onedir",
+ TEMP_DIR / "msix",
+ dirs_exist_ok=True,
+ )
+ shutil.copytree(here / "windows-installer", TEMP_DIR / "msix", dirs_exist_ok=True)
+
+ manifest = TEMP_DIR / "msix/AppxManifest.xml"
+ app_version = version()
+ if app_version in ["citest", "dev"]:
+ app_version = datetime.now().strftime("%y%m.%d.%H%M").replace(".0", ".").replace(".0", ".").replace(".0", ".")
+ manifest.write_text(manifest.read_text().replace("1.2.3", app_version))
+
+ makeappx_exe = (
+ Path(os.environ["ProgramFiles(x86)"])
+ / "Windows Kits/10/App Certification Kit/makeappx.exe"
+ )
+ subprocess.check_call(
+ [
+ makeappx_exe,
+ "pack",
+ "/d",
+ TEMP_DIR / "msix",
+ "/p",
+ DIST_DIR / f"mitmproxy-{version()}-installer.msix",
+ ],
+ )
+ assert (DIST_DIR / f"mitmproxy-{version()}-installer.msix").exists()
+
+
+@cli.command()
+def installbuilder_installer():
+ """Windows: Build the InstallBuilder installer."""
+ _ensure_pyinstaller_onedir()
+
+ IB_VERSION = "21.6.0"
+ IB_SETUP_SHA256 = "2bc9f9945cb727ad176aa31fa2fa5a8c57a975bad879c169b93e312af9d05814"
+ IB_DIR = here / "installbuilder"
+ IB_SETUP = IB_DIR / "setup" / f"{IB_VERSION}-installer.exe"
+ IB_CLI = Path(
+ rf"C:\Program Files\VMware InstallBuilder Enterprise {IB_VERSION}\bin\builder-cli.exe"
+ )
+ IB_LICENSE = IB_DIR / "license.xml"
+
+ if not IB_LICENSE.exists():
+ print("Decrypt InstallBuilder license...")
+ f = cryptography.fernet.Fernet(os.environ["CI_BUILD_KEY"].encode())
+ with open(IB_LICENSE.with_suffix(".xml.enc"), "rb") as infile, open(
+ IB_LICENSE, "wb"
+ ) as outfile:
+ outfile.write(f.decrypt(infile.read()))
+
+ if not IB_CLI.exists():
+ if not IB_SETUP.exists():
+ print("Downloading InstallBuilder...")
+
+ def report(block, blocksize, total):
+ done = block * blocksize
+ if round(100 * done / total) != round(100 * (done - blocksize) / total):
+ print(f"Downloading... {round(100 * done / total)}%")
+
+ tmp = IB_SETUP.with_suffix(".tmp")
+ urllib.request.urlretrieve(
+ f"https://clients.bitrock.com/installbuilder/installbuilder-enterprise-{IB_VERSION}-windows-x64-installer.exe",
+ tmp,
+ reporthook=report,
+ )
+ tmp.rename(IB_SETUP)
+
+ ib_setup_hash = hashlib.sha256()
+ with IB_SETUP.open("rb") as fp:
+ while True:
+ data = fp.read(65_536)
+ if not data:
+ break
+ ib_setup_hash.update(data)
+ if ib_setup_hash.hexdigest() != IB_SETUP_SHA256: # pragma: no cover
+ raise RuntimeError("InstallBuilder hashes don't match.")
+
+ print("Install InstallBuilder...")
+ subprocess.run(
+ [IB_SETUP, "--mode", "unattended", "--unattendedmodeui", "none"], check=True
+ )
+ assert IB_CLI.is_file()
+
+ print("Run InstallBuilder...")
+ subprocess.check_call(
+ [
+ IB_CLI,
+ "build",
+ str(IB_DIR / "mitmproxy.xml"),
+ "windows-x64",
+ "--license",
+ str(IB_LICENSE),
+ "--setvars",
+ f"project.version={version()}",
+ "--verbose",
+ ]
+ )
+ assert (DIST_DIR / f"mitmproxy-{version()}-windows-x64-installer.exe").exists()
+
+
+if __name__ == "__main__":
+ cli()
diff --git a/release/cibuild.py b/release/cibuild.py
deleted file mode 100755
index a1c6ac5e6..000000000
--- a/release/cibuild.py
+++ /dev/null
@@ -1,633 +0,0 @@
-#!/usr/bin/env python3
-
-import contextlib
-import hashlib
-import os
-import platform
-import re
-import shutil
-import subprocess
-import sys
-import tarfile
-import urllib.request
-import zipfile
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Optional, Union
-
-import click
-import cryptography.fernet
-import parver
-
-
-@contextlib.contextmanager
-def chdir(path: Path): # pragma: no cover
- old_dir = os.getcwd()
- os.chdir(path)
- yield
- os.chdir(old_dir)
-
-
-class BuildError(Exception):
- pass
-
-
-def bool_from_env(envvar: str) -> bool:
- val = os.environ.get(envvar, "")
- if not val or val.lower() in ("0", "false"):
- return False
- else:
- return True
-
-
-class ZipFile2(zipfile.ZipFile):
- # ZipFile and tarfile have slightly different APIs. Let's fix that.
- def add(self, name: str, arcname: str) -> None:
- return self.write(name, arcname)
-
- def __enter__(self) -> "ZipFile2":
- return self
-
-
-@dataclass(frozen=True, repr=False)
-class BuildEnviron:
- PLATFORM_TAGS = {
- "Darwin": "osx",
- "Windows": "windows",
- "Linux": "linux",
- }
-
- system: str
- root_dir: Path
- branch: Optional[str] = None
- tag: Optional[str] = None
- is_pull_request: bool = True
- should_build_wheel: bool = False
- should_build_docker: bool = False
- should_build_pyinstaller: bool = False
- should_build_wininstaller: bool = False
- has_aws_creds: bool = False
- has_twine_creds: bool = False
- docker_username: Optional[str] = None
- docker_password: Optional[str] = None
- build_key: Optional[str] = None
-
- @classmethod
- def from_env(cls) -> "BuildEnviron":
- branch = None
- tag = None
-
- if ref := os.environ.get("GITHUB_REF", ""):
- if ref.startswith("refs/heads/"):
- branch = ref.replace("refs/heads/", "")
- if ref.startswith("refs/pull/"):
- branch = "pr-" + ref.split("/")[2]
- if ref.startswith("refs/tags/"):
- tag = ref.replace("refs/tags/", "")
-
- is_pull_request = (
- os.environ.get("GITHUB_EVENT_NAME", "pull_request") == "pull_request"
- )
-
- return cls(
- system=platform.system(),
- root_dir=Path(__file__).parent.parent,
- branch=branch,
- tag=tag,
- is_pull_request=is_pull_request,
- should_build_wheel=bool_from_env("CI_BUILD_WHEEL"),
- should_build_pyinstaller=bool_from_env("CI_BUILD_PYINSTALLER"),
- should_build_wininstaller=bool_from_env("CI_BUILD_WININSTALLER"),
- should_build_docker=bool_from_env("CI_BUILD_DOCKER"),
- has_aws_creds=bool_from_env("AWS_ACCESS_KEY_ID"),
- has_twine_creds=bool_from_env("TWINE_USERNAME")
- and bool_from_env("TWINE_PASSWORD"),
- docker_username=os.environ.get("DOCKER_USERNAME", None),
- docker_password=os.environ.get("DOCKER_PASSWORD", None),
- build_key=os.environ.get("CI_BUILD_KEY", None),
- )
-
- def archive(self, path: Path) -> Union[tarfile.TarFile, ZipFile2]:
- if self.system == "Windows":
- return ZipFile2(path, "w")
- else:
- return tarfile.open(path, "w:gz")
-
- @property
- def archive_path(self) -> Path:
- if self.system == "Windows":
- ext = "zip"
- else:
- ext = "tar.gz"
- return self.dist_dir / f"mitmproxy-{self.version}-{self.platform_tag}.{ext}"
-
- @property
- def build_dir(self) -> Path:
- return self.release_dir / "build"
-
- @property
- def dist_dir(self) -> Path:
- return self.release_dir / "dist"
-
- @property
- def docker_tag(self) -> str:
- if self.branch == "main":
- t = "dev"
- else:
- t = self.version
- return f"mitmproxy/mitmproxy:{t}"
-
- def dump_info(self, fp=sys.stdout) -> None:
- lst = [
- "version",
- "tag",
- "branch",
- "platform_tag",
- "root_dir",
- "release_dir",
- "build_dir",
- "dist_dir",
- "upload_dir",
- "should_build_wheel",
- "should_build_pyinstaller",
- "should_build_wininstaller",
- "should_build_docker",
- "should_upload_aws",
- "should_upload_docker",
- "should_upload_pypi",
- ]
- for attr in lst:
- print(f"cibuild.{attr}={getattr(self, attr)}", file=fp)
-
- def check_version(self) -> None:
- """
- Check that version numbers match our conventions.
- Raises a ValueError if there is a mismatch.
- """
- contents = (self.root_dir / "mitmproxy" / "version.py").read_text("utf8")
- match = re.search(r'^VERSION = "(.+?)"', contents, re.M)
- assert match
- version = match.group(1)
-
- if self.is_prod_release:
- # For production releases, we require strict version equality
- if self.version != version:
- raise ValueError(
- f"Tag is {self.tag}, but mitmproxy/version.py is {version}."
- )
- elif not self.is_maintenance_branch:
- # Commits on maintenance branches don't need the dev suffix. This
- # allows us to incorporate and test commits between tagged releases.
- # For snapshots, we only ensure that mitmproxy/version.py contains a
- # dev release.
- version_info = parver.Version.parse(version)
- if not version_info.is_devrelease:
- raise ValueError(
- f"Non-production releases must have dev suffix: {version}"
- )
-
- @property
- def is_maintenance_branch(self) -> bool:
- """
- Is this an untagged commit on a maintenance branch?
- """
- if not self.tag and self.branch and re.match(r"v\d+\.x", self.branch):
- return True
- return False
-
- @property
- def has_docker_creds(self) -> bool:
- return bool(self.docker_username and self.docker_password)
-
- @property
- def is_prod_release(self) -> bool:
- if not self.tag or not self.tag.startswith("v"):
- return False
- try:
- v = parver.Version.parse(self.version, strict=True)
- except (parver.ParseError, BuildError):
- return False
- return not v.is_prerelease
-
- @property
- def platform_tag(self) -> str:
- if self.system in self.PLATFORM_TAGS:
- return self.PLATFORM_TAGS[self.system]
- raise BuildError(f"Unsupported platform: {self.system}")
-
- @property
- def release_dir(self) -> Path:
- return self.root_dir / "release"
-
- @property
- def should_upload_docker(self) -> bool:
- return all(
- [
- (self.is_prod_release or self.branch in ["main", "dockertest"]),
- self.should_build_docker,
- self.has_docker_creds,
- ]
- )
-
- @property
- def should_upload_aws(self) -> bool:
- return all(
- [
- self.has_aws_creds,
- (
- self.should_build_wheel
- or self.should_build_pyinstaller
- or self.should_build_wininstaller
- ),
- ]
- )
-
- @property
- def should_upload_pypi(self) -> bool:
- return all(
- [
- self.is_prod_release,
- self.should_build_wheel,
- self.has_twine_creds,
- ]
- )
-
- @property
- def upload_dir(self) -> str:
- if self.tag:
- return self.version
- else:
- return f"branches/{self.version}"
-
- @property
- def version(self) -> str:
- if self.tag:
- if self.tag.startswith("v"):
- try:
- parver.Version.parse(self.tag[1:], strict=True)
- except parver.ParseError:
- return self.tag
- return self.tag[1:]
- return self.tag
- elif self.branch:
- return self.branch
- else:
- raise BuildError(
- "We're on neither a tag nor a branch - could not establish version"
- )
-
-
-def build_wheel(be: BuildEnviron) -> None: # pragma: no cover
- click.echo("Building wheel...")
- subprocess.check_call(
- [
- "python",
- "setup.py",
- "-q",
- "bdist_wheel",
- "--dist-dir",
- be.dist_dir,
- ]
- )
- (whl,) = be.dist_dir.glob("mitmproxy-*-py3-none-any.whl")
- click.echo(f"Found wheel package: {whl}")
- subprocess.check_call(["tox", "-e", "wheeltest", "--", whl])
-
-
-DOCKER_PLATFORMS = "linux/amd64,linux/arm64"
-
-
-def build_docker_image(be: BuildEnviron) -> None: # pragma: no cover
- click.echo("Building Docker images...")
-
- (whl,) = be.dist_dir.glob("mitmproxy-*-py3-none-any.whl")
- docker_build_dir = be.release_dir / "docker"
- shutil.copy(whl, docker_build_dir / whl.name)
-
- subprocess.check_call(
- [
- "docker",
- "buildx",
- "build",
- "--tag",
- be.docker_tag,
- "--platform",
- DOCKER_PLATFORMS,
- "--build-arg",
- f"MITMPROXY_WHEEL={whl.name}",
- ".",
- ],
- cwd=docker_build_dir,
- )
- # smoke-test the newly built docker image
-
- # build again without --platform but with --load to make the tag available,
- # see https://github.com/docker/buildx/issues/59#issuecomment-616050491
- subprocess.check_call(
- [
- "docker",
- "buildx",
- "build",
- "--tag",
- be.docker_tag,
- "--load",
- "--build-arg",
- f"MITMPROXY_WHEEL={whl.name}",
- ".",
- ],
- cwd=docker_build_dir,
- )
- r = subprocess.run(
- [
- "docker",
- "run",
- "--rm",
- be.docker_tag,
- "mitmdump",
- "--version",
- ],
- check=True,
- capture_output=True,
- )
- print(r.stdout.decode())
- assert "Mitmproxy: " in r.stdout.decode()
-
-
-def build_pyinstaller(be: BuildEnviron) -> None: # pragma: no cover
- click.echo("Building pyinstaller package...")
-
- PYINSTALLER_SPEC = be.release_dir / "specs"
- PYINSTALLER_TEMP = be.build_dir / "pyinstaller"
- PYINSTALLER_DIST = be.build_dir / "binaries" / be.platform_tag
-
- if PYINSTALLER_TEMP.exists():
- shutil.rmtree(PYINSTALLER_TEMP)
- if PYINSTALLER_DIST.exists():
- shutil.rmtree(PYINSTALLER_DIST)
-
- if be.platform_tag == "windows":
- with chdir(PYINSTALLER_SPEC):
- click.echo("Building PyInstaller binaries in directory mode...")
- subprocess.check_call(
- [
- "pyinstaller",
- "--clean",
- "--workpath",
- PYINSTALLER_TEMP,
- "--distpath",
- PYINSTALLER_DIST,
- "./windows-dir.spec",
- ]
- )
- for tool in ["mitmproxy", "mitmdump", "mitmweb"]:
- click.echo(f"> {tool} --version")
- executable = (PYINSTALLER_DIST / "onedir" / tool).with_suffix(".exe")
- click.echo(subprocess.check_output([executable, "--version"]).decode())
-
- with be.archive(be.archive_path) as archive:
- for tool in ["mitmproxy", "mitmdump", "mitmweb"]:
- # We can't have a folder and a file with the same name.
- if tool == "mitmproxy":
- tool = "mitmproxy_main"
- # Make sure that we are in the spec folder.
- with chdir(PYINSTALLER_SPEC):
- click.echo(f"Building PyInstaller {tool} binary...")
- excludes = []
- if tool != "mitmweb":
- excludes.append("mitmproxy.tools.web")
- if tool != "mitmproxy_main":
- excludes.append("mitmproxy.tools.console")
-
- subprocess.check_call(
- [ # type: ignore
- "pyinstaller",
- "--clean",
- "--workpath",
- PYINSTALLER_TEMP,
- "--distpath",
- PYINSTALLER_DIST,
- "--onefile",
- "--console",
- "--icon",
- "icon.ico",
- ]
- + [x for e in excludes for x in ["--exclude-module", e]]
- + [tool]
- )
- # Delete the spec file - we're good without.
- os.remove(f"{tool}.spec")
-
- executable = PYINSTALLER_DIST / tool
- if be.platform_tag == "windows":
- executable = executable.with_suffix(".exe")
-
- # Remove _main suffix from mitmproxy executable
- if "_main" in executable.name:
- executable = executable.rename(
- executable.with_name(executable.name.replace("_main", ""))
- )
-
- # Test if it works at all O:-)
- click.echo(f"> {executable} --version")
- click.echo(subprocess.check_output([executable, "--version"]).decode())
-
- archive.add(str(executable), str(executable.name))
- click.echo(f"Packed {be.archive_path.name}.")
-
-
-def build_wininstaller(be: BuildEnviron) -> None: # pragma: no cover
- click.echo("Building wininstaller package...")
-
- IB_VERSION = "21.6.0"
- IB_SETUP_SHA256 = "2bc9f9945cb727ad176aa31fa2fa5a8c57a975bad879c169b93e312af9d05814"
- IB_DIR = be.release_dir / "installbuilder"
- IB_SETUP = IB_DIR / "setup" / f"{IB_VERSION}-installer.exe"
- IB_CLI = Path(
- fr"C:\Program Files\VMware InstallBuilder Enterprise {IB_VERSION}\bin\builder-cli.exe"
- )
- IB_LICENSE = IB_DIR / "license.xml"
-
- if not IB_LICENSE.exists() and not be.build_key:
- click.echo("Cannot build windows installer without secret key.")
- return
-
- if not IB_CLI.exists():
- if not IB_SETUP.exists():
- click.echo("Downloading InstallBuilder...")
-
- def report(block, blocksize, total):
- done = block * blocksize
- if round(100 * done / total) != round(100 * (done - blocksize) / total):
- click.secho(f"Downloading... {round(100 * done / total)}%")
-
- tmp = IB_SETUP.with_suffix(".tmp")
- urllib.request.urlretrieve(
- f"https://clients.bitrock.com/installbuilder/installbuilder-enterprise-{IB_VERSION}-windows-x64-installer.exe",
- tmp,
- reporthook=report,
- )
- tmp.rename(IB_SETUP)
-
- ib_setup_hash = hashlib.sha256()
- with IB_SETUP.open("rb") as fp:
- while True:
- data = fp.read(65_536)
- if not data:
- break
- ib_setup_hash.update(data)
- if ib_setup_hash.hexdigest() != IB_SETUP_SHA256: # pragma: no cover
- raise RuntimeError("InstallBuilder hashes don't match.")
-
- click.echo("Install InstallBuilder...")
- subprocess.run(
- [IB_SETUP, "--mode", "unattended", "--unattendedmodeui", "none"], check=True
- )
- assert IB_CLI.is_file()
-
- if not IB_LICENSE.exists():
- assert be.build_key
- click.echo("Decrypt InstallBuilder license...")
- f = cryptography.fernet.Fernet(be.build_key.encode())
- with open(IB_LICENSE.with_suffix(".xml.enc"), "rb") as infile, open(
- IB_LICENSE, "wb"
- ) as outfile:
- outfile.write(f.decrypt(infile.read()))
-
- click.echo("Run InstallBuilder...")
- subprocess.run(
- [
- IB_CLI,
- "build",
- str(IB_DIR / "mitmproxy.xml"),
- "windows-x64",
- "--license",
- str(IB_LICENSE),
- "--setvars",
- f"project.version={be.version}",
- "--verbose",
- ],
- check=True,
- )
- assert (be.dist_dir / f"mitmproxy-{be.version}-windows-x64-installer.exe").exists()
-
-
-@click.group(chain=True)
-def cli(): # pragma: no cover
- """
- mitmproxy build tool
- """
-
-
-@cli.command("build")
-def build(): # pragma: no cover
- """
- Build a binary distribution
- """
- be = BuildEnviron.from_env()
- be.dump_info()
-
- be.check_version()
- os.makedirs(be.dist_dir, exist_ok=True)
-
- if be.should_build_wheel:
- build_wheel(be)
- if be.should_build_docker:
- build_docker_image(be)
- if be.should_build_pyinstaller:
- build_pyinstaller(be)
- if be.should_build_wininstaller:
- build_wininstaller(be)
-
-
-@cli.command("upload")
-def upload(): # pragma: no cover
- """
- Upload build artifacts
-
- Uploads the wheels package to PyPi.
- Uploads the Pyinstaller and wheels packages to the snapshot server.
- Pushes the Docker image to Docker Hub.
- """
- be = BuildEnviron.from_env()
- be.dump_info()
-
- if be.is_pull_request:
- click.echo("Refusing to upload artifacts from a pull request!")
- return
-
- if be.should_upload_aws:
- num_files = len([name for name in be.dist_dir.iterdir() if name.is_file()])
- click.echo(f"Uploading {num_files} files to AWS dir {be.upload_dir}...")
- subprocess.check_call(
- [
- "aws",
- "s3",
- "cp",
- "--acl",
- "public-read",
- f"{be.dist_dir}/",
- f"s3://snapshots.mitmproxy.org/{be.upload_dir}/",
- "--recursive",
- ]
- )
-
- if be.should_upload_pypi:
- (whl,) = be.dist_dir.glob("mitmproxy-*-py3-none-any.whl")
- click.echo(f"Uploading {whl} to PyPi...")
- subprocess.check_call(["twine", "upload", whl])
-
- if be.should_upload_docker:
- click.echo(f"Uploading Docker image to tag={be.docker_tag}...")
- subprocess.check_call(
- [
- "docker",
- "login",
- "-u",
- be.docker_username,
- "-p",
- be.docker_password,
- ]
- )
-
- (whl,) = be.dist_dir.glob("mitmproxy-*-py3-none-any.whl")
- docker_build_dir = be.release_dir / "docker"
- shutil.copy(whl, docker_build_dir / whl.name)
- # buildx is a bit weird in that we need to reinvoke build, but oh well.
- subprocess.check_call(
- [
- "docker",
- "buildx",
- "build",
- "--tag",
- be.docker_tag,
- "--push",
- "--platform",
- DOCKER_PLATFORMS,
- "--build-arg",
- f"MITMPROXY_WHEEL={whl.name}",
- ".",
- ],
- cwd=docker_build_dir,
- )
-
- if be.is_prod_release:
- subprocess.check_call(
- [
- "docker",
- "buildx",
- "build",
- "--tag",
- "mitmproxy/mitmproxy:latest",
- "--push",
- "--platform",
- DOCKER_PLATFORMS,
- "--build-arg",
- f"MITMPROXY_WHEEL={whl.name}",
- ".",
- ],
- cwd=docker_build_dir,
- )
-
-
-if __name__ == "__main__": # pragma: no cover
- cli()
diff --git a/release/deploy-microsoft-store.py b/release/deploy-microsoft-store.py
new file mode 100755
index 000000000..ca99e9f00
--- /dev/null
+++ b/release/deploy-microsoft-store.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python3
+"""
+This script submits a single MSIX installer to the Microsoft Store.
+
+The client_secret will expire after 24 months and needs to be recreated (see docstring below).
+
+References:
+ - https://docs.microsoft.com/en-us/windows/uwp/monetize/manage-app-submissions
+ - https://docs.microsoft.com/en-us/windows/uwp/monetize/python-code-examples-for-the-windows-store-submission-api
+ - https://docs.microsoft.com/en-us/windows/uwp/monetize/python-code-examples-for-submissions-game-options-and-trailers
+"""
+import http.client
+import json
+import os
+import sys
+import tempfile
+import urllib.parse
+from zipfile import ZipFile
+
+# Security: No third-party dependencies here!
+
+assert (
+ os.environ["GITHUB_REF"].startswith("refs/tags/")
+ or os.environ["GITHUB_REF"] == "refs/heads/citest"
+)
+
+app_id = os.environ["MSFT_APP_ID"]
+"""
+The public application ID / product ID of the app.
+For https://www.microsoft.com/store/productId/9NWNDLQMNZD7, the app id is 9NWNDLQMNZD7.
+"""
+app_flight = os.environ.get("MSFT_APP_FLIGHT", "")
+"""
+The application flight we want to target. This is useful to deploy ci test builds to a subset of users.
+"""
+tenant_id = os.environ["MSFT_TENANT_ID"]
+"""
+The tenant ID for the Azure AD application.
+https://partner.microsoft.com/en-us/dashboard/account/v3/usermanagement
+"""
+client_id = os.environ["MSFT_CLIENT_ID"]
+"""
+The client ID for the Azure AD application.
+https://partner.microsoft.com/en-us/dashboard/account/v3/usermanagement
+"""
+client_secret = os.environ["MSFT_CLIENT_SECRET"]
+"""
+The client secret. Expires every 24 months and needs to be recreated at
+https://partner.microsoft.com/en-us/dashboard/account/v3/usermanagement
+or at https://portal.azure.com/ -> App registrations -> Certificates & Secrets -> Client secrets.
+"""
+
+
+try:
+ _, msi_file = sys.argv
+except ValueError:
+ print(f"Usage: {sys.argv[0]} installer.msix")
+ sys.exit(1)
+
+if app_flight:
+ app_id = f"{app_id}/flights/{app_flight}"
+ pending_submission = "pendingFlightSubmission"
+ packages = "flightPackages"
+else:
+ pending_submission = "pendingApplicationSubmission"
+ packages = "applicationPackages"
+
+print("Obtaining auth token...")
+auth = http.client.HTTPSConnection("login.microsoftonline.com")
+auth.request(
+ "POST",
+ f"/{tenant_id}/oauth2/token",
+ body=urllib.parse.urlencode(
+ {
+ "grant_type": "client_credentials",
+ "client_id": client_id,
+ "client_secret": client_secret,
+ "resource": "https://manage.devcenter.microsoft.com",
+ }
+ ),
+ headers={"Content-Type": "application/x-www-form-urlencoded; charset=utf-8"},
+)
+token = json.loads(auth.getresponse().read())["access_token"]
+auth.close()
+headers = {
+ "Authorization": f"Bearer {token}",
+ "Content-type": "application/json",
+ "User-Agent": "Python/mitmproxy",
+}
+
+
+def request(method: str, path: str, body: str = "") -> bytes:
+ print(f"{method} {path}")
+ conn.request(method, path, body, headers=headers)
+ resp = conn.getresponse()
+ data = resp.read()
+ print(f"{resp.status} {resp.reason}")
+ # noinspection PyUnreachableCode
+ if False:
+ assert "CI" not in os.environ
+ # This contains sensitive data such as the fileUploadUrl, so don't print it in production.
+ print(data.decode(errors="ignore"))
+ assert 200 <= resp.status < 300
+ return data
+
+
+print("Getting app info...")
+conn = http.client.HTTPSConnection("manage.devcenter.microsoft.com")
+# print(request("GET", f"/v1.0/my/applications/{app_id}/listflights"))
+app_info = json.loads(request("GET", f"/v1.0/my/applications/{app_id}"))
+
+if pending_submission in app_info:
+ print("Deleting pending submission...")
+ request(
+ "DELETE",
+ f"/v1.0/my/applications/{app_id}/submissions/{app_info[pending_submission]['id']}",
+ )
+
+print("Creating new submission...")
+submission = json.loads(request("POST", f"/v1.0/my/applications/{app_id}/submissions"))
+
+print("Updating submission...")
+# Mark all existing packages for deletion.
+for package in submission[packages]:
+ package["fileStatus"] = "PendingDelete"
+submission[packages].append(
+ {
+ "fileName": f"installer.msix",
+ "fileStatus": "PendingUpload",
+ "minimumDirectXVersion": "None",
+ "minimumSystemRam": "None",
+ }
+)
+request(
+ "PUT",
+ f"/v1.0/my/applications/{app_id}/submissions/{submission['id']}",
+ json.dumps(submission),
+)
+conn.close()
+
+print(f"Zipping {msi_file}...")
+with tempfile.TemporaryFile() as zipfile:
+ with ZipFile(zipfile, "w") as f:
+ f.write(msi_file, f"installer.msix")
+ zip_size = zipfile.tell()
+ zipfile.seek(0)
+
+ print("Uploading zip file...")
+ host, _, path = submission["fileUploadUrl"].removeprefix("https://").partition("/")
+ upload = http.client.HTTPSConnection(host)
+ upload.request(
+ "PUT",
+ "/" + path,
+ zipfile,
+ {
+ "x-ms-blob-type": "BlockBlob",
+ "x-ms-version": "2019-12-12",
+ "Content-Length": str(zip_size),
+ },
+ )
+resp = upload.getresponse()
+resp.read()
+print(resp.status, resp.reason)
+assert 200 <= resp.status < 300
+upload.close()
+
+print("Publishing submission...")
+# previous connection has timed out during upload.
+conn = http.client.HTTPSConnection("manage.devcenter.microsoft.com")
+request("POST", f"/v1.0/my/applications/{app_id}/submissions/{submission['id']}/commit")
+# We could wait until it's published here, but CI is billed by the minute.
+# resp = request("GET", f"/v1.0/my/applications/{app_id}/submissions/{submission['id']}/status")
+conn.close()
diff --git a/release/deploy.py b/release/deploy.py
index 3c4d4aaaa..6fce5e1e2 100755
--- a/release/deploy.py
+++ b/release/deploy.py
@@ -1,12 +1,13 @@
#!/usr/bin/env python3
import os
-import re
import subprocess
from pathlib import Path
from typing import Optional
# Security: No third-party dependencies here!
+root = Path(__file__).absolute().parent.parent
+
if __name__ == "__main__":
ref = os.environ["GITHUB_REF"]
branch: Optional[str] = None
@@ -20,10 +21,10 @@ if __name__ == "__main__":
# Upload binaries (be it release or snapshot)
if tag:
- # remove "v" prefix from version tags.
- upload_dir = re.sub(r"^v([\d.]+)$", r"\1", tag)
+ upload_dir = tag
else:
upload_dir = f"branches/{branch}"
+ print(f"Uploading binaries to snapshots.mitmproxy.org/{upload_dir}...")
subprocess.check_call(
[
"aws",
@@ -31,7 +32,7 @@ if __name__ == "__main__":
"cp",
"--acl",
"public-read",
- f"./release/dist/",
+ root / "release/dist",
f"s3://snapshots.mitmproxy.org/{upload_dir}/",
"--recursive",
]
@@ -39,11 +40,13 @@ if __name__ == "__main__":
# Upload releases to PyPI
if tag:
- (whl,) = Path("release/dist/").glob("mitmproxy-*-py3-none-any.whl")
+ print(f"Uploading wheel to PyPI...")
+ (whl,) = root.glob("release/dist/mitmproxy-*-py3-none-any.whl")
subprocess.check_call(["twine", "upload", whl])
# Upload dev docs
- if branch == "main" or branch == "actions-hardening": # FIXME remove
+ if branch == "main":
+ print(f"Uploading dev docs...")
subprocess.check_call(["aws", "configure", "set", "preview.cloudfront", "true"])
subprocess.check_call(
[
@@ -53,7 +56,7 @@ if __name__ == "__main__":
"--delete",
"--acl",
"public-read",
- "docs/public",
+ root / "docs/public",
"s3://docs.mitmproxy.org/dev",
]
)
diff --git a/release/specs/mitmproxy_main b/release/specs/mitmproxy_main
deleted file mode 100644
index 59160ff79..000000000
--- a/release/specs/mitmproxy_main
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-from mitmproxy.tools.main import mitmproxy
-mitmproxy()
diff --git a/release/specs/standalone.spec b/release/specs/standalone.spec
new file mode 100644
index 000000000..768b2ade0
--- /dev/null
+++ b/release/specs/standalone.spec
@@ -0,0 +1,26 @@
+# -*- mode: python ; coding: utf-8 -*-
+
+for tool in ["mitmproxy", "mitmdump", "mitmweb"]:
+ excludes = []
+ if tool != "mitmweb":
+ excludes.append("mitmproxy.tools.web")
+ if tool != "mitmproxy":
+ excludes.append("mitmproxy.tools.console")
+
+ a = Analysis(
+ [tool],
+ excludes=excludes,
+ )
+ pyz = PYZ(a.pure, a.zipped_data)
+
+ EXE(
+ pyz,
+ a.scripts,
+ a.binaries,
+ a.zipfiles,
+ a.datas,
+ [],
+ name=tool,
+ console=True,
+ icon='icon.ico',
+ )
diff --git a/release/windows-installer/AppxManifest.xml b/release/windows-installer/AppxManifest.xml
new file mode 100644
index 000000000..25699f6c6
--- /dev/null
+++ b/release/windows-installer/AppxManifest.xml
@@ -0,0 +1,85 @@
+
+
+
+
+
+ mitmproxy
+ mitmproxy.org
+ mitmproxy is a free and open source interactive HTTPS proxy.
+ Assets\logo.150x150.png
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/release/windows-store-experiment/Assets/logo.150x150.png b/release/windows-installer/Assets/logo.150x150.png
similarity index 100%
rename from release/windows-store-experiment/Assets/logo.150x150.png
rename to release/windows-installer/Assets/logo.150x150.png
diff --git a/release/windows-store-experiment/Assets/logo.44x44.png b/release/windows-installer/Assets/logo.44x44.png
similarity index 100%
rename from release/windows-store-experiment/Assets/logo.44x44.png
rename to release/windows-installer/Assets/logo.44x44.png
diff --git a/release/windows-store-experiment/Assets/logo.50x50.png b/release/windows-installer/Assets/logo.50x50.png
similarity index 100%
rename from release/windows-store-experiment/Assets/logo.50x50.png
rename to release/windows-installer/Assets/logo.50x50.png
diff --git a/release/windows-store-experiment/AppxManifest.xml b/release/windows-store-experiment/AppxManifest.xml
deleted file mode 100644
index 5e687f006..000000000
--- a/release/windows-store-experiment/AppxManifest.xml
+++ /dev/null
@@ -1,35 +0,0 @@
-
-
-
-
- Mitmproxy
- Maximilian Hils
- Assets\logo.44x44.png
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/release/windows-store-experiment/README.md b/release/windows-store-experiment/README.md
deleted file mode 100644
index 3118aa23e..000000000
--- a/release/windows-store-experiment/README.md
+++ /dev/null
@@ -1,20 +0,0 @@
-# Mitmproxy on the Windows Store
-
-@mhils experimented with bringing mitmproxy to the Window Store using the Desktop Bridge. This would replace our current InstallBuilder setup and allow for clean installs and - more importantly - automatic updates.
-
-## Advantages
-
-- Automatic updates
-- Clean installs
-- Very simple setup on our end
-- Possibility to roll out experimental releases to a subset of users
-
-## Disadvantages
-
-- No support for mitmproxy. That only runs under WSL. Making WSL nicer is a complementary effort.
-- "Your developer account doesn’t have permission to submit apps converted with the Desktop Bridge at this time." (requested)
-- New releases need to be submitted manually (Submission API is in preview).
-
-## Notes
-
-We do not want to force anyone to use this, we would of course keep our portable binaries (and, of course, WSL).
diff --git a/test/release/test_cibuild.py b/test/release/test_cibuild.py
deleted file mode 100644
index c1ec0aed2..000000000
--- a/test/release/test_cibuild.py
+++ /dev/null
@@ -1,240 +0,0 @@
-import io
-from pathlib import Path
-
-import pytest
-
-from release import cibuild
-
-root = Path(__file__).parent.parent.parent
-
-
-def test_buildenviron_live():
- be = cibuild.BuildEnviron.from_env()
- assert be.release_dir
-
-
-def test_buildenviron_common():
- be = cibuild.BuildEnviron(
- system="Linux",
- root_dir=root,
- branch="main",
- )
- assert be.release_dir == be.root_dir / "release"
- assert be.dist_dir == be.root_dir / "release" / "dist"
- assert be.build_dir == be.root_dir / "release" / "build"
- assert not be.has_docker_creds
-
- cs = io.StringIO()
- be.dump_info(cs)
- assert cs.getvalue()
-
- be = cibuild.BuildEnviron(
- system="Unknown",
- root_dir=root,
- )
- with pytest.raises(cibuild.BuildError):
- be.version
- with pytest.raises(cibuild.BuildError):
- be.platform_tag
-
-
-def test_buildenviron_pr(monkeypatch):
- # Simulates a PR. We build everything, but don't have access to secret
- # credential env variables.
- monkeypatch.setenv("GITHUB_REF", "refs/pull/42/merge")
- monkeypatch.setenv("CI_BUILD_WHEEL", "1")
- monkeypatch.setenv("GITHUB_EVENT_NAME", "pull_request")
-
- be = cibuild.BuildEnviron.from_env()
- assert be.branch == "pr-42"
- assert be.is_pull_request
- assert be.should_build_wheel
- assert not be.should_upload_pypi
-
-
-def test_buildenviron_commit():
- # Simulates an ordinary commit on the master branch.
- be = cibuild.BuildEnviron(
- system="Linux",
- root_dir=root,
- branch="main",
- is_pull_request=False,
- should_build_wheel=True,
- should_build_pyinstaller=True,
- should_build_docker=True,
- docker_username="foo",
- docker_password="bar",
- has_aws_creds=True,
- )
- assert be.docker_tag == "mitmproxy/mitmproxy:dev"
- assert be.should_upload_docker
- assert not be.should_upload_pypi
- assert be.should_upload_docker
- assert be.should_upload_aws
- assert not be.is_prod_release
- assert not be.is_maintenance_branch
-
-
-def test_buildenviron_releasetag():
- # Simulates a tagged release on a release branch.
- be = cibuild.BuildEnviron(
- system="Linux",
- root_dir=root,
- tag="v0.0.1",
- should_build_wheel=True,
- should_build_docker=True,
- should_build_pyinstaller=True,
- has_twine_creds=True,
- docker_username="foo",
- docker_password="bar",
- )
- assert be.tag == "v0.0.1"
- assert be.branch is None
- assert be.version == "0.0.1"
- assert be.upload_dir == "0.0.1"
- assert be.docker_tag == "mitmproxy/mitmproxy:0.0.1"
- assert be.should_upload_pypi
- assert be.should_upload_docker
- assert be.is_prod_release
- assert not be.is_maintenance_branch
-
-
-def test_buildenviron_namedtag():
- # Simulates a non-release tag on a branch.
- be = cibuild.BuildEnviron(
- system="Linux",
- root_dir=root,
- tag="anyname",
- should_build_wheel=True,
- should_build_docker=True,
- should_build_pyinstaller=True,
- has_twine_creds=True,
- docker_username="foo",
- docker_password="bar",
- )
- assert be.tag == "anyname"
- assert be.branch is None
- assert be.version == "anyname"
- assert be.upload_dir == "anyname"
- assert be.docker_tag == "mitmproxy/mitmproxy:anyname"
- assert not be.should_upload_pypi
- assert not be.should_upload_docker
- assert not be.is_prod_release
- assert not be.is_maintenance_branch
-
-
-def test_buildenviron_dev_branch():
- # Simulates a commit on a development branch on the main repo
- be = cibuild.BuildEnviron(
- system="Linux",
- root_dir=root,
- branch="mybranch",
- should_build_wheel=True,
- should_build_docker=True,
- should_build_pyinstaller=True,
- has_twine_creds=True,
- docker_username="foo",
- docker_password="bar",
- )
- assert be.tag is None
- assert be.branch == "mybranch"
- assert be.version == "mybranch"
- assert be.upload_dir == "branches/mybranch"
- assert not be.should_upload_pypi
- assert not be.should_upload_docker
- assert not be.is_maintenance_branch
-
-
-def test_buildenviron_maintenance_branch():
- # Simulates a commit on a release maintenance branch on the main repo
- be = cibuild.BuildEnviron(
- system="Linux",
- root_dir=root,
- branch="v0.x",
- should_build_wheel=True,
- should_build_docker=True,
- should_build_pyinstaller=True,
- has_twine_creds=True,
- docker_username="foo",
- docker_password="bar",
- )
- assert be.tag is None
- assert be.branch == "v0.x"
- assert be.version == "v0.x"
- assert be.upload_dir == "branches/v0.x"
- assert not be.should_upload_pypi
- assert not be.should_upload_docker
- assert be.is_maintenance_branch
-
-
-def test_buildenviron_osx(tmp_path):
- be = cibuild.BuildEnviron(
- system="Darwin",
- root_dir=root,
- tag="v0.0.1",
- )
- assert be.platform_tag == "osx"
- assert be.archive_path == be.dist_dir / "mitmproxy-0.0.1-osx.tar.gz"
-
- with be.archive(tmp_path / "arch"):
- pass
- assert (tmp_path / "arch").exists()
-
-
-def test_buildenviron_windows(tmp_path):
- be = cibuild.BuildEnviron(
- system="Windows",
- root_dir=root,
- tag="v0.0.1",
- )
- assert be.platform_tag == "windows"
- assert be.archive_path == be.dist_dir / "mitmproxy-0.0.1-windows.zip"
-
- with be.archive(tmp_path / "arch"):
- pass
- assert (tmp_path / "arch").exists()
-
-
-@pytest.mark.parametrize(
- "version, tag, ok",
- [
- ("3.0.0.dev", "", True), # regular snapshot
- ("3.0.0.dev", "v3.0.0", False), # forgot to remove ".dev" on bump
- ("3.0.0", "", False), # forgot to re-add ".dev"
- ("3.0.0", "v4.0.0", False), # version mismatch
- ("3.0.0", "v3.0.0", True), # regular release
- ("3.0.0.rc1", "v3.0.0.rc1", False), # non-canonical.
- ("3.0.0.dev", "anyname", True), # tagged test/dev release
- ("3.0.0", "3.0.0", False), # tagged, but without v prefix
- ],
-)
-def test_buildenviron_check_version(version, tag, ok, tmpdir):
- tmpdir.mkdir("mitmproxy").join("version.py").write(f'VERSION = "{version}"')
-
- be = cibuild.BuildEnviron(
- root_dir=tmpdir,
- system="Windows",
- tag=tag,
- )
- if ok:
- be.check_version()
- else:
- with pytest.raises(ValueError):
- be.check_version()
-
-
-def test_bool_from_env(monkeypatch):
- monkeypatch.setenv("FOO", "1")
- assert cibuild.bool_from_env("FOO")
-
- monkeypatch.setenv("FOO", "0")
- assert not cibuild.bool_from_env("FOO")
-
- monkeypatch.setenv("FOO", "false")
- assert not cibuild.bool_from_env("FOO")
-
- monkeypatch.setenv("FOO", "")
- assert not cibuild.bool_from_env("FOO")
-
- monkeypatch.delenv("FOO")
- assert not cibuild.bool_from_env("FOO")