2020-12-06 13:08:22 +00:00
|
|
|
import random
|
2022-05-25 02:06:31 +00:00
|
|
|
from typing import Any
|
2020-12-06 13:08:22 +00:00
|
|
|
|
2018-06-23 11:53:56 +00:00
|
|
|
import pytest
|
|
|
|
|
|
|
|
|
2022-05-25 02:06:31 +00:00
|
|
|
def generate_largish_json(n_rows: int = 91746) -> dict[str, Any]:
|
2020-12-06 13:08:22 +00:00
|
|
|
# with n_rows = 91746, the output JSON size will be ~15 MB/10k rows
|
|
|
|
|
|
|
|
# Note: we don't fix the random seed here, but the actual values
|
|
|
|
# shouldn't matter
|
|
|
|
columns = [
|
|
|
|
("column0", lambda: "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX"),
|
|
|
|
(
|
|
|
|
"column1",
|
|
|
|
lambda: random.choice(
|
|
|
|
[
|
|
|
|
"notification-interval-longer",
|
|
|
|
"notification-interval-short",
|
|
|
|
"control",
|
|
|
|
]
|
|
|
|
),
|
|
|
|
),
|
|
|
|
("column2", lambda: random.choice([True, False])),
|
|
|
|
("column3", lambda: random.randint(0, 4)),
|
|
|
|
("column4", lambda: random.randint(0, 4)),
|
|
|
|
("column5", lambda: random.randint(0, 4)),
|
|
|
|
("column6", lambda: random.randint(0, 4)),
|
|
|
|
("column7", lambda: random.randint(0, 4)),
|
|
|
|
]
|
|
|
|
data = {}
|
|
|
|
for name, generator in columns:
|
|
|
|
data[name] = [generator() for _ in range(n_rows)]
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2021-04-08 07:01:51 +00:00
|
|
|
@pytest.mark.driver_timeout(30)
|
2018-08-17 09:57:42 +00:00
|
|
|
def test_extra_import(selenium, request):
|
2018-05-23 11:23:49 +00:00
|
|
|
selenium.load_package("pandas")
|
2021-11-02 13:54:16 +00:00
|
|
|
selenium.run("from pandas import Series, DataFrame")
|
2018-09-12 14:14:51 +00:00
|
|
|
|
|
|
|
|
2022-05-25 20:34:40 +00:00
|
|
|
@pytest.mark.xfail_browsers(
|
|
|
|
chrome="test_load_largish_file triggers a fatal runtime error in Chrome 89 see #1495",
|
|
|
|
node="open_url doesn't work in node",
|
|
|
|
)
|
2021-04-08 07:01:51 +00:00
|
|
|
@pytest.mark.driver_timeout(40)
|
2021-03-24 23:32:26 +00:00
|
|
|
@pytest.mark.skip_refcount_check
|
2020-12-06 13:08:22 +00:00
|
|
|
def test_load_largish_file(selenium_standalone, request, httpserver):
|
2018-09-12 14:14:51 +00:00
|
|
|
selenium = selenium_standalone
|
|
|
|
selenium.load_package("pandas")
|
|
|
|
selenium.load_package("matplotlib")
|
|
|
|
|
2020-12-06 13:08:22 +00:00
|
|
|
n_rows = 91746
|
|
|
|
|
|
|
|
data = generate_largish_json(n_rows)
|
|
|
|
|
2023-08-18 10:32:55 +00:00
|
|
|
httpserver.expect_oneshot_request("/pandas_largish").respond_with_json(
|
2020-12-06 13:08:22 +00:00
|
|
|
data, headers={"Access-Control-Allow-Origin": "*"}
|
|
|
|
)
|
2023-08-18 10:32:55 +00:00
|
|
|
request_url = httpserver.url_for("/pandas_largish")
|
2020-12-06 13:08:22 +00:00
|
|
|
|
2020-06-28 18:24:40 +00:00
|
|
|
selenium.run(
|
2020-12-06 13:08:22 +00:00
|
|
|
f"""
|
2023-03-27 01:48:33 +00:00
|
|
|
import pyodide.http
|
2018-09-12 14:14:51 +00:00
|
|
|
import matplotlib.pyplot as plt
|
|
|
|
import pandas as pd
|
|
|
|
|
2023-03-27 01:48:33 +00:00
|
|
|
df = pd.read_json(pyodide.http.open_url('{request_url}'))
|
2020-12-06 13:08:22 +00:00
|
|
|
assert df.shape == ({n_rows}, 8)
|
2021-03-24 23:32:26 +00:00
|
|
|
"""
|
2020-06-28 18:24:40 +00:00
|
|
|
)
|