Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ENH: upgrade CI tests to python>=3.10 #837

Merged
merged 14 commits into from
Dec 23, 2024
36 changes: 16 additions & 20 deletions .github/workflows/python.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -77,29 +77,28 @@ jobs:
matrix:
os: ["ubuntu-latest", "macos-14", "windows-latest"]
python-version: ["3.9", "3.10", "3.11", "3.12"]
module: ["xorbits", "xorbits/numpy", "xorbits/pandas"]
module: ["xorbits"]
exclude:
- { os: macos-14, python-version: 3.10}
- { os: macos-14, python-version: 3.9}
- { os: windows-latest, python-version: 3.10}
- { os: windows-latest, python-version: 3.9}
- { os: windows-latest, module: kubernetes}
- { os: macos-14, module: kubernetes}
include:
- { os: ubuntu-latest, module: _mars/dataframe, python-version: 3.9 }
- { os: ubuntu-latest, module: learn, python-version: 3.9 }
- { os: ubuntu-latest, module: mars-core, python-version: 3.9 }
- { os: ubuntu-20.04, module: hadoop, python-version: 3.9 }
- { os: ubuntu-latest, module: vineyard, python-version: 3.11 }
- { os: ubuntu-latest, module: external-storage, python-version: 3.11 }
- { os: ubuntu-latest, module: doc-build, python-version: 3.9 }
- { os: self-hosted, module: gpu, python-version: 3.11}
- { os: ubuntu-latest, module: jax, python-version: 3.9 }
- { os: ubuntu-latest, module: datasets, python-version: 3.9 }
- { os: ubuntu-latest, module: kubernetes, python-version: 3.11 }
- { os: ubuntu-latest, module: _mars/dataframe, python-version: "3.11" }
- { os: ubuntu-latest, module: learn, python-version: "3.11" }
- { os: ubuntu-latest, module: mars-core, python-version: "3.11" }
- { os: ubuntu-20.04, module: hadoop, python-version: "3.10" }
- { os: ubuntu-latest, module: vineyard, python-version: "3.11" }
- { os: ubuntu-latest, module: external-storage, python-version: "3.11" }
- { os: ubuntu-latest, module: doc-build, python-version: "3.11" }
- { os: self-hosted, module: gpu, python-version: "3.11" }
- { os: ubuntu-latest, module: jax, python-version: "3.10" }
- { os: ubuntu-latest, module: datasets, python-version: "3.10" }
# a self-hosted runner which needs computing resources, activate when necessary
# - { os: juicefs-ci, module: kubernetes-juicefs, python-version: 3.9 }
# TODO: slurm & kubernetes tests are not stable
# - { os: ubuntu-latest, module: slurm, python-version: 3.9 }
# - { os: ubuntu-latest, module: kubernetes, python-version: 3.11 }
# always test compatibility with the latest version
# - { os: ubuntu-latest, module: compatibility, python-version: 3.9 }
steps:
Expand Down Expand Up @@ -156,11 +155,8 @@ jobs:
pip install --upgrade --upgrade-strategy only-if-needed --no-cache-dir ".[doc]"
else
pip install -e "git+https://github.com/xorbitsai/xoscar.git@main#subdirectory=python&egg=xoscar"
pip install -U numpy scipy cython pyftpdlib coverage flaky numexpr
pip install -U numpy scipy cython pyftpdlib coverage flaky numexpr openpyxl

if [[ "$MODULE" == "xorbits/pandas" ]]; then
pip install openpyxl
fi
if [[ "$MODULE" == "mars-core" ]]; then
pip install oss2
fi
Expand Down Expand Up @@ -312,12 +308,12 @@ jobs:
-W ignore::PendingDeprecationWarning \
--cov-config=setup.cfg --cov-report=xml --cov=xorbits \
xorbits
elif [[ "$MODULE" == "xorbits/pandas" ]]; then
# xorbits/pandas
pytest --timeout=1500 \
-W ignore::PendingDeprecationWarning \
--cov-config=setup.cfg --cov-report=xml \
--cov=xorbits xorbits/pandas
elif [[ "$MODULE" == "xorbits/numpy" ]]; then
# xorbits/numpy
pytest --timeout=1500 \
-W ignore::PendingDeprecationWarning \
--cov-config=setup.cfg --cov-report=xml --cov=xorbits \
Expand Down
30 changes: 15 additions & 15 deletions python/xorbits/_mars/lib/sparse/tests/test_sparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,13 @@ def test_sparse_creation():
s = SparseNDArray(s1_data)
assert s.ndim == 2
assert isinstance(s, SparseMatrix)
assert_array_equal(s.toarray(), s1_data.A)
assert_array_equal(s.todense(), s1_data.A)
assert_array_equal(s.toarray(), s1_data.toarray())
assert_array_equal(s.todense(), s1_data.todense())

ss = pickle.loads(pickle.dumps(s))
assert s == ss
assert_array_equal(ss.toarray(), s1_data.A)
assert_array_equal(ss.todense(), s1_data.A)
assert_array_equal(ss.toarray(), s1_data.toarray())
assert_array_equal(ss.todense(), s1_data.todense())

v = SparseNDArray(v1, shape=(3,))
assert s.ndim
Expand Down Expand Up @@ -330,12 +330,12 @@ def test_sparse_dot():

assert_array_equal(mls.dot(s1, v1_s), s1.dot(v1_data))
assert_array_equal(mls.dot(s2, v1_s), s2.dot(v1_data))
assert_array_equal(mls.dot(v2_s, s1), v2_data.dot(s1_data.A))
assert_array_equal(mls.dot(v2_s, s2), v2_data.dot(s2_data.A))
assert_array_equal(mls.dot(v2_s, s1), v2_data.dot(s1_data.toarray()))
assert_array_equal(mls.dot(v2_s, s2), v2_data.dot(s2_data.toarray()))
assert_array_equal(mls.dot(v1_s, v1_s), v1_data.dot(v1_data), almost=True)
assert_array_equal(mls.dot(v2_s, v2_s), v2_data.dot(v2_data), almost=True)

assert_array_equal(mls.dot(v2_s, s1, sparse=False), v2_data.dot(s1_data.A))
assert_array_equal(mls.dot(v2_s, s1, sparse=False), v2_data.dot(s1_data.toarray()))
assert_array_equal(mls.dot(v1_s, v1_s, sparse=False), v1_data.dot(v1_data))


Expand Down Expand Up @@ -389,7 +389,7 @@ def test_sparse_fill_diagonal():
arr = SparseNDArray(s1)
arr.fill_diagonal(3)

expected = s1.copy().A
expected = s1.copy().toarray()
np.fill_diagonal(expected, 3)

np.testing.assert_array_equal(arr.toarray(), expected)
Expand All @@ -398,7 +398,7 @@ def test_sparse_fill_diagonal():
arr = SparseNDArray(s1)
arr.fill_diagonal(3, wrap=True)

expected = s1.copy().A
expected = s1.copy().toarray()
np.fill_diagonal(expected, 3, wrap=True)

np.testing.assert_array_equal(arr.toarray(), expected)
Expand All @@ -407,7 +407,7 @@ def test_sparse_fill_diagonal():
arr = SparseNDArray(s1)
arr.fill_diagonal([1, 2, 3])

expected = s1.copy().A
expected = s1.copy().toarray()
np.fill_diagonal(expected, [1, 2, 3])

np.testing.assert_array_equal(arr.toarray(), expected)
Expand All @@ -416,7 +416,7 @@ def test_sparse_fill_diagonal():
arr = SparseNDArray(s1)
arr.fill_diagonal([1, 2, 3], wrap=True)

expected = s1.copy().A
expected = s1.copy().toarray()
np.fill_diagonal(expected, [1, 2, 3], wrap=True)

np.testing.assert_array_equal(arr.toarray(), expected)
Expand All @@ -426,7 +426,7 @@ def test_sparse_fill_diagonal():
arr = SparseNDArray(s1)
arr.fill_diagonal(val)

expected = s1.copy().A
expected = s1.copy().toarray()
np.fill_diagonal(expected, val)

np.testing.assert_array_equal(arr.toarray(), expected)
Expand All @@ -436,7 +436,7 @@ def test_sparse_fill_diagonal():
arr = SparseNDArray(s1)
arr.fill_diagonal(val, wrap=True)

expected = s1.copy().A
expected = s1.copy().toarray()
np.fill_diagonal(expected, val, wrap=True)

np.testing.assert_array_equal(arr.toarray(), expected)
Expand All @@ -446,7 +446,7 @@ def test_sparse_fill_diagonal():
arr = SparseNDArray(s1)
arr.fill_diagonal(val)

expected = s1.copy().A
expected = s1.copy().toarray()
np.fill_diagonal(expected, val)

np.testing.assert_array_equal(arr.toarray(), expected)
Expand All @@ -456,7 +456,7 @@ def test_sparse_fill_diagonal():
arr = SparseNDArray(s1)
arr.fill_diagonal(val, wrap=True)

expected = s1.copy().A
expected = s1.copy().toarray()
np.fill_diagonal(expected, val, wrap=True)

np.testing.assert_array_equal(arr.toarray(), expected)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,27 +67,33 @@ class FakeClusterAPI(ClusterAPI):
async def create(cls, address: str, **kw):
dones, _ = await asyncio.wait(
[
mo.create_actor(
SupervisorPeerLocatorActor,
"fixed",
address,
uid=SupervisorPeerLocatorActor.default_uid(),
address=address,
asyncio.create_task(
mo.create_actor(
SupervisorPeerLocatorActor,
"fixed",
address,
uid=SupervisorPeerLocatorActor.default_uid(),
address=address,
)
),
mo.create_actor(
MockNodeInfoCollectorActor,
with_gpu=kw.get("with_gpu", False),
uid=NodeInfoCollectorActor.default_uid(),
address=address,
asyncio.create_task(
mo.create_actor(
MockNodeInfoCollectorActor,
with_gpu=kw.get("with_gpu", False),
uid=NodeInfoCollectorActor.default_uid(),
address=address,
)
),
mo.create_actor(
NodeInfoUploaderActor,
NodeRole.WORKER,
interval=kw.get("upload_interval"),
band_to_resource=kw.get("band_to_resource"),
use_gpu=kw.get("use_gpu", False),
uid=NodeInfoUploaderActor.default_uid(),
address=address,
asyncio.create_task(
mo.create_actor(
NodeInfoUploaderActor,
NodeRole.WORKER,
interval=kw.get("upload_interval"),
band_to_resource=kw.get("band_to_resource"),
use_gpu=kw.get("use_gpu", False),
uid=NodeInfoUploaderActor.default_uid(),
address=address,
)
),
]
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,10 @@ async def test_global_resource(actor_pool):
)

wait_coro = global_resource_ref.wait_band_idle(band)
(done, pending) = await asyncio.wait([wait_coro], timeout=0.5)
(done, pending) = await asyncio.wait([asyncio.create_task(wait_coro)], timeout=0.5)
assert not done
await global_resource_ref.release_subtask_resource(band, session_id, "subtask0")
(done, pending) = await asyncio.wait([wait_coro], timeout=0.5)
(done, pending) = await asyncio.wait([asyncio.create_task(wait_coro)], timeout=0.5)
assert done
assert band in await global_resource_ref.get_idle_bands(0)
assert ["subtask1"] == await global_resource_ref.apply_subtask_resources(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,26 +68,32 @@ class FakeClusterAPI(ClusterAPI):
async def create(cls, address: str, **kw):
dones, _ = await asyncio.wait(
[
mo.create_actor(
SupervisorPeerLocatorActor,
"fixed",
address,
uid=SupervisorPeerLocatorActor.default_uid(),
address=address,
asyncio.create_task(
mo.create_actor(
SupervisorPeerLocatorActor,
"fixed",
address,
uid=SupervisorPeerLocatorActor.default_uid(),
address=address,
)
),
mo.create_actor(
MockNodeInfoCollectorActor,
uid=NodeInfoCollectorActor.default_uid(),
address=address,
asyncio.create_task(
mo.create_actor(
MockNodeInfoCollectorActor,
uid=NodeInfoCollectorActor.default_uid(),
address=address,
)
),
mo.create_actor(
NodeInfoUploaderActor,
NodeRole.WORKER,
interval=kw.get("upload_interval"),
band_to_resource=kw.get("band_to_resource"),
use_gpu=kw.get("use_gpu", False),
uid=NodeInfoUploaderActor.default_uid(),
address=address,
asyncio.create_task(
mo.create_actor(
NodeInfoUploaderActor,
NodeRole.WORKER,
interval=kw.get("upload_interval"),
band_to_resource=kw.get("band_to_resource"),
use_gpu=kw.get("use_gpu", False),
uid=NodeInfoUploaderActor.default_uid(),
address=address,
)
),
]
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -333,19 +333,19 @@ def test_arctan2_execution(setup):

assert y.issparse() is False
result = y.execute().fetch()
np.testing.assert_equal(result, np.arctan2(raw1, raw2.A))
np.testing.assert_equal(result, np.arctan2(raw1, raw2.toarray()))

y = arctan2(raw2, raw2)

assert y.issparse() is True
result = y.execute().fetch()
np.testing.assert_equal(result, np.arctan2(raw2.A, raw2.A))
np.testing.assert_equal(result, np.arctan2(raw2.toarray(), raw2.toarray()))

y = arctan2(0, raw2)

assert y.issparse() is True
result = y.execute().fetch()
np.testing.assert_equal(result, np.arctan2(0, raw2.A))
np.testing.assert_equal(result, np.arctan2(0, raw2.toarray()))


@pytest.mark.ray_dag
Expand Down
4 changes: 2 additions & 2 deletions python/xorbits/_mars/tensor/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
TupleField,
)
from ..utils import on_deserialize_shape, on_serialize_shape
from .utils import fetch_corner_data, get_chunk_slices
from .utils import get_chunk_slices

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -227,7 +227,7 @@ def _to_str(self, representation=False):
print_options = np.get_printoptions()
threshold = print_options["threshold"]

corner_data = fetch_corner_data(self, session=self._executed_sessions[-1])
corner_data = self.fetch(session=self._executed_sessions[-1])
# if less than default threshold, just set it as default,
# if not, set to corner_data.size - 1 make sure ... exists in repr
threshold = threshold if self.size <= threshold else corner_data.size - 1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -719,7 +719,7 @@ def test_fill_diagonal_execution(setup):
def copy(x):
if hasattr(x, "nnz"):
# sparse
return x.A
return x.toarray()
else:
return x.copy()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,7 @@ def test_lu_execution(setup):

t = P.dot(L).dot(U)
res = t.execute().fetch()
np.testing.assert_array_almost_equal(data.A, res)
np.testing.assert_array_almost_equal(data.toarray(), res)

a = tensor(data, chunk_size=5)
P, L, U = lu(a)
Expand All @@ -404,7 +404,7 @@ def test_lu_execution(setup):

t = P.dot(L).dot(U)
res = t.execute().fetch()
np.testing.assert_array_almost_equal(data.A, res)
np.testing.assert_array_almost_equal(data.toarray(), res)


def test_solve_triangular(setup):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,9 @@ def test_concatenate_execution(setup):

d = concatenate([a, b, c], axis=-1)
res = d.execute().fetch()
expected = np.concatenate([a_data.A, b_data.A, c_data.A], axis=-1)
expected = np.concatenate(
[a_data.toarray(), b_data.toarray(), c_data.toarray()], axis=-1
)
np.testing.assert_array_equal(res.toarray(), expected)


Expand Down
Loading
Loading