Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implementing Unit testing for Python #50

Closed
wants to merge 6 commits into from
Closed
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 10 additions & 3 deletions .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,14 @@ jobs:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Install protobuf compiler
shell: bash
run: sudo apt-get install protobuf-compiler
- name: Build Rust code
run: cargo build --verbose
- name: Set up Python
uses: actions/setup-python@v2
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install test dependencies
Expand All @@ -49,5 +49,12 @@ jobs:
- name: Generate test data
run: |
./scripts/gen-test-data.sh
- name: Run tests
- name: Run Rust tests
run: cargo test --verbose
- name: Run Python tests
run: |
python -m venv venv
source venv/bin/activate
pip install -r requirements-in.txt
maturin develop
python -m pytest
File renamed without changes.
7 changes: 2 additions & 5 deletions src/query_stage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
use crate::context::serialize_execution_plan;
use crate::shuffle::{ShuffleCodec, ShuffleReaderExec};
use datafusion::error::Result;
use datafusion::physical_plan::{ExecutionPlan, Partitioning};
use datafusion::physical_plan::{ExecutionPlan, ExecutionPlanProperties, Partitioning};
use datafusion::prelude::SessionContext;
use datafusion_proto::bytes::physical_plan_from_bytes_with_extension_codec;
use pyo3::prelude::*;
Expand Down Expand Up @@ -99,10 +99,7 @@ impl QueryStage {
/// Get the input partition count. This is the same as the number of concurrent tasks
/// when we schedule this query stage for execution
pub fn get_input_partition_count(&self) -> usize {
self.plan.children()[0]
.properties()
.output_partitioning()
.partition_count()
self.plan.output_partitioning().partition_count()
}

pub fn get_output_partition_count(&self) -> usize {
Expand Down
16 changes: 12 additions & 4 deletions datafusion_ray/tests/test_context.py → tests/test_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,20 @@
# specific language governing permissions and limitations
# under the License.

from datafusion_ray import Context
from datafusion_ray.context import DatafusionRayContext
from datafusion import SessionContext


def test():
def test_basic_query_succeed():
df_ctx = SessionContext()
ctx = Context(df_ctx, False)
ctx = DatafusionRayContext(df_ctx)
df_ctx.register_csv("tips", "examples/tips.csv", has_header=True)
ctx.plan("SELECT * FROM tips")
record_batch = ctx.sql("SELECT * FROM tips")
assert record_batch.num_rows == 244


def test_no_result_query():
df_ctx = SessionContext()
ctx = DatafusionRayContext(df_ctx)
df_ctx.register_csv("tips", "examples/tips.csv", has_header=True)
ctx.sql("CREATE VIEW tips_view AS SELECT * FROM tips")
Loading