Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 15 additions & 3 deletions annotationengine/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
def wkb_to_numpy(wkb, convert_to_nm=None):
"""Fixes single geometry column"""
shp = to_shape(wkb)
xyz_voxel = np.array([shp.xy[0][0], shp.xy[1][0], shp.z], dtype=np.int)
xyz_voxel = np.array([shp.xy[0][0], shp.xy[1][0], shp.z], dtype=int)
if convert_to_nm is not None:
return xyz_voxel * convert_to_nm
else:
Expand Down Expand Up @@ -68,7 +68,13 @@ def aligned_volume_view(aligned_volume_name):
.filter(Metadata.deleted == None)
.filter(Metadata.valid == True)
)
df = pd.read_sql(query.statement, db.database.engine)
statement = str(
query.statement.compile(
db.database.engine, compile_kwargs={"literal_binds": True}
)
)
with db.database.engine.connect() as conn:
df = pd.read_sql(statement, conn.connection)
base_user_url = "https://{auth_uri}/api/v1/user/{user_id}"
auth_uri = os.environ["AUTH_URI"]
base_schema_url = (
Expand Down Expand Up @@ -120,7 +126,13 @@ def table_view(aligned_volume_name, table_name):
Model = db.database._get_model_from_table_name(table_name)
table_size = db.database.get_annotation_table_size(table_name)
query = db.database.cached_session.query(Model).limit(15)
top15_df = pd.read_sql(query.statement, db.database.engine)
statement = str(
query.statement.compile(
db.database.engine, compile_kwargs={"literal_binds": True}
)
)
with db.database.engine.connect() as conn:
top15_df = pd.read_sql(statement, conn.connection)
top15_df = fix_wkb_columns(top15_df)
return render_template(
"table.html",
Expand Down
32 changes: 26 additions & 6 deletions dev.Dockerfile
Original file line number Diff line number Diff line change
@@ -1,11 +1,31 @@
FROM tiangolo/uwsgi-nginx-flask:python3.9

RUN git config --global http.sslVerify false && \
mkdir -p /home/nginx/.cloudvolume/secrets && chown -R nginx /home/nginx && usermod -d /home/nginx -s /bin/bash nginx
RUN apt-get update && apt-get install -y gcc curl ca-certificates \
&& rm -rf /var/lib/apt/lists/*

RUN pip install uv

# Enable bytecode compilation and avoid hardlinks in mounted cache volumes.
ENV UV_COMPILE_BYTECODE=1
ENV UV_LINK_MODE=copy
ENV UV_PYTHON_DOWNLOADS=0

RUN git config --global http.sslVerify false && \
mkdir -p /home/nginx/.cloudvolume/secrets && \
chown -R nginx /home/nginx && \
usermod -d /home/nginx -s /bin/bash nginx

WORKDIR /app

# Install runtime + dev/test dependencies from the lockfile before copying source.
COPY uv.lock pyproject.toml ./
ENV UV_PROJECT_ENVIRONMENT="/usr/local/"
RUN --mount=type=cache,target=/root/.cache/uv \
UV_VENV_ARGS="--system-site-packages" uv sync --frozen --no-install-project --group dev

ENV UWSGI_INI /app/uwsgi.ini
ENV PATH /app/.venv/bin:/home/nginx/google-cloud-sdk/bin:/root/google-cloud-sdk/bin:$PATH
ENV PYTHONNOUSERSITE=1

RUN python -m pip install --upgrade pip
COPY dev_requirements.txt /app/.
RUN pip install -r dev_requirements.txt
COPY timeout.conf /etc/nginx/conf.d/
COPY . /app

18 changes: 18 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,24 @@ services:
depends_on:
- db

test:
image: "app:tag"
build:
context: .
dockerfile: dev.Dockerfile
env_file:
- ./.env-docker.dev
environment:
FLASK_CONFIGURATION: docker_testing
AUTH_DISABLED: "true"
volumes:
- .:/app
depends_on:
db:
condition: service_healthy
entrypoint: []
command: ["python", "-m", "pytest"]

volumes:
postgres_data:
driver: local
47 changes: 47 additions & 0 deletions scripts/run_tests_with_compose.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
#!/usr/bin/env bash
# Run the test suite inside a Docker container using the docker-compose stack.
#
# Usage:
# ./scripts/run_tests_with_compose.sh [--down] [pytest args...]
#
# Examples:
# ./scripts/run_tests_with_compose.sh # run tests, leave db up
# ./scripts/run_tests_with_compose.sh --down # run tests, then tear down
# ./scripts/run_tests_with_compose.sh -v test/test_api.py # run specific tests with verbose

set -e

SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
cd "$PROJECT_ROOT"

TEAR_DOWN=false
EXTRA_ARGS=()
for arg in "$@"; do
case "$arg" in
--down)
TEAR_DOWN=true
;;
*)
EXTRA_ARGS+=("$arg")
;;
esac
done

# Prefer docker compose (v2) over docker-compose (v1)
COMPOSE_CMD="docker compose"
if ! docker compose version &>/dev/null; then
COMPOSE_CMD="docker-compose"
fi

echo "Starting Postgres and running tests in Docker..."
# Pass extra args to pytest; -- separates compose args from the command
$COMPOSE_CMD run --build --rm test python -m pytest "${EXTRA_ARGS[@]}"
RESULT=$?

if [[ "$TEAR_DOWN" == "true" ]]; then
echo "Tearing down docker-compose services..."
$COMPOSE_CMD down
fi

exit $RESULT
3 changes: 2 additions & 1 deletion test/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@ def test_aligned_volume():

@pytest.fixture(scope="module")
def client():
flask_app = create_app(config_name="testing")
config_name = os.environ.get("FLASK_CONFIGURATION", "testing")
flask_app = create_app(config_name=config_name)
test_logger.info("Starting test flask app...")

# Create a test client using the Flask application configured for testing
Expand Down
57 changes: 57 additions & 0 deletions test/test_views_pandas_compat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
"""
Regression test for pd.read_sql compatibility with SQLAlchemy 1.4+ and pandas 2.x.

pandas 2.3+ requires SQLAlchemy >= 2.0.0. When SQLAlchemy 1.4 is installed,
pandas' import_optional_dependency("sqlalchemy") returns None, so pd.read_sql
falls through to the SQLiteDatabase DBAPI2 path and raises:
TypeError: Query must be a string unless using sqlalchemy.

The fix is to execute via SQLAlchemy directly and build the DataFrame from the
result, bypassing pd.read_sql entirely:
with engine.connect() as conn:
result = conn.execute(query.statement)
df = pd.DataFrame(result.fetchall(), columns=result.keys())
"""

import json
import os
from unittest import mock

import pytest

aligned_volume_name = "test_aligned_volume"
table_name = "test_view_table"


@pytest.fixture(scope="module", autouse=True)
def create_test_table(client):
data = {
"table_name": table_name,
"schema_type": "synapse",
"metadata": {
"user_id": "1",
"description": "Test table for views",
"flat_segmentation_source": "precomputed://gs://test/image",
"voxel_resolution_x": 4,
"voxel_resolution_y": 4,
"voxel_resolution_z": 40,
},
}
url = f"/annotation/api/v2/aligned_volume/{aligned_volume_name}/table"
with mock.patch("annotationengine.api.check_aligned_volume") as mock_cv:
mock_cv.return_value = aligned_volume_name
client.post(url, json=data, content_type="application/json", follow_redirects=True)


class TestViewPages:
def test_aligned_volume_view_returns_200(self, client):
url = f"/annotation/views/aligned_volume/{aligned_volume_name}"
with mock.patch.dict(os.environ, {"AUTH_URI": "auth.test.example.com"}):
response = client.get(url, follow_redirects=False)
assert response.status_code == 200

def test_table_view_returns_200(self, client):
url = f"/annotation/views/aligned_volume/{aligned_volume_name}/table/{table_name}"
with mock.patch.dict(os.environ, {"AUTH_URI": "auth.test.example.com"}):
response = client.get(url, follow_redirects=False)
assert response.status_code == 200
Loading