mirror of
https://github.com/geoserver/geoserver-cloud.git
synced 2025-12-08 20:16:08 +00:00
Consolidate ImageMosaic acceptance tests
- Add comprehensive ImageMosaic test coverage: direct directory, manual granules, empty store workflows, and XML-based store creation - Fix file path handling: use direct paths instead of file:// URLs for local files - Update documentation to mention shared mount volume at /mnt/geoserver_data - Add version testing examples (TAG=2.27.1.0, TAG=2.26.2.0) to README Tests pass with datadir backend but show limitations with pgconfig backend. Provides comprehensive test coverage for realistic ImageMosaic workflows.
This commit is contained in:
parent
ba0ef2c323
commit
47ff586e2b
12
Makefile
12
Makefile
@ -1,7 +1,7 @@
|
||||
.PHONY: all
|
||||
all: install test build-image
|
||||
|
||||
TAG=$(shell mvn help:evaluate -Dexpression=project.version -q -DforceStdout)
|
||||
TAG?=$(shell mvn help:evaluate -Dexpression=project.version -q -DforceStdout)
|
||||
|
||||
COSIGN_PASSWORD := $(COSIGN_PASSWORD)
|
||||
|
||||
@ -158,14 +158,14 @@ verify-image:
|
||||
|
||||
.PHONY: build-acceptance
|
||||
build-acceptance:
|
||||
docker build --tag=acceptance:$(TAG) acceptance_tests
|
||||
docker build --tag=geoservercloud/acceptance:latest acceptance_tests
|
||||
|
||||
.PHONY: acceptance-tests-datadir
|
||||
acceptance-tests-datadir: build-acceptance start-acceptance-tests-datadir run-acceptance-tests-datadir
|
||||
|
||||
.PHONY: start-acceptance-tests-datadir
|
||||
start-acceptance-tests-datadir:
|
||||
(cd compose/ && ./acceptance_datadir up -d)
|
||||
(cd compose/ && TAG=$(TAG) ./acceptance_datadir up -d)
|
||||
|
||||
.PHONY: run-acceptance-tests-datadir
|
||||
run-acceptance-tests-datadir:
|
||||
@ -173,14 +173,14 @@ run-acceptance-tests-datadir:
|
||||
|
||||
.PHONY: clean-acceptance-tests-datadir
|
||||
clean-acceptance-tests-datadir:
|
||||
(cd compose/ && ./acceptance_datadir down -v)
|
||||
(cd compose/ && TAG=$(TAG) ./acceptance_datadir down -v)
|
||||
|
||||
.PHONY: acceptance-tests-pgconfig
|
||||
acceptance-tests-pgconfig: build-acceptance start-acceptance-tests-pgconfig run-acceptance-tests-pgconfig
|
||||
|
||||
.PHONY: start-acceptance-tests-pgconfig
|
||||
start-acceptance-tests-pgconfig:
|
||||
(cd compose/ && ./acceptance_pgconfig up -d)
|
||||
(cd compose/ && TAG=$(TAG) ./acceptance_pgconfig up -d)
|
||||
|
||||
.PHONY: run-acceptance-tests-pgconfig
|
||||
run-acceptance-tests-pgconfig:
|
||||
@ -188,7 +188,7 @@ run-acceptance-tests-pgconfig:
|
||||
|
||||
.PHONY: clean-acceptance-tests-pgconfig
|
||||
clean-acceptance-tests-pgconfig:
|
||||
(cd compose/ && ./acceptance_pgconfig down -v)
|
||||
(cd compose/ && TAG=$(TAG) ./acceptance_pgconfig down -v)
|
||||
|
||||
.PHONY: acceptance-tests-jdbcconfig
|
||||
acceptance-tests-jdbcconfig: build-acceptance start-acceptance-tests-jdbcconfig run-acceptance-tests-jdbcconfig
|
||||
|
||||
@ -2,17 +2,190 @@
|
||||
|
||||
## Requirements
|
||||
|
||||
[Poetry](https://python-poetry.org/docs/#installing-with-the-official-installer)
|
||||
- Python 3.8+
|
||||
- [Poetry](https://python-poetry.org/docs/#installing-with-the-official-installer) (recommended)
|
||||
- Or a Python virtual environment
|
||||
|
||||
## Installation
|
||||
|
||||
### Option 1: Using Poetry (recommended)
|
||||
|
||||
```shell
|
||||
poetry install
|
||||
```
|
||||
|
||||
# Run the tests
|
||||
First start the docker composition then run:
|
||||
### Option 2: Using Python virtual environment
|
||||
|
||||
```shell
|
||||
GEOSERVER_URL=http://localhost:9090/geoserver/cloud poetry run pytest -vvv .
|
||||
```
|
||||
# Create virtual environment
|
||||
python -m venv .venv
|
||||
|
||||
# Activate virtual environment
|
||||
# On Linux/macOS:
|
||||
source .venv/bin/activate
|
||||
# On Windows:
|
||||
.venv\Scripts\activate
|
||||
|
||||
# Install dependencies
|
||||
pip install -e .
|
||||
```
|
||||
|
||||
## Running the tests
|
||||
|
||||
### Option 1: Using make (runs full docker composition)
|
||||
|
||||
```shell
|
||||
# Run tests with datadir backend
|
||||
make acceptance-tests-datadir
|
||||
|
||||
# Run tests with pgconfig backend
|
||||
make acceptance-tests-pgconfig
|
||||
```
|
||||
|
||||
### Option 2: Manual execution
|
||||
|
||||
#### Run tests inside Docker container (recommended for all tests)
|
||||
|
||||
```shell
|
||||
# Start GeoServer services
|
||||
cd ../compose
|
||||
./acceptance_datadir up -d # or ./acceptance_pgconfig up -d
|
||||
|
||||
# Optional: Start webui service if needed (not started by default in acceptance composition)
|
||||
./acceptance_datadir scale webui=1
|
||||
|
||||
# Run all tests inside the container
|
||||
./acceptance_datadir exec acceptance pytest . -vvv --color=yes
|
||||
|
||||
# Run specific tests inside the container
|
||||
./acceptance_datadir exec acceptance pytest tests/test_cog.py -v --color=yes
|
||||
```
|
||||
|
||||
#### Run tests from host machine (full functionality)
|
||||
|
||||
**Note:** This requires the geodatabase port to be exposed (port 5433). The acceptance composition now exposes this port automatically.
|
||||
|
||||
```shell
|
||||
# Start GeoServer services (geodatabase port 5433 will be exposed)
|
||||
cd ../compose
|
||||
./acceptance_datadir up -d # or ./acceptance_pgconfig up -d
|
||||
|
||||
# Optional: Start webui service if needed
|
||||
./acceptance_datadir scale webui=1
|
||||
|
||||
# From acceptance_tests directory, run tests from host
|
||||
cd ../acceptance_tests
|
||||
./run_tests_locally.sh tests/test_cog.py # Run COG tests
|
||||
./run_tests_locally.sh tests/test_imagemosaic_cog.py # Run ImageMosaic tests
|
||||
./run_tests_locally.sh tests/test_workspace.py # Run workspace tests
|
||||
./run_tests_locally.sh # Run all tests
|
||||
|
||||
# Run specific test functions
|
||||
./run_tests_locally.sh tests/test_imagemosaic_cog.py::test_create_imagemosaic_local_files
|
||||
./run_tests_locally.sh tests/test_cog.py::test_create_cog_coverage
|
||||
```
|
||||
|
||||
### Run specific tests with make
|
||||
|
||||
```shell
|
||||
# To run specific tests with make, you can modify the Makefile or use the manual Docker approach above
|
||||
```
|
||||
|
||||
## Debugging
|
||||
|
||||
If you need to debug the GeoServer services, you can run the acceptance test composition with local ports exposed:
|
||||
|
||||
```shell
|
||||
cd ../compose
|
||||
|
||||
# Start the acceptance test compose with local ports
|
||||
./acceptance_datadir -f localports.yml up -d
|
||||
|
||||
# Enable the webui service if needed
|
||||
./acceptance_datadir -f localports.yml scale webui=1
|
||||
|
||||
# Shut down the rest service if you're going to launch it from your IDE
|
||||
./acceptance_datadir -f localports.yml down rest
|
||||
|
||||
# Now you can run from the IDE with the `local` spring profile enabled
|
||||
# and the required catalog backend profile (datadir/pgconfig)
|
||||
```
|
||||
|
||||
### Accessing Sample Data
|
||||
|
||||
When debugging, you may need to access the sample data that's available in the containers. The sample data is extracted to `/mnt/geoserver_data/sampledata` inside the containers. To access it from your local development environment:
|
||||
|
||||
```shell
|
||||
# Check what sample data is available
|
||||
./acceptance_datadir exec wms find /mnt/geoserver_data/sampledata
|
||||
|
||||
# Copy sample data to your local machine for testing
|
||||
docker cp $(./acceptance_datadir ps -q wms | head -1):/mnt/geoserver_data/sampledata ./local_sampledata
|
||||
|
||||
# Or mount the geoserver_data volume directly to a local directory
|
||||
# Add this to your docker-compose override file:
|
||||
# volumes:
|
||||
# geoserver_data:
|
||||
# driver: local
|
||||
# driver_opts:
|
||||
# type: none
|
||||
# o: bind
|
||||
# device: /path/to/local/sampledata
|
||||
```
|
||||
|
||||
## Testing Different GeoServer Cloud Versions
|
||||
|
||||
You can test different versions of GeoServer Cloud without modifying the `.env` file by setting the TAG environment variable.
|
||||
|
||||
### Option 1: Using Make Commands (Recommended)
|
||||
|
||||
```shell
|
||||
# Test with GeoServer Cloud 2.27.2-SNAPSHOT (datadir backend)
|
||||
TAG=2.27.2-SNAPSHOT make start-acceptance-tests-datadir
|
||||
cd acceptance_tests && ./run_tests_locally.sh tests/test_imagemosaic.py
|
||||
TAG=2.27.2-SNAPSHOT make clean-acceptance-tests-datadir
|
||||
|
||||
# Test with GeoServer Cloud 2.26.2.0 (pgconfig backend)
|
||||
TAG=2.26.2.0 make start-acceptance-tests-pgconfig
|
||||
cd acceptance_tests && ./run_tests_locally.sh tests/test_imagemosaic.py
|
||||
TAG=2.26.2.0 make clean-acceptance-tests-pgconfig
|
||||
|
||||
# Test with default version (from Maven project.version)
|
||||
make start-acceptance-tests-datadir
|
||||
cd acceptance_tests && ./run_tests_locally.sh
|
||||
make clean-acceptance-tests-datadir
|
||||
```
|
||||
|
||||
### Option 2: Manual Docker Compose Commands
|
||||
|
||||
```shell
|
||||
# Test with GeoServer Cloud 2.27.1.0
|
||||
cd ../compose
|
||||
TAG=2.27.1.0 ./acceptance_datadir up -d
|
||||
|
||||
# Run your tests
|
||||
cd ../acceptance_tests
|
||||
./run_tests_locally.sh
|
||||
|
||||
# Test with GeoServer Cloud 2.26.2.0
|
||||
cd ../compose
|
||||
./acceptance_datadir down
|
||||
TAG=2.26.2.0 ./acceptance_datadir up -d
|
||||
|
||||
# Run your tests again
|
||||
cd ../acceptance_tests
|
||||
./run_tests_locally.sh
|
||||
|
||||
# Return to default version (check .env file for current default)
|
||||
cd ../compose
|
||||
./acceptance_datadir down
|
||||
./acceptance_datadir up -d
|
||||
```
|
||||
|
||||
## Cleanup
|
||||
|
||||
```shell
|
||||
# Stop and remove containers
|
||||
cd ../compose
|
||||
./acceptance_datadir down -v # or ./acceptance_pgconfig down -v
|
||||
```
|
||||
|
||||
139
acceptance_tests/run_tests_locally.sh
Executable file
139
acceptance_tests/run_tests_locally.sh
Executable file
@ -0,0 +1,139 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Default GeoServer URL
|
||||
GEOSERVER_URL=${GEOSERVER_URL:-"http://localhost:9090/geoserver/cloud"}
|
||||
|
||||
# Default database connection for local testing (requires geodatabase port exposed on 5433)
|
||||
PGHOST=${PGHOST:-"localhost"}
|
||||
PGPORT=${PGPORT:-"5433"}
|
||||
PGDATABASE=${PGDATABASE:-"geodata"}
|
||||
PGUSER=${PGUSER:-"geodata"}
|
||||
PGPASSWORD=${PGPASSWORD:-"geodata"}
|
||||
PGSCHEMA=${PGSCHEMA:-"test1"}
|
||||
|
||||
# Note: This script runs tests from the host machine and requires the geodatabase
|
||||
# port to be exposed (5433). Start services with: ./acceptance_datadir up -d
|
||||
|
||||
# Help function
|
||||
show_help() {
|
||||
echo "Usage: $0 [OPTIONS] [TEST_PATH]"
|
||||
echo ""
|
||||
echo "Run GeoServer Cloud acceptance tests locally"
|
||||
echo ""
|
||||
echo "OPTIONS:"
|
||||
echo " -h, --help Show this help message"
|
||||
echo " -v, --verbose Run with verbose output (-vvv)"
|
||||
echo " -q, --quiet Run with minimal output (-q)"
|
||||
echo ""
|
||||
echo "TEST_PATH:"
|
||||
echo " Optional path to specific test file or test function"
|
||||
echo " Examples:"
|
||||
echo " $0 tests/test_cog.py"
|
||||
echo " $0 tests/test_cog_imagemosaic.py::test_create_imagemosaic_cogs_http"
|
||||
echo " $0 tests/test_workspace.py"
|
||||
echo ""
|
||||
echo "Environment variables:"
|
||||
echo " GEOSERVER_URL GeoServer URL (default: http://localhost:9090/geoserver/cloud)"
|
||||
echo " PGHOST Database host (default: localhost)"
|
||||
echo " PGPORT Database port (default: 5433)"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 # Run all tests"
|
||||
echo " $0 tests/test_cog.py # Run COG tests only"
|
||||
echo " $0 -v # Run all tests with verbose output"
|
||||
echo " GEOSERVER_URL=http://localhost:8080/geoserver $0 # Use different URL"
|
||||
}
|
||||
|
||||
# Parse command line arguments
|
||||
VERBOSE_FLAG="-v"
|
||||
TEST_PATH=""
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
-h|--help)
|
||||
show_help
|
||||
exit 0
|
||||
;;
|
||||
-v|--verbose)
|
||||
VERBOSE_FLAG="-vvv"
|
||||
shift
|
||||
;;
|
||||
-q|--quiet)
|
||||
VERBOSE_FLAG="-q"
|
||||
shift
|
||||
;;
|
||||
-*)
|
||||
echo "Unknown option: $1" >&2
|
||||
show_help >&2
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
TEST_PATH="$1"
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Set the target (all tests or specific test)
|
||||
if [[ -n "$TEST_PATH" ]]; then
|
||||
TARGET="$TEST_PATH"
|
||||
else
|
||||
TARGET="."
|
||||
fi
|
||||
|
||||
echo "Running GeoServer Cloud acceptance tests..."
|
||||
echo "GeoServer URL: $GEOSERVER_URL"
|
||||
echo "Test target: $TARGET"
|
||||
echo "Verbosity: $VERBOSE_FLAG"
|
||||
echo ""
|
||||
|
||||
# Wait for GeoServer to be available
|
||||
echo "Waiting for GeoServer to be available at $GEOSERVER_URL/rest/workspaces..."
|
||||
max_attempts=60
|
||||
attempt=0
|
||||
while [ $attempt -lt $max_attempts ]; do
|
||||
if curl -s -u admin:geoserver --fail "$GEOSERVER_URL/rest/workspaces" > /dev/null 2>&1; then
|
||||
echo "✓ GeoServer is ready!"
|
||||
break
|
||||
fi
|
||||
attempt=$((attempt + 1))
|
||||
echo " Attempt $attempt/$max_attempts - waiting 5 seconds..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
if [ $attempt -eq $max_attempts ]; then
|
||||
echo "✗ Timeout: GeoServer did not become available at $GEOSERVER_URL/rest/workspaces"
|
||||
echo " Please ensure the services are running with: cd ../compose && ./acceptance_datadir up -d"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# Check if we're using Poetry or virtual environment
|
||||
if command -v poetry &> /dev/null && [[ -f "pyproject.toml" ]]; then
|
||||
echo "Using Poetry to run tests..."
|
||||
echo "Installing dependencies if needed..."
|
||||
poetry install
|
||||
export GEOSERVER_URL PGHOST PGPORT PGDATABASE PGUSER PGPASSWORD PGSCHEMA
|
||||
poetry run pytest $VERBOSE_FLAG --color=yes $TARGET
|
||||
elif [[ -n "$VIRTUAL_ENV" ]]; then
|
||||
echo "Using activated virtual environment..."
|
||||
# Check if pytest is available
|
||||
if ! command -v pytest &> /dev/null; then
|
||||
echo "Error: pytest not found in virtual environment"
|
||||
echo "Please install dependencies: pip install -e ."
|
||||
exit 1
|
||||
fi
|
||||
export GEOSERVER_URL PGHOST PGPORT PGDATABASE PGUSER PGPASSWORD PGSCHEMA
|
||||
pytest $VERBOSE_FLAG --color=yes $TARGET
|
||||
else
|
||||
echo "Error: Please either:"
|
||||
echo " 1. Install Poetry (https://python-poetry.org/docs/#installing-with-the-official-installer) and run this script again, or"
|
||||
echo " 2. Create and activate a virtual environment:"
|
||||
echo " python -m venv .venv"
|
||||
echo " source .venv/bin/activate # Linux/macOS"
|
||||
echo " pip install -e ."
|
||||
echo " ./run_tests_locally.sh"
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
@ -7,13 +7,13 @@ from geoservercloud import GeoServerCloud
|
||||
|
||||
GEOSERVER_URL = os.getenv("GEOSERVER_URL", "http://gateway:8080/geoserver/cloud")
|
||||
RESOURCE_DIR = Path(__file__).parent / "resources"
|
||||
# Database connection
|
||||
PGHOST = "geodatabase"
|
||||
PGPORT = 5432
|
||||
PGDATABASE = "geodata"
|
||||
PGUSER = "geodata"
|
||||
PGPASSWORD = "geodata"
|
||||
PGSCHEMA = "test1"
|
||||
# Database connection - defaults for container, can be overridden for local testing
|
||||
PGHOST = os.getenv("PGHOST", "geodatabase")
|
||||
PGPORT = int(os.getenv("PGPORT", "5432"))
|
||||
PGDATABASE = os.getenv("PGDATABASE", "geodata")
|
||||
PGUSER = os.getenv("PGUSER", "geodata")
|
||||
PGPASSWORD = os.getenv("PGPASSWORD", "geodata")
|
||||
PGSCHEMA = os.getenv("PGSCHEMA", "test1")
|
||||
WORKSPACE = "test_workspace"
|
||||
DATASTORE = "test_datastore"
|
||||
|
||||
|
||||
78
acceptance_tests/tests/test_cog.py
Normal file
78
acceptance_tests/tests/test_cog.py
Normal file
@ -0,0 +1,78 @@
|
||||
import pytest
|
||||
from geoservercloud import GeoServerCloud
|
||||
from conftest import GEOSERVER_URL
|
||||
|
||||
|
||||
def test_create_cog_coverage():
|
||||
"""Test creating a COG coverage store and coverage"""
|
||||
geoserver = GeoServerCloud(GEOSERVER_URL)
|
||||
workspace = "cog"
|
||||
store_name = "land_shallow_topo_21600_NW_cog"
|
||||
coverage_name = "land_shallow_topo_NW"
|
||||
|
||||
# Delete and recreate workspace
|
||||
geoserver.delete_workspace(workspace)
|
||||
response = geoserver.create_workspace(workspace)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Create COG coverage store
|
||||
store_xml = f"""<coverageStore>
|
||||
<name>{store_name}</name>
|
||||
<type>GeoTIFF</type>
|
||||
<enabled>true</enabled>
|
||||
<workspace><name>{workspace}</name></workspace>
|
||||
<url>cog://https://test-data-cog-public.s3.amazonaws.com/public/land_shallow_topo_21600_NW_cog.tif</url>
|
||||
<metadata>
|
||||
<entry key="CogSettings.Key">
|
||||
<cogSettings>
|
||||
<rangeReaderSettings>HTTP</rangeReaderSettings>
|
||||
</cogSettings>
|
||||
</entry>
|
||||
</metadata>
|
||||
</coverageStore>"""
|
||||
|
||||
response = geoserver.post_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores",
|
||||
data=store_xml,
|
||||
headers={"Content-Type": "application/xml"}
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Create coverage
|
||||
coverage_xml = f"""<coverage>
|
||||
<name>{coverage_name}</name>
|
||||
<nativeName>{store_name}</nativeName>
|
||||
</coverage>"""
|
||||
|
||||
response = geoserver.post_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages",
|
||||
data=coverage_xml,
|
||||
headers={"Content-Type": "application/xml"}
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Verify the coverage was created - try listing coverages first
|
||||
list_response = geoserver.get_request(f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages.json")
|
||||
if list_response.status_code != 200:
|
||||
print(f"Coverage listing failed: {list_response.status_code} - {list_response.text}")
|
||||
assert list_response.status_code == 200
|
||||
|
||||
# Check specific coverage
|
||||
response = geoserver.get_request(f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages/{coverage_name}.json")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verify coverage properties
|
||||
coverage_data = response.json()["coverage"]
|
||||
assert coverage_data["name"] == coverage_name
|
||||
assert coverage_data["nativeName"] == coverage_name
|
||||
assert coverage_data["enabled"] == True
|
||||
|
||||
# Test WMS GetMap request
|
||||
wms_response = geoserver.get_request(
|
||||
f"/wms?SERVICE=WMS&VERSION=1.1.0&REQUEST=GetMap&LAYERS={workspace}:{coverage_name}&STYLES=&BBOX=-180,-90,180,90&WIDTH=256&HEIGHT=256&FORMAT=image/jpeg&SRS=EPSG:4326"
|
||||
)
|
||||
assert wms_response.status_code == 200
|
||||
assert wms_response.headers.get("content-type").startswith("image/jpeg")
|
||||
|
||||
# Cleanup
|
||||
geoserver.delete_workspace(workspace)
|
||||
539
acceptance_tests/tests/test_imagemosaic.py
Normal file
539
acceptance_tests/tests/test_imagemosaic.py
Normal file
@ -0,0 +1,539 @@
|
||||
"""
|
||||
ImageMosaic acceptance tests for GeoServer Cloud
|
||||
|
||||
Tests various workflows for creating ImageMosaic stores and layers:
|
||||
- Direct directory creation (like web UI)
|
||||
- Manual granule addition
|
||||
- Empty store creation with directory/file harvesting
|
||||
- XML-based store creation
|
||||
|
||||
All tests use sample data from a shared mount volume at /mnt/geoserver_data
|
||||
that is accessible to both the test environment and GeoServer containers.
|
||||
"""
|
||||
import os
|
||||
import tempfile
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
import pytest
|
||||
from geoservercloud import GeoServerCloud
|
||||
from conftest import GEOSERVER_URL
|
||||
|
||||
|
||||
def test_create_imagemosaic_local_files():
|
||||
"""Test creating an ImageMosaic using local sample data files via direct directory approach"""
|
||||
geoserver = GeoServerCloud(GEOSERVER_URL)
|
||||
workspace = "local_sampledata"
|
||||
store_name = "ne_pyramid_store"
|
||||
|
||||
# Delete and recreate workspace
|
||||
geoserver.delete_workspace(workspace)
|
||||
response = geoserver.create_workspace(workspace)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Use direct directory approach (like web UI) instead of individual file URLs
|
||||
directory_path = "/mnt/geoserver_data/sampledata/ne/pyramid/"
|
||||
|
||||
# Create ImageMosaic store directly from directory
|
||||
response = geoserver.put_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/external.imagemosaic",
|
||||
data=directory_path,
|
||||
headers={"Content-Type": "text/plain"}
|
||||
)
|
||||
assert response.status_code in [201, 202], f"Failed to create ImageMosaic from directory: {response.text}"
|
||||
|
||||
# List available coverages (should be auto-discovered)
|
||||
response = geoserver.get_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages.xml?list=all"
|
||||
)
|
||||
assert response.status_code == 200, f"Failed to list coverages: {response.text}"
|
||||
|
||||
# Extract the auto-discovered coverage name
|
||||
response_text = response.text
|
||||
import re
|
||||
coverage_match = re.search(r'<coverageName>([^<]+)</coverageName>', response_text)
|
||||
assert coverage_match, f"No coverage found in response: {response_text}"
|
||||
|
||||
coverage_name = coverage_match.group(1)
|
||||
|
||||
# Check if coverage was auto-created (likely scenario)
|
||||
coverage_response = geoserver.get_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages/{coverage_name}.json"
|
||||
)
|
||||
|
||||
if coverage_response.status_code == 200:
|
||||
# Coverage was auto-created - this is the normal case
|
||||
coverage_data = coverage_response.json()["coverage"]
|
||||
assert coverage_data["name"] == coverage_name
|
||||
assert coverage_data["nativeName"] == coverage_name
|
||||
assert coverage_data["enabled"] == True
|
||||
else:
|
||||
# Coverage not auto-created, create it manually
|
||||
coverage_xml = f"""<coverage>
|
||||
<name>{coverage_name}</name>
|
||||
<title>Natural Earth Pyramid Mosaic</title>
|
||||
<nativeName>{coverage_name}</nativeName>
|
||||
<enabled>true</enabled>
|
||||
</coverage>"""
|
||||
|
||||
response = geoserver.post_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages",
|
||||
data=coverage_xml,
|
||||
headers={"Content-Type": "text/xml"}
|
||||
)
|
||||
assert response.status_code == 201, f"Failed to create coverage: {response.text}"
|
||||
|
||||
# Verify the coverage was created
|
||||
response = geoserver.get_request(f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages/{coverage_name}.json")
|
||||
assert response.status_code == 200
|
||||
|
||||
coverage_data = response.json()["coverage"]
|
||||
assert coverage_data["name"] == coverage_name
|
||||
assert coverage_data["nativeName"] == coverage_name
|
||||
assert coverage_data["enabled"] == True
|
||||
|
||||
# Test WMS GetMap request (verify local file mosaic works)
|
||||
wms_response = geoserver.get_request(
|
||||
f"/wms?SERVICE=WMS&VERSION=1.1.0&REQUEST=GetMap&LAYERS={workspace}:{coverage_name}&STYLES=&BBOX=-180,-90,180,90&WIDTH=256&HEIGHT=256&FORMAT=image/png&SRS=EPSG:4326"
|
||||
)
|
||||
assert wms_response.status_code == 200, f"WMS GetMap failed: {wms_response.text}"
|
||||
assert wms_response.headers.get("content-type").startswith("image/png")
|
||||
|
||||
# Cleanup
|
||||
geoserver.delete_workspace(workspace)
|
||||
|
||||
|
||||
def test_create_imagemosaic_manual_granules():
|
||||
"""Test creating an ImageMosaic by manually adding individual granules"""
|
||||
geoserver = GeoServerCloud(GEOSERVER_URL)
|
||||
workspace = "manual_granules"
|
||||
store_name = "manual_granules_store"
|
||||
coverage_name = "manual_granules_coverage"
|
||||
|
||||
# Delete and recreate workspace
|
||||
geoserver.delete_workspace(workspace)
|
||||
response = geoserver.create_workspace(workspace)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Create temporary directory for mosaic configuration
|
||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||
tmp_path = Path(tmp_dir)
|
||||
|
||||
# Create indexer.properties for manual granule addition
|
||||
indexer_content = f"""MosaicCRS=EPSG\\:4326
|
||||
Name={coverage_name}
|
||||
PropertyCollectors=CRSExtractorSPI(crs),ResolutionExtractorSPI(resolution)
|
||||
Schema=*the_geom:Polygon,location:String,crs:String,resolution:String
|
||||
CanBeEmpty=true
|
||||
AbsolutePath=true"""
|
||||
|
||||
indexer_file = tmp_path / "indexer.properties"
|
||||
indexer_file.write_text(indexer_content)
|
||||
|
||||
# Create datastore.properties (using JNDI like in COG tests)
|
||||
datastore_content = """SPI=org.geotools.data.postgis.PostgisNGJNDIDataStoreFactory
|
||||
# JNDI data source
|
||||
jndiReferenceName=java:comp/env/jdbc/postgis
|
||||
|
||||
#Boolean
|
||||
# perform only primary filter on bbox
|
||||
# Default Boolean.TRUE
|
||||
Loose\\ bbox=true
|
||||
|
||||
#Boolean
|
||||
# use prepared statements
|
||||
#Default Boolean.FALSE
|
||||
preparedStatements=false
|
||||
"""
|
||||
datastore_file = tmp_path / "datastore.properties"
|
||||
datastore_file.write_text(datastore_content)
|
||||
|
||||
# Create ZIP file with both configuration files
|
||||
zip_file = tmp_path / "manual-granules-config.zip"
|
||||
with zipfile.ZipFile(zip_file, 'w') as zf:
|
||||
zf.write(indexer_file, "indexer.properties")
|
||||
zf.write(datastore_file, "datastore.properties")
|
||||
|
||||
# Create empty ImageMosaic store
|
||||
with open(zip_file, 'rb') as f:
|
||||
zip_data = f.read()
|
||||
|
||||
response = geoserver.put_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/file.imagemosaic?configure=none",
|
||||
data=zip_data,
|
||||
headers={"Content-Type": "application/zip"}
|
||||
)
|
||||
assert response.status_code == 201, f"Failed to create ImageMosaic store: {response.text}"
|
||||
|
||||
# Manually add individual granules from the sample data
|
||||
granule_paths = [
|
||||
"/mnt/geoserver_data/sampledata/ne/pyramid/NE1_LR_LC_SR_W_DR_1_1.tif",
|
||||
"/mnt/geoserver_data/sampledata/ne/pyramid/NE1_LR_LC_SR_W_DR_1_2.tif",
|
||||
"/mnt/geoserver_data/sampledata/ne/pyramid/NE1_LR_LC_SR_W_DR_2_1.tif",
|
||||
"/mnt/geoserver_data/sampledata/ne/pyramid/NE1_LR_LC_SR_W_DR_2_2.tif"
|
||||
]
|
||||
|
||||
for granule_path in granule_paths:
|
||||
# Use direct file paths (without file:// protocol) for external.imagemosaic
|
||||
response = geoserver.post_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/external.imagemosaic",
|
||||
data=granule_path,
|
||||
headers={"Content-Type": "text/plain"}
|
||||
)
|
||||
assert response.status_code in [201, 202], f"Failed to add granule {granule_path}: {response.text}"
|
||||
|
||||
# Initialize the store (list available coverages)
|
||||
response = geoserver.get_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages.xml?list=all"
|
||||
)
|
||||
assert response.status_code == 200, f"Failed to list coverages: {response.text}"
|
||||
|
||||
# Verify coverage name is available
|
||||
response_text = response.text
|
||||
assert f"<coverageName>{coverage_name}</coverageName>" in response_text, \
|
||||
f"Coverage name '{coverage_name}' not found in response: {response_text}"
|
||||
|
||||
# Create layer/coverage
|
||||
coverage_xml = f"""<coverage>
|
||||
<name>{coverage_name}</name>
|
||||
<title>Manual Granules Test Coverage</title>
|
||||
<nativeName>{coverage_name}</nativeName>
|
||||
<enabled>true</enabled>
|
||||
</coverage>"""
|
||||
|
||||
response = geoserver.post_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages",
|
||||
data=coverage_xml,
|
||||
headers={"Content-Type": "text/xml"}
|
||||
)
|
||||
assert response.status_code == 201, f"Failed to create coverage: {response.text}"
|
||||
|
||||
# Verify the coverage was created successfully
|
||||
response = geoserver.get_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages/{coverage_name}.json"
|
||||
)
|
||||
assert response.status_code == 200, f"Failed to get coverage details: {response.text}"
|
||||
|
||||
coverage_data = response.json()["coverage"]
|
||||
assert coverage_data["name"] == coverage_name
|
||||
assert coverage_data["nativeName"] == coverage_name
|
||||
assert coverage_data["enabled"] == True
|
||||
|
||||
# Test WMS GetMap request (verify manual granule addition works)
|
||||
wms_response = geoserver.get_request(
|
||||
f"/wms?SERVICE=WMS&VERSION=1.1.0&REQUEST=GetMap&LAYERS={workspace}:{coverage_name}&STYLES=&BBOX=-180,-90,180,90&WIDTH=256&HEIGHT=256&FORMAT=image/png&SRS=EPSG:4326"
|
||||
)
|
||||
assert wms_response.status_code == 200, f"WMS GetMap failed: {wms_response.text}"
|
||||
assert wms_response.headers.get("content-type").startswith("image/png")
|
||||
|
||||
# Cleanup
|
||||
geoserver.delete_workspace(workspace)
|
||||
|
||||
|
||||
def test_create_imagemosaic_empty_store_with_directory_harvest():
|
||||
"""
|
||||
Test creating an empty ImageMosaic store first, then harvesting granules from a directory.
|
||||
This tests the workflow: create store -> harvest directory -> create layer.
|
||||
"""
|
||||
geoserver = GeoServerCloud(GEOSERVER_URL)
|
||||
workspace = "directory_harvest"
|
||||
store_name = "directory_harvest_store"
|
||||
coverage_name = "directory_harvest_coverage"
|
||||
|
||||
# Clean up any existing workspace
|
||||
geoserver.delete_workspace(workspace)
|
||||
|
||||
# Step 1: Create workspace
|
||||
response = geoserver.create_workspace(workspace)
|
||||
assert response.status_code == 201, f"Failed to create workspace: {response.text}"
|
||||
|
||||
# Step 2: Create ImageMosaic store with configuration
|
||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||
tmp_path = Path(tmp_dir)
|
||||
|
||||
# Create indexer.properties
|
||||
indexer_content = f"""MosaicCRS=EPSG\\:4326
|
||||
Name={coverage_name}
|
||||
PropertyCollectors=CRSExtractorSPI(crs),ResolutionExtractorSPI(resolution)
|
||||
Schema=*the_geom:Polygon,location:String,crs:String,resolution:String
|
||||
CanBeEmpty=true
|
||||
AbsolutePath=true"""
|
||||
|
||||
indexer_file = tmp_path / "indexer.properties"
|
||||
indexer_file.write_text(indexer_content)
|
||||
|
||||
# Create datastore.properties (using JNDI)
|
||||
datastore_content = """SPI=org.geotools.data.postgis.PostgisNGJNDIDataStoreFactory
|
||||
# JNDI data source
|
||||
jndiReferenceName=java:comp/env/jdbc/postgis
|
||||
|
||||
#Boolean
|
||||
# perform only primary filter on bbox
|
||||
# Default Boolean.TRUE
|
||||
Loose\\ bbox=true
|
||||
|
||||
#Boolean
|
||||
# use prepared statements
|
||||
#Default Boolean.FALSE
|
||||
preparedStatements=false
|
||||
"""
|
||||
datastore_file = tmp_path / "datastore.properties"
|
||||
datastore_file.write_text(datastore_content)
|
||||
|
||||
# Create ZIP file with both configuration files
|
||||
zip_file = tmp_path / "mosaic-config.zip"
|
||||
with zipfile.ZipFile(zip_file, 'w') as zf:
|
||||
zf.write(indexer_file, "indexer.properties")
|
||||
zf.write(datastore_file, "datastore.properties")
|
||||
|
||||
# Upload ZIP to create empty ImageMosaic store
|
||||
with open(zip_file, 'rb') as f:
|
||||
zip_data = f.read()
|
||||
|
||||
response = geoserver.put_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/file.imagemosaic?configure=none",
|
||||
data=zip_data,
|
||||
headers={"Content-Type": "application/zip"}
|
||||
)
|
||||
assert response.status_code == 201, f"Failed to create ImageMosaic store: {response.text}"
|
||||
|
||||
# Step 3: Harvest granules from directory
|
||||
harvest_path = "/mnt/geoserver_data/sampledata/ne/pyramid/"
|
||||
|
||||
response = geoserver.post_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/external.imagemosaic",
|
||||
data=harvest_path,
|
||||
headers={"Content-Type": "text/plain"}
|
||||
)
|
||||
assert response.status_code in [201, 202], f"Failed to harvest directory {harvest_path}: {response.text}"
|
||||
|
||||
# Step 4: List available coverages
|
||||
response = geoserver.get_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages.xml?list=all"
|
||||
)
|
||||
assert response.status_code == 200, f"Failed to list coverages: {response.text}"
|
||||
|
||||
# Verify coverage name is available
|
||||
response_text = response.text
|
||||
assert f"<coverageName>{coverage_name}</coverageName>" in response_text, \
|
||||
f"Coverage name '{coverage_name}' not found in response: {response_text}"
|
||||
|
||||
# Step 5: Create layer/coverage
|
||||
coverage_xml = f"""<coverage>
|
||||
<name>{coverage_name}</name>
|
||||
<title>Directory Harvest Test Coverage</title>
|
||||
<nativeName>{coverage_name}</nativeName>
|
||||
<enabled>true</enabled>
|
||||
</coverage>"""
|
||||
|
||||
response = geoserver.post_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages",
|
||||
data=coverage_xml,
|
||||
headers={"Content-Type": "text/xml"}
|
||||
)
|
||||
assert response.status_code == 201, f"Layer creation failed: {response.text}"
|
||||
|
||||
# Step 6: Verify the coverage was created successfully
|
||||
response = geoserver.get_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages/{coverage_name}.json"
|
||||
)
|
||||
assert response.status_code == 200, f"Failed to get coverage details: {response.text}"
|
||||
|
||||
coverage_data = response.json()["coverage"]
|
||||
assert coverage_data["name"] == coverage_name
|
||||
assert coverage_data["nativeName"] == coverage_name
|
||||
assert coverage_data["enabled"] == True
|
||||
|
||||
# Step 7: Test WMS GetMap request
|
||||
wms_response = geoserver.get_request(
|
||||
f"/wms?SERVICE=WMS&VERSION=1.1.0&REQUEST=GetMap&LAYERS={workspace}:{coverage_name}"
|
||||
f"&STYLES=&BBOX=-180,-90,180,90&WIDTH=256&HEIGHT=256&FORMAT=image/png&SRS=EPSG:4326"
|
||||
)
|
||||
assert wms_response.status_code == 200, f"WMS GetMap failed: {wms_response.text}"
|
||||
assert wms_response.headers.get("content-type").startswith("image/png")
|
||||
|
||||
# Cleanup
|
||||
geoserver.delete_workspace(workspace)
|
||||
|
||||
|
||||
def test_create_imagemosaic_empty_store_with_single_file_harvest():
|
||||
"""
|
||||
Test creating an empty ImageMosaic store first, then harvesting a single file.
|
||||
This tests the workflow: create store -> harvest single file -> create layer.
|
||||
"""
|
||||
geoserver = GeoServerCloud(GEOSERVER_URL)
|
||||
workspace = "single_file_harvest"
|
||||
store_name = "single_file_harvest_store"
|
||||
coverage_name = "single_file_harvest_coverage"
|
||||
|
||||
# Clean up any existing workspace
|
||||
geoserver.delete_workspace(workspace)
|
||||
|
||||
# Step 1: Create workspace
|
||||
response = geoserver.create_workspace(workspace)
|
||||
assert response.status_code == 201, f"Failed to create workspace: {response.text}"
|
||||
|
||||
# Step 2: Create ImageMosaic store
|
||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||
tmp_path = Path(tmp_dir)
|
||||
|
||||
# Create indexer.properties for single file
|
||||
indexer_content = f"""MosaicCRS=EPSG\\:4326
|
||||
Name={coverage_name}
|
||||
PropertyCollectors=CRSExtractorSPI(crs),ResolutionExtractorSPI(resolution)
|
||||
Schema=*the_geom:Polygon,location:String,crs:String,resolution:String
|
||||
CanBeEmpty=true
|
||||
AbsolutePath=true"""
|
||||
|
||||
indexer_file = tmp_path / "indexer.properties"
|
||||
indexer_file.write_text(indexer_content)
|
||||
|
||||
# Create datastore.properties (using JNDI)
|
||||
datastore_content = """SPI=org.geotools.data.postgis.PostgisNGJNDIDataStoreFactory
|
||||
# JNDI data source
|
||||
jndiReferenceName=java:comp/env/jdbc/postgis
|
||||
|
||||
#Boolean
|
||||
# perform only primary filter on bbox
|
||||
# Default Boolean.TRUE
|
||||
Loose\\ bbox=true
|
||||
|
||||
#Boolean
|
||||
# use prepared statements
|
||||
#Default Boolean.FALSE
|
||||
preparedStatements=false
|
||||
"""
|
||||
datastore_file = tmp_path / "datastore.properties"
|
||||
datastore_file.write_text(datastore_content)
|
||||
|
||||
# Create ZIP file with both files
|
||||
zip_file = tmp_path / "mosaic-single-config.zip"
|
||||
with zipfile.ZipFile(zip_file, 'w') as zf:
|
||||
zf.write(indexer_file, "indexer.properties")
|
||||
zf.write(datastore_file, "datastore.properties")
|
||||
|
||||
# Upload ZIP to create ImageMosaic store
|
||||
with open(zip_file, 'rb') as f:
|
||||
zip_data = f.read()
|
||||
|
||||
response = geoserver.put_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/file.imagemosaic?configure=none",
|
||||
data=zip_data,
|
||||
headers={"Content-Type": "application/zip"}
|
||||
)
|
||||
assert response.status_code == 201, f"Failed to create ImageMosaic store: {response.text}"
|
||||
|
||||
# Step 3: Harvest single file
|
||||
single_file_path = "/mnt/geoserver_data/sampledata/ne/NE1_LR_LC_SR_W_DR.tif"
|
||||
|
||||
response = geoserver.post_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/external.imagemosaic",
|
||||
data=single_file_path,
|
||||
headers={"Content-Type": "text/plain"}
|
||||
)
|
||||
assert response.status_code in [201, 202], f"Failed to harvest file {single_file_path}: {response.text}"
|
||||
|
||||
# Step 4: List and create layer
|
||||
response = geoserver.get_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages.xml?list=all"
|
||||
)
|
||||
assert response.status_code == 200, f"Failed to list coverages: {response.text}"
|
||||
|
||||
# Create layer/coverage
|
||||
coverage_xml = f"""<coverage>
|
||||
<name>{coverage_name}</name>
|
||||
<title>Single File Harvest Test Coverage</title>
|
||||
<nativeName>{coverage_name}</nativeName>
|
||||
<enabled>true</enabled>
|
||||
</coverage>"""
|
||||
|
||||
response = geoserver.post_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages",
|
||||
data=coverage_xml,
|
||||
headers={"Content-Type": "text/xml"}
|
||||
)
|
||||
assert response.status_code == 201, f"Layer creation failed: {response.text}"
|
||||
|
||||
# Verify WMS works
|
||||
wms_response = geoserver.get_request(
|
||||
f"/wms?SERVICE=WMS&VERSION=1.1.0&REQUEST=GetMap&LAYERS={workspace}:{coverage_name}"
|
||||
f"&STYLES=&BBOX=-180,-90,180,90&WIDTH=256&HEIGHT=256&FORMAT=image/png&SRS=EPSG:4326"
|
||||
)
|
||||
assert wms_response.status_code == 200, f"WMS GetMap failed: {wms_response.text}"
|
||||
assert wms_response.headers.get("content-type").startswith("image/png")
|
||||
|
||||
# Cleanup
|
||||
geoserver.delete_workspace(workspace)
|
||||
|
||||
|
||||
def test_create_imagemosaic_via_xml_store_creation():
|
||||
"""
|
||||
Test creating an ImageMosaic store via XML store creation (not file upload).
|
||||
This tests direct store creation pointing to a directory.
|
||||
"""
|
||||
geoserver = GeoServerCloud(GEOSERVER_URL)
|
||||
workspace = "xml_store_creation"
|
||||
store_name = "xml_store_creation_store"
|
||||
|
||||
# Clean up any existing workspace
|
||||
geoserver.delete_workspace(workspace)
|
||||
|
||||
# Step 1: Create workspace
|
||||
response = geoserver.create_workspace(workspace)
|
||||
assert response.status_code == 201, f"Failed to create workspace: {response.text}"
|
||||
|
||||
# Step 2: Create ImageMosaic store via XML store creation
|
||||
store_xml = f"""<coverageStore>
|
||||
<name>{store_name}</name>
|
||||
<workspace>
|
||||
<name>{workspace}</name>
|
||||
</workspace>
|
||||
<type>ImageMosaic</type>
|
||||
<enabled>true</enabled>
|
||||
<url>/mnt/geoserver_data/sampledata/ne/pyramid/</url>
|
||||
</coverageStore>"""
|
||||
|
||||
response = geoserver.post_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores",
|
||||
data=store_xml,
|
||||
headers={"Content-Type": "text/xml"}
|
||||
)
|
||||
assert response.status_code == 201, f"Store creation via XML failed: {response.text}"
|
||||
|
||||
# Step 3: List available coverages
|
||||
response = geoserver.get_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages.xml?list=all"
|
||||
)
|
||||
assert response.status_code == 200, f"Failed to list coverages: {response.text}"
|
||||
assert "coverageName" in response.text, f"No coverage found in response: {response.text}"
|
||||
|
||||
# Extract coverage name
|
||||
import re
|
||||
coverage_match = re.search(r'<coverageName>([^<]+)</coverageName>', response.text)
|
||||
assert coverage_match, f"Could not extract coverage name from: {response.text}"
|
||||
coverage_name = coverage_match.group(1)
|
||||
|
||||
# Create layer
|
||||
coverage_xml = f"""<coverage>
|
||||
<name>{coverage_name}</name>
|
||||
<title>XML Store Creation Test Coverage</title>
|
||||
<nativeName>{coverage_name}</nativeName>
|
||||
<enabled>true</enabled>
|
||||
</coverage>"""
|
||||
|
||||
response = geoserver.post_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{store_name}/coverages",
|
||||
data=coverage_xml,
|
||||
headers={"Content-Type": "text/xml"}
|
||||
)
|
||||
assert response.status_code == 201, f"Layer creation failed: {response.text}"
|
||||
|
||||
# Verify WMS works
|
||||
wms_response = geoserver.get_request(
|
||||
f"/wms?SERVICE=WMS&VERSION=1.1.0&REQUEST=GetMap&LAYERS={workspace}:{coverage_name}"
|
||||
f"&STYLES=&BBOX=-180,-90,180,90&WIDTH=256&HEIGHT=256&FORMAT=image/png&SRS=EPSG:4326"
|
||||
)
|
||||
assert wms_response.status_code == 200, f"WMS GetMap failed: {wms_response.text}"
|
||||
assert wms_response.headers.get("content-type").startswith("image/png")
|
||||
|
||||
# Cleanup
|
||||
geoserver.delete_workspace(workspace)
|
||||
184
acceptance_tests/tests/test_imagemosaic_cog.py
Normal file
184
acceptance_tests/tests/test_imagemosaic_cog.py
Normal file
@ -0,0 +1,184 @@
|
||||
import os
|
||||
import tempfile
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
import pytest
|
||||
from geoservercloud import GeoServerCloud
|
||||
from conftest import GEOSERVER_URL
|
||||
|
||||
|
||||
def _create_imagemosaic(geoserver, workspace, coverage, granules, indexer_content, title="ImageMosaic Coverage"):
|
||||
"""Helper function to create an ImageMosaic with COG granules"""
|
||||
# Delete and recreate workspace
|
||||
geoserver.delete_workspace(workspace)
|
||||
response = geoserver.create_workspace(workspace)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Create temporary directory for mosaic files
|
||||
with tempfile.TemporaryDirectory() as tmp_dir:
|
||||
tmp_path = Path(tmp_dir)
|
||||
|
||||
# Create indexer.properties
|
||||
indexer_file = tmp_path / "indexer.properties"
|
||||
indexer_file.write_text(indexer_content)
|
||||
|
||||
# Create datastore.properties (using JNDI)
|
||||
datastore_content = """SPI=org.geotools.data.postgis.PostgisNGJNDIDataStoreFactory
|
||||
# JNDI data source
|
||||
jndiReferenceName=java:comp/env/jdbc/postgis
|
||||
|
||||
#Boolean
|
||||
# perform only primary filter on bbox
|
||||
# Default Boolean.TRUE
|
||||
Loose\\ bbox=true
|
||||
|
||||
#Boolean
|
||||
# use prepared statements
|
||||
#Default Boolean.FALSE
|
||||
preparedStatements=false
|
||||
"""
|
||||
datastore_file = tmp_path / "datastore.properties"
|
||||
datastore_file.write_text(datastore_content)
|
||||
|
||||
# Create zip file
|
||||
zip_file = tmp_path / f"{coverage}.zip"
|
||||
with zipfile.ZipFile(zip_file, 'w') as zf:
|
||||
zf.write(indexer_file, "indexer.properties")
|
||||
zf.write(datastore_file, "datastore.properties")
|
||||
|
||||
# Create timeregex.properties if needed for time-based PropertyCollector
|
||||
if "timeregex" in indexer_content:
|
||||
# Regex pattern to extract date from MODIS filename format: 2018.01.01
|
||||
timeregex_content = "regex=(?<=\\.)([0-9]{4}\\.[0-9]{2}\\.[0-9]{2})(?=\\.),format=yyyy.MM.dd"
|
||||
timeregex_file = tmp_path / "timeregex.properties"
|
||||
timeregex_file.write_text(timeregex_content)
|
||||
zf.write(timeregex_file, "timeregex.properties")
|
||||
|
||||
# Create empty imagemosaic
|
||||
with open(zip_file, 'rb') as f:
|
||||
zip_data = f.read()
|
||||
|
||||
response = geoserver.put_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{coverage}/file.imagemosaic?configure=none",
|
||||
data=zip_data,
|
||||
headers={"Content-Type": "application/zip"}
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Add granules
|
||||
for uri in granules:
|
||||
response = geoserver.post_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{coverage}/remote.imagemosaic",
|
||||
data=uri,
|
||||
headers={"Content-Type": "text/plain"}
|
||||
)
|
||||
# Accept both 202 (Accepted) and 201 (Created) as valid responses
|
||||
assert response.status_code in [201, 202]
|
||||
|
||||
# Initialize the store (list available coverages)
|
||||
response = geoserver.get_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{coverage}/coverages.xml?list=all"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verify coverage name in response
|
||||
response_text = response.text
|
||||
assert f"<coverageName>{coverage}</coverageName>" in response_text
|
||||
|
||||
# Configure the coverage
|
||||
coverage_xml = f"""<coverage>
|
||||
<name>{coverage}</name>
|
||||
<title>{title}</title>
|
||||
<nativeName>{coverage}</nativeName>
|
||||
<enabled>true</enabled>
|
||||
</coverage>"""
|
||||
|
||||
response = geoserver.post_request(
|
||||
f"/rest/workspaces/{workspace}/coveragestores/{coverage}/coverages",
|
||||
data=coverage_xml,
|
||||
headers={"Content-Type": "text/xml"}
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
# Verify the coverage was created
|
||||
response = geoserver.get_request(f"/rest/workspaces/{workspace}/coveragestores/{coverage}/coverages/{coverage}.json")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Verify coverage properties
|
||||
coverage_data = response.json()["coverage"]
|
||||
assert coverage_data["name"] == coverage
|
||||
assert coverage_data["nativeName"] == coverage
|
||||
assert coverage_data["enabled"] == True
|
||||
assert coverage_data["title"] == title
|
||||
|
||||
# Test WMS GetMap request
|
||||
wms_response = geoserver.get_request(
|
||||
f"/wms?SERVICE=WMS&VERSION=1.1.0&REQUEST=GetMap&LAYERS={workspace}:{coverage}&STYLES=&BBOX=-180,-90,180,90&WIDTH=256&HEIGHT=256&FORMAT=image/png&SRS=EPSG:4326"
|
||||
)
|
||||
assert wms_response.status_code == 200
|
||||
assert wms_response.headers.get("content-type").startswith("image/png")
|
||||
|
||||
return coverage_data
|
||||
|
||||
|
||||
def test_create_imagemosaic_landshallow_topo():
|
||||
"""Test creating an ImageMosaic coverage store with multiple COG granules"""
|
||||
geoserver = GeoServerCloud(GEOSERVER_URL)
|
||||
workspace = "s3cog_public"
|
||||
coverage = "land_shallow_topo_http"
|
||||
|
||||
# HTTP granules
|
||||
granules = [
|
||||
"https://test-data-cog-public.s3.amazonaws.com/public/land_shallow_topo_21600_NE_cog.tif",
|
||||
"https://test-data-cog-public.s3.amazonaws.com/public/land_shallow_topo_21600_NW_cog.tif",
|
||||
"https://test-data-cog-public.s3.amazonaws.com/public/land_shallow_topo_21600_SE_cog.tif",
|
||||
"https://test-data-cog-public.s3.amazonaws.com/public/land_shallow_topo_21600_SW_cog.tif",
|
||||
]
|
||||
|
||||
# Create indexer.properties
|
||||
indexer_content = f"""Cog=true
|
||||
CogRangeReader=it.geosolutions.imageioimpl.plugins.cog.HttpRangeReader
|
||||
Schema=*the_geom:Polygon,location:String
|
||||
CanBeEmpty=true
|
||||
Name={coverage}"""
|
||||
|
||||
_create_imagemosaic(geoserver, workspace, coverage, granules, indexer_content, "Land Shallow Topo HTTP")
|
||||
|
||||
# Cleanup
|
||||
geoserver.delete_workspace(workspace)
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Takes too long - enable for full testing")
|
||||
def test_create_imagemosaic_modis():
|
||||
"""Test creating a MODIS ImageMosaic coverage with time dimension (reproduces official tutorial)"""
|
||||
geoserver = GeoServerCloud(GEOSERVER_URL)
|
||||
workspace = "modis_cog"
|
||||
coverage = "modisvi"
|
||||
|
||||
# MODIS COG datasets from NASA EarthData
|
||||
modis_granules = [
|
||||
"https://modis-vi-nasa.s3-us-west-2.amazonaws.com/MOD13A1.006/2018.01.01.tif",
|
||||
"https://modis-vi-nasa.s3-us-west-2.amazonaws.com/MOD13A1.006/2018.01.17.tif",
|
||||
]
|
||||
|
||||
# Create indexer.properties (based on MODIS tutorial)
|
||||
indexer_content = f"""Cog=true
|
||||
PropertyCollectors=TimestampFileNameExtractorSPI[timeregex](time)
|
||||
TimeAttribute=time
|
||||
Schema=*the_geom:Polygon,location:String,time:java.util.Date
|
||||
CanBeEmpty=true
|
||||
Name={coverage}"""
|
||||
|
||||
coverage_data = _create_imagemosaic(geoserver, workspace, coverage, modis_granules, indexer_content, "MODIS Vegetation Index")
|
||||
|
||||
# Additional test for time-based query (since MODIS has time dimension)
|
||||
time_wms_response = geoserver.get_request(
|
||||
f"/wms?SERVICE=WMS&VERSION=1.1.0&REQUEST=GetMap&LAYERS={workspace}:{coverage}&STYLES=&BBOX=-180,-90,180,90&WIDTH=256&HEIGHT=256&FORMAT=image/png&SRS=EPSG:4326&TIME=2018-01-01"
|
||||
)
|
||||
assert time_wms_response.status_code == 200
|
||||
assert time_wms_response.headers.get("content-type").startswith("image/png")
|
||||
|
||||
# Cleanup
|
||||
#geoserver.delete_workspace(workspace)
|
||||
|
||||
|
||||
@ -8,6 +8,8 @@ services:
|
||||
restart: always
|
||||
volumes:
|
||||
- ./acceptance_pg_entrypoint:/docker-entrypoint-initdb.d:ro
|
||||
ports:
|
||||
- "5433:5432" # Expose on port 5433 to avoid conflict with local PostgreSQL
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U geodata"]
|
||||
interval: 30s
|
||||
@ -22,7 +24,7 @@ services:
|
||||
memory: 512M
|
||||
|
||||
acceptance:
|
||||
image: acceptance:${TAG}
|
||||
image: geoservercloud/acceptance:latest
|
||||
user: ${GS_USER}
|
||||
depends_on:
|
||||
geodatabase:
|
||||
|
||||
@ -3,7 +3,8 @@
|
||||
GSUID=$(id -u)
|
||||
GSGID=$(id -g)
|
||||
|
||||
GS_USER=$GSUID:$GSGID COMPOSE_PROJECT_NAME=gscloud-acceptance-datadir \
|
||||
GS_USER="$GSUID:$GSGID" \
|
||||
COMPOSE_PROJECT_NAME=gscloud-acceptance-datadir \
|
||||
docker compose \
|
||||
-f compose.yml \
|
||||
-f catalog-datadir.yml \
|
||||
|
||||
@ -1,5 +1,9 @@
|
||||
#/bin/bash
|
||||
|
||||
GSUID=$(id -u)
|
||||
GSGID=$(id -g)
|
||||
|
||||
GS_USER="$GSUID:$GSGID" \
|
||||
COMPOSE_PROJECT_NAME=gscloud-acceptance-jdbcconfig \
|
||||
docker compose \
|
||||
-f compose.yml \
|
||||
|
||||
@ -1,5 +1,9 @@
|
||||
#/bin/bash
|
||||
|
||||
GSUID=$(id -u)
|
||||
GSGID=$(id -g)
|
||||
|
||||
GS_USER="$GSUID:$GSGID" \
|
||||
COMPOSE_PROJECT_NAME=gscloud-acceptance-pgconfig \
|
||||
docker compose \
|
||||
-f compose.yml \
|
||||
|
||||
@ -28,8 +28,8 @@ x-geoserver-env: &geoserver_environment
|
||||
services:
|
||||
init-datadir:
|
||||
image: alpine:3.18.4
|
||||
user: ${GS_USER}
|
||||
command: sh -c "cd /opt/app/data_directory; if [ ! -f global.xml ]; then tar xvzf /tmp/datadir.tgz; fi"
|
||||
user: root
|
||||
command: sh -c "cd /opt/app/data_directory; if [ ! -f global.xml ]; then tar xvzf /tmp/datadir.tgz; fi; chown -R ${GS_USER} /opt/app/data_directory"
|
||||
volumes:
|
||||
- data_directory:/opt/app/data_directory
|
||||
- ./catalog-datadir.tgz:/tmp/datadir.tgz
|
||||
|
||||
@ -1,9 +1,12 @@
|
||||
volumes:
|
||||
# geowebcache tiles shared volume
|
||||
geowebcache_data:
|
||||
# geoserver data files shared volume (not datadir, data)
|
||||
geoserver_data:
|
||||
|
||||
include:
|
||||
- ./infra.yml
|
||||
|
||||
volumes:
|
||||
geowebcache_data:
|
||||
|
||||
x-gs-dependencies: &gs-dependencies
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
@ -17,8 +20,19 @@ x-gs-dependencies: &gs-dependencies
|
||||
postgis:
|
||||
condition: service_started
|
||||
required: true
|
||||
init-test-data:
|
||||
condition: service_completed_successfully
|
||||
required: true
|
||||
|
||||
services:
|
||||
init-test-data:
|
||||
image: alpine:3.18.4
|
||||
user: root
|
||||
volumes:
|
||||
- geoserver_data:/mnt/geoserver_data
|
||||
- geowebcache_data:/mnt/geowebcache_data
|
||||
- ./sampledata.tgz:/tmp/sampledata.tgz
|
||||
command: sh -c "chown -R ${GS_USER} /mnt/geoserver_data /mnt/geowebcache_data && cd /mnt/geoserver_data && if [ ! -d sampledata ]; then tar xvzf /tmp/sampledata.tgz && chown -R ${GS_USER} sampledata; fi"
|
||||
|
||||
acl:
|
||||
image: ${ACL_REPOSITORY}/geoserver-acl:${ACL_TAG}
|
||||
|
||||
@ -1,3 +1,7 @@
|
||||
#/bin/sh
|
||||
|
||||
GSUID=$(id -u)
|
||||
GSGID=$(id -g)
|
||||
|
||||
GS_USER="$GSUID:$GSGID" \
|
||||
docker compose -f compose.yml -f catalog-datadir.yml $@
|
||||
|
||||
@ -1,3 +1,7 @@
|
||||
#/bin/sh
|
||||
|
||||
GSUID=$(id -u)
|
||||
GSGID=$(id -g)
|
||||
|
||||
GS_USER="$GSUID:$GSGID" \
|
||||
docker compose -f compose.yml -f catalog-jdbcconfig.yml $@
|
||||
|
||||
@ -1,3 +1,7 @@
|
||||
#/bin/sh
|
||||
|
||||
GSUID=$(id -u)
|
||||
GSGID=$(id -g)
|
||||
|
||||
GS_USER="$GSUID:$GSGID" \
|
||||
docker compose -f compose.yml -f catalog-pgconfig.yml $@
|
||||
|
||||
BIN
compose/sampledata.tgz
Normal file
BIN
compose/sampledata.tgz
Normal file
Binary file not shown.
@ -1,13 +1,14 @@
|
||||
# Define reusable volume mounts as an anchor
|
||||
x-geoserver-volume-mounts: &geoserver_volumes
|
||||
- geowebcache_data:/data/geowebcache
|
||||
- geowebcache_data:/mnt/geowebcache_data
|
||||
- geoserver_data:/mnt/geoserver_data
|
||||
|
||||
# Define reusable environment variables
|
||||
x-geoserver-env: &geoserver_environment
|
||||
SPRING_PROFILES_ACTIVE: "${GEOSERVER_DEFAULT_PROFILES}"
|
||||
# Enable the PostGIS JNDI datasource (for development purposes)
|
||||
JNDI_POSTGIS_ENABLED: true
|
||||
GEOWEBCACHE_CACHE_DIR: /data/geowebcache
|
||||
GEOWEBCACHE_CACHE_DIR: /mnt/geowebcache_data
|
||||
JAVA_OPTS: "${JAVA_OPTS_GEOSERVER}"
|
||||
|
||||
services:
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user