diff --git a/GEOCHECK_API.md b/GEOCHECK_API.md new file mode 100644 index 0000000..44102e4 --- /dev/null +++ b/GEOCHECK_API.md @@ -0,0 +1,384 @@ +# GeoCheck API Implementation + +This document describes the GeoCheck (Geometry Check) API implementation for the MPC-Plus backend. + +## Overview + +The GeoCheck API provides endpoints for managing geometry check data from radiation therapy machines. Geometry checks include measurements for: +- **IsoCenterGroup**: ISO center size and offsets (MV/KV) +- **BeamGroup**: Relative output, uniformity, and center shift +- **CollimationGroup**: Collimation rotation offset +- **GantryGroup**: Gantry absolute and relative measurements +- **EnhancedCouchGroup**: Couch position errors and movements +- **MLCGroup**: Multi-Leaf Collimator (MLC) positions for 40 leaves (banks A & B) +- **MLCBacklashGroup**: MLC backlash measurements for 40 leaves (banks A & B) +- **JawsGroup**: Jaw positions (X1, X2, Y1, Y2) +- **JawsParallelismGroup**: Jaw parallelism measurements + +## Architecture + +The implementation follows the repository pattern used throughout the API: + +``` +Models/ + └── GeoCheck.cs # Domain model +Repositories/ + ├── Abstractions/ + │ └── IGeoCheckRepository.cs # Repository interface + ├── Entities/ + │ └── GeoCheckEntity.cs # Database entity with Supabase attributes + └── Supabase/ + └── SupabaseGeoCheckRepository.cs # Supabase implementation +Controllers/ + └── GeoCheckController.cs # REST API endpoints +Extensions/ + └── ServiceCollectionExtensions.cs # DI configuration +``` + +## API Endpoints + +All endpoints are prefixed with `/api/geocheck` + +### GET /api/geocheck +Get all geometry checks with optional filtering. + +**Query Parameters:** +- `type` (string): Filter by beam type (e.g., "6xff") +- `machine-id` (string): Filter by machine ID +- `date` (string): Filter by specific date (YYYY-MM-DD) +- `start-date` (string): Filter by date range start +- `end-date` (string): Filter by date range end + +**Response:** `200 OK` +```json +[ + { + "id": "geo-001", + "type": "6xff", + "date": "2025-01-15", + "machineId": "NDS-WKS-SN6543", + "path": "/data/geochecks/...", + "isoCenterSize": 0.5, + "relativeOutput": 1.002, + "mlcLeavesA": { + "Leaf11": 0.1, + "Leaf12": 0.15, + ... + }, + "mlcLeavesB": { ... }, + ... + } +] +``` + +### GET /api/geocheck/{id} +Get a specific geometry check by ID. + +**Response:** `200 OK` or `404 Not Found` + +### POST /api/geocheck +Create a new geometry check. + +**Request Body:** +```json +{ + "id": "geo-002", + "type": "6xff", + "date": "2025-01-15", + "machineId": "NDS-WKS-SN6543", + "relativeOutput": 1.001, + "relativeUniformity": 0.998, + "mlcLeavesA": { + "Leaf11": 0.1, + "Leaf12": 0.12, + ... + }, + ... +} +``` + +**Response:** `201 Created` with Location header + +### PUT /api/geocheck/{id} +Update an existing geometry check. + +**Response:** `204 No Content` or `404 Not Found` + +### DELETE /api/geocheck/{id} +Delete a geometry check. + +**Response:** `204 No Content` or `404 Not Found` + +## Data Model + +### GeoCheck Model Properties + +| Property | Type | Required | Description | +|----------|------|----------|-------------| +| Id | string | Yes | Unique identifier | +| Type | string | Yes | Beam type (e.g., "6xff") | +| Date | DateOnly | Yes | Check date | +| MachineId | string | Yes | Associated machine ID | +| Path | string | No | File path to raw data | +| IsoCenterSize | double? | No | ISO center size | +| IsoCenterMVOffset | double? | No | MV offset | +| IsoCenterKVOffset | double? | No | KV offset | +| RelativeOutput | double? | No | Relative output | +| RelativeUniformity | double? | No | Relative uniformity | +| CenterShift | double? | No | Center shift | +| CollimationRotationOffset | double? | No | Collimation rotation | +| GantryAbsolute | double? | No | Gantry absolute position | +| GantryRelative | double? | No | Gantry relative position | +| CouchMaxPositionError | double? | No | Max couch position error | +| CouchLat | double? | No | Couch lateral position | +| CouchLng | double? | No | Couch longitudinal position | +| CouchVrt | double? | No | Couch vertical position | +| CouchRtnFine | double? | No | Fine couch rotation | +| CouchRtnLarge | double? | No | Large couch rotation | +| RotationInducedCouchShiftFullRange | double? | No | Rotation-induced shift | +| MLCLeavesA | Dictionary? | No | MLC bank A positions (40 leaves) | +| MLCLeavesB | Dictionary? | No | MLC bank B positions (40 leaves) | +| MaxOffsetA | double? | No | Max offset bank A | +| MaxOffsetB | double? | No | Max offset bank B | +| MeanOffsetA | double? | No | Mean offset bank A | +| MeanOffsetB | double? | No | Mean offset bank B | +| MLCBacklashA | Dictionary? | No | MLC backlash bank A (40 leaves) | +| MLCBacklashB | Dictionary? | No | MLC backlash bank B (40 leaves) | +| MLCBacklashMaxA | double? | No | Max backlash bank A | +| MLCBacklashMaxB | double? | No | Max backlash bank B | +| MLCBacklashMeanA | double? | No | Mean backlash bank A | +| MLCBacklashMeanB | double? | No | Mean backlash bank B | +| JawX1 | double? | No | Jaw X1 position | +| JawX2 | double? | No | Jaw X2 position | +| JawY1 | double? | No | Jaw Y1 position | +| JawY2 | double? | No | Jaw Y2 position | +| JawParallelismX1 | double? | No | Jaw X1 parallelism | +| JawParallelismX2 | double? | No | Jaw X2 parallelism | +| JawParallelismY1 | double? | No | Jaw Y1 parallelism | +| JawParallelismY2 | double? | No | Jaw Y2 parallelism | +| Note | string | No | Additional notes | + +### MLC Leaves Structure + +Both `MLCLeavesA/B` and `MLCBacklashA/B` use dictionaries with keys `"Leaf11"` through `"Leaf50"` (40 leaves total): + +```json +{ + "Leaf11": 0.1, + "Leaf12": 0.12, + "Leaf13": 0.11, + ... + "Leaf50": 0.09 +} +``` + +## Database Schema + +The `geochecks` table uses JSONB columns for MLC leaf data to avoid column explosion (40 leaves × 4 groups = 160 potential columns). + +### Table: geochecks + +```sql +CREATE TABLE geochecks ( + id VARCHAR(255) PRIMARY KEY, + type VARCHAR(50) NOT NULL, + date DATE NOT NULL, + machine_id VARCHAR(255) NOT NULL, + path TEXT, + + -- IsoCenterGroup + iso_center_size DOUBLE PRECISION, + iso_center_mv_offset DOUBLE PRECISION, + iso_center_kv_offset DOUBLE PRECISION, + + -- BeamGroup + relative_output DOUBLE PRECISION, + relative_uniformity DOUBLE PRECISION, + center_shift DOUBLE PRECISION, + + -- CollimationGroup + collimation_rotation_offset DOUBLE PRECISION, + + -- GantryGroup + gantry_absolute DOUBLE PRECISION, + gantry_relative DOUBLE PRECISION, + + -- EnhancedCouchGroup + couch_max_position_error DOUBLE PRECISION, + couch_lat DOUBLE PRECISION, + couch_lng DOUBLE PRECISION, + couch_vrt DOUBLE PRECISION, + couch_rtn_fine DOUBLE PRECISION, + couch_rtn_large DOUBLE PRECISION, + rotation_induced_couch_shift_full_range DOUBLE PRECISION, + + -- MLCGroup (JSONB for 40 leaves each) + mlc_leaves_a JSONB, + mlc_leaves_b JSONB, + max_offset_a DOUBLE PRECISION, + max_offset_b DOUBLE PRECISION, + mean_offset_a DOUBLE PRECISION, + mean_offset_b DOUBLE PRECISION, + + -- MLCBacklashGroup (JSONB for 40 leaves each) + mlc_backlash_a JSONB, + mlc_backlash_b JSONB, + mlc_backlash_max_a DOUBLE PRECISION, + mlc_backlash_max_b DOUBLE PRECISION, + mlc_backlash_mean_a DOUBLE PRECISION, + mlc_backlash_mean_b DOUBLE PRECISION, + + -- JawsGroup + jaw_x1 DOUBLE PRECISION, + jaw_x2 DOUBLE PRECISION, + jaw_y1 DOUBLE PRECISION, + jaw_y2 DOUBLE PRECISION, + + -- JawsParallelismGroup + jaw_parallelism_x1 DOUBLE PRECISION, + jaw_parallelism_x2 DOUBLE PRECISION, + jaw_parallelism_y1 DOUBLE PRECISION, + jaw_parallelism_y2 DOUBLE PRECISION, + + note TEXT, + + CONSTRAINT fk_machine FOREIGN KEY (machine_id) + REFERENCES machines(id) ON DELETE CASCADE +); + +-- Indexes for performance +CREATE INDEX idx_geochecks_machine_id ON geochecks(machine_id); +CREATE INDEX idx_geochecks_date ON geochecks(date); +CREATE INDEX idx_geochecks_type ON geochecks(type); +CREATE INDEX idx_geochecks_machine_date ON geochecks(machine_id, date); +CREATE INDEX idx_geochecks_machine_type ON geochecks(machine_id, type); +``` + +## Configuration + +The GeoCheck repository is registered in `Program.cs`: + +```csharp +builder.Services.AddGeoCheckDataAccess(builder.Configuration); +``` + +This uses Supabase as the data store and requires valid Supabase credentials in the `.env` file: + +```env +SUPABASE_URL=https://your-project.supabase.co +SUPABASE_KEY=your-service-role-key +``` + +## Implementation Details + +### JSON Serialization for MLC Leaves + +The entity uses JSON serialization to store MLC leaf dictionaries in the database: + +```csharp +// In GeoCheckEntity.cs +[Column("mlc_leaves_a")] +public string? MLCLeavesAJson { get; set; } + +private static Dictionary? DeserializeLeaves(string? json) +{ + if (string.IsNullOrWhiteSpace(json)) + return null; + return JsonSerializer.Deserialize>(json); +} + +private static string? SerializeLeaves(Dictionary? leaves) +{ + if (leaves == null || leaves.Count == 0) + return null; + return JsonSerializer.Serialize(leaves); +} +``` + +### Error Handling + +The controller includes comprehensive error handling: +- `400 Bad Request` for invalid date formats or mismatched IDs +- `404 Not Found` when geometry check doesn't exist +- `409 Conflict` for duplicate IDs +- `500 Internal Server Error` for unexpected errors + +All errors are logged using `ILogger`. + +### Filtering + +The repository supports filtering by: +- Machine ID +- Beam type +- Specific date +- Date range (start/end dates) + +Results are ordered by date (descending) and type. + +## Testing + +To test the GeoCheck endpoints: + +```bash +# Get all geometry checks +curl http://localhost:5000/api/geocheck + +# Get geometry checks for a specific machine +curl "http://localhost:5000/api/geocheck?machine-id=NDS-WKS-SN6543" + +# Get geometry checks by type +curl "http://localhost:5000/api/geocheck?type=6xff" + +# Get geometry check by ID +curl http://localhost:5000/api/geocheck/geo-001 + +# Create a new geometry check +curl -X POST http://localhost:5000/api/geocheck \ + -H "Content-Type: application/json" \ + -d '{ + "id": "geo-001", + "type": "6xff", + "date": "2025-01-15", + "machineId": "NDS-WKS-SN6543", + "relativeOutput": 1.002, + "mlcLeavesA": { + "Leaf11": 0.1, + "Leaf12": 0.12 + } + }' +``` + +## Integration with Python Pipeline + +The Python data extraction pipeline (`Geo6xfffModel.py`) maps to this C# model. The `Uploader.py` should be updated to upload to the `geochecks` table: + +```python +# In Uploader.py or data extraction +geo_check_data = { + "id": f"geo-{timestamp}", + "type": "6xff", + "date": check_date, + "machine_id": machine_id, + "mlc_leaves_a": {f"Leaf{i}": value for i in range(11, 51)}, + "mlc_leaves_b": {f"Leaf{i}": value for i in range(11, 51)}, + # ... other fields +} + +# Upload to Supabase +response = supabase.table("geochecks").insert(geo_check_data).execute() +``` + +## Future Enhancements + +1. **Thresholds**: Add threshold checking for geometry measurements +2. **Alerts**: Notify when measurements exceed acceptable ranges +3. **Trending**: API endpoints for trending analysis over time +4. **Comparison**: Compare geometry checks across dates +5. **Validation**: Add stricter validation for MLC leaf ranges (11-50) +6. **Batch Operations**: Support bulk upload/update for multiple checks + +## Related Documentation + +- See `api.yaml` for OpenAPI specification +- See `Geo6xfffModel.py` for Python model structure +- See `create_geochecks_table.sql` for database migration diff --git a/requirements.txt b/requirements.txt index 08a3b1d..b730b9c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ watchdog==3.0.0 pylinac==3.38.0 supabase==2.0.0 +python-dotenv==1.0.0 diff --git a/src/api.Tests/Controllers/BeamControllerTests.cs b/src/api.Tests/Controllers/BeamControllerTests.cs index 29c51af..2ed2d45 100644 --- a/src/api.Tests/Controllers/BeamControllerTests.cs +++ b/src/api.Tests/Controllers/BeamControllerTests.cs @@ -8,12 +8,12 @@ namespace Api.Tests.Controllers; public class BeamControllerTests { private readonly Mock _mockRepository; - private readonly BeamController _controller; + private readonly BeamsController _controller; public BeamControllerTests() { _mockRepository = new Mock(); - _controller = new BeamController(_mockRepository.Object); + _controller = new BeamsController(_mockRepository.Object); } [Fact] @@ -136,7 +136,7 @@ public async Task Create_WithValidBeam_ReturnsCreatedAtActionResult() // Assert var createdResult = result.Result.Should().BeOfType().Subject; - createdResult.ActionName.Should().Be(nameof(BeamController.GetById)); + createdResult.ActionName.Should().Be(nameof(BeamsController.GetById)); createdResult.RouteValues!["id"].Should().Be("beam-new"); } diff --git a/src/api.Tests/Controllers/MachineControllerTests.cs b/src/api.Tests/Controllers/MachineControllerTests.cs index 55f3ba8..b2c6be1 100644 --- a/src/api.Tests/Controllers/MachineControllerTests.cs +++ b/src/api.Tests/Controllers/MachineControllerTests.cs @@ -8,12 +8,12 @@ namespace Api.Tests.Controllers; public class MachineControllerTests { private readonly Mock _mockRepository; - private readonly MachineController _controller; + private readonly MachinesController _controller; public MachineControllerTests() { _mockRepository = new Mock(); - _controller = new MachineController(_mockRepository.Object); + _controller = new MachinesController(_mockRepository.Object); } [Fact] @@ -82,7 +82,7 @@ public async Task Create_WithValidMachine_ReturnsCreatedAtActionResult() // Assert var createdResult = result.Result.Should().BeOfType().Subject; - createdResult.ActionName.Should().Be(nameof(MachineController.GetById)); + createdResult.ActionName.Should().Be(nameof(MachinesController.GetById)); createdResult.RouteValues!["id"].Should().Be("4"); var returnedMachine = createdResult.Value.Should().BeOfType().Subject; returnedMachine.Id.Should().Be("4"); diff --git a/src/api.Tests/Controllers/ResultsControllerTests.cs b/src/api.Tests/Controllers/ResultsControllerTests.cs new file mode 100644 index 0000000..d162955 --- /dev/null +++ b/src/api.Tests/Controllers/ResultsControllerTests.cs @@ -0,0 +1,239 @@ +using Api.Controllers; +using Api.Models; +using Api.Repositories.Abstractions; +using Microsoft.AspNetCore.Mvc; + +namespace Api.Tests.Controllers; + +public class ResultsControllerTests +{ + private readonly Mock _mockBeamRepository; + private readonly ResultsController _controller; + + public ResultsControllerTests() + { + _mockBeamRepository = new Mock(); + _controller = new ResultsController(_mockBeamRepository.Object); + } + + [Fact] + public async Task Get_WithValidParameters_ReturnsOkWithMonthlyResults() + { + // Arrange + var beams = new List + { + new Beam + { + Id = "beam-1", + MachineId = "1", + Date = new DateOnly(2025, 9, 5), + Type = "15x" + }, + new Beam + { + Id = "beam-2", + MachineId = "1", + Date = new DateOnly(2025, 9, 10), + Type = "6e" + } + }; + + _mockBeamRepository.Setup(r => r.GetAllAsync( + "1", + null, + null, + new DateOnly(2025, 9, 1), + new DateOnly(2025, 9, 30), + It.IsAny())) + .ReturnsAsync(beams.AsReadOnly()); + + // Act + var result = await _controller.Get(9, 2025, "1", CancellationToken.None); + + // Assert + var okResult = result.Result.Should().BeOfType().Subject; + var returnedResults = okResult.Value.Should().BeAssignableTo().Subject; + returnedResults.Month.Should().Be(9); + returnedResults.Year.Should().Be(2025); + returnedResults.MachineId.Should().Be("1"); + returnedResults.Checks.Should().HaveCount(2); + } + + [Fact] + public async Task Get_WithMonthLessThanOne_ReturnsBadRequest() + { + // Act + var result = await _controller.Get(0, 2025, "1", CancellationToken.None); + + // Assert + result.Result.Should().BeOfType(); + var badRequest = result.Result as BadRequestObjectResult; + badRequest!.Value.Should().Be("Month must be between 1 and 12."); + _mockBeamRepository.Verify(r => r.GetAllAsync( + It.IsAny(), It.IsAny(), It.IsAny(), + It.IsAny(), It.IsAny(), It.IsAny()), Times.Never); + } + + [Fact] + public async Task Get_WithMonthGreaterThanTwelve_ReturnsBadRequest() + { + // Act + var result = await _controller.Get(13, 2025, "1", CancellationToken.None); + + // Assert + result.Result.Should().BeOfType(); + var badRequest = result.Result as BadRequestObjectResult; + badRequest!.Value.Should().Be("Month must be between 1 and 12."); + _mockBeamRepository.Verify(r => r.GetAllAsync( + It.IsAny(), It.IsAny(), It.IsAny(), + It.IsAny(), It.IsAny(), It.IsAny()), Times.Never); + } + + [Fact] + public async Task Get_WithYearLessThan1900_ReturnsBadRequest() + { + // Act + var result = await _controller.Get(9, 1899, "1", CancellationToken.None); + + // Assert + result.Result.Should().BeOfType(); + var badRequest = result.Result as BadRequestObjectResult; + badRequest!.Value.Should().Be("Year must be between 1900 and 2100."); + _mockBeamRepository.Verify(r => r.GetAllAsync( + It.IsAny(), It.IsAny(), It.IsAny(), + It.IsAny(), It.IsAny(), It.IsAny()), Times.Never); + } + + [Fact] + public async Task Get_WithYearGreaterThan2100_ReturnsBadRequest() + { + // Act + var result = await _controller.Get(9, 2101, "1", CancellationToken.None); + + // Assert + result.Result.Should().BeOfType(); + var badRequest = result.Result as BadRequestObjectResult; + badRequest!.Value.Should().Be("Year must be between 1900 and 2100."); + _mockBeamRepository.Verify(r => r.GetAllAsync( + It.IsAny(), It.IsAny(), It.IsAny(), + It.IsAny(), It.IsAny(), It.IsAny()), Times.Never); + } + + [Fact] + public async Task Get_WithEmptyMachineId_ReturnsBadRequest() + { + // Act + var result = await _controller.Get(9, 2025, "", CancellationToken.None); + + // Assert + result.Result.Should().BeOfType(); + var badRequest = result.Result as BadRequestObjectResult; + badRequest!.Value.Should().Be("MachineId is required."); + _mockBeamRepository.Verify(r => r.GetAllAsync( + It.IsAny(), It.IsAny(), It.IsAny(), + It.IsAny(), It.IsAny(), It.IsAny()), Times.Never); + } + + [Fact] + public async Task Get_WithNullMachineId_ReturnsBadRequest() + { + // Act + var result = await _controller.Get(9, 2025, null!, CancellationToken.None); + + // Assert + result.Result.Should().BeOfType(); + var badRequest = result.Result as BadRequestObjectResult; + badRequest!.Value.Should().Be("MachineId is required."); + _mockBeamRepository.Verify(r => r.GetAllAsync( + It.IsAny(), It.IsAny(), It.IsAny(), + It.IsAny(), It.IsAny(), It.IsAny()), Times.Never); + } + + [Fact] + public async Task Get_WithValidBoundaryValues_ReturnsOk() + { + // Arrange + var beams = new List().AsReadOnly(); + _mockBeamRepository.Setup(r => r.GetAllAsync( + It.IsAny(), + null, + null, + It.IsAny(), + It.IsAny(), + It.IsAny())) + .ReturnsAsync(beams); + + // Act + var result1 = await _controller.Get(1, 1900, "1", CancellationToken.None); + var result2 = await _controller.Get(12, 2100, "2", CancellationToken.None); + + // Assert + result1.Result.Should().BeOfType(); + result2.Result.Should().BeOfType(); + } + + [Fact] + public async Task Get_WithNoBeams_ReturnsOkWithEmptyChecks() + { + // Arrange + var emptyBeams = new List().AsReadOnly(); + _mockBeamRepository.Setup(r => r.GetAllAsync( + "1", + null, + null, + new DateOnly(2024, 6, 1), + new DateOnly(2024, 6, 30), + It.IsAny())) + .ReturnsAsync(emptyBeams); + + // Act + var result = await _controller.Get(6, 2024, "1", CancellationToken.None); + + // Assert + var okResult = result.Result.Should().BeOfType().Subject; + var returnedResults = okResult.Value.Should().BeAssignableTo().Subject; + returnedResults.Checks.Should().BeEmpty(); + } + + [Fact] + public async Task Get_WithMultipleBeamsOnSameDay_AggregatesStatus() + { + // Arrange + var beams = new List + { + new Beam + { + Id = "beam-1", + MachineId = "1", + Date = new DateOnly(2025, 9, 5), + Type = "15x" + }, + new Beam + { + Id = "beam-2", + MachineId = "1", + Date = new DateOnly(2025, 9, 5), + Type = "6e" + } + }; + + _mockBeamRepository.Setup(r => r.GetAllAsync( + "1", + null, + null, + new DateOnly(2025, 9, 1), + new DateOnly(2025, 9, 30), + It.IsAny())) + .ReturnsAsync(beams.AsReadOnly()); + + // Act + var result = await _controller.Get(9, 2025, "1", CancellationToken.None); + + // Assert + var okResult = result.Result.Should().BeOfType().Subject; + var returnedResults = okResult.Value.Should().BeAssignableTo().Subject; + returnedResults.Checks.Should().HaveCount(1); + returnedResults.Checks.First().Date.Should().Be(new DateTime(2025, 9, 5)); + } +} + diff --git a/src/api/Controllers/BeamController.cs b/src/api/Controllers/BeamsController.cs similarity index 96% rename from src/api/Controllers/BeamController.cs rename to src/api/Controllers/BeamsController.cs index dbcd50e..1e659a8 100644 --- a/src/api/Controllers/BeamController.cs +++ b/src/api/Controllers/BeamsController.cs @@ -6,11 +6,11 @@ namespace Api.Controllers; [ApiController] [Route("api/[controller]")] -public class BeamController : ControllerBase +public class BeamsController : ControllerBase { private readonly IBeamRepository _repository; - public BeamController(IBeamRepository repository) + public BeamsController(IBeamRepository repository) { _repository = repository; } diff --git a/src/api/Controllers/GeoCheckController.cs b/src/api/Controllers/GeoCheckController.cs new file mode 100644 index 0000000..7e00b1d --- /dev/null +++ b/src/api/Controllers/GeoCheckController.cs @@ -0,0 +1,145 @@ +using Api.Models; +using Api.Repositories.Abstractions; +using Microsoft.AspNetCore.Mvc; + +namespace Api.Controllers; + +[ApiController] +[Route("api/[controller]")] +public class GeoCheckController : ControllerBase +{ + private readonly IGeoCheckRepository _repository; + private readonly ILogger _logger; + + public GeoCheckController(IGeoCheckRepository repository, ILogger logger) + { + _repository = repository; + _logger = logger; + } + + /// + /// Get geometry check data filtered by various parameters. + /// + [HttpGet] + public async Task>> GetAll( + [FromQuery] string? type = null, + [FromQuery(Name = "machine-id")] string? machineId = null, + [FromQuery] string? date = null, + [FromQuery(Name = "start-date")] string? startDate = null, + [FromQuery(Name = "end-date")] string? endDate = null, + CancellationToken cancellationToken = default) + { + DateOnly? dateOnly = null; + if (!string.IsNullOrWhiteSpace(date)) + { + if (!DateOnly.TryParse(date, out var parsedDate)) + { + return BadRequest($"Invalid date format: {date}"); + } + dateOnly = parsedDate; + } + + DateOnly? startDateOnly = null; + if (!string.IsNullOrWhiteSpace(startDate)) + { + if (!DateOnly.TryParse(startDate, out var parsedStartDate)) + { + return BadRequest($"Invalid start-date format: {startDate}"); + } + startDateOnly = parsedStartDate; + } + + DateOnly? endDateOnly = null; + if (!string.IsNullOrWhiteSpace(endDate)) + { + if (!DateOnly.TryParse(endDate, out var parsedEndDate)) + { + return BadRequest($"Invalid end-date format: {endDate}"); + } + endDateOnly = parsedEndDate; + } + + var geoChecks = await _repository.GetAllAsync( + machineId: machineId, + type: type, + date: dateOnly, + startDate: startDateOnly, + endDate: endDateOnly, + cancellationToken: cancellationToken); + + return Ok(geoChecks); + } + + /// + /// Get a specific geometry check by ID. + /// + [HttpGet("{id}")] + public async Task> GetById(string id, CancellationToken cancellationToken) + { + var geoCheck = await _repository.GetByIdAsync(id, cancellationToken); + if (geoCheck is null) + { + return NotFound($"Geometry check with id '{id}' was not found."); + } + + return Ok(geoCheck); + } + + /// + /// Create a new geometry check. + /// + [HttpPost] + public async Task> Create([FromBody] GeoCheck geoCheck, CancellationToken cancellationToken) + { + try + { + var created = await _repository.CreateAsync(geoCheck, cancellationToken); + return CreatedAtAction(nameof(GetById), new { id = created.Id }, created); + } + catch (InvalidOperationException exception) + { + _logger.LogWarning(exception, "Conflict creating geometry check"); + return Conflict(exception.Message); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error creating geometry check"); + return StatusCode(500, "An error occurred while creating the geometry check."); + } + } + + /// + /// Update an existing geometry check. + /// + [HttpPut("{id}")] + public async Task Update(string id, [FromBody] GeoCheck geoCheck, CancellationToken cancellationToken) + { + if (!string.Equals(id, geoCheck.Id, StringComparison.OrdinalIgnoreCase)) + { + return BadRequest("The geometry check id in the route must match the payload."); + } + + var updated = await _repository.UpdateAsync(geoCheck, cancellationToken); + if (!updated) + { + return NotFound($"Geometry check with id '{id}' was not found."); + } + + return NoContent(); + } + + /// + /// Delete a geometry check. + /// + [HttpDelete("{id}")] + public async Task Delete(string id, CancellationToken cancellationToken) + { + var deleted = await _repository.DeleteAsync(id, cancellationToken); + if (!deleted) + { + return NotFound($"Geometry check with id '{id}' was not found."); + } + + return NoContent(); + } +} diff --git a/src/api/Controllers/MachineController.cs b/src/api/Controllers/MachinesController.cs similarity index 95% rename from src/api/Controllers/MachineController.cs rename to src/api/Controllers/MachinesController.cs index 4da5140..273dc23 100644 --- a/src/api/Controllers/MachineController.cs +++ b/src/api/Controllers/MachinesController.cs @@ -6,11 +6,11 @@ namespace Api.Controllers; [ApiController] [Route("api/[controller]")] -public class MachineController : ControllerBase +public class MachinesController : ControllerBase { private readonly IMachineRepository _repository; - public MachineController(IMachineRepository repository) + public MachinesController(IMachineRepository repository) { _repository = repository; } diff --git a/src/api/Controllers/ResultsController.cs b/src/api/Controllers/ResultsController.cs new file mode 100644 index 0000000..323e80a --- /dev/null +++ b/src/api/Controllers/ResultsController.cs @@ -0,0 +1,159 @@ +using Api.Models; +using Api.Repositories.Abstractions; +using Microsoft.AspNetCore.Mvc; + +namespace Api.Controllers; + +[ApiController] +[Route("api/[controller]")] +public class ResultsController : ControllerBase +{ + private readonly IBeamRepository _beamRepository; + private readonly IGeoCheckRepository _geoCheckRepository; + + public ResultsController(IBeamRepository beamRepository, IGeoCheckRepository geoCheckRepository) + { + _beamRepository = beamRepository; + _geoCheckRepository = geoCheckRepository; + } + + [HttpGet] + public async Task> Get( + [FromQuery] int month, + [FromQuery] int year, + [FromQuery] string machineId, + CancellationToken cancellationToken = default) + { + // Validate month range (1-12) + if (month < 1 || month > 12) + { + return BadRequest("Month must be between 1 and 12."); + } + + // Validate year (reasonable range) + if (year < 1900 || year > 2100) + { + return BadRequest("Year must be between 1900 and 2100."); + } + + // Validate machineId + if (string.IsNullOrWhiteSpace(machineId)) + { + return BadRequest("MachineId is required."); + } + + // Get all beam checks for this machine, month, and year + var startDate = new DateOnly(year, month, 1); + var endDate = month == 12 + ? new DateOnly(year + 1, 1, 1).AddDays(-1) + : new DateOnly(year, month + 1, 1).AddDays(-1); + + var beamChecks = await _beamRepository.GetAllAsync( + machineId: machineId, + startDate: startDate, + endDate: endDate, + cancellationToken: cancellationToken); + + var geoChecks = await _geoCheckRepository.GetAllAsync( + machineId: machineId, + startDate: startDate, + endDate: endDate, + cancellationToken: cancellationToken); + + // Group by date and aggregate status + example display values + var dailyChecks = new Dictionary(); + + // Process beam checks + foreach (var check in beamChecks) + { + var date = check.Date; + var status = DetermineCheckStatus(check); + // derive a single numeric value for display (prefer RelOutput, then RelUniformity, then CenterShift) + double? value = check.RelOutput ?? check.RelUniformity ?? check.CenterShift; + + if (dailyChecks.ContainsKey(date)) + { + var (existingBeamStatus, existingBeamValue, geoStatus, geoValue) = dailyChecks[date]; + dailyChecks[date] = (AggregateStatuses(existingBeamStatus, status), existingBeamValue ?? value, geoStatus, geoValue); + } + else + { + dailyChecks[date] = (status, value, null, null); + } + } + + // Process geometry checks + foreach (var check in geoChecks) + { + var date = check.Date; + var status = DetermineGeoCheckStatus(check); + // derive a single numeric value for display (prefer RelativeOutput, then RelativeUniformity, then CenterShift, then IsoCenterSize) + double? value = check.RelativeOutput ?? check.RelativeUniformity ?? check.CenterShift ?? check.IsoCenterSize; + + if (dailyChecks.ContainsKey(date)) + { + var (beamStatus, beamValue, existingGeoStatus, existingGeoValue) = dailyChecks[date]; + dailyChecks[date] = (beamStatus, beamValue, AggregateStatuses(existingGeoStatus, status), existingGeoValue ?? value); + } + else + { + dailyChecks[date] = (null, null, status, value); + } + } + + var checks = dailyChecks + .OrderBy(kvp => kvp.Key) + .Select(kvp => new DayCheckStatus + { + Date = kvp.Key.ToDateTime(TimeOnly.MinValue), + BeamCheckStatus = kvp.Value.beamStatus, + GeometryCheckStatus = kvp.Value.geoStatus, + BeamValue = kvp.Value.beamValue, + GeometryValue = kvp.Value.geoValue + }) + .ToList(); + + var monthlyResults = new MonthlyResults + { + Month = month, + Year = year, + MachineId = machineId, + Checks = checks.AsReadOnly() + }; + + return Ok(monthlyResults); + } + + /// + /// Determine the status of a single beam check based on pass criteria. + /// + private static string DetermineCheckStatus(Beam beam) + { + // TODO: Implement actual pass/warning/fail logic based on beam metrics + // For now, return "pass" as default + return "pass"; + } + + /// + /// Determine the status of a geometry check based on pass criteria. + /// + private static string DetermineGeoCheckStatus(GeoCheck geoCheck) + { + // TODO: Implement actual pass/warning/fail logic based on geometry check metrics + // For now, return "pass" as default + return "pass"; + } + + /// + /// Aggregate two statuses, returning the worse one. + /// Hierarchy: fail > warning > pass + /// + private static string AggregateStatuses(string? status1, string? status2) + { + if (status1 == "fail" || status2 == "fail") return "fail"; + if (status1 == "warning" || status2 == "warning") return "warning"; + return "pass"; + } + + +} diff --git a/src/api/Controllers/UpdatesController.cs b/src/api/Controllers/UpdatesController.cs new file mode 100644 index 0000000..84fae03 --- /dev/null +++ b/src/api/Controllers/UpdatesController.cs @@ -0,0 +1,126 @@ +using Api.Models; +using Api.Repositories.Abstractions; +using Microsoft.AspNetCore.Mvc; + +namespace Api.Controllers; + +[ApiController] +[Route("api/[controller]")] +public class UpdatesController : ControllerBase +{ + private readonly IUpdateRepository _repository; + private readonly ILogger _logger; + + public UpdatesController(IUpdateRepository repository, ILogger logger) + { + _repository = repository; + _logger = logger; + } + + /// + /// Get all updates + /// + [HttpGet] + public async Task>> GetAll(CancellationToken cancellationToken) + { + try + { + var updates = await _repository.GetAllAsync(cancellationToken); + return Ok(updates); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error retrieving updates"); + return StatusCode(500, "An error occurred while retrieving updates"); + } + } + + /// + /// Get update by ID + /// + [HttpGet("{id}")] + public async Task> GetById(string id, CancellationToken cancellationToken) + { + try + { + var update = await _repository.GetByIdAsync(id, cancellationToken); + if (update == null) + { + return NotFound($"Update with ID '{id}' not found"); + } + return Ok(update); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error retrieving update {UpdateId}", id); + return StatusCode(500, "An error occurred while retrieving the update"); + } + } + + /// + /// Create a new update + /// + [HttpPost] + public async Task> Create([FromBody] Update update, CancellationToken cancellationToken) + { + try + { + var created = await _repository.CreateAsync(update, cancellationToken); + return CreatedAtAction(nameof(GetById), new { id = created.Id }, created); + } + catch (InvalidOperationException ex) + { + _logger.LogWarning(ex, "Update creation failed: {Message}", ex.Message); + return BadRequest(ex.Message); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error creating update"); + return StatusCode(500, "An error occurred while creating the update"); + } + } + + /// + /// Update an existing update + /// + [HttpPut] + public async Task Update([FromBody] Update update, CancellationToken cancellationToken) + { + try + { + var success = await _repository.UpdateAsync(update, cancellationToken); + if (!success) + { + return NotFound($"Update with ID '{update.Id}' not found"); + } + return Ok(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error updating update {UpdateId}", update.Id); + return StatusCode(500, "An error occurred while updating the update"); + } + } + + /// + /// Delete an update + /// + [HttpDelete] + public async Task Delete([FromQuery] string id, CancellationToken cancellationToken) + { + try + { + var success = await _repository.DeleteAsync(id, cancellationToken); + if (!success) + { + return NotFound($"Update with ID '{id}' not found"); + } + return NoContent(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error deleting update {UpdateId}", id); + return StatusCode(500, "An error occurred while deleting the update"); + } + } +} diff --git a/src/api/Extensions/ServiceCollectionExtensions.cs b/src/api/Extensions/ServiceCollectionExtensions.cs index 2879285..f174f0d 100644 --- a/src/api/Extensions/ServiceCollectionExtensions.cs +++ b/src/api/Extensions/ServiceCollectionExtensions.cs @@ -132,5 +132,63 @@ private static IServiceCollection AddInMemoryBeamDataAccess(this IServiceCollect services.AddSingleton(provider => provider.GetRequiredService()); return services; } + + public static IServiceCollection AddUpdateDataAccess(this IServiceCollection services, IConfiguration configuration) + { + services.Configure(configuration.GetSection(SupabaseSettings.SectionName)); + + services.AddScoped(provider => + { + var settings = provider.GetRequiredService>().Value; + var loggerFactory = provider.GetRequiredService(); + + if (string.IsNullOrWhiteSpace(settings.Url) || string.IsNullOrWhiteSpace(settings.Key)) + { + throw new InvalidOperationException("Supabase credentials are required for Update repository."); + } + + var options = new SupabaseOptions + { + AutoConnectRealtime = false + }; + + var client = new Client(settings.Url, settings.Key, options); + client.InitializeAsync().GetAwaiter().GetResult(); + + var logger = loggerFactory.CreateLogger(); + return new SupabaseUpdateRepository(client, logger); + }); + + return services; + } + + public static IServiceCollection AddGeoCheckDataAccess(this IServiceCollection services, IConfiguration configuration) + { + services.Configure(configuration.GetSection(SupabaseSettings.SectionName)); + + services.AddScoped(provider => + { + var settings = provider.GetRequiredService>().Value; + var loggerFactory = provider.GetRequiredService(); + + if (string.IsNullOrWhiteSpace(settings.Url) || string.IsNullOrWhiteSpace(settings.Key)) + { + throw new InvalidOperationException("Supabase credentials are required for GeoCheck repository."); + } + + var options = new SupabaseOptions + { + AutoConnectRealtime = false + }; + + var client = new Client(settings.Url, settings.Key, options); + client.InitializeAsync().GetAwaiter().GetResult(); + + var logger = loggerFactory.CreateLogger(); + return new SupabaseGeoCheckRepository(client, logger); + }); + + return services; + } } diff --git a/src/api/Models/Beam.cs b/src/api/Models/Beam.cs index b8c6536..a0600fd 100644 --- a/src/api/Models/Beam.cs +++ b/src/api/Models/Beam.cs @@ -31,5 +31,12 @@ public class Beam /// Notes about the beam. public string? Note { get; set; } + + /// + /// Convenience property representing a single numeric value to display in UIs. + /// Priority: RelOutput, RelUniformity, CenterShift. + /// This is not persisted to the database; it's computed by repositories. + /// + public double? Value { get; set; } } diff --git a/src/api/Models/DayCheckStatus.cs b/src/api/Models/DayCheckStatus.cs new file mode 100644 index 0000000..9a52dea --- /dev/null +++ b/src/api/Models/DayCheckStatus.cs @@ -0,0 +1,28 @@ +namespace Api.Models; + +/// +/// Represents the check statuses for a specific day in the calendar view. +/// +public class DayCheckStatus +{ + /// Date of the checks. + public required DateTime Date { get; set; } + + /// Overall beam check status for the day (null if no checks). + public string? BeamCheckStatus { get; set; } + + /// Overall geometry check status for the day (null if no checks). + public string? GeometryCheckStatus { get; set; } + + /// + /// Convenience numeric value to show in the UI table for the beam check. + /// This is derived from beam metrics (relOutput, relUniformity, centerShift) when available. + /// + public double? BeamValue { get; set; } + + /// + /// Convenience numeric value to show in the UI table for the geometry check. + /// This is derived from geo metrics (relativeOutput, relativeUniformity, centerShift, isoCenterSize) when available. + /// + public double? GeometryValue { get; set; } +} diff --git a/src/api/Models/GeoCheck.cs b/src/api/Models/GeoCheck.cs new file mode 100644 index 0000000..f6e573c --- /dev/null +++ b/src/api/Models/GeoCheck.cs @@ -0,0 +1,142 @@ +namespace Api.Models; + +/// +/// Represents geometry check data with all measurement groups. +/// +public class GeoCheck +{ + /// Unique identifier for the geometry check. + public required string Id { get; set; } + + /// Type of beam (e.g., 6xff). + public required string Type { get; set; } + + /// Date of the geometry check. + public required DateOnly Date { get; set; } + + /// Associated machine identifier. + public required string MachineId { get; set; } + + /// File path to the geometry check data. + public string? Path { get; set; } + + // ---- IsoCenterGroup ---- + /// Iso center size measurement. + public double? IsoCenterSize { get; set; } + + /// Iso center MV offset measurement. + public double? IsoCenterMVOffset { get; set; } + + /// Iso center KV offset measurement. + public double? IsoCenterKVOffset { get; set; } + + // ---- BeamGroup ---- + /// Relative output value. + public double? RelativeOutput { get; set; } + + /// Relative uniformity value. + public double? RelativeUniformity { get; set; } + + /// Center shift value. + public double? CenterShift { get; set; } + + // ---- CollimationGroup ---- + /// Collimation rotation offset measurement. + public double? CollimationRotationOffset { get; set; } + + // ---- GantryGroup ---- + /// Gantry absolute measurement. + public double? GantryAbsolute { get; set; } + + /// Gantry relative measurement. + public double? GantryRelative { get; set; } + + // ---- EnhancedCouchGroup ---- + /// Couch maximum position error. + public double? CouchMaxPositionError { get; set; } + + /// Couch lateral measurement. + public double? CouchLat { get; set; } + + /// Couch longitudinal measurement. + public double? CouchLng { get; set; } + + /// Couch vertical measurement. + public double? CouchVrt { get; set; } + + /// Couch rotation fine measurement. + public double? CouchRtnFine { get; set; } + + /// Couch rotation large measurement. + public double? CouchRtnLarge { get; set; } + + /// Rotation induced couch shift full range. + public double? RotationInducedCouchShiftFullRange { get; set; } + + // ---- MLCGroup ---- + /// MLC leaves A measurements (Leaf11-Leaf50). + public Dictionary? MLCLeavesA { get; set; } + + /// MLC leaves B measurements (Leaf11-Leaf50). + public Dictionary? MLCLeavesB { get; set; } + + /// Maximum offset for MLC bank A. + public double? MaxOffsetA { get; set; } + + /// Maximum offset for MLC bank B. + public double? MaxOffsetB { get; set; } + + /// Mean offset for MLC bank A. + public double? MeanOffsetA { get; set; } + + /// Mean offset for MLC bank B. + public double? MeanOffsetB { get; set; } + + // ---- MLCBacklashGroup ---- + /// MLC backlash A measurements (Leaf11-Leaf50). + public Dictionary? MLCBacklashA { get; set; } + + /// MLC backlash B measurements (Leaf11-Leaf50). + public Dictionary? MLCBacklashB { get; set; } + + /// Maximum backlash for MLC bank A. + public double? MLCBacklashMaxA { get; set; } + + /// Maximum backlash for MLC bank B. + public double? MLCBacklashMaxB { get; set; } + + /// Mean backlash for MLC bank A. + public double? MLCBacklashMeanA { get; set; } + + /// Mean backlash for MLC bank B. + public double? MLCBacklashMeanB { get; set; } + + // ---- JawsGroup ---- + /// Jaw X1 measurement. + public double? JawX1 { get; set; } + + /// Jaw X2 measurement. + public double? JawX2 { get; set; } + + /// Jaw Y1 measurement. + public double? JawY1 { get; set; } + + /// Jaw Y2 measurement. + public double? JawY2 { get; set; } + + // ---- JawsParallelismGroup ---- + /// Jaw parallelism X1 measurement. + public double? JawParallelismX1 { get; set; } + + /// Jaw parallelism X2 measurement. + public double? JawParallelismX2 { get; set; } + + /// Jaw parallelism Y1 measurement. + public double? JawParallelismY1 { get; set; } + + /// Jaw parallelism Y2 measurement. + public double? JawParallelismY2 { get; set; } + + /// Notes about the geometry check. + public string? Note { get; set; } +} diff --git a/src/api/Models/MonthlyResults.cs b/src/api/Models/MonthlyResults.cs new file mode 100644 index 0000000..148db3a --- /dev/null +++ b/src/api/Models/MonthlyResults.cs @@ -0,0 +1,19 @@ +namespace Api.Models; + +/// +/// Represents monthly results showing calendar view of check statuses. +/// +public class MonthlyResults +{ + /// Month queried (1-12). + public int Month { get; set; } + + /// Year queried. + public int Year { get; set; } + + /// Machine identifier. + public required string MachineId { get; set; } + + /// Daily check statuses for the month. + public IReadOnlyList Checks { get; set; } = []; +} diff --git a/src/api/Models/Result.cs b/src/api/Models/Result.cs index fdbf117..d556f46 100644 --- a/src/api/Models/Result.cs +++ b/src/api/Models/Result.cs @@ -10,6 +10,12 @@ public class Result /// Unique identifier for the result. public required string Id { get; set; } + /// Machine identifier. + public required string MachineId { get; set; } + + /// Date of the result. + public DateTime? Date { get; set; } + /// Month of the result. public int? Month { get; set; } diff --git a/src/api/Models/Update.cs b/src/api/Models/Update.cs index b12c529..fc8cb86 100644 --- a/src/api/Models/Update.cs +++ b/src/api/Models/Update.cs @@ -9,7 +9,7 @@ public class Update public required string Id { get; set; } /// Associated machine. - public required string Machine { get; set; } + public required string MachineId { get; set; } /// Update information. public required string Info { get; set; } diff --git a/src/api/Program.cs b/src/api/Program.cs index 8182601..d8b2017 100644 --- a/src/api/Program.cs +++ b/src/api/Program.cs @@ -1,14 +1,40 @@ using Api.Extensions; +using DotNetEnv; + +// Load environment variables from .env file +Env.Load(); var builder = WebApplication.CreateBuilder(args); +// Override configuration with environment variables +var supabaseUrl = Environment.GetEnvironmentVariable("SUPABASE_URL"); +var supabaseKey = Environment.GetEnvironmentVariable("SUPABASE_KEY"); + +Console.WriteLine($"[DEBUG] SUPABASE_URL: {supabaseUrl}"); +Console.WriteLine($"[DEBUG] SUPABASE_KEY: {(string.IsNullOrWhiteSpace(supabaseKey) ? "EMPTY" : "SET")}"); + +builder.Configuration["Supabase:Url"] = supabaseUrl; +builder.Configuration["Supabase:Key"] = supabaseKey; + builder.Services.AddMachineDataAccess(builder.Configuration); builder.Services.AddBeamDataAccess(builder.Configuration); +builder.Services.AddUpdateDataAccess(builder.Configuration); +builder.Services.AddGeoCheckDataAccess(builder.Configuration); // Add services to the container. builder.Services.AddControllers(); builder.Services.AddOpenApi(); +builder.Services.AddCors(options => +{ + options.AddPolicy("AllowFrontend", policy => + { + policy.WithOrigins("http://localhost:3000") + .AllowAnyMethod() + .AllowAnyHeader(); + }); +}); + var app = builder.Build(); // Configure the HTTP request pipeline. @@ -19,6 +45,8 @@ app.UseHttpsRedirection(); +app.UseCors("AllowFrontend"); + app.MapControllers(); app.Run(); diff --git a/src/api/Repositories/Abstractions/IGeoCheckRepository.cs b/src/api/Repositories/Abstractions/IGeoCheckRepository.cs new file mode 100644 index 0000000..31cf103 --- /dev/null +++ b/src/api/Repositories/Abstractions/IGeoCheckRepository.cs @@ -0,0 +1,40 @@ +using Api.Models; + +namespace Api.Repositories.Abstractions; + +/// +/// Repository interface for GeoCheck operations. +/// +public interface IGeoCheckRepository +{ + /// + /// Gets all geometry checks, optionally filtered by machine ID, type, and date range. + /// + Task> GetAllAsync( + string? machineId = null, + string? type = null, + DateOnly? date = null, + DateOnly? startDate = null, + DateOnly? endDate = null, + CancellationToken cancellationToken = default); + + /// + /// Gets a geometry check by ID. + /// + Task GetByIdAsync(string id, CancellationToken cancellationToken = default); + + /// + /// Creates a new geometry check. + /// + Task CreateAsync(GeoCheck geoCheck, CancellationToken cancellationToken = default); + + /// + /// Updates an existing geometry check. + /// + Task UpdateAsync(GeoCheck geoCheck, CancellationToken cancellationToken = default); + + /// + /// Deletes a geometry check by ID. + /// + Task DeleteAsync(string id, CancellationToken cancellationToken = default); +} diff --git a/src/api/Repositories/Abstractions/IUpdateRepository.cs b/src/api/Repositories/Abstractions/IUpdateRepository.cs new file mode 100644 index 0000000..0d13e02 --- /dev/null +++ b/src/api/Repositories/Abstractions/IUpdateRepository.cs @@ -0,0 +1,12 @@ +using Api.Models; + +namespace Api.Repositories.Abstractions; + +public interface IUpdateRepository +{ + Task> GetAllAsync(CancellationToken cancellationToken = default); + Task GetByIdAsync(string id, CancellationToken cancellationToken = default); + Task CreateAsync(Update update, CancellationToken cancellationToken = default); + Task UpdateAsync(Update update, CancellationToken cancellationToken = default); + Task DeleteAsync(string id, CancellationToken cancellationToken = default); +} diff --git a/src/api/Repositories/Entities/GeoCheckEntity.cs b/src/api/Repositories/Entities/GeoCheckEntity.cs new file mode 100644 index 0000000..f04bdf0 --- /dev/null +++ b/src/api/Repositories/Entities/GeoCheckEntity.cs @@ -0,0 +1,275 @@ +using Api.Models; +using Supabase.Postgrest.Attributes; +using Supabase.Postgrest.Models; +using System.Text.Json; + +namespace Api.Repositories.Entities; + +/// +/// Supabase entity model for geometry check data. +/// +[Table("geochecks")] +public class GeoCheckEntity : BaseModel +{ + [PrimaryKey("id", false)] + public string Id { get; set; } = default!; + + [Column("type")] + public string Type { get; set; } = default!; + + [Column("date")] + public DateOnly Date { get; set; } + + [Column("machine_id")] + public string MachineId { get; set; } = default!; + + [Column("path")] + public string? Path { get; set; } + + // ---- IsoCenterGroup ---- + [Column("iso_center_size")] + public double? IsoCenterSize { get; set; } + + [Column("iso_center_mv_offset")] + public double? IsoCenterMVOffset { get; set; } + + [Column("iso_center_kv_offset")] + public double? IsoCenterKVOffset { get; set; } + + // ---- BeamGroup ---- + [Column("relative_output")] + public double? RelativeOutput { get; set; } + + [Column("relative_uniformity")] + public double? RelativeUniformity { get; set; } + + [Column("center_shift")] + public double? CenterShift { get; set; } + + // ---- CollimationGroup ---- + [Column("collimation_rotation_offset")] + public double? CollimationRotationOffset { get; set; } + + // ---- GantryGroup ---- + [Column("gantry_absolute")] + public double? GantryAbsolute { get; set; } + + [Column("gantry_relative")] + public double? GantryRelative { get; set; } + + // ---- EnhancedCouchGroup ---- + [Column("couch_max_position_error")] + public double? CouchMaxPositionError { get; set; } + + [Column("couch_lat")] + public double? CouchLat { get; set; } + + [Column("couch_lng")] + public double? CouchLng { get; set; } + + [Column("couch_vrt")] + public double? CouchVrt { get; set; } + + [Column("couch_rtn_fine")] + public double? CouchRtnFine { get; set; } + + [Column("couch_rtn_large")] + public double? CouchRtnLarge { get; set; } + + [Column("rotation_induced_couch_shift_full_range")] + public double? RotationInducedCouchShiftFullRange { get; set; } + + // ---- MLCGroup ---- + [Column("mlc_leaves_a")] + public string? MLCLeavesAJson { get; set; } + + [Column("mlc_leaves_b")] + public string? MLCLeavesBJson { get; set; } + + [Column("max_offset_a")] + public double? MaxOffsetA { get; set; } + + [Column("max_offset_b")] + public double? MaxOffsetB { get; set; } + + [Column("mean_offset_a")] + public double? MeanOffsetA { get; set; } + + [Column("mean_offset_b")] + public double? MeanOffsetB { get; set; } + + // ---- MLCBacklashGroup ---- + [Column("mlc_backlash_a")] + public string? MLCBacklashAJson { get; set; } + + [Column("mlc_backlash_b")] + public string? MLCBacklashBJson { get; set; } + + [Column("mlc_backlash_max_a")] + public double? MLCBacklashMaxA { get; set; } + + [Column("mlc_backlash_max_b")] + public double? MLCBacklashMaxB { get; set; } + + [Column("mlc_backlash_mean_a")] + public double? MLCBacklashMeanA { get; set; } + + [Column("mlc_backlash_mean_b")] + public double? MLCBacklashMeanB { get; set; } + + // ---- JawsGroup ---- + [Column("jaw_x1")] + public double? JawX1 { get; set; } + + [Column("jaw_x2")] + public double? JawX2 { get; set; } + + [Column("jaw_y1")] + public double? JawY1 { get; set; } + + [Column("jaw_y2")] + public double? JawY2 { get; set; } + + // ---- JawsParallelismGroup ---- + [Column("jaw_parallelism_x1")] + public double? JawParallelismX1 { get; set; } + + [Column("jaw_parallelism_x2")] + public double? JawParallelismX2 { get; set; } + + [Column("jaw_parallelism_y1")] + public double? JawParallelismY1 { get; set; } + + [Column("jaw_parallelism_y2")] + public double? JawParallelismY2 { get; set; } + + [Column("note")] + public string? Note { get; set; } + + /// + /// Converts this entity to a domain model. + /// + public GeoCheck ToModel() + { + return new GeoCheck + { + Id = Id, + Type = Type, + Date = Date, + MachineId = MachineId, + Path = Path, + IsoCenterSize = IsoCenterSize, + IsoCenterMVOffset = IsoCenterMVOffset, + IsoCenterKVOffset = IsoCenterKVOffset, + RelativeOutput = RelativeOutput, + RelativeUniformity = RelativeUniformity, + CenterShift = CenterShift, + CollimationRotationOffset = CollimationRotationOffset, + GantryAbsolute = GantryAbsolute, + GantryRelative = GantryRelative, + CouchMaxPositionError = CouchMaxPositionError, + CouchLat = CouchLat, + CouchLng = CouchLng, + CouchVrt = CouchVrt, + CouchRtnFine = CouchRtnFine, + CouchRtnLarge = CouchRtnLarge, + RotationInducedCouchShiftFullRange = RotationInducedCouchShiftFullRange, + MLCLeavesA = DeserializeLeaves(MLCLeavesAJson), + MLCLeavesB = DeserializeLeaves(MLCLeavesBJson), + MaxOffsetA = MaxOffsetA, + MaxOffsetB = MaxOffsetB, + MeanOffsetA = MeanOffsetA, + MeanOffsetB = MeanOffsetB, + MLCBacklashA = DeserializeLeaves(MLCBacklashAJson), + MLCBacklashB = DeserializeLeaves(MLCBacklashBJson), + MLCBacklashMaxA = MLCBacklashMaxA, + MLCBacklashMaxB = MLCBacklashMaxB, + MLCBacklashMeanA = MLCBacklashMeanA, + MLCBacklashMeanB = MLCBacklashMeanB, + JawX1 = JawX1, + JawX2 = JawX2, + JawY1 = JawY1, + JawY2 = JawY2, + JawParallelismX1 = JawParallelismX1, + JawParallelismX2 = JawParallelismX2, + JawParallelismY1 = JawParallelismY1, + JawParallelismY2 = JawParallelismY2, + Note = Note + }; + } + + /// + /// Converts a domain model to this entity. + /// + public static GeoCheckEntity FromModel(GeoCheck geoCheck) + { + return new GeoCheckEntity + { + Id = geoCheck.Id, + Type = geoCheck.Type, + Date = geoCheck.Date, + MachineId = geoCheck.MachineId, + Path = geoCheck.Path, + IsoCenterSize = geoCheck.IsoCenterSize, + IsoCenterMVOffset = geoCheck.IsoCenterMVOffset, + IsoCenterKVOffset = geoCheck.IsoCenterKVOffset, + RelativeOutput = geoCheck.RelativeOutput, + RelativeUniformity = geoCheck.RelativeUniformity, + CenterShift = geoCheck.CenterShift, + CollimationRotationOffset = geoCheck.CollimationRotationOffset, + GantryAbsolute = geoCheck.GantryAbsolute, + GantryRelative = geoCheck.GantryRelative, + CouchMaxPositionError = geoCheck.CouchMaxPositionError, + CouchLat = geoCheck.CouchLat, + CouchLng = geoCheck.CouchLng, + CouchVrt = geoCheck.CouchVrt, + CouchRtnFine = geoCheck.CouchRtnFine, + CouchRtnLarge = geoCheck.CouchRtnLarge, + RotationInducedCouchShiftFullRange = geoCheck.RotationInducedCouchShiftFullRange, + MLCLeavesAJson = SerializeLeaves(geoCheck.MLCLeavesA), + MLCLeavesBJson = SerializeLeaves(geoCheck.MLCLeavesB), + MaxOffsetA = geoCheck.MaxOffsetA, + MaxOffsetB = geoCheck.MaxOffsetB, + MeanOffsetA = geoCheck.MeanOffsetA, + MeanOffsetB = geoCheck.MeanOffsetB, + MLCBacklashAJson = SerializeLeaves(geoCheck.MLCBacklashA), + MLCBacklashBJson = SerializeLeaves(geoCheck.MLCBacklashB), + MLCBacklashMaxA = geoCheck.MLCBacklashMaxA, + MLCBacklashMaxB = geoCheck.MLCBacklashMaxB, + MLCBacklashMeanA = geoCheck.MLCBacklashMeanA, + MLCBacklashMeanB = geoCheck.MLCBacklashMeanB, + JawX1 = geoCheck.JawX1, + JawX2 = geoCheck.JawX2, + JawY1 = geoCheck.JawY1, + JawY2 = geoCheck.JawY2, + JawParallelismX1 = geoCheck.JawParallelismX1, + JawParallelismX2 = geoCheck.JawParallelismX2, + JawParallelismY1 = geoCheck.JawParallelismY1, + JawParallelismY2 = geoCheck.JawParallelismY2, + Note = geoCheck.Note + }; + } + + private static Dictionary? DeserializeLeaves(string? json) + { + if (string.IsNullOrWhiteSpace(json)) + return null; + + try + { + return JsonSerializer.Deserialize>(json); + } + catch + { + return null; + } + } + + private static string? SerializeLeaves(Dictionary? leaves) + { + if (leaves == null || leaves.Count == 0) + return null; + + return JsonSerializer.Serialize(leaves); + } +} diff --git a/src/api/Repositories/Entities/UpdateEntity.cs b/src/api/Repositories/Entities/UpdateEntity.cs new file mode 100644 index 0000000..afd17c3 --- /dev/null +++ b/src/api/Repositories/Entities/UpdateEntity.cs @@ -0,0 +1,44 @@ +using Api.Models; +using Supabase.Postgrest.Attributes; +using Supabase.Postgrest.Models; + +namespace Api.Repositories.Entities; + +[Table("updates")] +public class UpdateEntity : BaseModel +{ + [PrimaryKey("id", false)] + [Column("id")] + public string Id { get; set; } = string.Empty; + + [Column("machine_id")] + public string MachineId { get; set; } = string.Empty; + + [Column("info")] + public string Info { get; set; } = string.Empty; + + [Column("type")] + public string Type { get; set; } = string.Empty; + + public Update ToModel() + { + return new Update + { + Id = Id, + MachineId = MachineId, + Info = Info, + Type = Type + }; + } + + public static UpdateEntity FromModel(Update update) + { + return new UpdateEntity + { + Id = update.Id, + MachineId = update.MachineId, + Info = update.Info, + Type = update.Type + }; + } +} diff --git a/src/api/Repositories/InMemory/InMemoryBeamRepository.cs b/src/api/Repositories/InMemory/InMemoryBeamRepository.cs index 6011111..fae1896 100644 --- a/src/api/Repositories/InMemory/InMemoryBeamRepository.cs +++ b/src/api/Repositories/InMemory/InMemoryBeamRepository.cs @@ -15,23 +15,23 @@ public class InMemoryBeamRepository : IBeamRepository private static readonly IReadOnlyList SeedBeams = [ // MPC-001 (Primary Gantry) - Multiple beam types across several days - new Beam { Id = "beam-001", Type = "6e", Date = DateOnly.FromDateTime(DateTime.Now.AddDays(-5)), MachineId = "MPC-001", RelOutput = 98.5, RelUniformity = 99.2 }, - new Beam { Id = "beam-002", Type = "6e", Date = DateOnly.FromDateTime(DateTime.Now.AddDays(-4)), MachineId = "MPC-001", RelOutput = 98.7, RelUniformity = 99.1 }, - new Beam { Id = "beam-003", Type = "6e", Date = DateOnly.FromDateTime(DateTime.Now.AddDays(-3)), MachineId = "MPC-001", RelOutput = 98.9, RelUniformity = 99.3 }, - new Beam { Id = "beam-004", Type = "15x", Date = DateOnly.FromDateTime(DateTime.Now.AddDays(-5)), MachineId = "MPC-001", RelOutput = 97.2, RelUniformity = 98.5, CenterShift = 0.15 }, - new Beam { Id = "beam-005", Type = "15x", Date = DateOnly.FromDateTime(DateTime.Now.AddDays(-4)), MachineId = "MPC-001", RelOutput = 97.5, RelUniformity = 98.7, CenterShift = 0.12 }, + new Beam { Id = "beam-001", Type = "6e", Date = new DateOnly(2025, 11, 9), MachineId = "MPC-001", RelOutput = 98.5, RelUniformity = 99.2 }, + new Beam { Id = "beam-002", Type = "6e", Date = new DateOnly(2025, 11, 8), MachineId = "MPC-001", RelOutput = 98.7, RelUniformity = 99.1 }, + new Beam { Id = "beam-003", Type = "6e", Date = new DateOnly(2025, 11, 7), MachineId = "MPC-001", RelOutput = 98.9, RelUniformity = 99.3 }, + new Beam { Id = "beam-004", Type = "15x", Date = new DateOnly(2025, 11, 9), MachineId = "MPC-001", RelOutput = 97.2, RelUniformity = 98.5, CenterShift = 0.15 }, + new Beam { Id = "beam-005", Type = "15x", Date = new DateOnly(2025, 11, 8), MachineId = "MPC-001", RelOutput = 97.5, RelUniformity = 98.7, CenterShift = 0.12 }, // MPC-002 (Secondary Gantry) - Different beam types - new Beam { Id = "beam-006", Type = "6e", Date = DateOnly.FromDateTime(DateTime.Now.AddDays(-5)), MachineId = "MPC-002", RelOutput = 99.1, RelUniformity = 99.4 }, - new Beam { Id = "beam-007", Type = "6e", Date = DateOnly.FromDateTime(DateTime.Now.AddDays(-3)), MachineId = "MPC-002", RelOutput = 99.2, RelUniformity = 99.5 }, - new Beam { Id = "beam-008", Type = "10x", Date = DateOnly.FromDateTime(DateTime.Now.AddDays(-5)), MachineId = "MPC-002", RelOutput = 96.8, RelUniformity = 98.2, CenterShift = 0.08 }, - new Beam { Id = "beam-009", Type = "10x", Date = DateOnly.FromDateTime(DateTime.Now.AddDays(-4)), MachineId = "MPC-002", RelOutput = 96.9, RelUniformity = 98.3, CenterShift = 0.10 }, + new Beam { Id = "beam-006", Type = "6e", Date = new DateOnly(2025, 11, 9), MachineId = "MPC-002", RelOutput = 99.1, RelUniformity = 99.4 }, + new Beam { Id = "beam-007", Type = "6e", Date = new DateOnly(2025, 11, 7), MachineId = "MPC-002", RelOutput = 99.2, RelUniformity = 99.5 }, + new Beam { Id = "beam-008", Type = "10x", Date = new DateOnly(2025, 11, 9), MachineId = "MPC-002", RelOutput = 96.8, RelUniformity = 98.2, CenterShift = 0.08 }, + new Beam { Id = "beam-009", Type = "10x", Date = new DateOnly(2025, 11, 8), MachineId = "MPC-002", RelOutput = 96.9, RelUniformity = 98.3, CenterShift = 0.10 }, // MPC-003 (QA Test Bench) - Diagnostic machine with various beams - new Beam { Id = "beam-010", Type = "6e", Date = DateOnly.FromDateTime(DateTime.Now.AddDays(-5)), MachineId = "MPC-003", RelOutput = 98.0, RelUniformity = 99.0 }, - new Beam { Id = "beam-011", Type = "9e", Date = DateOnly.FromDateTime(DateTime.Now.AddDays(-5)), MachineId = "MPC-003", RelOutput = 97.5, RelUniformity = 98.8 }, - new Beam { Id = "beam-012", Type = "12e", Date = DateOnly.FromDateTime(DateTime.Now.AddDays(-4)), MachineId = "MPC-003", RelOutput = 96.9, RelUniformity = 98.6 }, - new Beam { Id = "beam-013", Type = "16e", Date = DateOnly.FromDateTime(DateTime.Now.AddDays(-3)), MachineId = "MPC-003", RelOutput = 96.2, RelUniformity = 98.4 }, + new Beam { Id = "beam-010", Type = "6e", Date = new DateOnly(2025, 11, 9), MachineId = "MPC-003", RelOutput = 98.0, RelUniformity = 99.0 }, + new Beam { Id = "beam-011", Type = "9e", Date = new DateOnly(2025, 11, 9), MachineId = "MPC-003", RelOutput = 97.5, RelUniformity = 98.8 }, + new Beam { Id = "beam-012", Type = "12e", Date = new DateOnly(2025, 11, 8), MachineId = "MPC-003", RelOutput = 96.9, RelUniformity = 98.6 }, + new Beam { Id = "beam-013", Type = "16e", Date = new DateOnly(2025, 11, 7), MachineId = "MPC-003", RelOutput = 96.2, RelUniformity = 98.4 }, ]; private readonly ConcurrentDictionary _beams; diff --git a/src/api/Repositories/Supabase/SupabaseGeoCheckRepository.cs b/src/api/Repositories/Supabase/SupabaseGeoCheckRepository.cs new file mode 100644 index 0000000..e2365c2 --- /dev/null +++ b/src/api/Repositories/Supabase/SupabaseGeoCheckRepository.cs @@ -0,0 +1,129 @@ +using Api.Models; +using Api.Repositories.Abstractions; +using Api.Repositories.Entities; +using Microsoft.Extensions.Logging; +using Supabase; +using Supabase.Postgrest.Exceptions; + +namespace Api.Repositories; + +public class SupabaseGeoCheckRepository : IGeoCheckRepository +{ + private readonly Client _client; + private readonly ILogger _logger; + + public SupabaseGeoCheckRepository(Client client, ILogger logger) + { + _client = client; + _logger = logger; + } + + public async Task> GetAllAsync( + string? machineId = null, + string? type = null, + DateOnly? date = null, + DateOnly? startDate = null, + DateOnly? endDate = null, + CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + try + { + var response = await _client.From().Get(); + var geoChecks = response.Models.Select(e => e.ToModel()).ToList(); + + if (!string.IsNullOrWhiteSpace(machineId)) + geoChecks = geoChecks.Where(g => g.MachineId == machineId).ToList(); + if (!string.IsNullOrWhiteSpace(type)) + geoChecks = geoChecks.Where(g => g.Type == type).ToList(); + if (date.HasValue) + geoChecks = geoChecks.Where(g => g.Date == date.Value).ToList(); + if (startDate.HasValue) + geoChecks = geoChecks.Where(g => g.Date >= startDate.Value).ToList(); + if (endDate.HasValue) + geoChecks = geoChecks.Where(g => g.Date <= endDate.Value).ToList(); + + return geoChecks.OrderByDescending(g => g.Date).ThenBy(g => g.Type).ToList().AsReadOnly(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error retrieving geometry checks"); + throw; + } + } + + public async Task GetByIdAsync(string id, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + try + { + var response = await _client.From() + .Filter(nameof(GeoCheckEntity.Id), Supabase.Postgrest.Constants.Operator.Equals, id) + .Get(); + return response.Models.FirstOrDefault()?.ToModel(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error retrieving geometry check {GeoCheckId}", id); + return null; + } + } + + public async Task CreateAsync(GeoCheck geoCheck, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + try + { + var response = await _client.From().Insert(GeoCheckEntity.FromModel(geoCheck)); + return response.Models.First().ToModel(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error creating geometry check"); + throw; + } + } + + public async Task UpdateAsync(GeoCheck geoCheck, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + try + { + var entity = GeoCheckEntity.FromModel(geoCheck); + await _client.From().Update(entity); + return true; + } + catch (PostgrestException ex) when (ex.Message.Contains("no rows")) + { + _logger.LogWarning("Geometry check {GeoCheckId} not found for update", geoCheck.Id); + return false; + } + catch (Exception ex) + { + _logger.LogError(ex, "Error updating geometry check {GeoCheckId}", geoCheck.Id); + throw; + } + } + + public async Task DeleteAsync(string id, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + try + { + await _client.From() + .Filter(nameof(GeoCheckEntity.Id), Supabase.Postgrest.Constants.Operator.Equals, id) + .Delete(); + return true; + } + catch (PostgrestException ex) when (ex.Message.Contains("no rows")) + { + _logger.LogWarning("Geometry check {GeoCheckId} not found for deletion", id); + return false; + } + catch (Exception ex) + { + _logger.LogError(ex, "Error deleting geometry check {GeoCheckId}", id); + throw; + } + } +} diff --git a/src/api/Repositories/Supabase/SupabaseUpdateRepository.cs b/src/api/Repositories/Supabase/SupabaseUpdateRepository.cs new file mode 100644 index 0000000..f959824 --- /dev/null +++ b/src/api/Repositories/Supabase/SupabaseUpdateRepository.cs @@ -0,0 +1,135 @@ +using Api.Models; +using Api.Repositories.Abstractions; +using Api.Repositories.Entities; +using Microsoft.Extensions.Logging; +using Supabase; +using Supabase.Postgrest.Exceptions; + +namespace Api.Repositories; + +public class SupabaseUpdateRepository : IUpdateRepository +{ + private readonly Client _client; + private readonly ILogger _logger; + + public SupabaseUpdateRepository(Client client, ILogger logger) + { + _client = client; + _logger = logger; + } + + public async Task> GetAllAsync(CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + var response = await _client + .From() + .Get(); + + return response.Models.Select(entity => entity.ToModel()).ToList(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error retrieving updates"); + throw; + } + } + + public async Task GetByIdAsync(string id, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + var response = await _client + .From() + .Filter(nameof(UpdateEntity.Id), Supabase.Postgrest.Constants.Operator.Equals, id) + .Get(); + + return response.Models.FirstOrDefault()?.ToModel(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error retrieving update {UpdateId}", id); + return null; + } + } + + public async Task CreateAsync(Update update, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + var entity = UpdateEntity.FromModel(update); + + try + { + var response = await _client.From().Insert(entity); + var created = response.Models.FirstOrDefault(); + + if (created is null) + { + _logger.LogWarning("Supabase insert returned no models for update {UpdateId}", update.Id); + return update; + } + + return created.ToModel(); + } + catch (PostgrestException exception) when (IsUniqueConstraintViolation(exception)) + { + throw new InvalidOperationException($"Update with id '{update.Id}' already exists.", exception); + } + } + + public async Task UpdateAsync(Update update, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + var entity = UpdateEntity.FromModel(update); + + try + { + var response = await _client + .From() + .Filter(nameof(UpdateEntity.Id), Supabase.Postgrest.Constants.Operator.Equals, update.Id) + .Update(entity); + + return response.Models.Any(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error updating update {UpdateId}", update.Id); + return false; + } + } + + public async Task DeleteAsync(string id, CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + var existing = await GetByIdAsync(id, cancellationToken); + if (existing is null) + { + return false; + } + + await _client + .From() + .Filter(nameof(UpdateEntity.Id), Supabase.Postgrest.Constants.Operator.Equals, id) + .Delete(); + + return true; + } + catch (Exception ex) + { + _logger.LogError(ex, "Error deleting update {UpdateId}", id); + return false; + } + } + + private static bool IsUniqueConstraintViolation(PostgrestException exception) => + exception.Message.Contains("duplicate key", StringComparison.OrdinalIgnoreCase); +} diff --git a/src/api/api.csproj b/src/api/api.csproj index 2db7bb9..c4d5b42 100644 --- a/src/api/api.csproj +++ b/src/api/api.csproj @@ -7,6 +7,7 @@ + diff --git a/src/api/api.yaml b/src/api/api.yaml index 9bf4b2e..fe77c4f 100644 --- a/src/api/api.yaml +++ b/src/api/api.yaml @@ -32,7 +32,7 @@ tags: description: Image operations paths: - /beam: + /beams: get: tags: - Beam @@ -151,7 +151,7 @@ paths: '404': description: Beam not found - /beam/{beamId}: + /beams/{beamId}: get: tags: - Beam @@ -174,7 +174,7 @@ paths: '404': description: Beam not found - /beam/types: + /beams/types: get: tags: - Beam @@ -277,8 +277,8 @@ paths: get: tags: - Results - summary: Get results for a specific month and year - description: Returns beam check results for the specified time period + summary: Get monthly calendar view of check results + description: Returns a calendar view showing beam checks and geometry checks for a specific month, year, and machine with pass/warning/fail status parameters: - name: month in: query @@ -294,15 +294,19 @@ paths: schema: type: integer description: Year + - name: machineId + in: query + required: true + schema: + type: string + description: Machine identifier responses: '200': description: Successful response content: application/json: schema: - type: array - items: - $ref: '#/components/schemas/Result' + $ref: '#/components/schemas/MonthlyResults' '400': description: Bad request @@ -596,19 +600,149 @@ paths: type: string format: binary - /geocheck: + + /geochecks: get: tags: - Geocheck - summary: Get geometry check data - description: Retrieves geometry check information + summary: Get geometry checks + description: Retrieves geometry check data, optionally filtered by machine, type, and date range + parameters: + - name: machineId + in: query + required: false + schema: + type: string + description: Machine identifier + - name: type + in: query + required: false + schema: + type: string + description: Type of beam (e.g., 6xff) + - name: date + in: query + required: false + schema: + type: string + format: date + description: Specific date for geometry check + - name: startDate + in: query + required: false + schema: + type: string + format: date + description: Start date for date range + - name: endDate + in: query + required: false + schema: + type: string + format: date + description: End date for date range responses: '200': description: Successful response content: application/json: schema: - type: object + type: array + items: + $ref: '#/components/schemas/GeoCheck' + + post: + tags: + - Geocheck + summary: Create a geometry check + description: Creates a new geometry check entry + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/GeoCheck' + responses: + '201': + description: Geometry check created successfully + content: + application/json: + schema: + $ref: '#/components/schemas/GeoCheck' + '400': + description: Bad request + + /geochecks/{id}: + get: + tags: + - Geocheck + summary: Get geometry check by ID + description: Retrieves a single geometry check by its identifier + parameters: + - name: id + in: path + required: true + schema: + type: string + description: Unique geometry check identifier + responses: + '200': + description: Successful response + content: + application/json: + schema: + $ref: '#/components/schemas/GeoCheck' + '404': + description: Geometry check not found + + put: + tags: + - Geocheck + summary: Update geometry check + description: Updates an existing geometry check + parameters: + - name: id + in: path + required: true + schema: + type: string + description: Unique geometry check identifier + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/GeoCheck' + responses: + '200': + description: Geometry check updated successfully + content: + application/json: + schema: + $ref: '#/components/schemas/GeoCheck' + '400': + description: Bad request + '404': + description: Geometry check not found + + delete: + tags: + - Geocheck + summary: Delete geometry check + description: Removes a geometry check from the system + parameters: + - name: id + in: path + required: true + schema: + type: string + description: Unique geometry check identifier + responses: + '204': + description: Geometry check deleted successfully + '404': + description: Geometry check not found + components: schemas: @@ -692,6 +826,110 @@ components: type: string description: Status of the check + MonthlyResults: + type: object + properties: + month: + type: integer + minimum: 1 + maximum: 12 + description: Month queried + year: + type: integer + description: Year queried + machineId: + type: string + description: Machine identifier + checks: + type: array + items: + $ref: '#/components/schemas/DayCheckStatus' + description: Daily check statuses for the month + required: + - month + - year + - machineId + - checks + + DayCheckStatus: + type: object + properties: + date: + type: string + format: date + description: Date of checks + beamCheckStatus: + type: string + nullable: true + enum: [pass, warning, fail] + description: Overall beam check status for the day (null if no checks) + geometryCheckStatus: + type: string + nullable: true + enum: [pass, warning, fail] + description: Overall geometry check status for the day (null if no checks) + required: + - date + - beamCheckStatus + - geometryCheckStatus + + GeoCheck: + type: object + properties: + id: + type: string + description: Unique identifier for the geometry check + type: + type: string + description: Type of beam (e.g., 6xff) + date: + type: string + format: date + description: Date of the geometry check + machineId: + type: string + description: Associated machine identifier + path: + type: string + description: File path to the geometry check data + isoCenterSize: + type: number + format: double + description: Iso center size measurement + isoCenterMVOffset: + type: number + format: double + description: Iso center MV offset measurement + isoCenterKVOffset: + type: number + format: double + description: Iso center KV offset measurement + relativeOutput: + type: number + format: double + description: Relative output value + relativeUniformity: + type: number + format: double + description: Relative uniformity value + centerShift: + type: number + format: double + description: Center shift value + collimationRotationOffset: + type: number + format: double + description: Collimation rotation offset measurement + gantryAbsolute: + type: number + format: double + description: Gantry absolute measurement + required: + - id + - type + - date + - machineId + Baseline: type: object properties: diff --git a/src/api/appsettings.Development.json b/src/api/appsettings.Development.json index 8b057e5..f669e19 100644 --- a/src/api/appsettings.Development.json +++ b/src/api/appsettings.Development.json @@ -6,10 +6,10 @@ } }, "Database": { - "Provider": "InMemory" + "Provider": "Supabase" }, "Supabase": { - "Url": "", - "Key": "" + "Url": "", + "Key": "" } } diff --git a/src/data_manipulation/ETL/DataProcessor.py b/src/data_manipulation/ETL/DataProcessor.py index 35b6a15..f58c538 100644 --- a/src/data_manipulation/ETL/DataProcessor.py +++ b/src/data_manipulation/ETL/DataProcessor.py @@ -1,4 +1,7 @@ import os +import logging +from pathlib import Path +from dotenv import load_dotenv from .data_extractor import data_extractor from .image_extractor import image_extractor from .Uploader import Uploader @@ -8,6 +11,17 @@ from ..models.Geo6xfffModel import Geo6xfffModel from ..models.ImageModel import ImageModel +# Set up logger for this module +logger = logging.getLogger(__name__) + +# Load environment variables from .env directory in project root +project_root = Path(__file__).parent.parent.parent.parent +# Try .env/apicreds.txt first (existing structure), then fall back to .env file +env_path = project_root / '.env' / 'apicreds.txt' +if not env_path.exists(): + env_path = project_root / '.env' +load_dotenv(env_path) + class DataProcessor: """ @@ -20,6 +34,7 @@ def __init__(self, path: str): """ Initialize the DataProcessor with the directory path containing beam data. """ + self.folder_path = path # Store the folder path for uploads self.data_path = os.path.join(path, "Results.csv") self.image_path = os.path.join(path, "BeamProfileCheck.xim") @@ -39,7 +54,7 @@ def _init_beam_model(self, model_class, beam_type): Sets path, type, date, and machine SN automatically. """ model = model_class() - model.set_path(self.data_path) + model.set_path(self.folder_path) # Use folder path instead of data_path for database model.set_type(beam_type) model.set_date(model._getDateFromPathName(self.data_path)) model.set_machine_SN(model._getSNFromPathName(self.data_path)) @@ -73,6 +88,11 @@ def _process_beam(self, is_test=False): Detects the beam type, initializes the model, and sends it to the correct extractor method. """ + + # Skip EnhancedMLCCheckTemplate6x - these have leaves we don't want to ingest + if "EnhancedMLCCheckTemplate6x" in self.data_path: + logger.info(f"Skipping EnhancedMLCCheckTemplate6x path (leaves not ingested): {self.data_path}") + return beam_map = { "6e": (EBeamModel, "6e"), @@ -81,38 +101,60 @@ def _process_beam(self, is_test=False): "16e": (EBeamModel, "16e"), "10x": (XBeamModel, "10x"), "15x": (XBeamModel, "15x"), - "6x": (Geo6xfffModel, "6x"), + "6x": (Geo6xfffModel, "6x"), # Geometry checks use 6x as the beam type } for key, (model_class, beam_type) in beam_map.items(): if key in self.data_path: - print(f"{beam_type.upper()} Beam detected") + # Special handling for 6x: use "6xFFF" only for BeamCheckTemplate6xFFF + if key == "6x": + if "BeamCheckTemplate6xFFF" in self.data_path: + beam_type = "6xFFF" + # For other 6x templates (like GeometryCheckTemplate6xMVkVEnhancedCouch), use "6x" + + logger.info(f"{beam_type.upper()} Beam detected") # Initialize the correct beam model (EBeam, XBeam, etc.) beam = self._init_beam_model(model_class, beam_type) if is_test: - print("Running test extraction...") + logger.info("Running test extraction...") self.data_ex.extractTest(beam) else: - print("Running normal extraction...") + logger.info("Running normal extraction...") self.data_ex.extract(beam) - print("Uploading to SupaBase...") + logger.info("Uploading to Supabase...") #Set Up DataBase - # Connect to database + # Connect to database using environment variables + # Connect to database using credentials from .env file connection_params = { - 'url': 'your-supabase-url', - 'key': 'your-supabase-key' + 'url': os.getenv('SUPABASE_URL'), + 'key': os.getenv('SUPABASE_KEY') } - self.up.connect(connection_params) - # self.up.upload(beam) - print("Uploading Complete") - self.up.close(); + + if not connection_params['url'] or not connection_params['key']: + error_msg = "Error: SUPABASE_URL and SUPABASE_KEY must be set in .env file" + logger.error(error_msg) + return + + logger.info(f"Connecting to Supabase with URL: {connection_params['url'][:30]}...") + if self.up.connect(connection_params): + logger.info("Successfully connected to Supabase, uploading beam data...") + upload_result = self.up.upload(beam) # Actually upload the data + if upload_result: + logger.info("Upload completed successfully") + else: + logger.warning("Upload returned False - check for errors above") + else: + logger.error("Failed to connect to Supabase") + + self.up.close() + logger.info("Supabase connection closed") # --- Image Extraction for all beam types --- - print(f"Extracting image data for {beam_type} beam...") + logger.debug(f"Extracting image data for {beam_type} beam...") self._init_beam_image(beam_type) return diff --git a/src/data_manipulation/ETL/Uploader.py b/src/data_manipulation/ETL/Uploader.py index c2c1a4d..7a83d9d 100644 --- a/src/data_manipulation/ETL/Uploader.py +++ b/src/data_manipulation/ETL/Uploader.py @@ -17,9 +17,15 @@ """ from abc import ABC, abstractmethod -from datetime import datetime +from datetime import datetime, date from decimal import Decimal from typing import Dict, Any, Optional +import logging +import os +import json + +# Set up logger for this module +logger = logging.getLogger(__name__) class DatabaseAdapter(ABC): @@ -43,13 +49,14 @@ def connect(self, connection_params: Dict[str, Any]) -> bool: pass @abstractmethod - def upload_beam_data(self, table_name: str, data: Dict[str, Any]) -> bool: + def upload_beam_data(self, table_name: str, data: Dict[str, Any], path: str = None) -> bool: """ Upload beam data to the specified table. Args: table_name: Name of the database table data: Dictionary containing the data to upload + path: Optional path to extract location from for machine creation Returns: bool: True if upload successful, False otherwise @@ -89,53 +96,259 @@ def connect(self, connection_params: Dict[str, Any]) -> bool: key = connection_params.get('key') if not url or not key: - print("Error: Supabase connection requires 'url' and 'key' parameters") + logger.error("Supabase connection requires 'url' and 'key' parameters") return False self.client: Client = create_client(url, key) self.connected = True - print("Successfully connected to Supabase") + logger.info("Successfully connected to Supabase") return True except ImportError: - print("Error: supabase-py library not installed. Install with: pip install supabase") + logger.error("supabase-py library not installed. Install with: pip install supabase") return False except Exception as e: - print(f"Error connecting to Supabase: {e}") + logger.error(f"Error connecting to Supabase: {e}") self.connected = False return False - def upload_beam_data(self, table_name: str, data: Dict[str, Any]) -> bool: + def ensure_machine_exists(self, machine_id: str, path: str = None) -> bool: + """ + Ensure a machine exists in the machines table before uploading beams. + Creates the machine if it doesn't exist. + + Args: + machine_id: The machine ID (serial number) + path: Optional path to extract location from (e.g., "/Volumes/Lexar/MPC Data/Arlington/...") + + Returns: + bool: True if machine exists or was created successfully, False otherwise + """ + if not self.connected or not self.client: + logger.error("Not connected to Supabase") + return False + + try: + # Check if machine exists + response = self.client.table('machines').select('id').eq('id', machine_id).execute() + + if response.data and len(response.data) > 0: + logger.debug(f"Machine {machine_id} already exists") + return True + + # Machine doesn't exist, create it + logger.info(f"Creating machine {machine_id}...") + + # Extract location from path if provided + location = "Unknown" + if path: + # Try to extract location from path (e.g., "/Volumes/Lexar/MPC Data/Arlington/..." -> "Arlington") + path_parts = path.split(os.sep) + for part in path_parts: + if part in ["Arlington", "Weatherford"]: + location = part + break + + # Create machine with default values + machine_data = { + 'id': machine_id, + 'name': f"Machine {machine_id}", + 'location': location, + 'type': 'NDS-WKS' # Default type based on folder naming pattern + } + + response = self.client.table('machines').insert(machine_data).execute() + + if response.data: + logger.info(f"Created machine {machine_id} in location {location}") + return True + else: + logger.warning(f"No data returned when creating machine {machine_id}") + return False + + except Exception as e: + logger.error(f"Error ensuring machine exists: {e}", exc_info=True) + return False + + def upload_beam_data(self, table_name: str, data: Dict[str, Any], path: str = None) -> bool: """ Upload beam data to Supabase table. Args: table_name: Name of the Supabase table data: Dictionary containing the data to upload + path: Optional path to extract location from for machine creation Returns: bool: True if upload successful, False otherwise """ if not self.connected or not self.client: - print("Error: Not connected to Supabase") + logger.error("Not connected to Supabase") return False try: + # Ensure machine exists before uploading beam + machine_id = data.get('machineId') + if machine_id: + if not self.ensure_machine_exists(machine_id, path): + logger.warning(f"Could not ensure machine {machine_id} exists, but continuing with upload attempt") + # Convert Decimal to float for JSON serialization serialized_data = self._serialize_data(data) + logger.debug(f"Uploading data to {table_name}: {serialized_data}") # Insert data into Supabase table response = self.client.table(table_name).insert(serialized_data).execute() if response.data: - print(f"Successfully uploaded data to {table_name}") + logger.info(f"Successfully uploaded data to {table_name}") return True else: - print(f"Warning: No data returned from Supabase insert") + logger.warning("No data returned from Supabase insert") return False except Exception as e: - print(f"Error uploading data to Supabase: {e}") + logger.error(f"Error uploading data to Supabase: {e}", exc_info=True) + return False + + def upload_geocheck_data(self, data: Dict[str, Any], path: str = None) -> Optional[str]: + """ + Upload geometry check data to geochecks table. + Note: MLC leaves and backlash should NOT be included here - they go to separate tables. + + Args: + data: Dictionary containing the geocheck data to upload (geometry data only: jaws, couch, gantry, etc.) + path: Optional path to extract location from for machine creation + + Returns: + str: The geocheck_id if upload successful, None otherwise + """ + if not self.connected or not self.client: + logger.error("Not connected to Supabase") + return None + + try: + # Ensure machine exists before uploading geocheck + machine_id = data.get('machine_id') + if machine_id: + if not self.ensure_machine_exists(machine_id, path): + logger.warning(f"Could not ensure machine {machine_id} exists, but continuing with upload attempt") + + # Remove MLC data if accidentally included (it goes to separate tables) + data.pop('mlc_leaves_a', None) + data.pop('mlc_leaves_b', None) + data.pop('mlc_backlash_a', None) + data.pop('mlc_backlash_b', None) + + # Convert Decimal to float for JSON serialization + serialized_data = self._serialize_data(data) + logger.debug(f"Uploading geocheck data: {serialized_data}") + + # Insert data into geochecks table + response = self.client.table('geochecks').insert(serialized_data).execute() + + if response.data and len(response.data) > 0: + geocheck_id = response.data[0].get('id') + logger.info(f"Successfully uploaded geocheck data with id: {geocheck_id}") + return geocheck_id + else: + logger.warning("No data returned from Supabase geocheck insert") + return None + + except Exception as e: + logger.error(f"Error uploading geocheck data to Supabase: {e}", exc_info=True) + return None + + def upload_mlc_leaves(self, geocheck_id: str, leaves_data: list, bank: str) -> bool: + """ + Upload MLC leaves data to geocheck_mlc_leaves_a or geocheck_mlc_leaves_b table. + + Args: + geocheck_id: The geocheck ID to associate leaves with + leaves_data: List of dictionaries with 'leaf_number' and 'leaf_value' + bank: Either 'a' or 'b' to determine which table to use + + Returns: + bool: True if all leaves uploaded successfully, False otherwise + """ + if not self.connected or not self.client: + logger.error("Not connected to Supabase") + return False + + if not geocheck_id or not leaves_data: + return False + + table_name = f'geocheck_mlc_leaves_{bank.lower()}' + + try: + # Prepare data with geocheck_id + upload_data = [] + for leaf in leaves_data: + leaf_record = { + 'geocheck_id': geocheck_id, + 'leaf_number': leaf.get('leaf_number'), + 'leaf_value': float(leaf.get('leaf_value')) if leaf.get('leaf_value') is not None else None + } + upload_data.append(leaf_record) + + # Insert all leaves at once + response = self.client.table(table_name).insert(upload_data).execute() + + if response.data: + logger.info(f"Successfully uploaded {len(upload_data)} MLC leaves to {table_name}") + return True + else: + logger.warning(f"No data returned from {table_name} insert") + return False + + except Exception as e: + logger.error(f"Error uploading MLC leaves to {table_name}: {e}", exc_info=True) + return False + + def upload_mlc_backlash(self, geocheck_id: str, backlash_data: list, bank: str) -> bool: + """ + Upload MLC backlash data to geocheck_mlc_backlash_a or geocheck_mlc_backlash_b table. + + Args: + geocheck_id: The geocheck ID to associate backlash with + backlash_data: List of dictionaries with 'leaf_number' and 'backlash_value' + bank: Either 'a' or 'b' to determine which table to use + + Returns: + bool: True if all backlash data uploaded successfully, False otherwise + """ + if not self.connected or not self.client: + logger.error("Not connected to Supabase") + return False + + if not geocheck_id or not backlash_data: + return False + + table_name = f'geocheck_mlc_backlash_{bank.lower()}' + + try: + # Prepare data with geocheck_id + upload_data = [] + for backlash in backlash_data: + backlash_record = { + 'geocheck_id': geocheck_id, + 'leaf_number': backlash.get('leaf_number'), + 'backlash_value': float(backlash.get('backlash_value')) if backlash.get('backlash_value') is not None else None + } + upload_data.append(backlash_record) + + # Insert all backlash records at once + response = self.client.table(table_name).insert(upload_data).execute() + + if response.data: + logger.info(f"Successfully uploaded {len(upload_data)} MLC backlash records to {table_name}") + return True + else: + logger.warning(f"No data returned from {table_name} insert") + return False + + except Exception as e: + logger.error(f"Error uploading MLC backlash to {table_name}: {e}", exc_info=True) return False def _serialize_data(self, data: Dict[str, Any]) -> Dict[str, Any]: @@ -152,7 +365,8 @@ def _serialize_data(self, data: Dict[str, Any]) -> Dict[str, Any]: for key, value in data.items(): if isinstance(value, Decimal): serialized[key] = float(value) - elif isinstance(value, datetime): + elif isinstance(value, (datetime, date)): + # Convert both datetime and date objects to ISO format strings serialized[key] = value.isoformat() elif value is None: serialized[key] = None @@ -164,7 +378,7 @@ def close(self): """Close the Supabase connection.""" self.client = None self.connected = False - print("Supabase connection closed") + logger.info("Supabase connection closed") class Uploader: @@ -198,6 +412,14 @@ def connect(self, connection_params: Dict[str, Any]) -> bool: self.connected = self.db_adapter.connect(connection_params) return self.connected + def close(self): + """ + Close the database connection using the adapter. + """ + if self.db_adapter: + self.db_adapter.close() + self.connected = False + def upload(self, model): """ Automatically calls the correct upload method @@ -209,7 +431,7 @@ def upload(self, model): - Geo6xfffModel """ if not self.connected: - print("Error: Not connected to database. Call connect() first.") + logger.error("Not connected to database. Call connect() first.") return False model_type = type(model).__name__.lower() @@ -234,7 +456,7 @@ def uploadTest(self, model): - Geo6xfffModel """ if not self.connected: - print("Error: Not connected to database. Call connect() first.") + logger.error("Not connected to database. Call connect() first.") return False model_type = type(model).__name__.lower() @@ -254,22 +476,24 @@ def eModelUpload(self, eBeam): Upload data for E-beam model to database. """ try: - # Prepare data dictionary using model getters + # Prepare data dictionary mapped to 'beams' table schema data = { - 'date': eBeam.get_date(), - 'machine_sn': eBeam.get_machine_SN(), - 'beam_type': eBeam.get_type(), - 'is_baseline': eBeam.get_baseline(), - 'relative_output': eBeam.get_relative_output(), - 'relative_uniformity': eBeam.get_relative_uniformity(), + 'type': eBeam.get_type(), + 'date': eBeam.get_date().date() if hasattr(eBeam.get_date(), 'date') else eBeam.get_date(), + 'path': eBeam.get_path(), + 'relOutput': float(eBeam.get_relative_output()) if eBeam.get_relative_output() else None, + 'relUniformity': float(eBeam.get_relative_uniformity()) if eBeam.get_relative_uniformity() else None, + 'centerShift': None, # E-beams don't have center shift + 'machineId': eBeam.get_machine_SN(), + 'note': f"Baseline: {eBeam.get_baseline()}" if eBeam.get_baseline() else None, } - # Upload to database - table_name = 'ebeam_data' # Adjust table name as needed - return self.db_adapter.upload_beam_data(table_name, data) + # Upload to database, passing path for machine creation + table_name = 'beams' + return self.db_adapter.upload_beam_data(table_name, data, path=eBeam.get_path()) except Exception as e: - print(f"Error during E-beam upload: {e}") + logger.error(f"Error during E-beam upload: {e}", exc_info=True) return False @@ -279,23 +503,29 @@ def xModelUpload(self, xBeam): Upload data for X-beam model to database. """ try: - # Prepare data dictionary using model getters + # Prepare data dictionary mapped to 'beams' table schema + # Get values, handling Decimal('0.0') which is falsy but valid + rel_output = xBeam.get_relative_output() + rel_uniformity = xBeam.get_relative_uniformity() + center_shift = xBeam.get_center_shift() + data = { - 'date': xBeam.get_date(), - 'machine_sn': xBeam.get_machine_SN(), - 'beam_type': xBeam.get_type(), - 'is_baseline': xBeam.get_baseline(), - 'relative_output': xBeam.get_relative_output(), - 'relative_uniformity': xBeam.get_relative_uniformity(), - 'center_shift': xBeam.get_center_shift(), + 'type': xBeam.get_type(), + 'date': xBeam.get_date().date() if hasattr(xBeam.get_date(), 'date') else xBeam.get_date(), + 'path': xBeam.get_path(), + 'relOutput': float(rel_output) if rel_output is not None else None, + 'relUniformity': float(rel_uniformity) if rel_uniformity is not None else None, + 'centerShift': float(center_shift) if center_shift is not None else None, + 'machineId': xBeam.get_machine_SN(), + 'note': f"Baseline: {xBeam.get_baseline()}" if xBeam.get_baseline() else None, } - # Upload to database - table_name = 'xbeam_data' # Adjust table name as needed - return self.db_adapter.upload_beam_data(table_name, data) + # Upload to database, passing path for machine creation + table_name = 'beams' + return self.db_adapter.upload_beam_data(table_name, data, path=xBeam.get_path()) except Exception as e: - print(f"Error during X-beam upload: {e}") + logger.error(f"Error during X-beam upload: {e}", exc_info=True) return False @@ -303,78 +533,158 @@ def xModelUpload(self, xBeam): def geoModelUpload(self, geoModel): """ Upload data for Geo6xfffModel to database. + The 6x beam data (relative_output, relative_uniformity, center_shift) goes to 'beams' table as an X-beam. + Geometry-specific data (jaws, couch, gantry, etc.) goes to 'geochecks' table. + MLC leaves and backlash go to separate tables: geocheck_mlc_leaves_a/b and geocheck_mlc_backlash_a/b. """ try: - # Prepare data dictionary using model getters - data = { - 'date': geoModel.get_date(), - 'machine_sn': geoModel.get_machine_SN(), - 'beam_type': geoModel.get_type(), - 'is_baseline': geoModel.get_baseline(), - + # Step 1: Extract and upload 6x beam data to 'beams' table (as an X-beam) + rel_output = geoModel.get_relative_output() + rel_uniformity = geoModel.get_relative_uniformity() + center_shift = geoModel.get_center_shift() + + beam_data = { + 'type': geoModel.get_type(), # "6x" - treated as an X-beam + 'date': geoModel.get_date().date() if hasattr(geoModel.get_date(), 'date') else geoModel.get_date(), + 'path': geoModel.get_path(), + 'relOutput': float(rel_output) if rel_output is not None else None, + 'relUniformity': float(rel_uniformity) if rel_uniformity is not None else None, + 'centerShift': float(center_shift) if center_shift is not None else None, + 'machineId': geoModel.get_machine_SN(), + 'note': f"Baseline: {geoModel.get_baseline()}, Geometry check data available" if geoModel.get_baseline() else "Geometry check data available", + } + + # Upload 6x beam to beams table + beam_result = self.db_adapter.upload_beam_data('beams', beam_data, path=geoModel.get_path()) + if not beam_result: + logger.warning("Failed to upload 6x beam data, but continuing with geometry data upload") + + # Check if this is BeamCheckTemplate6xFFF - these should NOT go to geochecks + path = geoModel.get_path() + is_beamcheck_6xfff = "BeamCheckTemplate6xFFF" in path + + if is_beamcheck_6xfff: + # BeamCheckTemplate6xFFF only goes to beams table, not geochecks + logger.info("BeamCheckTemplate6xFFF detected - skipping geochecks upload (beam data only)") + return beam_result + + # Step 2: Upload geometry data to 'geochecks' table (without MLC leaves/backlash) + # ID will be auto-generated by upload_geocheck_data if not provided + # Note: 'type' column doesn't exist in geochecks table - beam type is stored in beams table + geocheck_data = { + 'path': geoModel.get_path(), + 'machine_id': geoModel.get_machine_SN(), + 'date': geoModel.get_date().date() if hasattr(geoModel.get_date(), 'date') else geoModel.get_date(), # IsoCenterGroup - 'iso_center_size': geoModel.get_IsoCenterSize(), - 'iso_center_mv_offset': geoModel.get_IsoCenterMVOffset(), - 'iso_center_kv_offset': geoModel.get_IsoCenterKVOffset(), - - # BeamGroup - 'relative_output': geoModel.get_relative_output(), - 'relative_uniformity': geoModel.get_relative_uniformity(), - 'center_shift': geoModel.get_center_shift(), - + 'iso_center_size': float(geoModel.get_IsoCenterSize()) if geoModel.get_IsoCenterSize() is not None else None, + 'iso_center_mv_offset': float(geoModel.get_IsoCenterMVOffset()) if geoModel.get_IsoCenterMVOffset() is not None else None, + 'iso_center_kv_offset': float(geoModel.get_IsoCenterKVOffset()) if geoModel.get_IsoCenterKVOffset() is not None else None, + # BeamGroup (already in beams table, but also in geochecks for reference) + 'relative_output': float(rel_output) if rel_output is not None else None, + 'relative_uniformity': float(rel_uniformity) if rel_uniformity is not None else None, + 'center_shift': float(center_shift) if center_shift is not None else None, # CollimationGroup - 'collimation_rotation_offset': geoModel.get_CollimationRotationOffset(), - + 'collimation_rotation_offset': float(geoModel.get_CollimationRotationOffset()) if geoModel.get_CollimationRotationOffset() is not None else None, # GantryGroup - 'gantry_absolute': geoModel.get_GantryAbsolute(), - 'gantry_relative': geoModel.get_GantryRelative(), - + 'gantry_absolute': float(geoModel.get_GantryAbsolute()) if geoModel.get_GantryAbsolute() is not None else None, + 'gantry_relative': float(geoModel.get_GantryRelative()) if geoModel.get_GantryRelative() is not None else None, # EnhancedCouchGroup - 'couch_max_position_error': geoModel.get_CouchMaxPositionError(), - 'couch_lat': geoModel.get_CouchLat(), - 'couch_lng': geoModel.get_CouchLng(), - 'couch_vrt': geoModel.get_CouchVrt(), - 'couch_rtn_fine': geoModel.get_CouchRtnFine(), - 'couch_rtn_large': geoModel.get_CouchRtnLarge(), - 'rotation_induced_couch_shift_full_range': geoModel.get_RotationInducedCouchShiftFullRange(), - - # MLC Offsets - 'max_offset_a': geoModel.get_MaxOffsetA(), - 'max_offset_b': geoModel.get_MaxOffsetB(), - 'mean_offset_a': geoModel.get_MeanOffsetA(), - 'mean_offset_b': geoModel.get_MeanOffsetB(), - - # MLC Backlash - 'mlc_backlash_max_a': geoModel.get_MLCBacklashMaxA(), - 'mlc_backlash_max_b': geoModel.get_MLCBacklashMaxB(), - 'mlc_backlash_mean_a': geoModel.get_MLCBacklashMeanA(), - 'mlc_backlash_mean_b': geoModel.get_MLCBacklashMeanB(), + 'couch_max_position_error': float(geoModel.get_CouchMaxPositionError()) if geoModel.get_CouchMaxPositionError() is not None else None, + 'couch_lat': float(geoModel.get_CouchLat()) if geoModel.get_CouchLat() is not None else None, + 'couch_lng': float(geoModel.get_CouchLng()) if geoModel.get_CouchLng() is not None else None, + 'couch_vrt': float(geoModel.get_CouchVrt()) if geoModel.get_CouchVrt() is not None else None, + 'couch_rtn_fine': float(geoModel.get_CouchRtnFine()) if geoModel.get_CouchRtnFine() is not None else None, + 'couch_rtn_large': float(geoModel.get_CouchRtnLarge()) if geoModel.get_CouchRtnLarge() is not None else None, + 'rotation_induced_couch_shift_full_range': float(geoModel.get_RotationInducedCouchShiftFullRange()) if geoModel.get_RotationInducedCouchShiftFullRange() is not None else None, + # MLCGroup - Offsets (summary stats only, not individual leaves) + 'max_offset_a': float(geoModel.get_MaxOffsetA()) if geoModel.get_MaxOffsetA() is not None else None, + 'max_offset_b': float(geoModel.get_MaxOffsetB()) if geoModel.get_MaxOffsetB() is not None else None, + 'mean_offset_a': float(geoModel.get_MeanOffsetA()) if geoModel.get_MeanOffsetA() is not None else None, + 'mean_offset_b': float(geoModel.get_MeanOffsetB()) if geoModel.get_MeanOffsetB() is not None else None, + # MLCBacklashGroup - Summary stats only (not individual leaves) + 'mlc_backlash_max_a': float(geoModel.get_MLCBacklashMaxA()) if geoModel.get_MLCBacklashMaxA() is not None else None, + 'mlc_backlash_max_b': float(geoModel.get_MLCBacklashMaxB()) if geoModel.get_MLCBacklashMaxB() is not None else None, + 'mlc_backlash_mean_a': float(geoModel.get_MLCBacklashMeanA()) if geoModel.get_MLCBacklashMeanA() is not None else None, + 'mlc_backlash_mean_b': float(geoModel.get_MLCBacklashMeanB()) if geoModel.get_MLCBacklashMeanB() is not None else None, + # JawsGroup + 'jaw_x1': float(geoModel.get_JawX1()) if geoModel.get_JawX1() is not None else None, + 'jaw_x2': float(geoModel.get_JawX2()) if geoModel.get_JawX2() is not None else None, + 'jaw_y1': float(geoModel.get_JawY1()) if geoModel.get_JawY1() is not None else None, + 'jaw_y2': float(geoModel.get_JawY2()) if geoModel.get_JawY2() is not None else None, + # JawsParallelismGroup + 'jaw_parallelism_x1': float(geoModel.get_JawParallelismX1()) if geoModel.get_JawParallelismX1() is not None else None, + 'jaw_parallelism_x2': float(geoModel.get_JawParallelismX2()) if geoModel.get_JawParallelismX2() is not None else None, + 'jaw_parallelism_y1': float(geoModel.get_JawParallelismY1()) if geoModel.get_JawParallelismY1() is not None else None, + 'jaw_parallelism_y2': float(geoModel.get_JawParallelismY2()) if geoModel.get_JawParallelismY2() is not None else None, + } + + geocheck_id_result = self.db_adapter.upload_geocheck_data(geocheck_data, path=geoModel.get_path()) + if not geocheck_id_result: + logger.error("Failed to upload geocheck data, cannot proceed with MLC data") + return False + + # Step 3: Upload MLC leaves data to separate tables + # Determine leaf range based on template type + # GeometryCheckTemplate6xMVkVEnhancedCouch only has leaves 11-50 + if "GeometryCheckTemplate6xMVkVEnhancedCouch" in path: + leaf_range = range(11, 51) # Only leaves 11-50 for this template + else: + leaf_range = range(1, 61) # Leaves 1-60 for other templates (e.g., BeamCheckTemplate6xFFF) + + leaves_a_data = [] + leaves_b_data = [] + for i in leaf_range: + leaf_a_val = geoModel.get_MLCLeafA(i) + leaf_b_val = geoModel.get_MLCLeafB(i) - # Jaws Group - 'jaw_x1': geoModel.get_JawX1(), - 'jaw_x2': geoModel.get_JawX2(), - 'jaw_y1': geoModel.get_JawY1(), - 'jaw_y2': geoModel.get_JawY2(), + leaves_a_data.append({ + 'leaf_number': i, + 'leaf_value': float(leaf_a_val) if leaf_a_val is not None else None + }) + leaves_b_data.append({ + 'leaf_number': i, + 'leaf_value': float(leaf_b_val) if leaf_b_val is not None else None + }) + + leaves_a_result = self.db_adapter.upload_mlc_leaves(geocheck_id_result, leaves_a_data, 'a') + leaves_b_result = self.db_adapter.upload_mlc_leaves(geocheck_id_result, leaves_b_data, 'b') + + # Step 4: Upload MLC backlash data to separate tables + # Use same leaf range as determined above + backlash_a_data = [] + backlash_b_data = [] + for i in leaf_range: + backlash_a_val = geoModel.get_MLCBacklashA(i) + backlash_b_val = geoModel.get_MLCBacklashB(i) - # Jaw Parallelism - 'jaw_parallelism_x1': geoModel.get_JawParallelismX1(), - 'jaw_parallelism_x2': geoModel.get_JawParallelismX2(), - 'jaw_parallelism_y1': geoModel.get_JawParallelismY1(), - 'jaw_parallelism_y2': geoModel.get_JawParallelismY2(), - } - - # Upload to database - table_name = 'geo6xfff_data' # Adjust table name as needed - result = self.db_adapter.upload_beam_data(table_name, data) + backlash_a_data.append({ + 'leaf_number': i, + 'backlash_value': float(backlash_a_val) if backlash_a_val is not None else None + }) + backlash_b_data.append({ + 'leaf_number': i, + 'backlash_value': float(backlash_b_val) if backlash_b_val is not None else None + }) + + backlash_a_result = self.db_adapter.upload_mlc_backlash(geocheck_id_result, backlash_a_data, 'a') + backlash_b_result = self.db_adapter.upload_mlc_backlash(geocheck_id_result, backlash_b_data, 'b') + + # Return True if all critical uploads succeeded + # Beam upload is optional (we log warning but continue) + # Geocheck, leaves, and backlash are all required + overall_success = (geocheck_id_result is not None and + leaves_a_result and leaves_b_result and + backlash_a_result and backlash_b_result) - # Optionally upload MLC leaf data to separate tables - # This could be done in a separate method or as part of this method - # For now, we'll skip individual leaf data to keep the main record simple + if overall_success: + logger.info("Successfully uploaded all geometry check data") + else: + logger.warning("Some geometry check data uploads may have failed") - return result + return overall_success except Exception as e: - print(f"Error during Geo model upload: {e}") + logger.error(f"Error during Geo model upload: {e}", exc_info=True) return False def uploadMLCLeaves(self, geoModel, table_name: str = 'mlc_leaves_data'): @@ -386,23 +696,21 @@ def uploadMLCLeaves(self, geoModel, table_name: str = 'mlc_leaves_data'): try: leaves_data = [] - # Collect all MLC leaf A data - for i in range(11, 51): + # Collect all MLC leaf A data (leaves 1-60) + for i in range(1, 61): leaves_data.append({ 'date': geoModel.get_date(), 'machine_sn': geoModel.get_machine_SN(), - 'beam_type': geoModel.get_type(), 'leaf_bank': 'A', 'leaf_index': i, 'leaf_value': geoModel.get_MLCLeafA(i), }) - # Collect all MLC leaf B data - for i in range(11, 51): + # Collect all MLC leaf B data (leaves 1-60) + for i in range(1, 61): leaves_data.append({ 'date': geoModel.get_date(), 'machine_sn': geoModel.get_machine_SN(), - 'beam_type': geoModel.get_type(), 'leaf_bank': 'B', 'leaf_index': i, 'leaf_value': geoModel.get_MLCLeafB(i), @@ -414,11 +722,11 @@ def uploadMLCLeaves(self, geoModel, table_name: str = 'mlc_leaves_data'): if self.db_adapter.upload_beam_data(table_name, leaf_data): success_count += 1 - print(f"Uploaded {success_count}/{len(leaves_data)} MLC leaf records") + logger.info(f"Uploaded {success_count}/{len(leaves_data)} MLC leaf records") return success_count == len(leaves_data) except Exception as e: - print(f"Error uploading MLC leaves: {e}") + logger.error(f"Error uploading MLC leaves: {e}", exc_info=True) return False def uploadMLCBacklash(self, geoModel, table_name: str = 'mlc_backlash_data'): @@ -430,23 +738,21 @@ def uploadMLCBacklash(self, geoModel, table_name: str = 'mlc_backlash_data'): try: backlash_data = [] - # Collect all MLC backlash A data - for i in range(11, 51): + # Collect all MLC backlash A data (leaves 1-60) + for i in range(1, 61): backlash_data.append({ 'date': geoModel.get_date(), 'machine_sn': geoModel.get_machine_SN(), - 'beam_type': geoModel.get_type(), 'leaf_bank': 'A', 'leaf_index': i, 'backlash_value': geoModel.get_MLCBacklashA(i), }) - # Collect all MLC backlash B data - for i in range(11, 51): + # Collect all MLC backlash B data (leaves 1-60) + for i in range(1, 61): backlash_data.append({ 'date': geoModel.get_date(), 'machine_sn': geoModel.get_machine_SN(), - 'beam_type': geoModel.get_type(), 'leaf_bank': 'B', 'leaf_index': i, 'backlash_value': geoModel.get_MLCBacklashB(i), @@ -458,11 +764,16 @@ def uploadMLCBacklash(self, geoModel, table_name: str = 'mlc_backlash_data'): if self.db_adapter.upload_beam_data(table_name, backlash_record): success_count += 1 - print(f"Uploaded {success_count}/{len(backlash_data)} MLC backlash records") + logger.info(f"Uploaded {success_count}/{len(backlash_data)} MLC backlash records") return success_count == len(backlash_data) except Exception as e: - print(f"Error uploading MLC backlash: {e}") + logger.error(f"Error uploading MLC backlash: {e}", exc_info=True) return False + def close(self): + """Close the database connection.""" + if self.db_adapter and hasattr(self.db_adapter, 'close'): + self.db_adapter.close() + diff --git a/src/data_manipulation/ETL/data_extractor.py b/src/data_manipulation/ETL/data_extractor.py index 99ecc8f..191e292 100644 --- a/src/data_manipulation/ETL/data_extractor.py +++ b/src/data_manipulation/ETL/data_extractor.py @@ -16,8 +16,12 @@ import csv import decimal +import logging from decimal import Decimal +# Set up logger for this module +logger = logging.getLogger(__name__) + class data_extractor: """ Handles data extraction from CSV files for various beam models. @@ -71,9 +75,12 @@ def eModelExtraction(self, eBeam): """ Extract data for E-beam model from CSV file """ + import os + try: - # Get the path from the eBeam object - path = eBeam.get_path() + # Get the folder path and construct the CSV file path + folder_path = eBeam.get_path() + path = os.path.join(folder_path, "Results.csv") # Parse the CSV file with open(path, 'r', newline='', encoding='utf-8') as csvfile: @@ -82,53 +89,50 @@ def eModelExtraction(self, eBeam): # Read through the CSV rows for row in reader: name = row.get('Name [Unit]', '').strip() - value = row.get(' Value', '') + value = row.get(' Value', '').strip() + if not name or not value: + continue + + # Convert value to Decimal + try: + dec_val = Decimal(value) + except (ValueError, TypeError, decimal.InvalidOperation): + dec_val = Decimal(-1) + # Check for relative output (BeamOutputChange) if 'BeamOutputChange' in name: - try: - eBeam.set_relative_output(Decimal(value)) - except (ValueError, TypeError, decimal.InvalidOperation): - eBeam.set_relative_output(Decimal(-1)) + eBeam.set_relative_output(dec_val) # Check for relative uniformity (BeamUniformityChange) elif 'BeamUniformityChange' in name: - try: - eBeam.set_relative_uniformity(Decimal(value)) - except (ValueError, TypeError, decimal.InvalidOperation): - eBeam.set_relative_uniformity(Decimal(-1)) + eBeam.set_relative_uniformity(dec_val) except FileNotFoundError: - print(f"CSV file not found: {path}") + logger.error(f"CSV file not found: {path}") except csv.Error as e: - print(f"Error parsing CSV file: {e}") + logger.error(f"Error parsing CSV file: {e}") except Exception as e: - print(f"Error during extraction: {e}") + logger.error(f"Error during extraction: {e}", exc_info=True) def testeModelExtraction(self, eBeam): """ Test method for E model extraction. Runs eModelExtraction() and prints all values using getters. """ - print("\n--- Starting E Model Extraction Test ---") self.eModelExtraction(eBeam) - print("--- Extraction Complete ---\n") - - # Print the values - print(f"Date: {eBeam.get_date()}") - print(f"Machine SN: {eBeam.get_machine_SN()}") - print(f"Is Baseline: {eBeam.get_baseline()}") - print(f"Relative Uniformity: {eBeam.get_relative_uniformity()}") - print(f"Relative Output: {eBeam.get_relative_output()}") # --- X-BEAM --- def xModelExtraction(self, xBeam): """ - Extract data for X-beam model from CVS file + Extract data for X-beam model from CSV file """ + import os + try: - # Get the path from the eBeam object - path = xBeam.get_path() + # Get the folder path and construct the CSV file path + folder_path = xBeam.get_path() + path = os.path.join(folder_path, "Results.csv") # Parse the CSV file with open(path, 'r', newline='', encoding='utf-8') as csvfile: @@ -137,53 +141,42 @@ def xModelExtraction(self, xBeam): # Read through the CSV rows for row in reader: name = row.get('Name [Unit]', '').strip() - value = row.get(' Value', '') + value = row.get(' Value', '').strip() + if not name or not value: + continue + + # Convert value to Decimal + try: + dec_val = Decimal(value) + except (ValueError, TypeError, decimal.InvalidOperation): + dec_val = Decimal(-1) # Check for relative output (BeamOutputChange) if 'BeamOutputChange' in name: - try: - xBeam.set_relative_output(Decimal(value)) - except (ValueError, TypeError, decimal.InvalidOperation): - xBeam.set_relative_output(Decimal(-1)) + xBeam.set_relative_output(dec_val) # Check for relative uniformity (BeamUniformityChange) elif 'BeamUniformityChange' in name: - try: - xBeam.set_relative_uniformity(Decimal(value)) - except (ValueError, TypeError, decimal.InvalidOperation): - xBeam.set_relative_uniformity(Decimal(-1)) + xBeam.set_relative_uniformity(dec_val) # Check for Center Shift (BeamCenterShift) elif 'BeamCenterShift' in name: - try: - xBeam.set_center_shift(Decimal(value)) - except (ValueError, TypeError, decimal.InvalidOperation): - xBeam.set_center_shift(Decimal(-1)) + xBeam.set_center_shift(dec_val) except FileNotFoundError: - print(f"CSV file not found: {path}") + logger.error(f"CSV file not found: {path}") except csv.Error as e: - print(f"Error parsing CSV file: {e}") + logger.error(f"Error parsing CSV file: {e}") except Exception as e: - print(f"Error during extraction: {e}") + logger.error(f"Error during extraction: {e}", exc_info=True) def testxModelExtraction(self, xBeam): """ Test method for X model extraction. Runs xModelExtraction() and prints all values using getters. """ - print("\n--- Starting X Model Extraction Test ---") self.xModelExtraction(xBeam) - print("--- Extraction Complete ---\n") - - # Print the values - print(f"Date: {xBeam.get_date()}") - print(f"Machine SN: {xBeam.get_machine_SN()}") - print(f"Is Baseline: {xBeam.get_baseline()}") - print(f"Relative Uniformity: {xBeam.get_relative_uniformity()}") - print(f"Relative Output: {xBeam.get_relative_output()}") - print(f"Center Shift: {xBeam.get_center_shift()}") - + def geoModelExtraction(self, geoModel): """ @@ -191,10 +184,13 @@ def geoModelExtraction(self, geoModel): Reads each row and calls the appropriate setter. """ import csv + import os from decimal import Decimal, InvalidOperation try: - path = geoModel.get_path() + # Get the folder path and construct the CSV file path + folder_path = geoModel.get_path() + path = os.path.join(folder_path, "Results.csv") with open(path, 'r', newline='', encoding='utf-8') as csvfile: reader = csv.DictReader(csvfile) @@ -253,41 +249,85 @@ def geoModelExtraction(self, geoModel): geoModel.set_RotationInducedCouchShiftFullRange(dec_val) # ---- MLC Leaves ---- - elif 'MLCLeavesA/MLCLeaf' in name: + elif 'MLCLeavesA/MLCLeaf' in name or '/MLCLeavesA/MLCLeaf' in name: try: - index = int(name.split('MLCLeaf')[1].split()[0]) - geoModel.set_MLCLeafA(index, dec_val) - except Exception: + # Extract leaf number from patterns like: + # "CollimationGroup/MLCGroup/MLCLeavesA/MLCLeaf11 [mm]" + # or "MLCLeavesA/MLCLeaf11 [mm]" + parts = name.split('MLCLeaf') + if len(parts) > 1: + # Get the part after "MLCLeaf" and extract the number + leaf_part = parts[1].strip() + # Remove brackets and extract number: "11 [mm]" -> "11" + index_str = leaf_part.split()[0] if ' ' in leaf_part else leaf_part.split('[')[0] + index = int(index_str) + if 1 <= index <= 60: # Validate leaf number range (1-60) + geoModel.set_MLCLeafA(index, dec_val) + except (ValueError, IndexError, Exception) as e: + # Silently skip invalid entries pass - elif 'MLCLeavesB/MLCLeaf' in name: + elif 'MLCLeavesB/MLCLeaf' in name or '/MLCLeavesB/MLCLeaf' in name: try: - index = int(name.split('MLCLeaf')[1].split()[0]) - geoModel.set_MLCLeafB(index, dec_val) - except Exception: + # Extract leaf number from patterns like: + # "CollimationGroup/MLCGroup/MLCLeavesB/MLCLeaf11 [mm]" + # or "MLCLeavesB/MLCLeaf11 [mm]" + parts = name.split('MLCLeaf') + if len(parts) > 1: + # Get the part after "MLCLeaf" and extract the number + leaf_part = parts[1].strip() + # Remove brackets and extract number: "11 [mm]" -> "11" + index_str = leaf_part.split()[0] if ' ' in leaf_part else leaf_part.split('[')[0] + index = int(index_str) + if 1 <= index <= 60: # Validate leaf number range (1-60) + geoModel.set_MLCLeafB(index, dec_val) + except (ValueError, IndexError, Exception) as e: + # Silently skip invalid entries pass # ---- MLC Offsets ---- - elif 'MaxOffsetA' in name: + elif 'MLCMaxOffsetA' in name or 'MaxOffsetA' in name: geoModel.set_MaxOffsetA(dec_val) - elif 'MaxOffsetB' in name: + elif 'MLCMaxOffsetB' in name or 'MaxOffsetB' in name: geoModel.set_MaxOffsetB(dec_val) - elif 'MeanOffsetA' in name: + elif 'MLCMeanOffsetA' in name or 'MeanOffsetA' in name: geoModel.set_MeanOffsetA(dec_val) - elif 'MeanOffsetB' in name: + elif 'MLCMeanOffsetB' in name or 'MeanOffsetB' in name: geoModel.set_MeanOffsetB(dec_val) # ---- MLC Backlash ---- - elif 'MLCBacklashLeavesA/MLCBacklashLeaf' in name: + elif 'MLCBacklashLeavesA/MLCBacklashLeaf' in name or '/MLCBacklashLeavesA/MLCBacklashLeaf' in name: try: - index = int(name.split('MLCBacklashLeaf')[1].split()[0]) - geoModel.set_MLCBacklashA(index, dec_val) - except Exception: + # Extract leaf number from patterns like: + # "CollimationGroup/MLCBacklashGroup/MLCBacklashLeavesA/MLCBacklashLeaf11 [mm]" + # or "MLCBacklashLeavesA/MLCBacklashLeaf11 [mm]" + parts = name.split('MLCBacklashLeaf') + if len(parts) > 1: + # Get the part after "MLCBacklashLeaf" and extract the number + leaf_part = parts[1].strip() + # Remove brackets and extract number: "11 [mm]" -> "11" + index_str = leaf_part.split()[0] if ' ' in leaf_part else leaf_part.split('[')[0] + index = int(index_str) + if 1 <= index <= 60: # Validate leaf number range (1-60) + geoModel.set_MLCBacklashA(index, dec_val) + except (ValueError, IndexError, Exception) as e: + # Silently skip invalid entries pass - elif 'MLCBacklashLeavesB/MLCBacklashLeaf' in name: + elif 'MLCBacklashLeavesB/MLCBacklashLeaf' in name or '/MLCBacklashLeavesB/MLCBacklashLeaf' in name: try: - index = int(name.split('MLCBacklashLeaf')[1].split()[0]) - geoModel.set_MLCBacklashB(index, dec_val) - except Exception: + # Extract leaf number from patterns like: + # "CollimationGroup/MLCBacklashGroup/MLCBacklashLeavesB/MLCBacklashLeaf11 [mm]" + # or "MLCBacklashLeavesB/MLCBacklashLeaf11 [mm]" + parts = name.split('MLCBacklashLeaf') + if len(parts) > 1: + # Get the part after "MLCBacklashLeaf" and extract the number + leaf_part = parts[1].strip() + # Remove brackets and extract number: "11 [mm]" -> "11" + index_str = leaf_part.split()[0] if ' ' in leaf_part else leaf_part.split('[')[0] + index = int(index_str) + if 1 <= index <= 60: # Validate leaf number range (1-60) + geoModel.set_MLCBacklashB(index, dec_val) + except (ValueError, IndexError, Exception) as e: + # Silently skip invalid entries pass elif 'MLCBacklashMaxA' in name: geoModel.set_MLCBacklashMaxA(dec_val) @@ -319,86 +359,16 @@ def geoModelExtraction(self, geoModel): geoModel.set_JawParallelismY2(dec_val) except FileNotFoundError: - print(f"CSV file not found: {path}") + logger.error(f"CSV file not found: {path}") except csv.Error as e: - print(f"Error parsing CSV file: {e}") + logger.error(f"Error parsing CSV file: {e}") except Exception as e: - print(f"Error during extraction: {e}") + logger.error(f"Error during extraction: {e}", exc_info=True) def testGeoModelExtraction(self, geoModel): """ Test method for Geo model extraction. Runs geoModelExtraction() and prints all values using getters. """ - print("\n--- Starting Geo Model Extraction Test ---") self.geoModelExtraction(geoModel) - print("--- Extraction Complete ---\n") - - try: - print(f"Date: {geoModel.get_date()}") - print(f"Machine SN: {geoModel.get_machine_SN()}") - print(f"Is Baseline: {geoModel.get_baseline()}") - # IsoCenterGroup - print(f"IsoCenterSize: {geoModel.get_IsoCenterSize()}") - print(f"IsoCenterMVOffset: {geoModel.get_IsoCenterMVOffset()}") - print(f"IsoCenterKVOffset: {geoModel.get_IsoCenterKVOffset()}") - - # BeamGroup - print(f"BeamOutputChange: {geoModel.get_relative_output()}") - print(f"BeamUniformityChange: {geoModel.get_relative_uniformity()}") - print(f"BeamCenterShift: {geoModel.get_center_shift()}") - - # CollimationGroup - print(f"CollimationRotationOffset: {geoModel.get_CollimationRotationOffset()}") - - # GantryGroup - print(f"GantryAbsolute: {geoModel.get_GantryAbsolute()}") - print(f"GantryRelative: {geoModel.get_GantryRelative()}") - - # EnhancedCouchGroup - print(f"CouchMaxPositionError: {geoModel.get_CouchMaxPositionError()}") - print(f"CouchLat: {geoModel.get_CouchLat()}") - print(f"CouchLng: {geoModel.get_CouchLng()}") - print(f"CouchVrt: {geoModel.get_CouchVrt()}") - print(f"CouchRtnFine: {geoModel.get_CouchRtnFine()}") - print(f"CouchRtnLarge: {geoModel.get_CouchRtnLarge()}") - print(f"RotationInducedCouchShiftFullRange: {geoModel.get_RotationInducedCouchShiftFullRange()}") - - # MLC Groups - print(f"MaxOffsetA: {geoModel.get_MaxOffsetA()}") - print(f"MaxOffsetB: {geoModel.get_MaxOffsetB()}") - print(f"MeanOffsetA: {geoModel.get_MeanOffsetA()}") - print(f"MeanOffsetB: {geoModel.get_MeanOffsetB()}") - print(f"MLCBacklashMaxA: {geoModel.get_MLCBacklashMaxA()}") - print(f"MLCBacklashMaxB: {geoModel.get_MLCBacklashMaxB()}") - print(f"MLCBacklashMeanA: {geoModel.get_MLCBacklashMeanA()}") - print(f"MLCBacklashMeanB: {geoModel.get_MLCBacklashMeanB()}") - - # Individual MLC leaf/backlash arrays (if implemented) - # if hasattr(geoModel, "get_MLCLeafA"): - # print(f"MLCLeafA: {geoModel.get_MLCLeafA()}") - # if hasattr(geoModel, "get_MLCLeafB"): - # print(f"MLCLeafB: {geoModel.get_MLCLeafB()}") - for i in range(11, 51): - print(f"MLCLeafA (Index {i}): {geoModel.get_MLCLeafA(i)}") - for i in range(11, 51): - print(f"MLCLeafB (Index {i}): {geoModel.get_MLCLeafB(i)}") - - - # Jaw Group - print(f"JawX1: {geoModel.get_JawX1()}") - print(f"JawX2: {geoModel.get_JawX2()}") - print(f"JawY1: {geoModel.get_JawY1()}") - print(f"JawY2: {geoModel.get_JawY2()}") - - # Jaw Parallelism - print(f"JawParallelismX1: {geoModel.get_JawParallelismX1()}") - print(f"JawParallelismX2: {geoModel.get_JawParallelismX2()}") - print(f"JawParallelismY1: {geoModel.get_JawParallelismY1()}") - print(f"JawParallelismY2: {geoModel.get_JawParallelismY2()}") - - except Exception as e: - print(f"Error printing Geo model data: {e}") - - print("\n--- End of Geo Model Test ---\n") diff --git a/src/data_manipulation/ETL/image_extractor.py b/src/data_manipulation/ETL/image_extractor.py index 8b894e7..5231394 100644 --- a/src/data_manipulation/ETL/image_extractor.py +++ b/src/data_manipulation/ETL/image_extractor.py @@ -17,4 +17,4 @@ def get_image(self, image): #xim_img = XIM("path/to/image.xim") tempImage = XIM(image.get_path()) image.set_image(tempImage) - print(tempImage.properties) \ No newline at end of file + # Image properties are stored but not printed to avoid verbose output \ No newline at end of file diff --git a/src/data_manipulation/file_monitoring/folder_monitor.py b/src/data_manipulation/file_monitoring/folder_monitor.py index bc6f544..536e738 100644 --- a/src/data_manipulation/file_monitoring/folder_monitor.py +++ b/src/data_manipulation/file_monitoring/folder_monitor.py @@ -19,11 +19,15 @@ from src.data_manipulation.ETL.DataProcessor import DataProcessor # Configure logging +# Ensure logs directory exists +logs_dir = Path(__file__).parent.parent.parent.parent / 'logs' +logs_dir.mkdir(exist_ok=True) + logging.basicConfig( level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', handlers=[ - logging.FileHandler('logs/folder_monitor.log'), + logging.FileHandler(logs_dir / 'folder_monitor.log'), logging.StreamHandler(sys.stdout) ] ) @@ -135,29 +139,42 @@ def __init__(self, idrive_path="iDrive"): Initialize the folder monitor Args: - idrive_path (str): Path to the iDrive folder to monitor - """ - self.idrive_path = os.path.abspath(idrive_path) - self.observer = Observer() + idrive_path (str or list): Path(s) to the folder(s) to monitor. + Can be a single path string or a list of paths. + """ + # Handle both single path and multiple paths + if isinstance(idrive_path, list): + self.idrive_paths = [os.path.abspath(p) for p in idrive_path] + else: + self.idrive_paths = [os.path.abspath(idrive_path)] + + self.observers = [] # List of observers for multiple paths self.handler = iDriveFolderHandler() self.is_running = False def start_monitoring(self): """ - Start monitoring the iDrive folder + Start monitoring the folder(s) """ try: - # Ensure iDrive folder exists - if not os.path.exists(self.idrive_path): - logger.warning(f"iDrive folder does not exist: {self.idrive_path}") - logger.info(f"Creating iDrive folder: {self.idrive_path}") - os.makedirs(self.idrive_path, exist_ok=True) + # Ensure all folders exist + for path in self.idrive_paths: + if not os.path.exists(path): + logger.warning(f"Folder does not exist: {path}") + logger.info(f"Creating folder: {path}") + os.makedirs(path, exist_ok=True) + + logger.info(f"Starting folder monitoring on {len(self.idrive_paths)} location(s):") + for path in self.idrive_paths: + logger.info(f" - {path}") - logger.info(f"Starting folder monitoring on: {self.idrive_path}") + # Set up observers for each path + for path in self.idrive_paths: + observer = Observer() + observer.schedule(self.handler, path, recursive=True) + observer.start() + self.observers.append(observer) - # Set up observer - self.observer.schedule(self.handler, self.idrive_path, recursive=True) - self.observer.start() self.is_running = True logger.info("Folder monitoring started successfully") @@ -177,12 +194,15 @@ def start_monitoring(self): def stop_monitoring(self): """ - Stop monitoring the folder + Stop monitoring the folder(s) """ if self.is_running: logger.info("Stopping folder monitoring...") - self.observer.stop() - self.observer.join() + for observer in self.observers: + observer.stop() + for observer in self.observers: + observer.join() + self.observers = [] self.is_running = False logger.info("Folder monitoring stopped") @@ -191,18 +211,20 @@ def scan_existing_folders(self): Scan for existing folders that might need processing """ try: - logger.info("Scanning for existing folders in iDrive...") + logger.info(f"Scanning for existing folders in {len(self.idrive_paths)} location(s)...") - if not os.path.exists(self.idrive_path): - logger.info("iDrive folder does not exist yet") - return - - for item in os.listdir(self.idrive_path): - item_path = os.path.join(self.idrive_path, item) - if os.path.isdir(item_path): - logger.info(f"Found existing folder: {item_path}") - # Process existing folder if it hasn't been processed - self.handler._process_new_folder(item_path) + for idrive_path in self.idrive_paths: + if not os.path.exists(idrive_path): + logger.info(f"Folder does not exist yet: {idrive_path}") + continue + + logger.info(f"Scanning: {idrive_path}") + for item in os.listdir(idrive_path): + item_path = os.path.join(idrive_path, item) + if os.path.isdir(item_path): + logger.info(f"Found existing folder: {item_path}") + # Process existing folder if it hasn't been processed + self.handler._process_new_folder(item_path) except Exception as e: logger.error(f"Error scanning existing folders: {str(e)}") diff --git a/src/data_manipulation/file_monitoring/main.py b/src/data_manipulation/file_monitoring/main.py index 21391bb..bc34c82 100644 --- a/src/data_manipulation/file_monitoring/main.py +++ b/src/data_manipulation/file_monitoring/main.py @@ -13,11 +13,14 @@ import logging from pathlib import Path -# Add the src directory to the Python path -sys.path.insert(0, str(Path(__file__).parent)) +# Add the project root to the Python path +# This file is at: MPC-Plus/src/data_manipulation/file_monitoring/main.py +# We want to add: MPC-Plus/ to the path +project_root = Path(__file__).parent.parent.parent.parent +sys.path.insert(0, str(project_root)) -from folder_monitor import FolderMonitor -from run_monitor_service import MonitorService, install_dependencies +from src.data_manipulation.file_monitoring.folder_monitor import FolderMonitor +from src.data_manipulation.file_monitoring.run_monitor_service import MonitorService, install_dependencies # Configure logging logging.basicConfig( @@ -34,26 +37,55 @@ def print_banner(): print("=" * 50) print() -def start_monitor(idrive_path="iDrive", background=False): +def start_monitor(idrive_path="iDrive", background=False, lexar=False): """ Start the folder monitor Args: - idrive_path (str): Path to monitor + idrive_path (str): Path to monitor (or 'lexar' for Lexar drive) background (bool): Whether to run in background + lexar (bool): Whether to monitor Lexar drive locations """ try: - print(f"Starting folder monitor for: {os.path.abspath(idrive_path)}") - - if background: - # Use the service runner for background mode - service = MonitorService() - service.start_background() - else: - # Direct monitoring mode - monitor = FolderMonitor(idrive_path) + # Handle Lexar drive monitoring + if lexar or idrive_path.lower() == 'lexar': + lexar_base = "/Volumes/Lexar/MPC Data" + paths = [ + os.path.join(lexar_base, "Arlington"), + os.path.join(lexar_base, "Weatherford") + ] + + # Check if paths exist + existing_paths = [p for p in paths if os.path.exists(p)] + if not existing_paths: + logger.error(f"Lexar drive paths not found. Expected: {paths}") + logger.error("Please ensure the Lexar drive is mounted and contains 'MPC Data/Arlington' and 'MPC Data/Weatherford' folders") + sys.exit(1) + + print(f"Starting folder monitor for Lexar drive locations:") + for path in existing_paths: + print(f" - {path}") + + if background: + logger.warning("Background mode with multiple paths not fully supported. Using direct mode.") + + # Direct monitoring mode for multiple paths + monitor = FolderMonitor(existing_paths) monitor.scan_existing_folders() monitor.start_monitoring() + else: + # Single path monitoring + print(f"Starting folder monitor for: {os.path.abspath(idrive_path)}") + + if background: + # Use the service runner for background mode + service = MonitorService() + service.start_background() + else: + # Direct monitoring mode + monitor = FolderMonitor(idrive_path) + monitor.scan_existing_folders() + monitor.start_monitoring() except KeyboardInterrupt: print("\nShutting down...") @@ -102,15 +134,18 @@ def main(): python src/main.py setup # Set up the system python src/main.py start # Start folder monitoring python src/main.py start --path custom_folder # Monitor custom folder + python src/main.py start --lexar # Monitor Lexar drive (Arlington & Weatherford) python src/main.py start --background # Run in background - python -m src.data_manipulation.file_monitoring.main start + python -m src.data_manipulation.file_monitoring.main start --lexar """ ) parser.add_argument('command', choices=['setup', 'start', 'status'], help='Command to execute') parser.add_argument('--path', '-p', default='iDrive', - help='Path to monitor (default: iDrive)') + help='Path to monitor (default: iDrive, or use "lexar" for Lexar drive)') + parser.add_argument('--lexar', '-l', action='store_true', + help='Monitor Lexar drive locations (Arlington and Weatherford)') parser.add_argument('--background', '-b', action='store_true', help='Run in background mode') parser.add_argument('--verbose', '-v', action='store_true', @@ -127,7 +162,7 @@ def main(): if args.command == 'setup': setup_system() elif args.command == 'start': - start_monitor(args.path, args.background) + start_monitor(args.path, args.background, args.lexar) elif args.command == 'status': service = MonitorService() service.status() diff --git a/src/data_manipulation/models/AbstractBeamModel.py b/src/data_manipulation/models/AbstractBeamModel.py index 5af4244..7bfe980 100644 --- a/src/data_manipulation/models/AbstractBeamModel.py +++ b/src/data_manipulation/models/AbstractBeamModel.py @@ -11,7 +11,7 @@ def __init__(self): self._path = "" self._date = None self._machine_SN = None - self._baseline = False; + self._baseline = False # --- Getters --- def get_type(self): @@ -64,17 +64,20 @@ def _getDateFromPathName(self, path: str) -> datetime: def _getSNFromPathName(self, path: str) -> str: """ - Extracts a serial number from the given path. + Extracts a machine ID (serial number) from the given path. Example: '...NDS-WKS-SN6543-2025-09-19-07-41-49-0008-GeometryCheckTemplate6xMVkVEnhancedCouch' - → '6543' + → 'SN6543' + '/Users/alexandrem/Desktop/MPC Data/Arlington/NDS-WKS-SN5512-2025-09-17-07-08-59-0002-BeamCheckTemplate10x' + → 'SN5512' Raises: ValueError: if no valid serial number pattern is found in the path. """ - match = re.search(r'SN(\d{4})', path) + match = re.search(r'SN(\d+)', path) if not match: - raise ValueError(f"Could not extract serial number from path: {path}") - return match.group(1) + raise ValueError(f"Could not extract machine ID from path: {path}") + # Return the full match including "SN" prefix + return match.group(0) def _getIsBaselineFromPathName(self, pathName: str) -> bool: """ diff --git a/src/data_manipulation/models/Geo6xfffModel.py b/src/data_manipulation/models/Geo6xfffModel.py index 019a09c..c681cbe 100644 --- a/src/data_manipulation/models/Geo6xfffModel.py +++ b/src/data_manipulation/models/Geo6xfffModel.py @@ -34,9 +34,9 @@ def __init__(self): self._RotationInducedCouchShiftFullRange = Decimal('0.0') # ---- CollimationGroup / MLCGroup ---- - # 40 leaves for A and B banks (11–50) - self._MLCLeavesA = {f"Leaf{i}": Decimal('0.0') for i in range(11, 51)} - self._MLCLeavesB = {f"Leaf{i}": Decimal('0.0') for i in range(11, 51)} + # 60 leaves for A and B banks (1–60) + self._MLCLeavesA = {f"Leaf{i}": Decimal('0.0') for i in range(1, 61)} + self._MLCLeavesB = {f"Leaf{i}": Decimal('0.0') for i in range(1, 61)} self._MaxOffsetA = Decimal('0.0') self._MaxOffsetB = Decimal('0.0') @@ -44,8 +44,9 @@ def __init__(self): self._MeanOffsetB = Decimal('0.0') # ---- CollimationGroup / MLCBacklashGroup ---- - self._MLCBacklashA = {f"Leaf{i}": Decimal('0.0') for i in range(11, 51)} - self._MLCBacklashB = {f"Leaf{i}": Decimal('0.0') for i in range(11, 51)} + # 60 leaves for A and B banks (1–60) + self._MLCBacklashA = {f"Leaf{i}": Decimal('0.0') for i in range(1, 61)} + self._MLCBacklashB = {f"Leaf{i}": Decimal('0.0') for i in range(1, 61)} self._MLCBacklashMaxA = Decimal('0.0') self._MLCBacklashMaxB = Decimal('0.0')