feat: Implement source management with file upload, display, and deletion functionality on a new Sources page, backed by new backend models and handlers.
This commit is contained in:
@@ -10,7 +10,9 @@ path = "src/main.rs"
|
||||
[dependencies]
|
||||
# Web Framework
|
||||
axum = "0.8"
|
||||
axum-extra = { version = "0.10", features = ["multipart"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-util = { version = "0.7", features = ["io"] }
|
||||
tower = "0.5"
|
||||
tower-http = { version = "0.6", features = ["cors", "trace"] }
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ server:
|
||||
paths:
|
||||
database: "./data/zhealth.db"
|
||||
logs: "./logs"
|
||||
uploads: "./data/uploads"
|
||||
|
||||
logging:
|
||||
level: "info" # Options: trace | debug | info | warn | error
|
||||
|
||||
@@ -24,6 +24,7 @@ pub struct ServerConfig {
|
||||
pub struct PathsConfig {
|
||||
pub database: String,
|
||||
pub logs: String,
|
||||
pub uploads: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
|
||||
@@ -4,7 +4,7 @@ use sea_orm::{ConnectionTrait, Database, DatabaseConnection, DbBackend, DbErr, S
|
||||
use sea_orm::sea_query::SqliteQueryBuilder;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::models::bio::{biomarker, biomarker_category, biomarker_entry, biomarker_reference_rule};
|
||||
use crate::models::bio::{biomarker, biomarker_category, biomarker_entry, biomarker_reference_rule, source};
|
||||
use crate::models::user::{diet, role, session, user};
|
||||
|
||||
/// Connect to the SQLite database.
|
||||
@@ -33,6 +33,7 @@ pub async fn run_migrations(db: &DatabaseConnection) -> Result<(), DbErr> {
|
||||
schema.create_table_from_entity(biomarker_category::Entity),
|
||||
schema.create_table_from_entity(biomarker::Entity),
|
||||
schema.create_table_from_entity(biomarker_reference_rule::Entity),
|
||||
schema.create_table_from_entity(source::Entity),
|
||||
schema.create_table_from_entity(biomarker_entry::Entity),
|
||||
];
|
||||
|
||||
|
||||
@@ -60,6 +60,7 @@ pub async fn create_entry(
|
||||
value: Set(req.value),
|
||||
measured_at: Set(measured_at),
|
||||
notes: Set(req.notes.clone()),
|
||||
source_id: Set(None),
|
||||
created_at: Set(now),
|
||||
};
|
||||
|
||||
|
||||
@@ -5,4 +5,5 @@ pub mod biomarkers;
|
||||
pub mod categories;
|
||||
pub mod diets;
|
||||
pub mod entries;
|
||||
pub mod sources;
|
||||
pub mod users;
|
||||
|
||||
263
backend/src/handlers/sources.rs
Normal file
263
backend/src/handlers/sources.rs
Normal file
@@ -0,0 +1,263 @@
|
||||
//! Sources API handlers - file upload and management.
|
||||
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
Json,
|
||||
};
|
||||
use axum_extra::extract::Multipart;
|
||||
use chrono::Utc;
|
||||
use sea_orm::{ActiveModelTrait, DatabaseConnection, EntityTrait, Set};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use tokio::fs;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
use crate::models::bio::source;
|
||||
|
||||
/// Response for a source.
|
||||
#[derive(Serialize)]
|
||||
pub struct SourceResponse {
|
||||
pub id: i32,
|
||||
pub user_id: i32,
|
||||
pub name: String,
|
||||
pub file_path: String,
|
||||
pub file_type: String,
|
||||
pub file_size: i64,
|
||||
pub ocr_data: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub uploaded_at: String,
|
||||
}
|
||||
|
||||
/// State that includes config for upload path.
|
||||
#[derive(Clone)]
|
||||
pub struct SourcesState {
|
||||
pub db: DatabaseConnection,
|
||||
pub uploads_path: PathBuf,
|
||||
}
|
||||
|
||||
/// GET /api/sources - List all sources for current user.
|
||||
pub async fn list_sources(
|
||||
State(state): State<SourcesState>,
|
||||
// TODO: Get user_id from session
|
||||
) -> Result<Json<Vec<SourceResponse>>, StatusCode> {
|
||||
let sources = source::Entity::find()
|
||||
.all(&state.db)
|
||||
.await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
let items: Vec<SourceResponse> = sources
|
||||
.into_iter()
|
||||
.map(|s| SourceResponse {
|
||||
id: s.id,
|
||||
user_id: s.user_id,
|
||||
name: s.name,
|
||||
file_path: s.file_path,
|
||||
file_type: s.file_type,
|
||||
file_size: s.file_size,
|
||||
ocr_data: s.ocr_data,
|
||||
description: s.description,
|
||||
uploaded_at: s.uploaded_at.to_string(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(items))
|
||||
}
|
||||
|
||||
/// GET /api/sources/:id - Get a source by ID.
|
||||
pub async fn get_source(
|
||||
State(state): State<SourcesState>,
|
||||
Path(id): Path<i32>,
|
||||
) -> Result<Json<SourceResponse>, StatusCode> {
|
||||
let s = source::Entity::find_by_id(id)
|
||||
.one(&state.db)
|
||||
.await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
|
||||
.ok_or(StatusCode::NOT_FOUND)?;
|
||||
|
||||
Ok(Json(SourceResponse {
|
||||
id: s.id,
|
||||
user_id: s.user_id,
|
||||
name: s.name,
|
||||
file_path: s.file_path,
|
||||
file_type: s.file_type,
|
||||
file_size: s.file_size,
|
||||
ocr_data: s.ocr_data,
|
||||
description: s.description,
|
||||
uploaded_at: s.uploaded_at.to_string(),
|
||||
}))
|
||||
}
|
||||
|
||||
/// POST /api/sources - Upload a new source file.
|
||||
pub async fn upload_source(
|
||||
State(state): State<SourcesState>,
|
||||
mut multipart: Multipart,
|
||||
) -> Result<Json<SourceResponse>, StatusCode> {
|
||||
let mut file_name: Option<String> = None;
|
||||
let mut file_type: Option<String> = None;
|
||||
let mut file_data: Option<Vec<u8>> = None;
|
||||
let mut name: Option<String> = None;
|
||||
let mut description: Option<String> = None;
|
||||
let mut user_id: Option<i32> = None;
|
||||
|
||||
while let Some(field) = multipart.next_field().await.map_err(|e| {
|
||||
tracing::error!("Multipart error: {:?}", e);
|
||||
StatusCode::BAD_REQUEST
|
||||
})? {
|
||||
let field_name = field.name().unwrap_or("").to_string();
|
||||
|
||||
match field_name.as_str() {
|
||||
"file" => {
|
||||
file_name = field.file_name().map(|s| s.to_string());
|
||||
file_type = field.content_type().map(|s| s.to_string());
|
||||
file_data = Some(field.bytes().await.map_err(|e| {
|
||||
tracing::error!("Failed to read file data: {:?}", e);
|
||||
StatusCode::BAD_REQUEST
|
||||
})?.to_vec());
|
||||
}
|
||||
"name" => {
|
||||
name = Some(field.text().await.map_err(|_| StatusCode::BAD_REQUEST)?);
|
||||
}
|
||||
"description" => {
|
||||
description = Some(field.text().await.map_err(|_| StatusCode::BAD_REQUEST)?);
|
||||
}
|
||||
"user_id" => {
|
||||
let text = field.text().await.map_err(|_| StatusCode::BAD_REQUEST)?;
|
||||
user_id = Some(text.parse().map_err(|_| StatusCode::BAD_REQUEST)?);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let file_data = file_data.ok_or(StatusCode::BAD_REQUEST)?;
|
||||
let user_id = user_id.ok_or(StatusCode::BAD_REQUEST)?;
|
||||
let original_name = file_name.unwrap_or_else(|| "upload".to_string());
|
||||
let display_name = name.unwrap_or_else(|| original_name.clone());
|
||||
let content_type = file_type.unwrap_or_else(|| "application/octet-stream".to_string());
|
||||
let file_size = file_data.len() as i64;
|
||||
|
||||
// Generate unique filename
|
||||
let timestamp = Utc::now().timestamp_millis();
|
||||
let safe_name = original_name.replace(['/', '\\', ':', '*', '?', '"', '<', '>', '|'], "_");
|
||||
let stored_name = format!("{}_{}", timestamp, safe_name);
|
||||
|
||||
// Ensure uploads directory exists
|
||||
fs::create_dir_all(&state.uploads_path).await.map_err(|e| {
|
||||
tracing::error!("Failed to create uploads dir: {:?}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
// Write file
|
||||
let file_path = state.uploads_path.join(&stored_name);
|
||||
let mut file = fs::File::create(&file_path).await.map_err(|e| {
|
||||
tracing::error!("Failed to create file: {:?}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
file.write_all(&file_data).await.map_err(|e| {
|
||||
tracing::error!("Failed to write file: {:?}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
let now = Utc::now().naive_utc();
|
||||
|
||||
let new_source = source::ActiveModel {
|
||||
user_id: Set(user_id),
|
||||
name: Set(display_name.clone()),
|
||||
file_path: Set(file_path.to_string_lossy().to_string()),
|
||||
file_type: Set(content_type.clone()),
|
||||
file_size: Set(file_size),
|
||||
ocr_data: Set(None),
|
||||
description: Set(description.clone()),
|
||||
uploaded_at: Set(now),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let inserted = new_source
|
||||
.insert(&state.db)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tracing::error!("Failed to insert source: {:?}", e);
|
||||
StatusCode::INTERNAL_SERVER_ERROR
|
||||
})?;
|
||||
|
||||
Ok(Json(SourceResponse {
|
||||
id: inserted.id,
|
||||
user_id: inserted.user_id,
|
||||
name: inserted.name,
|
||||
file_path: inserted.file_path,
|
||||
file_type: inserted.file_type,
|
||||
file_size: inserted.file_size,
|
||||
ocr_data: inserted.ocr_data,
|
||||
description: inserted.description,
|
||||
uploaded_at: inserted.uploaded_at.to_string(),
|
||||
}))
|
||||
}
|
||||
|
||||
/// DELETE /api/sources/:id - Delete a source.
|
||||
pub async fn delete_source(
|
||||
State(state): State<SourcesState>,
|
||||
Path(id): Path<i32>,
|
||||
) -> Result<StatusCode, StatusCode> {
|
||||
// Get the source first to delete the file
|
||||
let s = source::Entity::find_by_id(id)
|
||||
.one(&state.db)
|
||||
.await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
|
||||
.ok_or(StatusCode::NOT_FOUND)?;
|
||||
|
||||
// Delete file from disk
|
||||
if let Err(e) = fs::remove_file(&s.file_path).await {
|
||||
tracing::warn!("Failed to delete file {}: {:?}", s.file_path, e);
|
||||
}
|
||||
|
||||
// Delete from database
|
||||
let result = source::Entity::delete_by_id(id)
|
||||
.exec(&state.db)
|
||||
.await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
if result.rows_affected == 0 {
|
||||
return Err(StatusCode::NOT_FOUND);
|
||||
}
|
||||
|
||||
Ok(StatusCode::NO_CONTENT)
|
||||
}
|
||||
|
||||
/// Request to update OCR data for a source.
|
||||
#[derive(Deserialize)]
|
||||
pub struct UpdateOcrRequest {
|
||||
pub ocr_data: String,
|
||||
}
|
||||
|
||||
/// PUT /api/sources/:id/ocr - Update OCR data for a source.
|
||||
pub async fn update_ocr(
|
||||
State(state): State<SourcesState>,
|
||||
Path(id): Path<i32>,
|
||||
Json(req): Json<UpdateOcrRequest>,
|
||||
) -> Result<Json<SourceResponse>, StatusCode> {
|
||||
let existing = source::Entity::find_by_id(id)
|
||||
.one(&state.db)
|
||||
.await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
|
||||
.ok_or(StatusCode::NOT_FOUND)?;
|
||||
|
||||
let mut active: source::ActiveModel = existing.into();
|
||||
active.ocr_data = Set(Some(req.ocr_data));
|
||||
|
||||
let updated = active
|
||||
.update(&state.db)
|
||||
.await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
Ok(Json(SourceResponse {
|
||||
id: updated.id,
|
||||
user_id: updated.user_id,
|
||||
name: updated.name,
|
||||
file_path: updated.file_path,
|
||||
file_type: updated.file_type,
|
||||
file_size: updated.file_size,
|
||||
ocr_data: updated.ocr_data,
|
||||
description: updated.description,
|
||||
uploaded_at: updated.uploaded_at.to_string(),
|
||||
}))
|
||||
}
|
||||
@@ -17,6 +17,7 @@ use axum_login::{
|
||||
};
|
||||
use sea_orm::DatabaseConnection;
|
||||
use std::net::SocketAddr;
|
||||
use std::path::PathBuf;
|
||||
use time::Duration;
|
||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||
|
||||
@@ -144,9 +145,24 @@ fn create_router(db: DatabaseConnection, config: &config::Config) -> Router {
|
||||
.route("/api/users/{user_id}/entries", get(handlers::entries::list_user_entries))
|
||||
.route_layer(middleware::from_fn(require_auth));
|
||||
|
||||
// Sources routes (need separate state for uploads path)
|
||||
let sources_state = handlers::sources::SourcesState {
|
||||
db: db.clone(),
|
||||
uploads_path: PathBuf::from(&config.paths.uploads),
|
||||
};
|
||||
let sources_routes = Router::new()
|
||||
.route("/api/sources", get(handlers::sources::list_sources)
|
||||
.post(handlers::sources::upload_source))
|
||||
.route("/api/sources/{id}", get(handlers::sources::get_source)
|
||||
.delete(handlers::sources::delete_source))
|
||||
.route("/api/sources/{id}/ocr", put(handlers::sources::update_ocr))
|
||||
.route_layer(middleware::from_fn(require_auth))
|
||||
.with_state(sources_state);
|
||||
|
||||
Router::new()
|
||||
.merge(public_routes)
|
||||
.merge(protected_routes)
|
||||
.merge(sources_routes)
|
||||
.layer(auth_layer)
|
||||
.with_state(db)
|
||||
}
|
||||
|
||||
@@ -26,6 +26,9 @@ pub struct Model {
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub notes: Option<String>,
|
||||
|
||||
/// Optional foreign key to source document
|
||||
pub source_id: Option<i32>,
|
||||
|
||||
pub created_at: DateTime,
|
||||
}
|
||||
|
||||
@@ -44,6 +47,13 @@ pub enum Relation {
|
||||
to = "crate::models::user::user::Column::Id"
|
||||
)]
|
||||
User,
|
||||
|
||||
#[sea_orm(
|
||||
belongs_to = "super::source::Entity",
|
||||
from = "Column::SourceId",
|
||||
to = "super::source::Column::Id"
|
||||
)]
|
||||
Source,
|
||||
}
|
||||
|
||||
impl Related<super::biomarker::Entity> for Entity {
|
||||
@@ -58,4 +68,10 @@ impl Related<crate::models::user::user::Entity> for Entity {
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::source::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Source.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
@@ -4,8 +4,10 @@ pub mod biomarker;
|
||||
pub mod biomarker_category;
|
||||
pub mod biomarker_entry;
|
||||
pub mod biomarker_reference_rule;
|
||||
pub mod source;
|
||||
|
||||
pub use biomarker::Entity as Biomarker;
|
||||
pub use biomarker_category::Entity as BiomarkerCategory;
|
||||
pub use biomarker_entry::Entity as BiomarkerEntry;
|
||||
pub use biomarker_reference_rule::Entity as BiomarkerReferenceRule;
|
||||
pub use source::Entity as Source;
|
||||
|
||||
67
backend/src/models/bio/source.rs
Normal file
67
backend/src/models/bio/source.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
//! Source entity - user-uploaded documents with OCR data.
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "sources")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i32,
|
||||
|
||||
/// Foreign key to users table
|
||||
pub user_id: i32,
|
||||
|
||||
/// Display name for the source
|
||||
#[sea_orm(column_type = "Text")]
|
||||
pub name: String,
|
||||
|
||||
/// Path to stored file
|
||||
#[sea_orm(column_type = "Text")]
|
||||
pub file_path: String,
|
||||
|
||||
/// MIME type (e.g., "application/pdf", "image/jpeg")
|
||||
#[sea_orm(column_type = "Text")]
|
||||
pub file_type: String,
|
||||
|
||||
/// File size in bytes
|
||||
pub file_size: i64,
|
||||
|
||||
/// OCR parsed data as JSON
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub ocr_data: Option<String>,
|
||||
|
||||
/// Optional description/notes
|
||||
#[sea_orm(column_type = "Text", nullable)]
|
||||
pub description: Option<String>,
|
||||
|
||||
/// When the file was uploaded
|
||||
pub uploaded_at: DateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "crate::models::user::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "crate::models::user::user::Column::Id"
|
||||
)]
|
||||
User,
|
||||
|
||||
#[sea_orm(has_many = "super::biomarker_entry::Entity")]
|
||||
BiomarkerEntries,
|
||||
}
|
||||
|
||||
impl Related<crate::models::user::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::User.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::biomarker_entry::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::BiomarkerEntries.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
Reference in New Issue
Block a user