diff --git a/sql/unit/delete_all_from_project.sql b/sql/unit/delete_all_from_project.sql new file mode 100644 index 0000000..b2eb937 --- /dev/null +++ b/sql/unit/delete_all_from_project.sql @@ -0,0 +1 @@ +DELETE FROM unit WHERE project_id = $1; \ No newline at end of file diff --git a/sql/unit/get_with_id.sql b/sql/unit/get_with_id.sql new file mode 100644 index 0000000..8e42dde --- /dev/null +++ b/sql/unit/get_with_id.sql @@ -0,0 +1,14 @@ +SELECT +id, +project_id, +price_usd, +unit_type as "unit_type: _", +rooms, +bathrooms, +area, +description, +media as "media: _", +admin_tag, +time_created, +last_updated +FROM unit WHERE id = $1; \ No newline at end of file diff --git a/sql/unit/insert.sql b/sql/unit/insert.sql index 72d6e6b..a376f29 100644 --- a/sql/unit/insert.sql +++ b/sql/unit/insert.sql @@ -26,15 +26,15 @@ INSERT INTO unit ( $11 ) RETURNING -id, -project_id, -price_usd, -unit_type as "unit_type: _", -rooms, -bathrooms, -area, -description, -media as "media: _", -admin_tag, -time_created, -last_updated; \ No newline at end of file + id, + project_id, + price_usd, + unit_type as "unit_type: _", + rooms, + bathrooms, + area, + description, + media as "media: _", + admin_tag, + time_created, + last_updated; \ No newline at end of file diff --git a/sqlx-data.json b/sqlx-data.json index bc35a62..490e358 100644 --- a/sqlx-data.json +++ b/sqlx-data.json @@ -12,6 +12,92 @@ }, "query": "DELETE FROM agent WHERE id = $1;" }, + "19a20556f5e3621438cd583aae4984c8cf510f359f65ec599f8a6b46d9153ec4": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Uuid" + }, + { + "name": "project_id", + "ordinal": 1, + "type_info": "Uuid" + }, + { + "name": "price_usd", + "ordinal": 2, + "type_info": "Float8" + }, + { + "name": "unit_type: _", + "ordinal": 3, + "type_info": "Varchar" + }, + { + "name": "rooms", + "ordinal": 4, + "type_info": "Int2" + }, + { + "name": "bathrooms", + "ordinal": 5, + "type_info": "Int2" + }, + { + "name": "area", + "ordinal": 6, + "type_info": "Float4" + }, + { + "name": "description", + "ordinal": 7, + "type_info": "Text" + }, + { + "name": "media: _", + "ordinal": 8, + "type_info": "Text" + }, + { + "name": "admin_tag", + "ordinal": 9, + "type_info": "Varchar" + }, + { + "name": "time_created", + "ordinal": 10, + "type_info": "Timestamptz" + }, + { + "name": "last_updated", + "ordinal": 11, + "type_info": "Timestamptz" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + false, + true, + false, + false + ], + "parameters": { + "Left": [ + "Uuid" + ] + } + }, + "query": "SELECT \nid,\nproject_id,\nprice_usd,\nunit_type as \"unit_type: _\",\nrooms,\nbathrooms,\narea,\ndescription,\nmedia as \"media: _\",\nadmin_tag,\ntime_created,\nlast_updated\nFROM unit WHERE id = $1;" + }, "20a9f804be4c73f4cbbeb063dbcd7e4f848c532f4d19583857a19f4fdaa65107": { "describe": { "columns": [ @@ -363,102 +449,6 @@ }, "query": "INSERT INTO agent (\n id, full_name, credential, credential_type, profile_picture_url, time_created, last_updated\n) VALUES (\n $1, $2, $3, $4, $5, $6, $6\n) RETURNING\nid,\nfull_name,\ncredential,\ncredential_type as \"credential_type: _\",\nprofile_picture_url,\ntime_created,\nlast_updated;" }, - "992569b87c0b84f2d99c58a3991d794274a543ea4cb43270f6f93a80a93458b8": { - "describe": { - "columns": [ - { - "name": "id", - "ordinal": 0, - "type_info": "Uuid" - }, - { - "name": "project_id", - "ordinal": 1, - "type_info": "Uuid" - }, - { - "name": "price_usd", - "ordinal": 2, - "type_info": "Float8" - }, - { - "name": "unit_type: _", - "ordinal": 3, - "type_info": "Varchar" - }, - { - "name": "rooms", - "ordinal": 4, - "type_info": "Int2" - }, - { - "name": "bathrooms", - "ordinal": 5, - "type_info": "Int2" - }, - { - "name": "area", - "ordinal": 6, - "type_info": "Float4" - }, - { - "name": "description", - "ordinal": 7, - "type_info": "Text" - }, - { - "name": "media: _", - "ordinal": 8, - "type_info": "Text" - }, - { - "name": "admin_tag", - "ordinal": 9, - "type_info": "Varchar" - }, - { - "name": "time_created", - "ordinal": 10, - "type_info": "Timestamptz" - }, - { - "name": "last_updated", - "ordinal": 11, - "type_info": "Timestamptz" - } - ], - "nullable": [ - false, - false, - false, - false, - false, - false, - false, - false, - false, - true, - false, - false - ], - "parameters": { - "Left": [ - "Uuid", - "Uuid", - "Float8", - "Varchar", - "Int2", - "Int2", - "Float4", - "Text", - "Text", - "Varchar", - "Timestamptz" - ] - } - }, - "query": "INSERT INTO unit (\n id,\n project_id,\n price_usd,\n unit_type,\n rooms,\n bathrooms,\n area,\n description,\n media,\n admin_tag,\n time_created,\n last_updated\n) VALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $11\n)\nRETURNING \nid,\nproject_id,\nprice_usd,\nunit_type as \"unit_type: _\",\nrooms,\nbathrooms,\narea,\ndescription,\nmedia as \"media: _\",\nadmin_tag,\ntime_created,\nlast_updated;" - }, "a4b4bff48a573996a661d52576b051edc19b17de90d0823b87057e361004ecf8": { "describe": { "columns": [ @@ -1130,6 +1120,18 @@ }, "query": "SELECT \nid,\nfull_name,\ncredential,\ncredential_type as \"credential_type: _\",\nprofile_picture_url,\ntime_created,\nlast_updated\nFROM agent WHERE id = $1" }, + "e15c1255cfa5d8075536949f04cb5bc5cc03336dee8cc417297ba02e7c8bff77": { + "describe": { + "columns": [], + "nullable": [], + "parameters": { + "Left": [ + "Uuid" + ] + } + }, + "query": "DELETE FROM unit WHERE project_id = $1;" + }, "e249fa71f1bfd550a78b5838b9f393bd0d1d062291c8ea0fdcec61388a8712b8": { "describe": { "columns": [ @@ -1259,5 +1261,101 @@ } }, "query": "SELECT * FROM location WHERE city = $1 ORDER BY district DESC;" + }, + "ef16e3c3ba17670dafc6c807cbf77c604954c292afc6c6598d8cbe62c570b625": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Uuid" + }, + { + "name": "project_id", + "ordinal": 1, + "type_info": "Uuid" + }, + { + "name": "price_usd", + "ordinal": 2, + "type_info": "Float8" + }, + { + "name": "unit_type: _", + "ordinal": 3, + "type_info": "Varchar" + }, + { + "name": "rooms", + "ordinal": 4, + "type_info": "Int2" + }, + { + "name": "bathrooms", + "ordinal": 5, + "type_info": "Int2" + }, + { + "name": "area", + "ordinal": 6, + "type_info": "Float4" + }, + { + "name": "description", + "ordinal": 7, + "type_info": "Text" + }, + { + "name": "media: _", + "ordinal": 8, + "type_info": "Text" + }, + { + "name": "admin_tag", + "ordinal": 9, + "type_info": "Varchar" + }, + { + "name": "time_created", + "ordinal": 10, + "type_info": "Timestamptz" + }, + { + "name": "last_updated", + "ordinal": 11, + "type_info": "Timestamptz" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false, + false, + false, + true, + false, + false + ], + "parameters": { + "Left": [ + "Uuid", + "Uuid", + "Float8", + "Varchar", + "Int2", + "Int2", + "Float4", + "Text", + "Text", + "Varchar", + "Timestamptz" + ] + } + }, + "query": "INSERT INTO unit (\n id,\n project_id,\n price_usd,\n unit_type,\n rooms,\n bathrooms,\n area,\n description,\n media,\n admin_tag,\n time_created,\n last_updated\n) VALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $11\n)\nRETURNING \n id,\n project_id,\n price_usd,\n unit_type as \"unit_type: _\",\n rooms,\n bathrooms,\n area,\n description,\n media as \"media: _\",\n admin_tag,\n time_created,\n last_updated;" } } \ No newline at end of file diff --git a/src/dao/location.rs b/src/dao/location.rs index 8a0400c..8fb758e 100644 --- a/src/dao/location.rs +++ b/src/dao/location.rs @@ -35,9 +35,7 @@ pub async fn get_locations_in_city( .await } -pub async fn fetch_all_locations( - conn: &PgPool, -) -> Result, sqlx::Error> { +pub async fn fetch_all_locations(conn: &PgPool) -> Result, sqlx::Error> { sqlx::query_file_as!(Location, "sql/location/fetch_all.sql") .fetch_all(conn) .await @@ -45,7 +43,7 @@ pub async fn fetch_all_locations( pub async fn delete_location( tx: &mut Transaction<'_, Postgres>, - location_id: Uuid, + location_id: &Uuid, ) -> Result { sqlx::query_file!("sql/location/delete.sql", location_id) .execute(tx) diff --git a/src/dao/unit.rs b/src/dao/unit.rs index 9bda472..2b4bc2a 100644 --- a/src/dao/unit.rs +++ b/src/dao/unit.rs @@ -2,6 +2,12 @@ use jl_types::domain::unit::Unit; use sqlx::{postgres::PgQueryResult, PgPool, Postgres, Transaction}; use uuid::Uuid; +pub async fn get_with_id(conn: &PgPool, unit_id: &Uuid) -> Result, sqlx::Error> { + sqlx::query_file_as!(Unit, "sql/unit/get_with_id.sql", unit_id) + .fetch_optional(conn) + .await +} + pub async fn fetch_with_project_id( conn: &PgPool, project_id: &Uuid, @@ -20,6 +26,15 @@ pub async fn delete( .await } +pub async fn delete_all_from_project( + tx: &mut Transaction<'_, Postgres>, + project_id: &Uuid, +) -> Result { + sqlx::query_file!("sql/unit/delete_all_from_project.sql", project_id) + .execute(tx) + .await +} + pub async fn insert(tx: &mut Transaction<'_, Postgres>, unit: &Unit) -> Result { sqlx::query_file_as!( Unit, @@ -40,7 +55,7 @@ pub async fn insert(tx: &mut Transaction<'_, Postgres>, unit: &Unit) -> Result Result { +pub async fn update(tx: &mut Transaction<'_, Postgres>, unit: &Unit) -> Result { sqlx::query_file_as!( Unit, "sql/unit/update.sql", @@ -55,6 +70,6 @@ pub async fn update(conn: &PgPool, unit: &Unit) -> Result { unit.last_updated, unit.id ) - .fetch_one(conn) + .fetch_one(tx) .await } diff --git a/src/routes/admin.rs b/src/routes/admin.rs new file mode 100644 index 0000000..d3086c3 --- /dev/null +++ b/src/routes/admin.rs @@ -0,0 +1,108 @@ +use std::sync::Arc; + +use actix_web::{ + delete, post, put, + web::{self, Path}, +}; +use actix_web_utils::extensions::typed_response::TypedHttpResponse; +use jl_types::{ + domain::{agent::Agent, location::Location, project::Project, unit::Unit}, + dto::payloads::{ + agent::{NewAgentPayload, UpdateAgentPayload}, + location::NewLocationPayload, + project::{NewProjectPayload, UpdateProjectPayload}, + unit::{NewUnitPayload, UpdateUnitPayload}, + }, +}; +use sqlx::PgPool; +use uuid::Uuid; + +use crate::services; + +#[post("/agent")] +pub async fn create_new_agent_profile( + db_conn: web::Data>, + new_agent_payload: web::Json, +) -> TypedHttpResponse { + services::admin::create_new_agent_profile(&db_conn, new_agent_payload.0).await +} + +#[post("/location")] +pub async fn create_new_location( + db_conn: web::Data>, + new_location_payload: web::Json, +) -> TypedHttpResponse { + services::admin::create_new_location(&db_conn, new_location_payload.0).await +} + +#[post("/project")] +pub async fn create_new_project( + db_conn: web::Data>, + new_project_payload: web::Json, +) -> TypedHttpResponse { + services::admin::create_new_project(&db_conn, new_project_payload.0).await +} + +#[post("/unit")] +pub async fn create_new_unit( + db_conn: web::Data>, + new_unit_payload: web::Json, +) -> TypedHttpResponse { + services::admin::create_new_unit(&db_conn, new_unit_payload.0).await +} + +#[put("/agent")] +pub async fn update_agent( + db_conn: web::Data>, + update_agent_payload: web::Json, +) -> TypedHttpResponse { + services::admin::update_agent(&db_conn, update_agent_payload.0).await +} + +#[put("/project")] +pub async fn update_project( + db_conn: web::Data>, + update_project_payload: web::Json, +) -> TypedHttpResponse { + services::admin::update_project(&db_conn, update_project_payload.0).await +} + +#[put("/unit")] +pub async fn update_unit( + db_conn: web::Data>, + update_unit_payload: web::Json, +) -> TypedHttpResponse { + services::admin::update_unit(&db_conn, update_unit_payload.0).await +} + +#[delete("agent/{agent_id}")] +pub async fn delete_agent( + db_conn: web::Data>, + agent_id: Path, +) -> TypedHttpResponse<()> { + services::admin::delete_agent(&db_conn, &agent_id).await +} + +#[delete("location/{location_id}")] +pub async fn delete_location( + db_conn: web::Data>, + location_id: Path, +) -> TypedHttpResponse<()> { + services::admin::delete_location(&db_conn, &location_id).await +} + +#[delete("project/{project_id}")] +pub async fn delete_project( + db_conn: web::Data>, + project_id: Path, +) -> TypedHttpResponse<()> { + services::admin::delete_project(&db_conn, &project_id).await +} + +#[delete("unit/{unit_id}")] +pub async fn delete_unit( + db_conn: web::Data>, + unit_id: Path, +) -> TypedHttpResponse<()> { + services::admin::delete_unit(&db_conn, &unit_id).await +} diff --git a/src/routes/mod.rs b/src/routes/mod.rs index abbbe10..026a1ab 100644 --- a/src/routes/mod.rs +++ b/src/routes/mod.rs @@ -1 +1,4 @@ pub mod main_router; + +pub mod admin; +pub mod read; diff --git a/src/routes/read.rs b/src/routes/read.rs new file mode 100644 index 0000000..9a2fb84 --- /dev/null +++ b/src/routes/read.rs @@ -0,0 +1,97 @@ +use std::{collections::HashMap, str::FromStr, sync::Arc}; + +use actix_web::{ + get, + web::{self, Path}, +}; +use actix_web_utils::extensions::typed_response::TypedHttpResponse; +use err::MessageResource; +use jl_types::{ + domain::{ + agent::Agent, location::Location, project::Project, project_condition::ProjectCondition, + project_type::ProjectType, + }, + dto::{filters::Filter, listing::Listing}, +}; +use sqlx::PgPool; +use uuid::Uuid; + +use crate::services; + +#[get("/agent")] +pub async fn get_all_agents(db_conn: web::Data>) -> TypedHttpResponse> { + services::read::get_all_agents(&db_conn).await +} + +#[get("/locations")] +pub async fn get_all_locations( + db_conn: web::Data>, +) -> TypedHttpResponse> { + services::read::get_all_locations(&db_conn).await +} + +#[get("/locations/{city}")] +pub async fn get_locations_in_city( + db_conn: web::Data>, + city: Path, +) -> TypedHttpResponse> { + services::read::get_all_locations_in_city(&db_conn, &city).await +} + +#[get("/projects/{page}")] +pub async fn get_projects_paged( + db_conn: web::Data>, + page: Path, + query_params: web::Query>, +) -> TypedHttpResponse> { + let filters = match parse_params_into_filters(query_params.0) { + Ok(filters) => filters, + Err(msg) => return TypedHttpResponse::return_standard_error(400, msg), + }; + services::read::get_projects_paged(&db_conn, &page, &filters).await +} + +#[get("/projects/{project_id}")] +pub async fn get_project_data( + db_conn: web::Data>, + project_id: Path, +) -> TypedHttpResponse { + services::read::get_project_data(&db_conn, &project_id).await +} + +fn parse_params_into_filters( + params: HashMap, +) -> Result, MessageResource> { + let mut filters: Vec = Vec::new(); + for key in params.keys() { + match key.as_str() { + "incity" => { + let city = params.get(key).unwrap(); + filters.push(Filter::InCity(city.clone())); + } + "indistrict" => { + let district = params.get(key).unwrap(); + filters.push(Filter::InDistrict(district.clone())); + } + "finished" => { + filters.push(Filter::Finished); + } + "byprojecttype" => { + let project_type = params.get(key).unwrap(); + match ProjectType::from_str(project_type) { + Ok(project_type) => filters.push(Filter::ByProjectType(project_type)), + Err(_) => {} + } + } + "byprojectcondition" => { + let condition = params.get(key).unwrap(); + match ProjectCondition::from_str(condition) { + Ok(condition) => filters.push(Filter::ByProjectCondition(condition)), + Err(_) => {} + } + } + _ => {} + }; + } + Ok(filters) +} diff --git a/src/services/admin.rs b/src/services/admin.rs index 717ca54..aa5eb49 100644 --- a/src/services/admin.rs +++ b/src/services/admin.rs @@ -1,10 +1,16 @@ use actix_web_utils::extensions::typed_response::TypedHttpResponse; use jl_types::{ - domain::{agent::Agent, project::Project, location::Location}, - dto::payloads::{agent::{NewAgentPayload, UpdateAgentPayload}, project::{NewProjectPayload, UpdateProjectPayload}, location::NewLocationPayload}, + domain::{agent::Agent, location::Location, project::Project, unit::Unit}, + dto::payloads::{ + agent::{NewAgentPayload, UpdateAgentPayload}, + location::NewLocationPayload, + project::{NewProjectPayload, UpdateProjectPayload}, + unit::{NewUnitPayload, UpdateUnitPayload}, + }, }; use sqlx::PgPool; +use uuid::Uuid; use crate::{dao, handle_db_read_op, handle_db_write_op, handle_tx, success, unwrap_or_not_found}; @@ -29,7 +35,10 @@ pub async fn create_new_location( new_location: NewLocationPayload, ) -> TypedHttpResponse { let mut tx = handle_tx!(conn.begin()); - let persisted_location = handle_db_write_op!(dao::location::insert_location(&mut tx, &new_location.into()), tx); + let persisted_location = handle_db_write_op!( + dao::location::insert_location(&mut tx, &new_location.into()), + tx + ); handle_tx!(tx.commit()); success!(persisted_location) } @@ -39,13 +48,33 @@ pub async fn create_new_project( new_project: NewProjectPayload, ) -> TypedHttpResponse { let mut tx = handle_tx!(conn.begin()); - unwrap_or_not_found!(handle_db_read_op!(dao::agent::get_agent_with_id(conn, &new_project.agent_id)), "agents"); - unwrap_or_not_found!(handle_db_read_op!(dao::location::get_location_with_id(conn, &new_project.location_id)), "locations"); - let persisted_project = handle_db_write_op!(dao::project::insert(&mut tx, &new_project.into()), tx); + unwrap_or_not_found!( + handle_db_read_op!(dao::agent::get_agent_with_id(conn, &new_project.agent_id)), + "agents" + ); + unwrap_or_not_found!( + handle_db_read_op!(dao::location::get_location_with_id( + conn, + &new_project.location_id + )), + "locations" + ); + let persisted_project = + handle_db_write_op!(dao::project::insert(&mut tx, &new_project.into()), tx); handle_tx!(tx.commit()); success!(persisted_project) } +pub async fn create_new_unit(conn: &PgPool, new_unit: NewUnitPayload) -> TypedHttpResponse { + let mut tx = handle_tx!(conn.begin()); + unwrap_or_not_found!( + handle_db_read_op!(dao::project::get_with_id(conn, &new_unit.project_id)), + "projects" + ); + let persisted_unit = handle_db_write_op!(dao::unit::insert(&mut tx, &new_unit.into()), tx); + handle_tx!(tx.commit()); + success!(persisted_unit) +} // // Update Methods // @@ -71,11 +100,72 @@ pub async fn update_agent( success!(updated_agent) } -pub async fn update_project(conn: &PgPool, update_project_payload: UpdateProjectPayload) -> TypedHttpResponse { +pub async fn update_project( + conn: &PgPool, + update_project_payload: UpdateProjectPayload, +) -> TypedHttpResponse { let mut tx = handle_tx!(conn.begin()); - let mut persisted_project = unwrap_or_not_found!(handle_db_read_op!(dao::project::get_with_id(conn, &update_project_payload.id)), "projects"); + let mut persisted_project = unwrap_or_not_found!( + handle_db_read_op!(dao::project::get_with_id(conn, &update_project_payload.id)), + "projects" + ); update_project_payload.update_project(&mut persisted_project); - let updated_project = handle_db_write_op!(dao::project::update(&mut tx, &persisted_project), tx); + let updated_project = + handle_db_write_op!(dao::project::update(&mut tx, &persisted_project), tx); handle_tx!(tx.commit()); success!(updated_project) } + +pub async fn update_unit( + conn: &PgPool, + update_unit_payload: UpdateUnitPayload, +) -> TypedHttpResponse { + let mut tx = handle_tx!(conn.begin()); + let mut persisted_unit = unwrap_or_not_found!( + handle_db_read_op!(dao::unit::get_with_id(conn, &update_unit_payload.id)), + "units" + ); + update_unit_payload.update_unit(&mut persisted_unit); + let updated_unit = handle_db_write_op!(dao::unit::update(&mut tx, &persisted_unit), tx); + handle_tx!(tx.commit()); + success!(updated_unit) +} + +// +// Delete methods +// + +pub async fn delete_project(conn: &PgPool, project_id: &Uuid) -> TypedHttpResponse<()> { + let mut tx = handle_tx!(conn.begin()); + //let persisted_project = unwrap_or_not_found!(handle_db_read_op!(dao::project::get_with_id(conn, project_id)), "projects"); + // TODO: Test to see if this gives an error if no units are found + handle_db_write_op!(dao::unit::delete_all_from_project(&mut tx, project_id), tx); + // handle_db_write_op!(dao::location::delete_location(&mut tx, persisted_project.location_id)); + handle_db_write_op!(dao::project::delete(&mut tx, project_id), tx); + handle_tx!(tx.commit()); + success!(()) +} + +pub async fn delete_location(conn: &PgPool, location_id: &Uuid) -> TypedHttpResponse<()> { + let mut tx = handle_tx!(conn.begin()); + // TODO: Test to see if this gives an error if no locations are found + handle_db_write_op!(dao::location::delete_location(&mut tx, location_id), tx); + handle_tx!(tx.commit()); + success!(()) +} + +pub async fn delete_agent(conn: &PgPool, agent_id: &Uuid) -> TypedHttpResponse<()> { + let mut tx = handle_tx!(conn.begin()); + // TODO: Test to see if this gives an error if no agents are found + handle_db_write_op!(dao::agent::delete_agent(&mut tx, agent_id), tx); + handle_tx!(tx.commit()); + success!(()) +} + +pub async fn delete_unit(conn: &PgPool, unit_id: &Uuid) -> TypedHttpResponse<()> { + let mut tx = handle_tx!(conn.begin()); + // TODO: Test to see if this gives an error if no units are found + handle_db_write_op!(dao::unit::delete(&mut tx, unit_id), tx); + handle_tx!(tx.commit()); + success!(()) +} diff --git a/src/services/read.rs b/src/services/read.rs index 8b13789..958cf40 100644 --- a/src/services/read.rs +++ b/src/services/read.rs @@ -1 +1,57 @@ +use actix_web_utils::extensions::typed_response::TypedHttpResponse; +use jl_types::{ + domain::{agent::Agent, location::Location, project::Project}, + dto::{filters::Filter, listing::Listing}, +}; +use sqlx::PgPool; +use uuid::Uuid; +use crate::{dao, handle_db_read_op, success, unwrap_or_not_found}; + +pub async fn get_all_agents(conn: &PgPool) -> TypedHttpResponse> { + success!(handle_db_read_op!(dao::agent::fetch_all(conn))) +} + +pub async fn get_all_locations(conn: &PgPool) -> TypedHttpResponse> { + success!(handle_db_read_op!(dao::location::fetch_all_locations(conn))) +} + +pub async fn get_all_locations_in_city( + conn: &PgPool, + city: &String, +) -> TypedHttpResponse> { + success!(handle_db_read_op!(dao::location::get_locations_in_city( + conn, city + ))) +} + +pub async fn get_projects_paged( + conn: &PgPool, + page: &i64, + filters: &Vec, +) -> TypedHttpResponse> { + success!(handle_db_read_op!(dao::project::fetch_with_filters_paged( + conn, filters, page + ))) +} + +/// Return Units, and agent +pub async fn get_project_data(conn: &PgPool, project_id: &Uuid) -> TypedHttpResponse { + let project = unwrap_or_not_found!( + handle_db_read_op!(dao::project::get_with_id(conn, project_id)), + "projects" + ); + let units = handle_db_read_op!(dao::unit::fetch_with_project_id(conn, project_id)); + let agent = unwrap_or_not_found!( + handle_db_read_op!(dao::agent::get_agent_with_id(conn, &project.agent_id)), + "agents" + ); + let location = unwrap_or_not_found!( + handle_db_read_op!(dao::location::get_location_with_id( + conn, + &project.location_id + )), + "locations" + ); + success!(Listing::new(project, units, location, agent)) +}