Added all routes

This commit is contained in:
Franklin 2023-03-19 16:34:58 -04:00
parent 30c9f86edb
commit 2c3a752130
11 changed files with 603 additions and 123 deletions

View File

@ -0,0 +1 @@
DELETE FROM unit WHERE project_id = $1;

14
sql/unit/get_with_id.sql Normal file
View File

@ -0,0 +1,14 @@
SELECT
id,
project_id,
price_usd,
unit_type as "unit_type: _",
rooms,
bathrooms,
area,
description,
media as "media: _",
admin_tag,
time_created,
last_updated
FROM unit WHERE id = $1;

View File

@ -26,15 +26,15 @@ INSERT INTO unit (
$11
)
RETURNING
id,
project_id,
price_usd,
unit_type as "unit_type: _",
rooms,
bathrooms,
area,
description,
media as "media: _",
admin_tag,
time_created,
last_updated;
id,
project_id,
price_usd,
unit_type as "unit_type: _",
rooms,
bathrooms,
area,
description,
media as "media: _",
admin_tag,
time_created,
last_updated;

View File

@ -12,6 +12,92 @@
},
"query": "DELETE FROM agent WHERE id = $1;"
},
"19a20556f5e3621438cd583aae4984c8cf510f359f65ec599f8a6b46d9153ec4": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Uuid"
},
{
"name": "project_id",
"ordinal": 1,
"type_info": "Uuid"
},
{
"name": "price_usd",
"ordinal": 2,
"type_info": "Float8"
},
{
"name": "unit_type: _",
"ordinal": 3,
"type_info": "Varchar"
},
{
"name": "rooms",
"ordinal": 4,
"type_info": "Int2"
},
{
"name": "bathrooms",
"ordinal": 5,
"type_info": "Int2"
},
{
"name": "area",
"ordinal": 6,
"type_info": "Float4"
},
{
"name": "description",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "media: _",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "admin_tag",
"ordinal": 9,
"type_info": "Varchar"
},
{
"name": "time_created",
"ordinal": 10,
"type_info": "Timestamptz"
},
{
"name": "last_updated",
"ordinal": 11,
"type_info": "Timestamptz"
}
],
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
true,
false,
false
],
"parameters": {
"Left": [
"Uuid"
]
}
},
"query": "SELECT \nid,\nproject_id,\nprice_usd,\nunit_type as \"unit_type: _\",\nrooms,\nbathrooms,\narea,\ndescription,\nmedia as \"media: _\",\nadmin_tag,\ntime_created,\nlast_updated\nFROM unit WHERE id = $1;"
},
"20a9f804be4c73f4cbbeb063dbcd7e4f848c532f4d19583857a19f4fdaa65107": {
"describe": {
"columns": [
@ -363,102 +449,6 @@
},
"query": "INSERT INTO agent (\n id, full_name, credential, credential_type, profile_picture_url, time_created, last_updated\n) VALUES (\n $1, $2, $3, $4, $5, $6, $6\n) RETURNING\nid,\nfull_name,\ncredential,\ncredential_type as \"credential_type: _\",\nprofile_picture_url,\ntime_created,\nlast_updated;"
},
"992569b87c0b84f2d99c58a3991d794274a543ea4cb43270f6f93a80a93458b8": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Uuid"
},
{
"name": "project_id",
"ordinal": 1,
"type_info": "Uuid"
},
{
"name": "price_usd",
"ordinal": 2,
"type_info": "Float8"
},
{
"name": "unit_type: _",
"ordinal": 3,
"type_info": "Varchar"
},
{
"name": "rooms",
"ordinal": 4,
"type_info": "Int2"
},
{
"name": "bathrooms",
"ordinal": 5,
"type_info": "Int2"
},
{
"name": "area",
"ordinal": 6,
"type_info": "Float4"
},
{
"name": "description",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "media: _",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "admin_tag",
"ordinal": 9,
"type_info": "Varchar"
},
{
"name": "time_created",
"ordinal": 10,
"type_info": "Timestamptz"
},
{
"name": "last_updated",
"ordinal": 11,
"type_info": "Timestamptz"
}
],
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
true,
false,
false
],
"parameters": {
"Left": [
"Uuid",
"Uuid",
"Float8",
"Varchar",
"Int2",
"Int2",
"Float4",
"Text",
"Text",
"Varchar",
"Timestamptz"
]
}
},
"query": "INSERT INTO unit (\n id,\n project_id,\n price_usd,\n unit_type,\n rooms,\n bathrooms,\n area,\n description,\n media,\n admin_tag,\n time_created,\n last_updated\n) VALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $11\n)\nRETURNING \nid,\nproject_id,\nprice_usd,\nunit_type as \"unit_type: _\",\nrooms,\nbathrooms,\narea,\ndescription,\nmedia as \"media: _\",\nadmin_tag,\ntime_created,\nlast_updated;"
},
"a4b4bff48a573996a661d52576b051edc19b17de90d0823b87057e361004ecf8": {
"describe": {
"columns": [
@ -1130,6 +1120,18 @@
},
"query": "SELECT \nid,\nfull_name,\ncredential,\ncredential_type as \"credential_type: _\",\nprofile_picture_url,\ntime_created,\nlast_updated\nFROM agent WHERE id = $1"
},
"e15c1255cfa5d8075536949f04cb5bc5cc03336dee8cc417297ba02e7c8bff77": {
"describe": {
"columns": [],
"nullable": [],
"parameters": {
"Left": [
"Uuid"
]
}
},
"query": "DELETE FROM unit WHERE project_id = $1;"
},
"e249fa71f1bfd550a78b5838b9f393bd0d1d062291c8ea0fdcec61388a8712b8": {
"describe": {
"columns": [
@ -1259,5 +1261,101 @@
}
},
"query": "SELECT * FROM location WHERE city = $1 ORDER BY district DESC;"
},
"ef16e3c3ba17670dafc6c807cbf77c604954c292afc6c6598d8cbe62c570b625": {
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Uuid"
},
{
"name": "project_id",
"ordinal": 1,
"type_info": "Uuid"
},
{
"name": "price_usd",
"ordinal": 2,
"type_info": "Float8"
},
{
"name": "unit_type: _",
"ordinal": 3,
"type_info": "Varchar"
},
{
"name": "rooms",
"ordinal": 4,
"type_info": "Int2"
},
{
"name": "bathrooms",
"ordinal": 5,
"type_info": "Int2"
},
{
"name": "area",
"ordinal": 6,
"type_info": "Float4"
},
{
"name": "description",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "media: _",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "admin_tag",
"ordinal": 9,
"type_info": "Varchar"
},
{
"name": "time_created",
"ordinal": 10,
"type_info": "Timestamptz"
},
{
"name": "last_updated",
"ordinal": 11,
"type_info": "Timestamptz"
}
],
"nullable": [
false,
false,
false,
false,
false,
false,
false,
false,
false,
true,
false,
false
],
"parameters": {
"Left": [
"Uuid",
"Uuid",
"Float8",
"Varchar",
"Int2",
"Int2",
"Float4",
"Text",
"Text",
"Varchar",
"Timestamptz"
]
}
},
"query": "INSERT INTO unit (\n id,\n project_id,\n price_usd,\n unit_type,\n rooms,\n bathrooms,\n area,\n description,\n media,\n admin_tag,\n time_created,\n last_updated\n) VALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $11\n)\nRETURNING \n id,\n project_id,\n price_usd,\n unit_type as \"unit_type: _\",\n rooms,\n bathrooms,\n area,\n description,\n media as \"media: _\",\n admin_tag,\n time_created,\n last_updated;"
}
}

View File

@ -35,9 +35,7 @@ pub async fn get_locations_in_city(
.await
}
pub async fn fetch_all_locations(
conn: &PgPool,
) -> Result<Vec<Location>, sqlx::Error> {
pub async fn fetch_all_locations(conn: &PgPool) -> Result<Vec<Location>, sqlx::Error> {
sqlx::query_file_as!(Location, "sql/location/fetch_all.sql")
.fetch_all(conn)
.await
@ -45,7 +43,7 @@ pub async fn fetch_all_locations(
pub async fn delete_location(
tx: &mut Transaction<'_, Postgres>,
location_id: Uuid,
location_id: &Uuid,
) -> Result<PgQueryResult, sqlx::Error> {
sqlx::query_file!("sql/location/delete.sql", location_id)
.execute(tx)

View File

@ -2,6 +2,12 @@ use jl_types::domain::unit::Unit;
use sqlx::{postgres::PgQueryResult, PgPool, Postgres, Transaction};
use uuid::Uuid;
pub async fn get_with_id(conn: &PgPool, unit_id: &Uuid) -> Result<Option<Unit>, sqlx::Error> {
sqlx::query_file_as!(Unit, "sql/unit/get_with_id.sql", unit_id)
.fetch_optional(conn)
.await
}
pub async fn fetch_with_project_id(
conn: &PgPool,
project_id: &Uuid,
@ -20,6 +26,15 @@ pub async fn delete(
.await
}
pub async fn delete_all_from_project(
tx: &mut Transaction<'_, Postgres>,
project_id: &Uuid,
) -> Result<PgQueryResult, sqlx::Error> {
sqlx::query_file!("sql/unit/delete_all_from_project.sql", project_id)
.execute(tx)
.await
}
pub async fn insert(tx: &mut Transaction<'_, Postgres>, unit: &Unit) -> Result<Unit, sqlx::Error> {
sqlx::query_file_as!(
Unit,
@ -40,7 +55,7 @@ pub async fn insert(tx: &mut Transaction<'_, Postgres>, unit: &Unit) -> Result<U
.await
}
pub async fn update(conn: &PgPool, unit: &Unit) -> Result<Unit, sqlx::Error> {
pub async fn update(tx: &mut Transaction<'_, Postgres>, unit: &Unit) -> Result<Unit, sqlx::Error> {
sqlx::query_file_as!(
Unit,
"sql/unit/update.sql",
@ -55,6 +70,6 @@ pub async fn update(conn: &PgPool, unit: &Unit) -> Result<Unit, sqlx::Error> {
unit.last_updated,
unit.id
)
.fetch_one(conn)
.fetch_one(tx)
.await
}

108
src/routes/admin.rs Normal file
View File

@ -0,0 +1,108 @@
use std::sync::Arc;
use actix_web::{
delete, post, put,
web::{self, Path},
};
use actix_web_utils::extensions::typed_response::TypedHttpResponse;
use jl_types::{
domain::{agent::Agent, location::Location, project::Project, unit::Unit},
dto::payloads::{
agent::{NewAgentPayload, UpdateAgentPayload},
location::NewLocationPayload,
project::{NewProjectPayload, UpdateProjectPayload},
unit::{NewUnitPayload, UpdateUnitPayload},
},
};
use sqlx::PgPool;
use uuid::Uuid;
use crate::services;
#[post("/agent")]
pub async fn create_new_agent_profile(
db_conn: web::Data<Arc<PgPool>>,
new_agent_payload: web::Json<NewAgentPayload>,
) -> TypedHttpResponse<Agent> {
services::admin::create_new_agent_profile(&db_conn, new_agent_payload.0).await
}
#[post("/location")]
pub async fn create_new_location(
db_conn: web::Data<Arc<PgPool>>,
new_location_payload: web::Json<NewLocationPayload>,
) -> TypedHttpResponse<Location> {
services::admin::create_new_location(&db_conn, new_location_payload.0).await
}
#[post("/project")]
pub async fn create_new_project(
db_conn: web::Data<Arc<PgPool>>,
new_project_payload: web::Json<NewProjectPayload>,
) -> TypedHttpResponse<Project> {
services::admin::create_new_project(&db_conn, new_project_payload.0).await
}
#[post("/unit")]
pub async fn create_new_unit(
db_conn: web::Data<Arc<PgPool>>,
new_unit_payload: web::Json<NewUnitPayload>,
) -> TypedHttpResponse<Unit> {
services::admin::create_new_unit(&db_conn, new_unit_payload.0).await
}
#[put("/agent")]
pub async fn update_agent(
db_conn: web::Data<Arc<PgPool>>,
update_agent_payload: web::Json<UpdateAgentPayload>,
) -> TypedHttpResponse<Agent> {
services::admin::update_agent(&db_conn, update_agent_payload.0).await
}
#[put("/project")]
pub async fn update_project(
db_conn: web::Data<Arc<PgPool>>,
update_project_payload: web::Json<UpdateProjectPayload>,
) -> TypedHttpResponse<Project> {
services::admin::update_project(&db_conn, update_project_payload.0).await
}
#[put("/unit")]
pub async fn update_unit(
db_conn: web::Data<Arc<PgPool>>,
update_unit_payload: web::Json<UpdateUnitPayload>,
) -> TypedHttpResponse<Unit> {
services::admin::update_unit(&db_conn, update_unit_payload.0).await
}
#[delete("agent/{agent_id}")]
pub async fn delete_agent(
db_conn: web::Data<Arc<PgPool>>,
agent_id: Path<Uuid>,
) -> TypedHttpResponse<()> {
services::admin::delete_agent(&db_conn, &agent_id).await
}
#[delete("location/{location_id}")]
pub async fn delete_location(
db_conn: web::Data<Arc<PgPool>>,
location_id: Path<Uuid>,
) -> TypedHttpResponse<()> {
services::admin::delete_location(&db_conn, &location_id).await
}
#[delete("project/{project_id}")]
pub async fn delete_project(
db_conn: web::Data<Arc<PgPool>>,
project_id: Path<Uuid>,
) -> TypedHttpResponse<()> {
services::admin::delete_project(&db_conn, &project_id).await
}
#[delete("unit/{unit_id}")]
pub async fn delete_unit(
db_conn: web::Data<Arc<PgPool>>,
unit_id: Path<Uuid>,
) -> TypedHttpResponse<()> {
services::admin::delete_unit(&db_conn, &unit_id).await
}

View File

@ -1 +1,4 @@
pub mod main_router;
pub mod admin;
pub mod read;

97
src/routes/read.rs Normal file
View File

@ -0,0 +1,97 @@
use std::{collections::HashMap, str::FromStr, sync::Arc};
use actix_web::{
get,
web::{self, Path},
};
use actix_web_utils::extensions::typed_response::TypedHttpResponse;
use err::MessageResource;
use jl_types::{
domain::{
agent::Agent, location::Location, project::Project, project_condition::ProjectCondition,
project_type::ProjectType,
},
dto::{filters::Filter, listing::Listing},
};
use sqlx::PgPool;
use uuid::Uuid;
use crate::services;
#[get("/agent")]
pub async fn get_all_agents(db_conn: web::Data<Arc<PgPool>>) -> TypedHttpResponse<Vec<Agent>> {
services::read::get_all_agents(&db_conn).await
}
#[get("/locations")]
pub async fn get_all_locations(
db_conn: web::Data<Arc<PgPool>>,
) -> TypedHttpResponse<Vec<Location>> {
services::read::get_all_locations(&db_conn).await
}
#[get("/locations/{city}")]
pub async fn get_locations_in_city(
db_conn: web::Data<Arc<PgPool>>,
city: Path<String>,
) -> TypedHttpResponse<Vec<Location>> {
services::read::get_all_locations_in_city(&db_conn, &city).await
}
#[get("/projects/{page}")]
pub async fn get_projects_paged(
db_conn: web::Data<Arc<PgPool>>,
page: Path<i64>,
query_params: web::Query<HashMap<String, String>>,
) -> TypedHttpResponse<Vec<Project>> {
let filters = match parse_params_into_filters(query_params.0) {
Ok(filters) => filters,
Err(msg) => return TypedHttpResponse::return_standard_error(400, msg),
};
services::read::get_projects_paged(&db_conn, &page, &filters).await
}
#[get("/projects/{project_id}")]
pub async fn get_project_data(
db_conn: web::Data<Arc<PgPool>>,
project_id: Path<Uuid>,
) -> TypedHttpResponse<Listing> {
services::read::get_project_data(&db_conn, &project_id).await
}
fn parse_params_into_filters(
params: HashMap<String, String>,
) -> Result<Vec<Filter>, MessageResource> {
let mut filters: Vec<Filter> = Vec::new();
for key in params.keys() {
match key.as_str() {
"incity" => {
let city = params.get(key).unwrap();
filters.push(Filter::InCity(city.clone()));
}
"indistrict" => {
let district = params.get(key).unwrap();
filters.push(Filter::InDistrict(district.clone()));
}
"finished" => {
filters.push(Filter::Finished);
}
"byprojecttype" => {
let project_type = params.get(key).unwrap();
match ProjectType::from_str(project_type) {
Ok(project_type) => filters.push(Filter::ByProjectType(project_type)),
Err(_) => {}
}
}
"byprojectcondition" => {
let condition = params.get(key).unwrap();
match ProjectCondition::from_str(condition) {
Ok(condition) => filters.push(Filter::ByProjectCondition(condition)),
Err(_) => {}
}
}
_ => {}
};
}
Ok(filters)
}

View File

@ -1,10 +1,16 @@
use actix_web_utils::extensions::typed_response::TypedHttpResponse;
use jl_types::{
domain::{agent::Agent, project::Project, location::Location},
dto::payloads::{agent::{NewAgentPayload, UpdateAgentPayload}, project::{NewProjectPayload, UpdateProjectPayload}, location::NewLocationPayload},
domain::{agent::Agent, location::Location, project::Project, unit::Unit},
dto::payloads::{
agent::{NewAgentPayload, UpdateAgentPayload},
location::NewLocationPayload,
project::{NewProjectPayload, UpdateProjectPayload},
unit::{NewUnitPayload, UpdateUnitPayload},
},
};
use sqlx::PgPool;
use uuid::Uuid;
use crate::{dao, handle_db_read_op, handle_db_write_op, handle_tx, success, unwrap_or_not_found};
@ -29,7 +35,10 @@ pub async fn create_new_location(
new_location: NewLocationPayload,
) -> TypedHttpResponse<Location> {
let mut tx = handle_tx!(conn.begin());
let persisted_location = handle_db_write_op!(dao::location::insert_location(&mut tx, &new_location.into()), tx);
let persisted_location = handle_db_write_op!(
dao::location::insert_location(&mut tx, &new_location.into()),
tx
);
handle_tx!(tx.commit());
success!(persisted_location)
}
@ -39,13 +48,33 @@ pub async fn create_new_project(
new_project: NewProjectPayload,
) -> TypedHttpResponse<Project> {
let mut tx = handle_tx!(conn.begin());
unwrap_or_not_found!(handle_db_read_op!(dao::agent::get_agent_with_id(conn, &new_project.agent_id)), "agents");
unwrap_or_not_found!(handle_db_read_op!(dao::location::get_location_with_id(conn, &new_project.location_id)), "locations");
let persisted_project = handle_db_write_op!(dao::project::insert(&mut tx, &new_project.into()), tx);
unwrap_or_not_found!(
handle_db_read_op!(dao::agent::get_agent_with_id(conn, &new_project.agent_id)),
"agents"
);
unwrap_or_not_found!(
handle_db_read_op!(dao::location::get_location_with_id(
conn,
&new_project.location_id
)),
"locations"
);
let persisted_project =
handle_db_write_op!(dao::project::insert(&mut tx, &new_project.into()), tx);
handle_tx!(tx.commit());
success!(persisted_project)
}
pub async fn create_new_unit(conn: &PgPool, new_unit: NewUnitPayload) -> TypedHttpResponse<Unit> {
let mut tx = handle_tx!(conn.begin());
unwrap_or_not_found!(
handle_db_read_op!(dao::project::get_with_id(conn, &new_unit.project_id)),
"projects"
);
let persisted_unit = handle_db_write_op!(dao::unit::insert(&mut tx, &new_unit.into()), tx);
handle_tx!(tx.commit());
success!(persisted_unit)
}
//
// Update Methods
//
@ -71,11 +100,72 @@ pub async fn update_agent(
success!(updated_agent)
}
pub async fn update_project(conn: &PgPool, update_project_payload: UpdateProjectPayload) -> TypedHttpResponse<Project> {
pub async fn update_project(
conn: &PgPool,
update_project_payload: UpdateProjectPayload,
) -> TypedHttpResponse<Project> {
let mut tx = handle_tx!(conn.begin());
let mut persisted_project = unwrap_or_not_found!(handle_db_read_op!(dao::project::get_with_id(conn, &update_project_payload.id)), "projects");
let mut persisted_project = unwrap_or_not_found!(
handle_db_read_op!(dao::project::get_with_id(conn, &update_project_payload.id)),
"projects"
);
update_project_payload.update_project(&mut persisted_project);
let updated_project = handle_db_write_op!(dao::project::update(&mut tx, &persisted_project), tx);
let updated_project =
handle_db_write_op!(dao::project::update(&mut tx, &persisted_project), tx);
handle_tx!(tx.commit());
success!(updated_project)
}
pub async fn update_unit(
conn: &PgPool,
update_unit_payload: UpdateUnitPayload,
) -> TypedHttpResponse<Unit> {
let mut tx = handle_tx!(conn.begin());
let mut persisted_unit = unwrap_or_not_found!(
handle_db_read_op!(dao::unit::get_with_id(conn, &update_unit_payload.id)),
"units"
);
update_unit_payload.update_unit(&mut persisted_unit);
let updated_unit = handle_db_write_op!(dao::unit::update(&mut tx, &persisted_unit), tx);
handle_tx!(tx.commit());
success!(updated_unit)
}
//
// Delete methods
//
pub async fn delete_project(conn: &PgPool, project_id: &Uuid) -> TypedHttpResponse<()> {
let mut tx = handle_tx!(conn.begin());
//let persisted_project = unwrap_or_not_found!(handle_db_read_op!(dao::project::get_with_id(conn, project_id)), "projects");
// TODO: Test to see if this gives an error if no units are found
handle_db_write_op!(dao::unit::delete_all_from_project(&mut tx, project_id), tx);
// handle_db_write_op!(dao::location::delete_location(&mut tx, persisted_project.location_id));
handle_db_write_op!(dao::project::delete(&mut tx, project_id), tx);
handle_tx!(tx.commit());
success!(())
}
pub async fn delete_location(conn: &PgPool, location_id: &Uuid) -> TypedHttpResponse<()> {
let mut tx = handle_tx!(conn.begin());
// TODO: Test to see if this gives an error if no locations are found
handle_db_write_op!(dao::location::delete_location(&mut tx, location_id), tx);
handle_tx!(tx.commit());
success!(())
}
pub async fn delete_agent(conn: &PgPool, agent_id: &Uuid) -> TypedHttpResponse<()> {
let mut tx = handle_tx!(conn.begin());
// TODO: Test to see if this gives an error if no agents are found
handle_db_write_op!(dao::agent::delete_agent(&mut tx, agent_id), tx);
handle_tx!(tx.commit());
success!(())
}
pub async fn delete_unit(conn: &PgPool, unit_id: &Uuid) -> TypedHttpResponse<()> {
let mut tx = handle_tx!(conn.begin());
// TODO: Test to see if this gives an error if no units are found
handle_db_write_op!(dao::unit::delete(&mut tx, unit_id), tx);
handle_tx!(tx.commit());
success!(())
}

View File

@ -1 +1,57 @@
use actix_web_utils::extensions::typed_response::TypedHttpResponse;
use jl_types::{
domain::{agent::Agent, location::Location, project::Project},
dto::{filters::Filter, listing::Listing},
};
use sqlx::PgPool;
use uuid::Uuid;
use crate::{dao, handle_db_read_op, success, unwrap_or_not_found};
pub async fn get_all_agents(conn: &PgPool) -> TypedHttpResponse<Vec<Agent>> {
success!(handle_db_read_op!(dao::agent::fetch_all(conn)))
}
pub async fn get_all_locations(conn: &PgPool) -> TypedHttpResponse<Vec<Location>> {
success!(handle_db_read_op!(dao::location::fetch_all_locations(conn)))
}
pub async fn get_all_locations_in_city(
conn: &PgPool,
city: &String,
) -> TypedHttpResponse<Vec<Location>> {
success!(handle_db_read_op!(dao::location::get_locations_in_city(
conn, city
)))
}
pub async fn get_projects_paged(
conn: &PgPool,
page: &i64,
filters: &Vec<Filter>,
) -> TypedHttpResponse<Vec<Project>> {
success!(handle_db_read_op!(dao::project::fetch_with_filters_paged(
conn, filters, page
)))
}
/// Return Units, and agent
pub async fn get_project_data(conn: &PgPool, project_id: &Uuid) -> TypedHttpResponse<Listing> {
let project = unwrap_or_not_found!(
handle_db_read_op!(dao::project::get_with_id(conn, project_id)),
"projects"
);
let units = handle_db_read_op!(dao::unit::fetch_with_project_id(conn, project_id));
let agent = unwrap_or_not_found!(
handle_db_read_op!(dao::agent::get_agent_with_id(conn, &project.agent_id)),
"agents"
);
let location = unwrap_or_not_found!(
handle_db_read_op!(dao::location::get_location_with_id(
conn,
&project.location_id
)),
"locations"
);
success!(Listing::new(project, units, location, agent))
}