From 227b0b5372f03d6726e47ed0c8765e6d5c400e2e Mon Sep 17 00:00:00 2001 From: Franklin Date: Fri, 10 Mar 2023 19:32:27 -0400 Subject: [PATCH] Finished service layer and dao layer. V1 coming soon --- Cargo.lock | 1 + migrations/3_property_details.sql | 2 +- sql/agent/get.sql | 1 + sql/property/fetch_paged_with_filters.sql | 21 +++ sql/property/fetch_with_ids.sql | 5 +- sqlx-data.json | 162 ++++++++++++++++++---- src/dao/agent.rs | 10 ++ src/dao/property.rs | 76 +++++++++- src/routes/admin.rs | 9 +- src/routes/mod.rs | 2 +- src/routes/{customer.rs => read.rs} | 0 src/service/admin.rs | 2 +- src/service/read.rs | 130 +++++++++++++++-- 13 files changed, 367 insertions(+), 54 deletions(-) create mode 100644 sql/agent/get.sql create mode 100644 sql/property/fetch_paged_with_filters.sql rename src/routes/{customer.rs => read.rs} (100%) diff --git a/Cargo.lock b/Cargo.lock index 55ebc76..f5d1897 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1473,6 +1473,7 @@ dependencies = [ "chrono-tz", "format_num", "serde", + "serde_json", "sqlx", "uuid", ] diff --git a/migrations/3_property_details.sql b/migrations/3_property_details.sql index 94e5e22..3f77ed3 100644 --- a/migrations/3_property_details.sql +++ b/migrations/3_property_details.sql @@ -1,7 +1,7 @@ CREATE TABLE IF NOT EXISTS "property_details" ( property_id UUID PRIMARY KEY, meters FLOAT4 NOT NULL, - listing_type BYTEA NOT NULL, + listing_type VARCHAR NOT NULL, photo_urls BYTEA NOT NULL, location_id UUID NOT NULL ); \ No newline at end of file diff --git a/sql/agent/get.sql b/sql/agent/get.sql new file mode 100644 index 0000000..d512775 --- /dev/null +++ b/sql/agent/get.sql @@ -0,0 +1 @@ +SELECT * FROM agent ORDER BY time_created DESC LIMIT 50 OFFSET $1; \ No newline at end of file diff --git a/sql/property/fetch_paged_with_filters.sql b/sql/property/fetch_paged_with_filters.sql new file mode 100644 index 0000000..97f7c6d --- /dev/null +++ b/sql/property/fetch_paged_with_filters.sql @@ -0,0 +1,21 @@ +SELECT +p.id, +p.title, +p.description, +p.agent_id, +p.state as "state: _", +p.time_created, +p.last_updated +FROM property p, property_details pd, location l where pd.property_id = p.id AND pd.location_id = l.id +AND (p.time_created <= $1 OR $1 = null) -- Before Filter +AND (p.time_created >= $2 OR $2 = null) -- After Filter +AND (p.title LIKE ('%' || LOWER($3) || '%') OR $3 = null) -- Title filter (like) +AND (p.description LIKE ('%' || LOWER($4) || '%') OR $4 = null) -- Description Filter (like) +AND (pd.meters > $5 OR $5 = null) -- Bigger than or equal to filter +AND (pd.meters < $6 OR $6 = null) -- Smaller than or equal to filter +AND (pd.location_id = $7 OR $7 = null) -- Location Filter +AND (pd.listing_type LIKE $8 || '%' OR $8 = null) -- Listing type filter +AND (split_part(pd.listing_type, ' ', 2)::FLOAT8 >= $9::FLOAT8 OR $9 = null) -- More expensive than filter +AND (split_part(pd.listing_type, ' ', 2)::FLOAT8 <= $10::FLOAT8 OR $10 = null) -- More expensive than filter +ORDER BY p.time_created DESC +LIMIT 25 OFFSET $11 \ No newline at end of file diff --git a/sql/property/fetch_with_ids.sql b/sql/property/fetch_with_ids.sql index eacf2a2..554d63f 100644 --- a/sql/property/fetch_with_ids.sql +++ b/sql/property/fetch_with_ids.sql @@ -6,4 +6,7 @@ agent_id, state as "state: _", time_created, last_updated -FROM property where id = ANY($1); \ No newline at end of file +FROM property +WHERE id = ANY($1) +ORDER BY time_created DESC +LIMIT 50 \ No newline at end of file diff --git a/sqlx-data.json b/sqlx-data.json index 26b4fbc..55145db 100644 --- a/sqlx-data.json +++ b/sqlx-data.json @@ -1,5 +1,61 @@ { "db": "PostgreSQL", + "0370da7558790827f09104791a6a15be8b0e9580da39d5f1e8dd2e47a48a692f": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Uuid" + }, + { + "name": "title", + "ordinal": 1, + "type_info": "Varchar" + }, + { + "name": "description", + "ordinal": 2, + "type_info": "Varchar" + }, + { + "name": "agent_id", + "ordinal": 3, + "type_info": "Uuid" + }, + { + "name": "state: _", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "time_created", + "ordinal": 5, + "type_info": "Timestamptz" + }, + { + "name": "last_updated", + "ordinal": 6, + "type_info": "Timestamptz" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false + ], + "parameters": { + "Left": [ + "UuidArray" + ] + } + }, + "query": "SELECT \nid,\ntitle,\ndescription,\nagent_id,\nstate as \"state: _\",\ntime_created,\nlast_updated\nFROM property\nWHERE id = ANY($1)\nORDER BY time_created DESC\nLIMIT 50" + }, "044ecd955f880a5d7e3185b6dc5217c01cb88100d2e94dea5323ee0a10a45fc0": { "describe": { "columns": [ @@ -211,6 +267,72 @@ }, "query": "INSERT INTO property (\n id,\n title,\n description,\n agent_id,\n state,\n time_created,\n last_updated\n) VALUES (\n $1,\n $2,\n $3,\n $4,\n $5,\n $6,\n $6\n) RETURNING\nid,\ntitle,\ndescription,\nagent_id,\nstate as \"state: _\",\ntime_created,\nlast_updated;" }, + "2bcf024ea2185368a01e109b697bb69564f96b213595d16d0bcde240c2a21b06": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Uuid" + }, + { + "name": "title", + "ordinal": 1, + "type_info": "Varchar" + }, + { + "name": "description", + "ordinal": 2, + "type_info": "Varchar" + }, + { + "name": "agent_id", + "ordinal": 3, + "type_info": "Uuid" + }, + { + "name": "state: _", + "ordinal": 4, + "type_info": "Bytea" + }, + { + "name": "time_created", + "ordinal": 5, + "type_info": "Timestamptz" + }, + { + "name": "last_updated", + "ordinal": 6, + "type_info": "Timestamptz" + } + ], + "nullable": [ + false, + false, + false, + false, + false, + false, + false + ], + "parameters": { + "Left": [ + "Timestamptz", + "Timestamptz", + "Text", + "Text", + "Float4", + "Float4", + "Uuid", + "Text", + "Float8", + "Float8", + "Int8" + ] + } + }, + "query": "SELECT \np.id,\np.title,\np.description,\np.agent_id,\np.state as \"state: _\",\np.time_created,\np.last_updated\nFROM property p, property_details pd, location l where pd.property_id = p.id AND pd.location_id = l.id\nAND (p.time_created <= $1 OR $1 = null) -- Before Filter\nAND (p.time_created >= $2 OR $2 = null) -- After Filter\nAND (p.title LIKE ('%' || LOWER($3) || '%') OR $3 = null) -- Title filter (like)\nAND (p.description LIKE ('%' || LOWER($4) || '%') OR $4 = null) -- Description Filter (like)\nAND (pd.meters > $5 OR $5 = null) -- Bigger than or equal to filter\nAND (pd.meters < $6 OR $6 = null) -- Smaller than or equal to filter\nAND (pd.location_id = $7 OR $7 = null) -- Location Filter\nAND (pd.listing_type LIKE $8 || '%' OR $8 = null) -- Listing type filter\nAND (split_part(pd.listing_type, ' ', 2)::FLOAT8 >= $9::FLOAT8 OR $9 = null) -- More expensive than filter\nAND (split_part(pd.listing_type, ' ', 2)::FLOAT8 <= $10::FLOAT8 OR $10 = null) -- More expensive than filter\nORDER BY p.time_created DESC\nLIMIT 25 OFFSET $11" + }, "403be6e4c7b32698eb9dd9c43f9c13f7f03dc95671f1c81a5391df05c51a92fc": { "describe": { "columns": [ @@ -361,7 +483,7 @@ }, "query": "SELECT * FROM location WHERE id = ANY($1);" }, - "75508bc7b114d6f9915cadf9708c376d2ca1a6dcdd6bff24b1a717cba0ec2fca": { + "9656e56cd7da8d41f98e7746421101900d4926a5ad5aa6dc21da39d227fed28a": { "describe": { "columns": [ { @@ -370,40 +492,22 @@ "type_info": "Uuid" }, { - "name": "title", + "name": "full_name", "ordinal": 1, "type_info": "Varchar" }, - { - "name": "description", - "ordinal": 2, - "type_info": "Varchar" - }, - { - "name": "agent_id", - "ordinal": 3, - "type_info": "Uuid" - }, - { - "name": "state: _", - "ordinal": 4, - "type_info": "Bytea" - }, { "name": "time_created", - "ordinal": 5, + "ordinal": 2, "type_info": "Timestamptz" }, { "name": "last_updated", - "ordinal": 6, + "ordinal": 3, "type_info": "Timestamptz" } ], "nullable": [ - false, - false, - false, false, false, false, @@ -411,11 +515,11 @@ ], "parameters": { "Left": [ - "UuidArray" + "Int8" ] } }, - "query": "SELECT \nid,\ntitle,\ndescription,\nagent_id,\nstate as \"state: _\",\ntime_created,\nlast_updated\nFROM property where id = ANY($1);" + "query": "SELECT * FROM agent ORDER BY time_created DESC LIMIT 50 OFFSET $1;" }, "a05953d6326a1fa6da0444234846b6ee761c8c38c69145b84a2f8d321d879289": { "describe": { @@ -433,7 +537,7 @@ { "name": "listing_type: _", "ordinal": 2, - "type_info": "Bytea" + "type_info": "Varchar" }, { "name": "photo_urls: _", @@ -457,7 +561,7 @@ "Left": [ "Uuid", "Float4", - "Bytea", + "Varchar", "Bytea", "Uuid" ] @@ -481,7 +585,7 @@ { "name": "listing_type: _", "ordinal": 2, - "type_info": "Bytea" + "type_info": "Varchar" }, { "name": "photo_urls: _", @@ -525,7 +629,7 @@ { "name": "listing_type: _", "ordinal": 2, - "type_info": "Bytea" + "type_info": "Varchar" }, { "name": "photo_urls: _", @@ -548,7 +652,7 @@ "parameters": { "Left": [ "Float4", - "Bytea", + "Varchar", "Bytea", "Uuid" ] diff --git a/src/dao/agent.rs b/src/dao/agent.rs index b883363..8a9dae7 100644 --- a/src/dao/agent.rs +++ b/src/dao/agent.rs @@ -25,6 +25,16 @@ pub async fn get_agents_with_ids( .await } +pub async fn get_agents_paged( + conn: &PgPool, + page: &i64, +) -> Result, sqlx::error::Error> { + let offset = (page - 1) * 50; + sqlx::query_file_as!(Agent, "sql/agent/get.sql", offset) + .fetch_all(conn) + .await +} + pub async fn update_agent( conn: &mut Transaction<'_, Postgres>, agent: &Agent, diff --git a/src/dao/property.rs b/src/dao/property.rs index b9bbb3a..345b4c1 100644 --- a/src/dao/property.rs +++ b/src/dao/property.rs @@ -1,4 +1,4 @@ -use remax_types::domain::property::Property; +use remax_types::domain::{filters::property::PropertyFilter, property::Property}; use sqlx::{PgPool, Postgres, Transaction}; use uuid::Uuid; @@ -19,6 +19,7 @@ pub async fn insert_property( .fetch_one(conn) .await } + pub async fn get_properties_with_ids( conn: &PgPool, ids: &Vec, @@ -28,6 +29,79 @@ pub async fn get_properties_with_ids( .await } +pub async fn fetch_properties_paged_with_filters( + conn: &PgPool, + filters: &Vec, + page: &i64, +) -> Result, sqlx::error::Error> { + let offset = (page - 1) * 25; + let mut before_filter = None; + let mut after_filter = None; + let mut title_filter = None; + let mut description_filter = None; + let mut bigger_filter = None; + let mut smaller_filter = None; + let mut location_filter = None; + let mut listing_type_filter = None; + let mut more_expensive_than_filter = None; + let mut cheaper_than_filter = None; + + for filter in filters { + match filter { + PropertyFilter::Before(before_time) => before_filter = Some(before_time), + PropertyFilter::After(after_time) => after_filter = Some(after_time), + PropertyFilter::Title(title_like) => title_filter = Some(title_like), + PropertyFilter::BiggerOrEqualTo(meters) => bigger_filter = Some(meters), + PropertyFilter::SmallerOrEqualTo(meters) => smaller_filter = Some(meters), + PropertyFilter::Description(description_like) => { + description_filter = Some(description_like) + } + PropertyFilter::Location(location_id) => location_filter = Some(location_id), + PropertyFilter::MoreExpensiveThan(listing_type) => { + listing_type_filter = Some(listing_type.to_string().split(" ").collect::()); + more_expensive_than_filter = match listing_type { + remax_types::domain::property_details::ListingType::Rent(amount) => { + Some(amount) + } + remax_types::domain::property_details::ListingType::Sale(amount) => { + Some(amount) + } + _ => None, + } + } + PropertyFilter::CheaperThan(listing_type) => { + listing_type_filter = Some(listing_type.to_string().split(" ").collect::()); + cheaper_than_filter = match listing_type { + remax_types::domain::property_details::ListingType::Rent(amount) => { + Some(amount) + } + remax_types::domain::property_details::ListingType::Sale(amount) => { + Some(amount) + } + _ => None, + } + } + }; + } + sqlx::query_file_as!( + Property, + "sql/property/fetch_paged_with_filters.sql", + before_filter, + after_filter, + title_filter, + description_filter, + bigger_filter, + smaller_filter, + location_filter, + listing_type_filter, + more_expensive_than_filter, + cheaper_than_filter, + offset + ) + .fetch_all(conn) + .await +} + pub async fn update_property( conn: &mut Transaction<'_, Postgres>, property: &Property, diff --git a/src/routes/admin.rs b/src/routes/admin.rs index 1c231a2..139597f 100644 --- a/src/routes/admin.rs +++ b/src/routes/admin.rs @@ -1,9 +1,2 @@ -// TODO: Property -// - Add property -// - Remove Properties by ids -// - Update Property -// TODO: Agent -// - Add agent -// - Remove agents -// - Update agents + diff --git a/src/routes/mod.rs b/src/routes/mod.rs index 5ae94f8..ab2965e 100644 --- a/src/routes/mod.rs +++ b/src/routes/mod.rs @@ -1,3 +1,3 @@ pub mod admin; -pub mod customer; +pub mod read; pub mod main_router; diff --git a/src/routes/customer.rs b/src/routes/read.rs similarity index 100% rename from src/routes/customer.rs rename to src/routes/read.rs diff --git a/src/service/admin.rs b/src/service/admin.rs index f6cab47..d81c639 100644 --- a/src/service/admin.rs +++ b/src/service/admin.rs @@ -127,7 +127,7 @@ pub async fn update_listing( let mut tx = handle_tx!(conn.begin()); let mut persisted_properties = handle_db_read_op!(dao::property::get_properties_with_ids( conn, - &vec![update_property_payload.property_id] + &vec![update_property_payload.property_id], )); let mut persisted_properties_details = handle_db_read_op!(dao::property_details::get_properties_with_ids( diff --git a/src/service/read.rs b/src/service/read.rs index ae5d015..04cffd2 100644 --- a/src/service/read.rs +++ b/src/service/read.rs @@ -1,21 +1,127 @@ -// TODO: Most important method: Get First page of property listings (Filters) -// TODO: Get individual property listing with all the info (contact, location, property, details) -// TODO: Get all agents - use actix_web_utils::extensions::typed_response::TypedHttpResponse; use remax_types::{ - domain::agent::Agent, - dto::property::{ListingContainer, PropertyContainer}, + domain::{agent::Agent, filters::property::PropertyFilter}, + dto::{ + agent::AgentContainer, + property::{ListingContainer, PropertyContainer}, + }, +}; +use sqlx::PgPool; +use uuid::Uuid; + +use crate::{ + dao, + handle_db_read_op, success, unwrap_or_not_found, }; -pub async fn get_property_listings_paged() -> TypedHttpResponse { - todo!() +/// Method to call when loading in to the homepage or searching anything +/// Pending tests +pub async fn get_property_listings_paged( + conn: &PgPool, + filters: &Vec, + page: &i64, +) -> TypedHttpResponse> { + let properties_filtered = handle_db_read_op!( + dao::property::fetch_properties_paged_with_filters(conn, filters, page) + ); + let property_ids: Vec = properties_filtered + .iter() + .map(|property| property.id) + .collect(); + let property_details = handle_db_read_op!(dao::property_details::get_properties_with_ids( + conn, + &property_ids + )); + let location_ids: Vec = property_details + .iter() + .map(|property_details| property_details.location_id) + .collect(); + let locations = handle_db_read_op!(dao::location::get_locations_with_ids(conn, &location_ids)); + + let mut property_containers: Vec = Vec::new(); + for property in properties_filtered { + let property_details = unwrap_or_not_found!( + property_details + .iter() + .find(|property_detail| property_detail.property_id == property.id), + "filtered property details" + ) + .clone(); + let location = unwrap_or_not_found!( + locations + .iter() + .find(|loc| loc.id == property_details.location_id), + "filtered location" + ) + .clone(); + property_containers.push(PropertyContainer { + property, + details: property_details, + location, + }); + } + success!(property_containers) } -pub async fn get_listing_container() -> TypedHttpResponse { - todo!() +pub async fn get_listing_container( + conn: &PgPool, + listing_id: &Uuid, +) -> TypedHttpResponse { + let property = unwrap_or_not_found!( + handle_db_read_op!(dao::property::get_properties_with_ids( + conn, + &vec![*listing_id] + )) + .first(), + "properties" + ) + .clone(); + let details = unwrap_or_not_found!( + handle_db_read_op!(dao::property_details::get_properties_with_ids( + conn, + &vec![*listing_id] + )) + .first(), + "property_details" + ) + .clone(); + let location = unwrap_or_not_found!( + handle_db_read_op!(dao::location::get_location_with_id( + conn, + &details.location_id + )), + "locations" + ); + let agent = unwrap_or_not_found!( + handle_db_read_op!(dao::agent::get_agents_with_ids( + conn, + &vec![property.agent_id] + )) + .first(), + "agents" + ) + .clone(); + let contact = unwrap_or_not_found!( + handle_db_read_op!(dao::contact_info::get_contact_infos_with_ids( + conn, + &vec![agent.id] + )) + .first(), + "contact_infos" + ) + .clone(); + + success!(ListingContainer { + property: PropertyContainer { + property, + details, + location, + }, + agent: AgentContainer { agent, contact }, + }) } -pub async fn get_all_agents() -> TypedHttpResponse> { - todo!() +pub async fn get_all_agents(conn: &PgPool, page: &i64) -> TypedHttpResponse> { + let agents = handle_db_read_op!(dao::agent::get_agents_paged(conn, page)); + success!(agents) }