Files
huike-back/htyws/src/ws_daka.rs
T
weli 07ee6e7cc6 feat(ws-org): add org_id scoping across ws entities
Add org_id fields and migrations for course, section, lianxi, piyue, and jihua/daka data paths, and enforce organization-scoped filtering in ws service queries.

Made-with: Cursor
2026-04-27 23:06:58 +08:00

981 lines
36 KiB
Rust

use crate::ws_all::{
convert_course_sections_to_req_course_sections_for_jihua_daka_scenario,
convert_course_sections_to_req_course_sections_for_jihua_daka_scenario2, find_hty_user_by_host,
find_hty_user_group_by_id,
};
use crate::ws_lianxi::count_lianxi_and_piyue_on_the_fly;
use anyhow::anyhow;
use axum::extract::{Path, Query, State};
use axum::Json;
use chrono::NaiveDateTime;
use diesel::PgConnection;
use htycommons::common::{
current_local_datetime, get_page_and_page_size, parse_date_time, strip_result_vec, HtyErr,
HtyErrCode, HtyResponse,
};
use htycommons::db::{exec_read_write_task, extract_conn, fetch_db_conn, DbState};
use htycommons::jwt::jwt_decode_token;
use htycommons::uuid;
use htycommons::web::{
wrap_json_anyhow_err, wrap_json_ok_resp, AuthorizationHeader, HtyHostHeader,
HtySudoerTokenHeader,
};
use htyuc_remote::remote_calls::find_hty_user_with_info_by_id;
use htyws_models::models::{
Daka, DakaCourseSection, JihuaCourseSectionMeta, Course, ReqDakaCourseSection,
ReqDakaCourseSectionsRelation, ReqDakaWithCourseSectionIds, ReqDakaWithCourseSections,
ReqDakaWithCourseSections2,
};
use std::collections::HashMap;
use std::ops::DerefMut;
use std::sync::Arc;
use tracing::{debug, error};
fn required_current_org_id_from_sudoer_token_str(token_str: &String) -> anyhow::Result<String> {
jwt_decode_token(token_str)?
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))
}
pub async fn create_daka(
root: HtySudoerTokenHeader,
host: HtyHostHeader,
auth: AuthorizationHeader,
State(db_pool): State<Arc<DbState>>,
Json(in_daka): Json<ReqDakaWithCourseSectionIds>,
) -> Json<HtyResponse<String>> {
debug!("create_daka -> starts");
match raw_create_daka(auth, root, host, db_pool, &in_daka).await {
Ok(ok) => wrap_json_ok_resp(ok),
Err(e) => {
error!("create_daka -> failed to create daka, e: {}", e);
wrap_json_anyhow_err(e)
}
}
}
pub async fn raw_create_daka(
_token: AuthorizationHeader,
root: HtySudoerTokenHeader,
host: HtyHostHeader,
db_pool: Arc<DbState>,
in_req_daka: &ReqDakaWithCourseSectionIds,
) -> anyhow::Result<String> {
let current_org_id = jwt_decode_token(&(*_token).clone())?
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))?;
let req_daka = in_req_daka.clone();
if req_daka.start_date.is_none()
|| req_daka.duration_days.is_none()
|| req_daka.name.is_none()
|| req_daka.teacher_id.is_none()
// || req_daka.group_id.is_none()
{
return Err(anyhow!(HtyErr {
code: HtyErrCode::WebErr,
reason: Some(
"start_date or duration_days or name or teacher_id or teacher_name is none".into()
),
}));
}
let teacher_name = find_hty_user_by_host(
&req_daka
.teacher_id
.clone()
.ok_or_else(|| anyhow!("teacher_id is required"))?,
&host,
&root,
)
.await?
.real_name;
let now = current_local_datetime();
let insert_daka = Daka {
id: uuid(),
desc: req_daka.desc.clone(),
start_date: req_daka
.start_date
.clone()
.ok_or_else(|| anyhow!("start_date is required"))?,
duration_days: req_daka
.duration_days
.clone()
.ok_or_else(|| anyhow!("duration_days is required"))?,
name: req_daka
.name
.clone()
.ok_or_else(|| anyhow!("name is required"))?,
created_at: now.clone(),
created_by: req_daka
.created_by
.clone()
.ok_or_else(|| anyhow!("created_by is required"))?,
teacher_id: req_daka
.teacher_id
.clone()
.ok_or_else(|| anyhow!("teacher_id is required"))?,
group_id: req_daka.group_id.clone(),
is_delete: false,
teacher_name,
group_name: req_daka.group_name.clone(),
lianxi_count: req_daka.lianxi_count.clone(),
teachers: req_daka.teachers.clone(),
updated_at: Some(now.clone()),
updated_by: Some(
req_daka
.created_by
.clone()
.ok_or_else(|| anyhow!("created_by is required"))?,
),
students: req_daka.students.clone(),
course_sections: req_daka.course_sections.clone(),
is_yanqi: req_daka.is_yanqi.clone(),
org_id: Some(current_org_id),
};
let mut params = HashMap::new();
params.insert("params".to_string(), (req_daka, insert_daka.clone()));
let created_daka_result =
raw_create_daka_tx(params, extract_conn(fetch_db_conn(&db_pool)?).deref_mut());
match created_daka_result {
Ok(create_daka) => Ok(create_daka.id),
Err(e) => Err(anyhow!(HtyErr {
code: HtyErrCode::WebErr,
reason: Some("fail to create daka e: ".to_string() + &e.to_string()),
})),
}
}
pub fn raw_create_daka_tx(
params: HashMap<String, (ReqDakaWithCourseSectionIds, Daka)>,
conn: &mut PgConnection,
) -> anyhow::Result<Daka> {
let task = move |in_params: Option<HashMap<String, (ReqDakaWithCourseSectionIds, Daka)>>,
conn: &mut PgConnection|
-> anyhow::Result<Daka> {
let the_params = in_params.ok_or_else(|| anyhow!("params is required"))?;
let (req_daka_with_section_ids, insert_daka) = the_params
.clone()
.get("params")
.ok_or_else(|| anyhow!("params key not found"))?
.clone();
let sections_ids = req_daka_with_section_ids
.to_owned()
.course_section_ids
.unwrap_or_default();
debug!("START CREATE DAKA -> {:?}", &the_params);
let result_daka = Daka::create(&insert_daka, conn)?;
debug!("DAKA CREATED -> {:?}", &result_daka);
let meta_data = JihuaCourseSectionMeta { meta: None };
for in_section_id in sections_ids {
let section = DakaCourseSection {
id: uuid(),
daka_id: result_daka.id.clone(),
course_section_id: in_section_id,
meta: Some(meta_data.clone()),
is_delete: false,
org_id: result_daka.org_id.clone(),
};
debug!("START CREATE DAKA QUMU SECTION -> {:?}", &section);
let created_section = DakaCourseSection::create(&section, conn)?;
debug!("CREATED DAKA QUMU SECTION -> {:?}", &created_section);
}
Ok(result_daka)
};
exec_read_write_task(Box::new(task), Some(params), conn)
}
pub async fn find_daka_by_id(
root: HtySudoerTokenHeader,
Path(id): Path<String>,
State(db_pool): State<Arc<DbState>>,
) -> Json<HtyResponse<ReqDakaWithCourseSections>> {
debug!("find_daka_by_id -> start here");
match raw_find_daka_by_id(&root, &id, db_pool).await {
Ok(res) => {
debug!("find_daka_by_id -> successfully find daka: {:?}", res);
wrap_json_ok_resp(res)
}
Err(e) => {
error!("find_daka_by_id -> failed to find daka, e: {}", e);
wrap_json_anyhow_err(e)
}
}
}
pub async fn find_daka_by_id2(
root: HtySudoerTokenHeader,
Path(id): Path<String>,
State(db_pool): State<Arc<DbState>>,
) -> Json<HtyResponse<ReqDakaWithCourseSections2>> {
debug!("find_daka_by_id2 -> start here");
match raw_find_daka_by_id2(&root, &id, db_pool).await {
Ok(res) => {
debug!("find_daka_by_id2 -> successfully find daka: {:?}", res);
wrap_json_ok_resp(res)
}
Err(e) => {
error!("find_daka_by_id2 -> failed to find daka, e: {}", e);
wrap_json_anyhow_err(e)
}
}
}
pub async fn raw_find_daka_by_id2(
root: &HtySudoerTokenHeader,
id: &String,
db_pool: Arc<DbState>,
) -> anyhow::Result<ReqDakaWithCourseSections2> {
debug!("raw_find_daka_by_id2 -> {:?}", id);
let current_org_id = required_current_org_id_from_sudoer_token_str(&root.0)?;
let daka = Daka::find_by_id_in_org(
&id,
&current_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
debug!("raw_find_daka_by_id2 -> daka: {:?}", id);
let all_lianxis =
daka.find_active_belonging_lianxis(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
debug!("raw_find_daka_by_id2 -> all_lianxis: {:?}", all_lianxis);
let (count_lianxi, not_piyue_count) = count_lianxi_and_piyue_on_the_fly(&all_lianxis);
let the_course_sections =
daka.find_active_linked_course_sections(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
debug!(
"raw_find_daka_by_id2 -> the_course_sections: {:?}",
the_course_sections
);
let any_req_course_sections = convert_course_sections_to_req_course_sections_for_jihua_daka_scenario2(
&String::from("DAKA"),
&daka.id,
the_course_sections.clone(),
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
);
debug!(
"raw_find_daka_by_id2 -> any_req_course_sections: {:?}",
any_req_course_sections
);
let out_req_course_sections = strip_result_vec(any_req_course_sections)?;
debug!(
"raw_find_daka_by_id2 -> out_req_course_sections: {:?}",
out_req_course_sections
);
let any_relations: Vec<anyhow::Result<ReqDakaCourseSection>> = the_course_sections
.iter()
.map(|course_section| {
let daka_org_id = daka
.org_id
.as_ref()
.ok_or_else(|| anyhow!("daka.org_id is required"))?;
let in_jihua_course_section =
DakaCourseSection::find_by_daka_id_and_course_section_id_in_org(
&daka.id,
&course_section.id,
daka_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
Ok(ReqDakaCourseSection {
id: Some(in_jihua_course_section.id.clone()),
daka_id: None,
course_section_id: Some(in_jihua_course_section.course_section_id.clone()),
meta: None,
is_delete: Some(in_jihua_course_section.is_delete.clone()),
})
})
.collect();
debug!(
"raw_find_daka_by_id2 -> daka.teacher_id {:?}",
&daka.teacher_id
);
let user_teacher = find_hty_user_with_info_by_id(&daka.teacher_id, root).await?;
let user_group;
if let Some(group_id) = &daka.group_id {
let c_group_id = group_id.clone();
user_group = find_hty_user_group_by_id(&c_group_id.clone(), root).await?;
Ok(ReqDakaWithCourseSections2 {
all_lianxis: None,
created_at: Some(daka.created_at.clone()),
created_by: Some(daka.created_by.clone()),
desc: daka.desc.clone(),
duration_days: Some(daka.duration_days.clone()),
group_id: Some(c_group_id.clone()),
group_name: user_group.group_name,
id: Some(daka.id.clone()),
is_delete: Some(daka.is_delete.clone()),
is_yanqi: daka.is_yanqi.clone(),
lianxi_count: Some(count_lianxi),
name: Some(daka.name.clone()),
not_piyue_count: Some(not_piyue_count),
course_sections2: daka.course_sections.clone(),
course_sections: Some(out_req_course_sections),
relations: Some(strip_result_vec(any_relations)?),
start_date: Some(daka.start_date.clone()),
students: daka.students.clone(),
teacher_id: Some(daka.teacher_id.clone()),
teacher_name: user_teacher.real_name,
teachers: daka.teachers.clone(),
updated_at: daka.updated_at.clone(),
updated_by: daka.updated_by.clone(),
})
} else {
Ok(ReqDakaWithCourseSections2 {
id: Some(daka.id.clone()),
desc: daka.desc.clone(),
start_date: Some(daka.start_date.clone()),
duration_days: Some(daka.duration_days.clone()),
name: Some(daka.name.clone()),
created_at: Some(daka.created_at.clone()),
created_by: Some(daka.created_by.clone()),
teacher_id: Some(daka.teacher_id.clone()),
group_id: None,
is_delete: Some(daka.is_delete.clone()),
teacher_name: user_teacher.real_name,
group_name: None,
course_sections: Some(out_req_course_sections),
lianxi_count: Some(count_lianxi),
teachers: daka.teachers.clone(),
updated_at: daka.updated_at.clone(),
all_lianxis: None,
relations: Some(strip_result_vec(any_relations)?),
not_piyue_count: Some(not_piyue_count),
updated_by: daka.updated_by.clone(),
students: daka.students.clone(),
course_sections2: daka.course_sections.clone(),
is_yanqi: daka.is_yanqi.clone(),
})
}
}
pub async fn raw_find_daka_by_id(
root: &HtySudoerTokenHeader,
id: &String,
db_pool: Arc<DbState>,
) -> anyhow::Result<ReqDakaWithCourseSections> {
let current_org_id = required_current_org_id_from_sudoer_token_str(&root.0)?;
let daka = Daka::find_by_id_in_org(
&id,
&current_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let all_lianxis =
daka.find_active_belonging_lianxis(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
let (count_lianxi, not_piyue_count) = count_lianxi_and_piyue_on_the_fly(&all_lianxis);
let the_course_sections =
daka.find_active_linked_course_sections(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
let any_req_course_sections = convert_course_sections_to_req_course_sections_for_jihua_daka_scenario(
&String::from("DAKA"),
&daka.id,
the_course_sections.clone(),
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
);
let out_req_course_sections = strip_result_vec(any_req_course_sections)?;
let any_relations: Vec<anyhow::Result<ReqDakaCourseSection>> = the_course_sections
.iter()
.map(|course_section| {
let daka_org_id = daka
.org_id
.as_ref()
.ok_or_else(|| anyhow!("daka.org_id is required"))?;
let in_jihua_course_section =
DakaCourseSection::find_by_daka_id_and_course_section_id_in_org(
&daka.id,
&course_section.id,
daka_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
Ok(ReqDakaCourseSection {
id: Some(in_jihua_course_section.id.clone()),
daka_id: None,
course_section_id: Some(in_jihua_course_section.course_section_id.clone()),
meta: None,
is_delete: Some(in_jihua_course_section.is_delete.clone()),
})
})
.collect();
debug!(
"raw_find_daka_by_id -> daka.teacher_id {:?}",
&daka.teacher_id
);
let user_teacher = find_hty_user_with_info_by_id(&daka.teacher_id, root).await?;
if let Some(group_id) = &daka.group_id {
let c_group_id = group_id.clone();
let user_group = find_hty_user_group_by_id(&c_group_id, root).await?;
Ok(ReqDakaWithCourseSections {
id: Some(daka.id.clone()),
desc: daka.desc.clone(),
start_date: Some(daka.start_date.clone()),
duration_days: Some(daka.duration_days.clone()),
name: Some(daka.name.clone()),
created_at: Some(daka.created_at.clone()),
created_by: Some(daka.created_by.clone()),
teacher_id: Some(daka.teacher_id.clone()),
group_id: Some(c_group_id),
is_delete: Some(daka.is_delete.clone()),
teacher_name: user_teacher.real_name,
group_name: user_group.group_name,
course_sections: Some(out_req_course_sections),
lianxi_count: Some(count_lianxi),
all_lianxis: None,
relations: Some(strip_result_vec(any_relations)?),
not_piyue_count: Some(not_piyue_count),
teachers: daka.teachers.clone(),
updated_at: daka.updated_at.clone(),
updated_by: daka.updated_by.clone(),
students: daka.students.clone(),
course_sections2: daka.course_sections.clone(),
is_yanqi: daka.is_yanqi.clone(),
})
} else {
Ok(ReqDakaWithCourseSections {
id: Some(daka.id.clone()),
desc: daka.desc.clone(),
start_date: Some(daka.start_date.clone()),
duration_days: Some(daka.duration_days.clone()),
name: Some(daka.name.clone()),
created_at: Some(daka.created_at.clone()),
created_by: Some(daka.created_by.clone()),
teacher_id: Some(daka.teacher_id.clone()),
group_id: None,
is_delete: Some(daka.is_delete.clone()),
teacher_name: user_teacher.real_name,
group_name: None,
course_sections: Some(out_req_course_sections),
lianxi_count: Some(count_lianxi),
all_lianxis: None,
relations: Some(strip_result_vec(any_relations)?),
not_piyue_count: Some(not_piyue_count),
teachers: daka.teachers.clone(),
updated_at: daka.updated_at.clone(),
updated_by: daka.updated_by.clone(),
students: daka.students.clone(),
course_sections2: daka.course_sections.clone(),
is_yanqi: daka.is_yanqi.clone(),
})
}
}
pub async fn update_daka(
_root: HtySudoerTokenHeader,
auth: AuthorizationHeader,
State(db_pool): State<Arc<DbState>>,
Json(in_daka): Json<ReqDakaWithCourseSectionIds>,
) -> Json<HtyResponse<String>> {
debug!("update_daka_by_id -> starts");
debug!("update_daka_by_id -> in_daka / {:?}", &in_daka);
match raw_update_daka(auth, db_pool, &in_daka).await {
Ok(ok) => wrap_json_ok_resp(ok),
Err(e) => {
error!("update_jihua -> failed to update jihua, e: {}", e);
wrap_json_anyhow_err(e)
}
}
}
pub async fn raw_update_daka(
_token: AuthorizationHeader,
db_pool: Arc<DbState>,
in_req_daka: &ReqDakaWithCourseSectionIds,
) -> anyhow::Result<String> {
let current_org_id = jwt_decode_token(&(*_token).clone())?
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))?;
let the_req_daka = in_req_daka.clone();
let id_daka = the_req_daka
.clone()
.id
.ok_or_else(|| anyhow!("id is required"))?;
if the_req_daka.id.is_none()
|| the_req_daka.start_date.is_none()
|| the_req_daka.duration_days.is_none()
|| the_req_daka.name.is_none()
// || the_req_daka.group_id.is_none()
// || the_req_daka.teacher_id.is_none()
{
return Err(anyhow!(HtyErr {
code: HtyErrCode::NullErr,
reason: Some(
"missing mandatory fields for Daka: id / start_date / duration_days / name".into()
),
}));
}
let mut params = HashMap::new();
params.insert("c_req_daka".to_string(), the_req_daka.clone());
debug!("raw_update_daka -> update_daka -> {:?}", &params);
let task_update_daka = move |in_params: Option<HashMap<String, ReqDakaWithCourseSectionIds>>,
conn: &mut PgConnection|
-> anyhow::Result<()> {
let the_params = in_params.ok_or_else(|| anyhow!("params is required"))?;
debug!("task_update_daka params -> {:?}", &the_params);
let c_req_daka = the_params
.clone()
.get("c_req_daka")
.ok_or_else(|| anyhow!("c_req_daka key not found"))?
.clone();
let mut db_daka = Daka::find_by_id(&id_daka, conn)?;
db_daka.desc = c_req_daka.desc.clone();
db_daka.start_date = c_req_daka
.start_date
.clone()
.ok_or_else(|| anyhow!("start_date is required"))?;
db_daka.duration_days = c_req_daka
.duration_days
.clone()
.ok_or_else(|| anyhow!("duration_days is required"))?;
db_daka.name = c_req_daka
.name
.clone()
.ok_or_else(|| anyhow!("name is required"))?;
db_daka.teacher_id = c_req_daka
.teacher_id
.clone()
.ok_or_else(|| anyhow!("teacher_id is required"))?;
db_daka.group_id = c_req_daka.group_id.clone();
db_daka.teacher_name = c_req_daka.teacher_name.clone();
db_daka.group_name = c_req_daka.group_name.clone();
db_daka.teachers = c_req_daka.teachers.clone();
db_daka.students = c_req_daka.students.clone();
db_daka.updated_by = c_req_daka.updated_by.clone();
db_daka.updated_at = c_req_daka.updated_at.clone();
db_daka.course_sections = c_req_daka.course_sections.clone();
db_daka.is_yanqi = c_req_daka.is_yanqi.clone();
db_daka.org_id = Some(current_org_id.clone());
debug!("raw_update_daka -> update_daka -> {:?}", &db_daka);
Daka::update(&db_daka, conn)?;
let _ = raw_update_daka_course_section_relations(
&c_req_daka.to_req_daka_course_sections_relation()?,
conn,
);
Ok(())
};
let _ = match exec_read_write_task(
Box::new(task_update_daka),
Some(params),
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
) {
Ok(()) => Ok(()),
_ => Err(anyhow!(HtyErr {
code: HtyErrCode::NullErr,
reason: Some("raw_update_daka -> updated error ".into()),
})),
};
Ok(the_req_daka
.clone()
.id
.ok_or_else(|| anyhow!("id is required"))?)
}
pub fn raw_update_daka_course_section_relations(
in_daka_course_sections: &ReqDakaCourseSectionsRelation,
conn: &mut PgConnection,
) -> anyhow::Result<()> {
debug!("in_daka_course_sections -> {:?}", in_daka_course_sections);
// 先验证数据完整性
let c_in_daka_course_sections = in_daka_course_sections.clone();
let in_daka_id = c_in_daka_course_sections.daka_id.clone();
if let Some(daka_id) = &in_daka_id {
let in_daka = Daka::find_by_id(daka_id, conn)?;
// 确认打卡存在.
debug!(
"raw_update_daka_course_section_relations -> in_daka: {:?}",
&in_daka
);
if c_in_daka_course_sections.course_section_ids.is_none() {
// *没传入关系,则不动。*
debug!("raw_update_daka_course_section_relations -> no incoming `daka_course_sections` to modify");
Ok(())
} else {
let in_course_section_ids = c_in_daka_course_sections
.clone()
.course_section_ids
.clone()
.ok_or_else(|| anyhow!("course_section_ids is required"))?; // 这些是传入给我们要保留/添加的关系.
// *更新逻辑*
// 1.把不存在于传入的`in_course_section_ids`,且已有的关系数据删掉(逻辑删除)。
// 2. `in_course_section_ids`之前没有的数据添加进表。
// 首先做第一步
// todo: 改为逻辑删除后,这里会重复查出来`已删除`关系,后续优化。
let current_org_id = in_daka
.org_id
.clone()
.ok_or_else(|| anyhow!("daka.org_id is required"))?;
let mut to_delete_relation_ids: Vec<String> = Vec::new();
// 查找这个daka的所有course_section_ids,然后跟传入的做比对,把不存在于传入数据的ids放入`to_delete_relation_ids`,然后逻辑删除.
let existing_daka_course_sections =
DakaCourseSection::find_by_daka_id_in_org(daka_id, &current_org_id, conn)?;
debug!(
"raw_update_daka_course_section_relations -> existing_daka_course_sections -> {:?}",
&existing_daka_course_sections
);
if !existing_daka_course_sections.is_empty() {
for rel in existing_daka_course_sections {
let id_section = rel.course_section_id;
if !in_course_section_ids.contains(&id_section) {
to_delete_relation_ids.push(rel.id.clone()); // 如果这条已有关系在传入数据里不存在,则准备逻辑删除这条关系.
}
}
}
debug!(
"raw_update_jihua_course_section_relations -> to_delete_relation_ids -> {:?}",
&to_delete_relation_ids
);
// todo: 改为逻辑删除后,这里会重复查出来`已删除`关系,后续优化。
// 删除不再需要的关系
for to_delete_rel_id in to_delete_relation_ids {
// 这里只是逻辑删除.
let _ = DakaCourseSection::logic_delete_by_id(&to_delete_rel_id, conn)?;
}
// 然后做第二步
for in_course_section_id in in_course_section_ids.clone() {
match DakaCourseSection::find_by_daka_id_and_course_section_id_in_org(
&in_daka.id,
&in_course_section_id,
&current_org_id,
conn,
) {
Ok(rel) => {
debug!("find_by_daka_id_and_course_section_id -> FOUND RELATION -> {:?} / UPDATING STATUS", & rel);
let mut c_rel = rel.clone();
c_rel.is_delete = false;
debug!(
"find_by_daka_id_and_course_section_id -> STATUS TO UPDATE {:?}",
&c_rel
);
let updated = DakaCourseSection::update(&c_rel, conn)?;
debug!(
"find_by_daka_id_and_course_section_id -> UPDATED ITEM {:?}",
&updated
);
}
Err(_err) => {
debug!(
"find_by_daka_id_and_course_section_id -> NOT FOUND RELATION / ADDING"
);
// 没有这条关系,添加.
let to_add_rel = DakaCourseSection {
id: uuid(),
daka_id: daka_id.clone(),
course_section_id: in_course_section_id.clone(),
meta: None,
is_delete: false,
org_id: in_daka.org_id.clone(),
};
debug!("raw_update_daka_course_section_relations -> ADDING THIS RELATION -> {:?}", & to_add_rel);
let _ = DakaCourseSection::create(&to_add_rel, conn)?;
}
}
}
Ok(())
}
} else {
return Err(anyhow!(HtyErr {
code: HtyErrCode::NullErr,
reason: Some("Null Daka ID".into()),
}));
}
}
pub async fn delete_daka_by_id(
root: HtySudoerTokenHeader,
Path(id_delete): Path<String>,
State(db_pool): State<Arc<DbState>>,
) -> Json<HtyResponse<()>> {
debug!("delete_daka_by_id -> start here");
match raw_delete_daka_by_id(&root, &id_delete, db_pool).await {
Ok(ok) => wrap_json_ok_resp(ok),
Err(e) => {
error!("delete_daka_by_id -> failed to delete daka, e: {}", e);
wrap_json_anyhow_err(e)
}
}
}
pub async fn raw_delete_daka_by_id(
root: &HtySudoerTokenHeader,
id_delete: &String,
db_pool: Arc<DbState>,
) -> anyhow::Result<()> {
let current_org_id = required_current_org_id_from_sudoer_token_str(&root.0)?;
let _ = Daka::find_by_id_in_org(
id_delete,
&current_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
// 逻辑删除 daka
let _ = Daka::logic_delete_by_id(
id_delete,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
Ok(())
}
pub async fn find_dakas_with_sections_by_user_id(
Query(params): Query<HashMap<String, String>>,
sudoer: HtySudoerTokenHeader,
host: HtyHostHeader,
_auth: AuthorizationHeader,
State(db_pool): State<Arc<DbState>>,
) -> Json<HtyResponse<(Vec<ReqDakaWithCourseSections>, i64, i64)>> {
debug!(
"find_dakas_with_sections_by_user_id -> starts / params: {:?}",
params
);
let teacher_id = params.get("teacher_id");
let student_id = params.get("student_id");
let scope = params.get("scope");
if params.get("page").is_none() || params.get("page_size").is_none() {
return wrap_json_anyhow_err(anyhow!("page or page_size not set!"));
}
let (some_page, some_page_size) = get_page_and_page_size(&params);
let page = match some_page {
Some(p) => p,
None => return wrap_json_anyhow_err(anyhow!("page is required")),
};
let page_size = match some_page_size {
Some(ps) => ps,
None => return wrap_json_anyhow_err(anyhow!("page_size is required")),
};
if page < 1 || page_size < 1 {
return wrap_json_anyhow_err(anyhow!("page or page_size not valid!"));
}
let start_date = params.get("start_date");
let mut start_from: Option<NaiveDateTime> = None;
if let Some(start_date_str) = start_date {
match parse_date_time(start_date_str) {
Ok(dt) => start_from = Some(dt),
Err(e) => return wrap_json_anyhow_err(e),
}
}
match raw_find_dakas_with_sections_by_user_id(
&teacher_id,
&student_id,
&scope,
&start_from,
page,
page_size,
sudoer,
&host,
db_pool,
)
.await
{
Ok(ok) => wrap_json_ok_resp(ok),
Err(e) => {
error!(
"find_dakas_with_sections_by_user_id -> failed to find, e: {}",
e
);
wrap_json_anyhow_err(e)
}
}
}
pub async fn raw_find_dakas_with_sections_by_user_id(
teacher_id: &Option<&String>,
student_id: &Option<&String>,
scope: &Option<&String>,
start_from: &Option<NaiveDateTime>,
page: i64,
page_size: i64,
_root: HtySudoerTokenHeader,
_host: &HtyHostHeader,
db_pool: Arc<DbState>,
) -> anyhow::Result<(Vec<ReqDakaWithCourseSections>, i64, i64)> {
debug!("raw_find_dakas_with_sections_by_user_id -> START");
debug!("raw_find_dakas_with_sections_by_user_id -> teacher_id: {:?} / student_id: {:?} / scope: {:?}", teacher_id, student_id, scope);
let current_org_id = required_current_org_id_from_sudoer_token_str(&_root.0)?;
if (teacher_id.is_some() && student_id.is_some())
|| (teacher_id.is_none() && student_id.is_none())
{
return Err(anyhow!(HtyErr {
code: HtyErrCode::WebErr,
reason: Some("teacher id and student id can only exits one".into()),
}));
}
let dakas_with_pages: (Vec<Daka>, i64, i64);
let dakas: Vec<Daka>;
if teacher_id.is_some() {
match scope {
&Some(string_ref) => match string_ref.as_str() {
"ALL" => {
dakas_with_pages = Daka::find_active_dakas_by_all_teachers_with_pagination(
teacher_id
.as_ref()
.ok_or_else(|| anyhow!("teacher_id is required"))?,
&current_org_id,
page,
page_size,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
}
"CREATED" => {
dakas_with_pages = Daka::find_active_dakas_by_owner_teacher_with_pagination(
teacher_id
.as_ref()
.ok_or_else(|| anyhow!("teacher_id is required"))?,
&current_org_id,
page,
page_size,
start_from,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
}
"ASSIGNED" => {
dakas_with_pages =
Daka::find_active_dakas_by_assigned_teachers_with_pagination(
teacher_id
.as_ref()
.ok_or_else(|| anyhow!("teacher_id is required"))?,
&current_org_id,
page,
page_size,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
}
_ => {
return Err(anyhow!(HtyErr {
code: HtyErrCode::WebErr,
reason: Some("invalid scope".into()),
}));
}
},
&None => {
return Err(anyhow!(HtyErr {
code: HtyErrCode::WebErr,
reason: Some("scope can not be none when teacher_id exist".into()),
}));
}
}
dakas = dakas_with_pages.0;
} else {
// student_id
let id_student = student_id
.as_ref()
.ok_or_else(|| anyhow!("student_id is required"))?;
dakas_with_pages = Daka::find_active_dakas_by_student_id_with_pagination(
&id_student,
&current_org_id,
page,
page_size,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
dakas = dakas_with_pages.0;
}
let res: anyhow::Result<Vec<ReqDakaWithCourseSections>> = dakas
.iter()
.map(|daka| {
let mut req_daka = daka.to_req();
//---------------------------------------------------------------------------------------------------------
let linked_course_section = daka.find_active_linked_course_sections(
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let req_course_sections: anyhow::Result<Vec<_>> = linked_course_section
.iter()
.map(|course_section| {
let mut req_course_section = course_section.to_req_with_ref_resource(
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let linked_course = Course::find_by_id(
course_section
.course_id
.as_ref()
.ok_or_else(|| anyhow!("course_id is required"))?,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
req_course_section.course_name = Some(linked_course.name);
req_course_section.meta = None;
Ok(req_course_section)
})
.collect();
let req_course_sections = req_course_sections?;
req_daka.course_sections = Some(req_course_sections);
//---------------------------------------------------------------------------------------------------------
// todo: 更新数据后,切换为使用字段数据
// // req_jihua.lianxi_count = Some(count_lianxi as i32);
// let c_ret_count = jihua.lianxi_count.clone();
// debug!("return lianxi_count: {:?}", c_ret_count);
// if c_ret_count.is_none() {
// req_jihua.lianxi_count = Some(0);
// } else {
// req_jihua.lianxi_count = c_ret_count;
// }
// fixme: 目前先动态计算
// fix: 改为使用数据库字段
// todo: 创建时练习时更新两个字段
// todo: jihua里面补充`not_piyue_count`字段
let all_lianxis = daka.find_active_belonging_lianxis(
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let (count_lianxi, not_piyue_count) = count_lianxi_and_piyue_on_the_fly(&all_lianxis);
req_daka.lianxi_count = Some(count_lianxi);
req_daka.not_piyue_count = Some(not_piyue_count);
Ok(req_daka)
})
.collect();
match res {
Ok(res) => Ok((res, dakas_with_pages.1, dakas_with_pages.2)),
Err(e) => {
debug!("err -> {:?}", e);
return Err(anyhow!(e));
}
}
}