feat(ws-org): add org_id scoping across ws entities

Add org_id fields and migrations for course, section, lianxi, piyue, and jihua/daka data paths, and enforce organization-scoped filtering in ws service queries.

Made-with: Cursor
This commit is contained in:
2026-04-27 23:06:58 +08:00
parent 83f657388c
commit 07ee6e7cc6
13 changed files with 675 additions and 59 deletions
+150
View File
@@ -81,11 +81,117 @@ pub async fn run_watermark_task(
anyhow::bail!("watermark AI FAILED");
}
payload.task_result = Some(updated);
persist_watermark_result_to_ref_resource(rt, sudo, host, &payload, req)
.await
.context("WATERMARK: persist task_result to ws.ref_resources.tasks failed")?;
req.payload = Some(serde_json::to_value(&payload)?);
tracing::debug!(ts_task_id = %ts_tid, "WATERMARK: pipeline ok");
Ok(())
}
async fn persist_watermark_result_to_ref_resource(
rt: &Arc<ProcessorRuntime>,
sudo: &str,
host: &str,
payload: &htyts_models::WatermarkPayload,
req: &ReqTask,
) -> anyhow::Result<()> {
let ts_task_id = req
.task_id
.as_deref()
.ok_or_else(|| anyhow::anyhow!("WATERMARK: missing ts task_id"))?;
let task_result_map = payload
.task_result
.as_ref()
.and_then(|r| r.task_result.as_ref())
.ok_or_else(|| anyhow::anyhow!("WATERMARK: missing AI task_result map"))?;
let ws_base = rt.htyws_url.trim_end_matches('/');
let find_ref_url = format!(
"{ws_base}/api/v1/ws/find_ref_resource_by_id/{}",
payload.ref_resource_id
);
let find_resp = rt
.http
.get(&find_ref_url)
.header("HtySudoerToken", sudo)
.header("Authorization", sudo)
.header("HtyHost", host)
.send()
.await?;
let mut ref_resource = parse_hty_response(find_resp)
.await
.context("WATERMARK: find_ref_resource_by_id failed")?;
let task_result_json_object: serde_json::Map<String, Value> = task_result_map
.iter()
.map(|(key, val)| (key.clone(), val.clone()))
.collect();
let updated = update_ref_resource_watermark_task_result(
&mut ref_resource,
ts_task_id,
Value::Object(task_result_json_object),
);
if !updated {
anyhow::bail!(
"WATERMARK: target task not found in ref_resource tasks, ref_resource_id={}, task_id={}",
payload.ref_resource_id,
ts_task_id
);
}
ref_resource["updated_by"] = json!(rt.ts_url.trim_end_matches('/'));
let update_ref_url = format!("{ws_base}/api/v1/ws/update_ref_resource");
let update_resp = rt
.http
.post(&update_ref_url)
.header("HtySudoerToken", sudo)
.header("Authorization", sudo)
.header("HtyHost", host)
.json(&ref_resource)
.send()
.await?;
let _ = parse_hty_response(update_resp)
.await
.context("WATERMARK: update_ref_resource failed")?;
Ok(())
}
fn update_ref_resource_watermark_task_result(
ref_resource: &mut Value,
task_id: &str,
task_result: Value,
) -> bool {
let Some(task_vals) = ref_resource
.get_mut("tasks")
.and_then(Value::as_object_mut)
.and_then(|tasks| tasks.get_mut("vals"))
.and_then(Value::as_array_mut)
else {
return false;
};
for task in task_vals.iter_mut() {
let same_task_id = task
.get("task_id")
.and_then(Value::as_str)
.map(|val| val == task_id)
.unwrap_or(false);
let is_watermark = task
.get("task_type")
.and_then(Value::as_str)
.map(|val| val == TaskType::Watermark.as_db_str())
.unwrap_or(false);
if same_task_id && is_watermark {
task["task_result"] = task_result;
task["task_status"] = json!("DONE");
task["task_from"] = json!(TASK_FROM_TS);
return true;
}
}
false
}
pub async fn run_video_compression_task(
rt: &Arc<ProcessorRuntime>,
sudo: &str,
@@ -758,3 +864,47 @@ fn build_req_hty_resource_for_audio(
}
})
}
#[cfg(test)]
mod tests {
use super::update_ref_resource_watermark_task_result;
use serde_json::json;
#[test]
fn update_ref_resource_watermark_task_result_only_updates_target_task() {
let mut ref_resource = json!({
"id": "rr1",
"tasks": {
"vals": [
{
"task_id": "t0",
"task_type": "VIDEO_COMPRESSION",
"task_status": "DONE",
"task_result": {"compressed_file_url":"a.mp4"}
},
{
"task_id": "t1",
"task_type": "WATERMARK",
"task_status": "DONE",
"task_result": null
}
]
}
});
let changed = update_ref_resource_watermark_task_result(
&mut ref_resource,
"t1",
json!({"watermarked_video_url":"https://cdn/new.mp4"}),
);
assert!(changed);
assert_eq!(
ref_resource["tasks"]["vals"][1]["task_result"]["watermarked_video_url"],
json!("https://cdn/new.mp4")
);
assert_eq!(ref_resource["tasks"]["vals"][1]["task_status"], json!("DONE"));
assert_eq!(
ref_resource["tasks"]["vals"][0]["task_result"]["compressed_file_url"],
json!("a.mp4")
);
}
}
+51 -15
View File
@@ -71,6 +71,9 @@ pub async fn raw_create_course(
let user_id = hty_token
.hty_id
.ok_or_else(|| anyhow!("hty_id is required"))?;
let current_org_id = hty_token
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))?;
let req_course = in_req_course.clone();
if req_course.course_type.is_none() || req_course.course_name.is_none() {
@@ -89,6 +92,7 @@ pub async fn raw_create_course(
created_by: Some(user_id.clone()),
created_at: Some(current_local_datetime()),
is_delete: Some(false),
org_id: Some(current_org_id),
};
Course::create(&in_course, extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?
.to_req(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())
@@ -272,6 +276,7 @@ pub fn convert_course_sections_to_req_course_sections_for_jihua_daka_scenario(
updated_by: c_course_section.updated_by.clone(),
updated_by_realname: None,
teachers: c_course_section.teachers.clone(),
org_id: c_course_section.org_id.clone(),
};
Ok(out_section)
@@ -407,6 +412,7 @@ reason: Some("scenario must be DAKA or JIHUA".into()),
resources: out_resources,
course_category_key: c_course_section.course_category_key,
course_category_name: c_course_section.course_category_name,
org_id: c_course_section.org_id.clone(),
};
Ok(out_section)
@@ -669,6 +675,9 @@ pub async fn raw_create_course_section(
) -> anyhow::Result<ReqCourseSection> {
let token = HtyToken::from_jwt(auth.clone().deref())?;
let user_id = token.hty_id.ok_or_else(|| anyhow!("hty_id is required"))?;
let current_org_id = token
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))?;
let mut in_section = CourseSection::from(&req_course_section)?;
@@ -684,6 +693,7 @@ pub async fn raw_create_course_section(
in_section.created_by = user_id.clone();
in_section.updated_at = Some(current_local_datetime());
in_section.updated_by = Some(user_id.clone());
in_section.org_id = Some(current_org_id);
if let Some(course_id) = &some_in_course_id {
let inner_course =
@@ -754,11 +764,12 @@ pub async fn raw_create_course_section(
pub async fn update_piyue(
_root: HtySudoerTokenHeader,
auth: AuthorizationHeader,
State(db_pool): State<Arc<DbState>>,
Json(in_piyue): Json<ReqPiyue>,
) -> Json<HtyResponse<String>> {
debug!("update_piyue -> starts");
match raw_update_piyue(&in_piyue, db_pool).await {
match raw_update_piyue(&auth, &in_piyue, db_pool).await {
Ok(ok) => {
debug!("update_piyue -> success to update piyue, e: {}", ok);
wrap_json_ok_resp(ok)
@@ -771,10 +782,14 @@ pub async fn update_piyue(
}
pub async fn raw_update_piyue(
auth: &AuthorizationHeader,
req_piyue: &ReqPiyue,
db_pool: Arc<DbState>,
) -> anyhow::Result<String> {
let c_piyue = req_piyue.clone();
let current_org_id = jwt_decode_token(&(*auth).clone())?
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))?;
let in_piyue = Piyue {
id: c_piyue.id.ok_or_else(|| anyhow!("id is required"))?,
@@ -788,6 +803,7 @@ pub async fn raw_update_piyue(
rating: c_piyue.rating,
created_at: c_piyue.created_at,
is_delete: c_piyue.is_delete,
org_id: Some(current_org_id.clone()),
};
let req_piyue_infos = req_piyue
@@ -811,6 +827,7 @@ pub async fn raw_update_piyue(
serial: c_each_piyue_info.serial,
created_at: c_each_piyue_info.created_at,
is_delete: c_each_piyue_info.is_delete,
org_id: Some(current_org_id.clone()),
})
})
.collect();
@@ -1364,11 +1381,12 @@ pub async fn create_piyue(
// deprecated
pub async fn raw_create_piyue(
_root: HtySudoerTokenHeader,
root: HtySudoerTokenHeader,
_host: HtyHostHeader,
req_piyue: &ReqPiyue,
db_pool: Arc<DbState>,
) -> anyhow::Result<String> {
let current_org_id = required_current_org_id_from_token_str(&root.0)?;
let req_piyue_copy = req_piyue.clone();
let the_lianxi_id = req_piyue_copy
.lianxi_id
@@ -1384,6 +1402,7 @@ pub async fn raw_create_piyue(
rating: req_piyue_copy.rating,
created_at: Some(current_local_datetime()),
is_delete: Some(false),
org_id: Some(current_org_id.clone()),
};
let res_piyue = Piyue::create(
@@ -1391,8 +1410,9 @@ pub async fn raw_create_piyue(
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let mut the_lianxi = Lianxi::find_by_id(
let mut the_lianxi = Lianxi::find_by_id_in_org(
&the_lianxi_id,
&current_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
the_lianxi.has_piyue = Some(true);
@@ -1460,6 +1480,7 @@ pub async fn raw_create_piyue(
serial: c_req_piyue_info.serial.clone(),
created_at: Some(current_local_datetime()),
is_delete: Some(false),
org_id: Some(current_org_id.clone()),
};
let resp_piyue_info = PiyueInfo::create(&piyue_info, conn)?;
@@ -1528,11 +1549,12 @@ pub async fn create_piyue2(
}
pub fn raw_create_piyue2(
_root: HtySudoerTokenHeader,
root: HtySudoerTokenHeader,
_host: HtyHostHeader,
req_piyue: &ReqPiyue2,
db_pool: Arc<DbState>,
) -> anyhow::Result<String> {
let current_org_id = required_current_org_id_from_token_str(&root.0)?;
let req_piyue_copy = req_piyue.clone();
debug!("raw_create_piyue2 -> req_piyue {:?}", req_piyue);
@@ -1543,8 +1565,9 @@ pub fn raw_create_piyue2(
.ok_or_else(|| anyhow!("lianxi_id is required"))?;
// https://github.com/alchemy-studio/huiwing/issues/1290
let some_existing_piyue = Piyue::find_first_by_lianxi_id(
let some_existing_piyue = Piyue::find_first_by_lianxi_id_in_org(
&the_lianxi_id.clone(),
&current_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
@@ -1568,6 +1591,7 @@ pub fn raw_create_piyue2(
rating: req_piyue_copy.rating.clone(),
created_at: Some(current_local_datetime()),
is_delete: Some(false),
org_id: Some(current_org_id.clone()),
};
debug!("raw_create_piyue2 -> in_piyue {:?}", in_piyue);
@@ -1577,8 +1601,9 @@ pub fn raw_create_piyue2(
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let mut the_lianxi = Lianxi::find_by_id(
let mut the_lianxi = Lianxi::find_by_id_in_org(
&the_lianxi_id,
&current_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
the_lianxi.has_piyue = Some(true);
@@ -2029,6 +2054,7 @@ pub async fn raw_update_course(
created_by: token.hty_id,
created_at: Some(current_local_datetime()),
is_delete: req_course.is_delete.clone(),
org_id: token.current_org_id.clone(),
};
let updated_course = Course::update(
@@ -2392,6 +2418,7 @@ pub async fn raw_find_course_by_id(
created_at: resp_course.created_at,
task: None,
is_delete: resp_course.is_delete,
org_id: resp_course.org_id,
};
Ok(req_course)
@@ -3812,12 +3839,11 @@ pub async fn raw_find_all_courses(
auth: &AuthorizationHeader,
db_pool: Arc<DbState>,
) -> anyhow::Result<Vec<ReqCourseWithSections>> {
let current_user = jwt_decode_token(&(*auth).clone())?
.hty_id
.ok_or_else(|| anyhow!("hty_id is required"))?;
let courses = Course::find_all_active_by_created_by(
&current_user,
let current_org_id = jwt_decode_token(&(*auth).clone())?
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))?;
let courses = Course::find_all_course_in_org(
&current_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
@@ -3832,6 +3858,7 @@ pub async fn raw_find_all_courses(
created_at: course_item.created_at.clone(),
task: None,
is_delete: course_item.is_delete.clone(),
org_id: course_item.org_id.clone(),
})
.collect();
Ok(result)
@@ -3840,10 +3867,11 @@ pub async fn raw_find_all_courses(
// #[get("/find_all_course_sections")]
pub async fn find_all_course_sections(
_root: HtySudoerTokenHeader,
auth: AuthorizationHeader,
State(db_pool): State<Arc<DbState>>,
) -> Json<HtyResponse<Vec<ReqCourseSection>>> {
debug!("find_all_course_sections -> starts");
match raw_find_all_course_sections(db_pool).await {
match raw_find_all_course_sections(&auth, db_pool).await {
Ok(sections) => wrap_json_ok_resp(sections),
Err(e) => {
error!(
@@ -3856,10 +3884,16 @@ pub async fn find_all_course_sections(
}
pub async fn raw_find_all_course_sections(
auth: &AuthorizationHeader,
db_pool: Arc<DbState>,
) -> anyhow::Result<Vec<ReqCourseSection>> {
let sections =
CourseSection::find_all_active_sections(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
let current_org_id = jwt_decode_token(&(*auth).clone())?
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))?;
let sections = CourseSection::find_all_active_sections_in_org(
&current_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let any_req_sections: Vec<anyhow::Result<ReqCourseSection>> = sections
.iter()
.map(|section| {
@@ -3912,6 +3946,7 @@ pub async fn raw_find_all_course_sections(
updated_by: section.updated_by.clone(),
updated_by_realname: None,
teachers: section.teachers.clone(),
org_id: section.org_id.clone(),
})
})
.collect();
@@ -4218,6 +4253,7 @@ pub async fn raw_find_all_course_sections_by_created_by_with_page(
updated_by: section.updated_by.clone(),
updated_by_realname: None,
teachers: section.teachers.clone(),
org_id: section.org_id.clone(),
})
})
.collect();
+73 -17
View File
@@ -14,6 +14,7 @@ use htycommons::common::{
HtyErrCode, HtyResponse,
};
use htycommons::db::{exec_read_write_task, extract_conn, fetch_db_conn, DbState};
use htycommons::jwt::jwt_decode_token;
use htycommons::uuid;
use htycommons::web::{
wrap_json_anyhow_err, wrap_json_ok_resp, AuthorizationHeader, HtyHostHeader,
@@ -30,6 +31,12 @@ use std::ops::DerefMut;
use std::sync::Arc;
use tracing::{debug, error};
fn required_current_org_id_from_sudoer_token_str(token_str: &String) -> anyhow::Result<String> {
jwt_decode_token(token_str)?
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))
}
pub async fn create_daka(
root: HtySudoerTokenHeader,
host: HtyHostHeader,
@@ -54,6 +61,9 @@ pub async fn raw_create_daka(
db_pool: Arc<DbState>,
in_req_daka: &ReqDakaWithCourseSectionIds,
) -> anyhow::Result<String> {
let current_org_id = jwt_decode_token(&(*_token).clone())?
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))?;
let req_daka = in_req_daka.clone();
if req_daka.start_date.is_none()
|| req_daka.duration_days.is_none()
@@ -120,6 +130,7 @@ pub async fn raw_create_daka(
students: req_daka.students.clone(),
course_sections: req_daka.course_sections.clone(),
is_yanqi: req_daka.is_yanqi.clone(),
org_id: Some(current_org_id),
};
let mut params = HashMap::new();
@@ -168,6 +179,7 @@ pub fn raw_create_daka_tx(
course_section_id: in_section_id,
meta: Some(meta_data.clone()),
is_delete: false,
org_id: result_daka.org_id.clone(),
};
debug!("START CREATE DAKA QUMU SECTION -> {:?}", &section);
let created_section = DakaCourseSection::create(&section, conn)?;
@@ -220,8 +232,12 @@ pub async fn raw_find_daka_by_id2(
db_pool: Arc<DbState>,
) -> anyhow::Result<ReqDakaWithCourseSections2> {
debug!("raw_find_daka_by_id2 -> {:?}", id);
let daka = Daka::find_by_id(&id, extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
let current_org_id = required_current_org_id_from_sudoer_token_str(&root.0)?;
let daka = Daka::find_by_id_in_org(
&id,
&current_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
debug!("raw_find_daka_by_id2 -> daka: {:?}", id);
@@ -262,11 +278,17 @@ pub async fn raw_find_daka_by_id2(
let any_relations: Vec<anyhow::Result<ReqDakaCourseSection>> = the_course_sections
.iter()
.map(|course_section| {
let in_jihua_course_section = DakaCourseSection::find_by_daka_id_and_course_section_id(
&daka.id,
&course_section.id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let daka_org_id = daka
.org_id
.as_ref()
.ok_or_else(|| anyhow!("daka.org_id is required"))?;
let in_jihua_course_section =
DakaCourseSection::find_by_daka_id_and_course_section_id_in_org(
&daka.id,
&course_section.id,
daka_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
Ok(ReqDakaCourseSection {
id: Some(in_jihua_course_section.id.clone()),
daka_id: None,
@@ -348,7 +370,12 @@ pub async fn raw_find_daka_by_id(
id: &String,
db_pool: Arc<DbState>,
) -> anyhow::Result<ReqDakaWithCourseSections> {
let daka = Daka::find_by_id(&id, extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
let current_org_id = required_current_org_id_from_sudoer_token_str(&root.0)?;
let daka = Daka::find_by_id_in_org(
&id,
&current_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let all_lianxis =
daka.find_active_belonging_lianxis(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
@@ -368,11 +395,17 @@ pub async fn raw_find_daka_by_id(
let any_relations: Vec<anyhow::Result<ReqDakaCourseSection>> = the_course_sections
.iter()
.map(|course_section| {
let in_jihua_course_section = DakaCourseSection::find_by_daka_id_and_course_section_id(
&daka.id,
&course_section.id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let daka_org_id = daka
.org_id
.as_ref()
.ok_or_else(|| anyhow!("daka.org_id is required"))?;
let in_jihua_course_section =
DakaCourseSection::find_by_daka_id_and_course_section_id_in_org(
&daka.id,
&course_section.id,
daka_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
Ok(ReqDakaCourseSection {
id: Some(in_jihua_course_section.id.clone()),
daka_id: None,
@@ -471,6 +504,9 @@ pub async fn raw_update_daka(
db_pool: Arc<DbState>,
in_req_daka: &ReqDakaWithCourseSectionIds,
) -> anyhow::Result<String> {
let current_org_id = jwt_decode_token(&(*_token).clone())?
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))?;
let the_req_daka = in_req_daka.clone();
let id_daka = the_req_daka
.clone()
@@ -534,6 +570,7 @@ pub async fn raw_update_daka(
db_daka.updated_at = c_req_daka.updated_at.clone();
db_daka.course_sections = c_req_daka.course_sections.clone();
db_daka.is_yanqi = c_req_daka.is_yanqi.clone();
db_daka.org_id = Some(current_org_id.clone());
debug!("raw_update_daka -> update_daka -> {:?}", &db_daka);
@@ -596,9 +633,14 @@ pub fn raw_update_daka_course_section_relations(
// 首先做第一步
// todo: 改为逻辑删除后,这里会重复查出来`已删除`关系,后续优化。
let current_org_id = in_daka
.org_id
.clone()
.ok_or_else(|| anyhow!("daka.org_id is required"))?;
let mut to_delete_relation_ids: Vec<String> = Vec::new();
// 查找这个daka的所有course_section_ids,然后跟传入的做比对,把不存在于传入数据的ids放入`to_delete_relation_ids`,然后逻辑删除.
let existing_daka_course_sections = DakaCourseSection::find_by_daka_id(daka_id, conn)?;
let existing_daka_course_sections =
DakaCourseSection::find_by_daka_id_in_org(daka_id, &current_org_id, conn)?;
debug!(
"raw_update_daka_course_section_relations -> existing_daka_course_sections -> {:?}",
&existing_daka_course_sections
@@ -627,9 +669,10 @@ pub fn raw_update_daka_course_section_relations(
// 然后做第二步
for in_course_section_id in in_course_section_ids.clone() {
match DakaCourseSection::find_by_daka_id_and_course_section_id(
match DakaCourseSection::find_by_daka_id_and_course_section_id_in_org(
&in_daka.id,
&in_course_section_id,
&current_org_id,
conn,
) {
Ok(rel) => {
@@ -657,6 +700,7 @@ pub fn raw_update_daka_course_section_relations(
course_section_id: in_course_section_id.clone(),
meta: None,
is_delete: false,
org_id: in_daka.org_id.clone(),
};
debug!("raw_update_daka_course_section_relations -> ADDING THIS RELATION -> {:?}", & to_add_rel);
let _ = DakaCourseSection::create(&to_add_rel, conn)?;
@@ -674,12 +718,12 @@ pub fn raw_update_daka_course_section_relations(
}
pub async fn delete_daka_by_id(
_root: HtySudoerTokenHeader,
root: HtySudoerTokenHeader,
Path(id_delete): Path<String>,
State(db_pool): State<Arc<DbState>>,
) -> Json<HtyResponse<()>> {
debug!("delete_daka_by_id -> start here");
match raw_delete_daka_by_id(&id_delete, db_pool).await {
match raw_delete_daka_by_id(&root, &id_delete, db_pool).await {
Ok(ok) => wrap_json_ok_resp(ok),
Err(e) => {
error!("delete_daka_by_id -> failed to delete daka, e: {}", e);
@@ -689,9 +733,16 @@ pub async fn delete_daka_by_id(
}
pub async fn raw_delete_daka_by_id(
root: &HtySudoerTokenHeader,
id_delete: &String,
db_pool: Arc<DbState>,
) -> anyhow::Result<()> {
let current_org_id = required_current_org_id_from_sudoer_token_str(&root.0)?;
let _ = Daka::find_by_id_in_org(
id_delete,
&current_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
// 逻辑删除 daka
let _ = Daka::logic_delete_by_id(
id_delete,
@@ -778,6 +829,7 @@ pub async fn raw_find_dakas_with_sections_by_user_id(
) -> anyhow::Result<(Vec<ReqDakaWithCourseSections>, i64, i64)> {
debug!("raw_find_dakas_with_sections_by_user_id -> START");
debug!("raw_find_dakas_with_sections_by_user_id -> teacher_id: {:?} / student_id: {:?} / scope: {:?}", teacher_id, student_id, scope);
let current_org_id = required_current_org_id_from_sudoer_token_str(&_root.0)?;
if (teacher_id.is_some() && student_id.is_some())
|| (teacher_id.is_none() && student_id.is_none())
@@ -799,6 +851,7 @@ pub async fn raw_find_dakas_with_sections_by_user_id(
teacher_id
.as_ref()
.ok_or_else(|| anyhow!("teacher_id is required"))?,
&current_org_id,
page,
page_size,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
@@ -809,6 +862,7 @@ pub async fn raw_find_dakas_with_sections_by_user_id(
teacher_id
.as_ref()
.ok_or_else(|| anyhow!("teacher_id is required"))?,
&current_org_id,
page,
page_size,
start_from,
@@ -821,6 +875,7 @@ pub async fn raw_find_dakas_with_sections_by_user_id(
teacher_id
.as_ref()
.ok_or_else(|| anyhow!("teacher_id is required"))?,
&current_org_id,
page,
page_size,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
@@ -850,6 +905,7 @@ pub async fn raw_find_dakas_with_sections_by_user_id(
dakas_with_pages = Daka::find_active_dakas_by_student_id_with_pagination(
&id_student,
&current_org_id,
page,
page_size,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
+68 -17
View File
@@ -12,6 +12,7 @@ use htycommons::common::{
HtyErr, HtyErrCode, HtyResponse,
};
use htycommons::db::{exec_read_write_task, extract_conn, fetch_db_conn, DbState};
use htycommons::jwt::jwt_decode_token;
use htycommons::uuid;
use htycommons::web::{
wrap_json_anyhow_err, wrap_json_ok_resp, AuthorizationHeader, HtyHostHeader,
@@ -28,6 +29,12 @@ use std::ops::DerefMut;
use std::sync::Arc;
use tracing::{debug, error};
fn required_current_org_id_from_sudoer_token_str(token_str: &String) -> anyhow::Result<String> {
jwt_decode_token(token_str)?
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))
}
pub async fn raw_find_jihuas_by_course_section_id(
id: String,
db_pool: Arc<DbState>,
@@ -121,7 +128,12 @@ async fn raw_find_jihua_by_id2(
id: &String,
db_pool: Arc<DbState>,
) -> anyhow::Result<ReqJihua2> {
let jihua = Jihua::find_by_id(&id, extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
let current_org_id = required_current_org_id_from_sudoer_token_str(&root.0)?;
let jihua = Jihua::find_by_id_in_org(
&id,
&current_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
debug!("raw_find_jihua_by_id2 -> {:?}", &jihua);
// unimplemented!()
@@ -148,11 +160,17 @@ async fn raw_find_jihua_by_id2(
let any_relations: Vec<anyhow::Result<ReqJihuaCourseSection>> = the_course_sections
.iter()
.map(|course_section| {
let in_jihua_course_section = JihuaCourseSection::find_by_jihua_id_and_course_section_id(
&jihua.id,
&course_section.id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let jihua_org_id = jihua
.org_id
.as_ref()
.ok_or_else(|| anyhow!("jihua.org_id is required"))?;
let in_jihua_course_section =
JihuaCourseSection::find_by_jihua_id_and_course_section_id_in_org(
&jihua.id,
&course_section.id,
jihua_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
Ok(ReqJihuaCourseSection {
id: Some(in_jihua_course_section.id.clone()),
@@ -204,7 +222,12 @@ async fn raw_find_jihua_by_id(
id: &String,
db_pool: Arc<DbState>,
) -> anyhow::Result<ReqJihua> {
let jihua = Jihua::find_by_id(&id, extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
let current_org_id = required_current_org_id_from_sudoer_token_str(&root.0)?;
let jihua = Jihua::find_by_id_in_org(
&id,
&current_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
debug!("raw_find_jihua_by_id -> {:?}", &jihua);
// fix: 改为使用数据库字段
@@ -229,11 +252,17 @@ async fn raw_find_jihua_by_id(
let any_relations: Vec<anyhow::Result<ReqJihuaCourseSection>> = the_course_sections
.iter()
.map(|course_section| {
let in_jihua_course_section = JihuaCourseSection::find_by_jihua_id_and_course_section_id(
&jihua.id,
&course_section.id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let jihua_org_id = jihua
.org_id
.as_ref()
.ok_or_else(|| anyhow!("jihua.org_id is required"))?;
let in_jihua_course_section =
JihuaCourseSection::find_by_jihua_id_and_course_section_id_in_org(
&jihua.id,
&course_section.id,
jihua_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
Ok(ReqJihuaCourseSection {
id: Some(in_jihua_course_section.id.clone()),
@@ -312,6 +341,9 @@ pub async fn raw_create_jihua(
db_pool: Arc<DbState>,
in_req_jihua: &ReqJihuaWithCourseSectionIds,
) -> anyhow::Result<ReqJihua> {
let current_org_id = jwt_decode_token(&(*_token).clone())?
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))?;
let req_jihua = in_req_jihua.clone();
let insert_jihua = Jihua {
id: uuid(),
@@ -339,6 +371,7 @@ pub async fn raw_create_jihua(
course_sections: req_jihua.course_sections.clone(),
created_by: req_jihua.created_by.clone(),
created_at: Some(current_local_datetime()),
org_id: Some(current_org_id),
};
debug!("raw_create_jihua -> insert_jihua: {:?}", insert_jihua);
@@ -401,6 +434,7 @@ pub fn raw_create_jihua_tx(
meta: Some(meta_data.clone()),
created_at: Some(current_local_datetime()),
is_delete: Some(false),
org_id: result_jihua.org_id.clone(),
};
debug!("START CREATE JIHUA QUMU SECTION -> {:?}", &section);
let created_section = JihuaCourseSection::create(&section, conn)?;
@@ -412,12 +446,12 @@ pub fn raw_create_jihua_tx(
}
pub async fn delete_jihua_by_id(
_root: HtySudoerTokenHeader,
root: HtySudoerTokenHeader,
Path(id_delete): Path<String>,
State(db_pool): State<Arc<DbState>>,
) -> Json<HtyResponse<Jihua>> {
debug!("delete_jihua_by_id -> start here");
match raw_delete_jihua_by_id(&id_delete, db_pool).await {
match raw_delete_jihua_by_id(&root, &id_delete, db_pool).await {
Ok(ok) => wrap_json_ok_resp(ok),
Err(e) => {
error!("delete_jihua_by_id -> failed to delete jihua, e: {}", e);
@@ -427,9 +461,11 @@ pub async fn delete_jihua_by_id(
}
pub async fn raw_delete_jihua_by_id(
root: &HtySudoerTokenHeader,
id_delete: &String,
db_pool: Arc<DbState>,
) -> anyhow::Result<Jihua> {
let current_org_id = required_current_org_id_from_sudoer_token_str(&root.0)?;
// 因为是逻辑删除,这里就不检查了。
// let jihua_course_sections = JihuaCourseSection::find_by_jihua_id(id_delete, extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
//
@@ -457,8 +493,9 @@ pub async fn raw_delete_jihua_by_id(
// let _ = Jihua::update(&child_jihua, extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
// 需要做的逻辑是把parent jihua的`is_yanqi`给改为`false`
let to_delete_jihua = Jihua::find_by_id(
let to_delete_jihua = Jihua::find_by_id_in_org(
id_delete,
&current_org_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
@@ -503,6 +540,9 @@ pub async fn raw_update_jihua(
db_pool: Arc<DbState>,
in_req_jihua: &ReqJihuaWithCourseSectionIds,
) -> anyhow::Result<ReqJihua> {
let current_org_id = jwt_decode_token(&(*_token).clone())?
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))?;
let the_req_jihua = in_req_jihua.clone();
if the_req_jihua.id.is_none()
|| the_req_jihua.start_date.is_none()
@@ -579,6 +619,7 @@ pub async fn raw_update_jihua(
course_sections: c_req_jihua.course_sections.clone(),
created_by: c_req_jihua.created_by.clone(),
created_at: c_req_jihua.created_at.clone(),
org_id: Some(current_org_id.clone()),
};
debug!("raw_update_jihua -> update_jihua -> {:?}", &update_jihua);
@@ -675,6 +716,10 @@ pub fn raw_update_jihua_course_section_relations(
.ok_or_else(|| anyhow!("course_section_ids is required"))?; // 这些是传入给我们要保留/添加的关系.
let mut resp_jihua = in_jihua.to_req(); // 返回数据
let current_org_id = in_jihua
.org_id
.clone()
.ok_or_else(|| anyhow!("jihua.org_id is required"))?;
// *更新逻辑*
// 1.把不存在于传入的`in_course_section_ids`,且已有的关系数据删掉(逻辑删除)。
@@ -684,7 +729,8 @@ pub fn raw_update_jihua_course_section_relations(
// todo: 改为逻辑删除后,这里会重复查出来`已删除`关系,后续优化。
let mut to_delete_relation_ids: Vec<String> = Vec::new();
// 查找这个jihua的所有course_section_ids,然后跟传入的做比对,把不存在于传入数据的ids放入`to_delete_relation_ids`,然后逻辑删除.
let existing_jihua_course_sections = JihuaCourseSection::find_by_jihua_id(jihua_id, conn)?;
let existing_jihua_course_sections =
JihuaCourseSection::find_by_jihua_id_in_org(jihua_id, &current_org_id, conn)?;
debug!(
"raw_update_jihua_course_section_relations -> existing_jihua_course_sections -> {:?}",
&existing_jihua_course_sections
@@ -713,9 +759,10 @@ pub fn raw_update_jihua_course_section_relations(
// 然后做第二步
for in_course_section_id in in_course_section_ids.clone() {
match JihuaCourseSection::find_by_jihua_id_and_course_section_id(
match JihuaCourseSection::find_by_jihua_id_and_course_section_id_in_org(
&in_jihua.id,
&in_course_section_id,
&current_org_id,
conn,
) {
Ok(rel) => {
@@ -744,6 +791,7 @@ pub fn raw_update_jihua_course_section_relations(
meta: None,
created_at: Some(current_local_datetime()),
is_delete: Some(false),
org_id: in_jihua.org_id.clone(),
};
debug!("raw_update_jihua_course_section_relations -> ADDING THIS RELATION -> {:?}", & to_add_rel);
let _ = JihuaCourseSection::create(&to_add_rel, conn)?;
@@ -879,6 +927,7 @@ pub async fn raw_find_jihuas_with_sections_by_user_id(
db_pool: Arc<DbState>,
) -> anyhow::Result<(Vec<ReqJihua>, i64, i64)> {
debug!("raw_find_jihuas_with_sections_by_user_id -> START");
let current_org_id = required_current_org_id_from_sudoer_token_str(&root.0)?;
if (teacher_id.is_some() && student_id.is_some())
|| (teacher_id.is_none() && student_id.is_none())
@@ -898,6 +947,7 @@ pub async fn raw_find_jihuas_with_sections_by_user_id(
if let Some(t_id) = &teacher_id {
jihuas_with_pages = Jihua::find_active_jihuas_by_teacher_id_with_pagination(
t_id,
&current_org_id,
page,
page_size,
start_from,
@@ -911,6 +961,7 @@ pub async fn raw_find_jihuas_with_sections_by_user_id(
student_id
.as_ref()
.ok_or_else(|| anyhow!("student_id is required"))?,
&current_org_id,
page,
page_size,
start_from,
+18 -4
View File
@@ -4,6 +4,7 @@ use axum::Json;
use axum_macros::debug_handler;
use htycommons::common::{current_local_datetime, HtyErr, HtyErrCode, HtyResponse};
use htycommons::db::{extract_conn, fetch_db_conn, DbState};
use htycommons::jwt::jwt_decode_token;
use htycommons::uuid;
use htycommons::web::{
wrap_json_anyhow_err, wrap_json_ok_resp, HtyHostHeader, HtySudoerTokenHeader,
@@ -16,6 +17,12 @@ use std::ops::DerefMut;
use std::sync::Arc;
use tracing::{debug, error};
fn required_current_org_id_from_sudoer_token_str(token_str: &String) -> anyhow::Result<String> {
jwt_decode_token(token_str)?
.current_org_id
.ok_or_else(|| anyhow!("current_org_id is required"))
}
pub fn count_lianxi_and_piyue_on_the_fly(all_lianxis: &Vec<Lianxi>) -> (i32, i32) {
let count_lianxi = all_lianxis.clone().len() as i32;
let not_piyue_count = all_lianxis
@@ -53,10 +60,11 @@ pub async fn create_lianxi(
pub async fn raw_create_lianxi(
_host: &HtyHostHeader,
_root: &HtySudoerTokenHeader,
root: &HtySudoerTokenHeader,
in_req_lianxi: &ReqLianxi,
db_pool: Arc<DbState>,
) -> anyhow::Result<String> {
let current_org_id = required_current_org_id_from_sudoer_token_str(&root.0)?;
let req_lianxi = in_req_lianxi.clone();
let mut in_jihua_course_section_id = None;
@@ -88,6 +96,7 @@ pub async fn raw_create_lianxi(
is_delete: Some(false),
created_by: req_lianxi.created_by.clone(),
creator_name: req_lianxi.creator_name.clone(),
org_id: Some(current_org_id),
};
let res = Lianxi::create(
@@ -159,10 +168,11 @@ pub async fn create_lianxi2(
pub async fn raw_create_lianxi2(
_host: &HtyHostHeader,
_root: &HtySudoerTokenHeader,
root: &HtySudoerTokenHeader,
in_req_lianxi: &ReqLianxi2,
db_pool: Arc<DbState>,
) -> anyhow::Result<String> {
let current_org_id = required_current_org_id_from_sudoer_token_str(&root.0)?;
let req_lianxi = in_req_lianxi.clone();
let mut in_jihua_course_section_id = None;
@@ -196,6 +206,7 @@ pub async fn raw_create_lianxi2(
is_delete: Some(false),
created_by: req_lianxi.created_by.clone(),
creator_name: req_lianxi.creator_name.clone(),
org_id: Some(current_org_id),
};
let some_in_ref_resources = req_lianxi.ref_resources.clone();
@@ -429,12 +440,12 @@ pub async fn raw_delete_lianxi_by_id2(
// deprecated. NO USE
pub async fn update_lianxi_by_id(
_root: HtySudoerTokenHeader,
root: HtySudoerTokenHeader,
State(db_pool): State<Arc<DbState>>,
Json(in_lianxi): Json<ReqLianxi>,
) -> Json<HtyResponse<String>> {
debug!("update_lianxi_by_id -> start here");
match raw_update_lianxi_by_id(&in_lianxi, db_pool).await {
match raw_update_lianxi_by_id(&root, &in_lianxi, db_pool).await {
Ok(ok) => {
debug!("update_lianxi_by_id -> success to update lianxi {}", ok);
wrap_json_ok_resp(ok)
@@ -447,9 +458,11 @@ pub async fn update_lianxi_by_id(
}
pub async fn raw_update_lianxi_by_id(
root: &HtySudoerTokenHeader,
in_req_lianxi: &ReqLianxi,
db_pool: Arc<DbState>,
) -> anyhow::Result<String> {
let current_org_id = required_current_org_id_from_sudoer_token_str(&root.0)?;
let req_lianxi = in_req_lianxi.clone();
if req_lianxi.jihua_course_section_id.is_none() && req_lianxi.daka_course_section_id.is_none() {
return Err(anyhow!(HtyErr {
@@ -502,6 +515,7 @@ pub async fn raw_update_lianxi_by_id(
is_delete: req_lianxi.is_delete,
created_by: req_lianxi.created_by,
creator_name: req_lianxi.creator_name,
org_id: Some(current_org_id),
};
let res = Lianxi::update(
&update_lianxi,
@@ -0,0 +1,8 @@
DROP INDEX IF EXISTS idx_course_section_org_id;
DROP INDEX IF EXISTS idx_course_org_id;
ALTER TABLE course_section
DROP COLUMN IF EXISTS org_id;
ALTER TABLE course
DROP COLUMN IF EXISTS org_id;
@@ -0,0 +1,8 @@
ALTER TABLE course
ADD COLUMN IF NOT EXISTS org_id varchar;
ALTER TABLE course_section
ADD COLUMN IF NOT EXISTS org_id varchar;
CREATE INDEX IF NOT EXISTS idx_course_org_id ON course (org_id);
CREATE INDEX IF NOT EXISTS idx_course_section_org_id ON course_section (org_id);
@@ -0,0 +1,12 @@
DROP INDEX IF EXISTS idx_piyue_info_org_id;
DROP INDEX IF EXISTS idx_piyue_org_id;
DROP INDEX IF EXISTS idx_lianxi_org_id;
ALTER TABLE piyue_info
DROP COLUMN IF EXISTS org_id;
ALTER TABLE piyue
DROP COLUMN IF EXISTS org_id;
ALTER TABLE lianxi
DROP COLUMN IF EXISTS org_id;
@@ -0,0 +1,12 @@
ALTER TABLE lianxi
ADD COLUMN IF NOT EXISTS org_id varchar;
ALTER TABLE piyue
ADD COLUMN IF NOT EXISTS org_id varchar;
ALTER TABLE piyue_info
ADD COLUMN IF NOT EXISTS org_id varchar;
CREATE INDEX IF NOT EXISTS idx_lianxi_org_id ON lianxi (org_id);
CREATE INDEX IF NOT EXISTS idx_piyue_org_id ON piyue (org_id);
CREATE INDEX IF NOT EXISTS idx_piyue_info_org_id ON piyue_info (org_id);
@@ -0,0 +1,16 @@
DROP INDEX IF EXISTS idx_daka_course_section_org_id;
DROP INDEX IF EXISTS idx_jihua_course_section_org_id;
DROP INDEX IF EXISTS idx_daka_org_id;
DROP INDEX IF EXISTS idx_jihua_org_id;
ALTER TABLE daka_course_section
DROP COLUMN IF EXISTS org_id;
ALTER TABLE jihua_course_section
DROP COLUMN IF EXISTS org_id;
ALTER TABLE daka
DROP COLUMN IF EXISTS org_id;
ALTER TABLE jihua
DROP COLUMN IF EXISTS org_id;
@@ -0,0 +1,16 @@
ALTER TABLE jihua
ADD COLUMN IF NOT EXISTS org_id varchar;
ALTER TABLE daka
ADD COLUMN IF NOT EXISTS org_id varchar;
ALTER TABLE jihua_course_section
ADD COLUMN IF NOT EXISTS org_id varchar;
ALTER TABLE daka_course_section
ADD COLUMN IF NOT EXISTS org_id varchar;
CREATE INDEX IF NOT EXISTS idx_jihua_org_id ON jihua (org_id);
CREATE INDEX IF NOT EXISTS idx_daka_org_id ON daka (org_id);
CREATE INDEX IF NOT EXISTS idx_jihua_course_section_org_id ON jihua_course_section (org_id);
CREATE INDEX IF NOT EXISTS idx_daka_course_section_org_id ON daka_course_section (org_id);
+234 -6
View File
@@ -48,6 +48,7 @@ pub struct Course {
pub created_by: Option<String>,
pub created_at: Option<NaiveDateTime>,
pub is_delete: Option<bool>,
pub org_id: Option<String>,
}
#[derive(
@@ -95,6 +96,7 @@ pub struct ReqCourseWithSections {
pub sections: Option<Vec<ReqCourseSection>>,
pub task: Option<UploadPictureTask>,
pub is_delete: Option<bool>,
pub org_id: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
@@ -215,6 +217,20 @@ impl Course {
}
}
pub fn find_all_course_in_org(
in_org_id: &String,
conn: &mut PgConnection,
) -> anyhow::Result<Vec<Course>> {
use crate::schema::course::dsl::*;
course
.filter(org_id.eq(in_org_id))
.get_results(conn)
.map_err(|e| anyhow!(HtyErr {
code: HtyErrCode::DbErr,
reason: Some(e.to_string()),
}))
}
pub fn find_by_id(id_course: &String, conn: &mut PgConnection) -> anyhow::Result<Course> {
// use crate::schema::course::dsl::*;
match course::table.filter(course::id.eq(id_course)).first::<Course>(conn) {
@@ -355,6 +371,7 @@ impl Course {
sections: Some(out_req_sections),
task: None,
is_delete: self.is_delete.clone(),
org_id: self.org_id.clone(),
};
Ok(out_course)
@@ -484,6 +501,7 @@ pub struct CourseSection {
pub updated_at: Option<NaiveDateTime>,
pub updated_by: Option<String>,
pub teachers: Option<MultiVals<TeacherIdAndName>>,
pub org_id: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
@@ -515,6 +533,7 @@ pub struct ReqCourseSection {
pub lianxi_count: Option<i32>,
// count_lianxi_and_piyue_on_the_fly(&lianxis);
pub updated_by_realname: Option<String>,
pub org_id: Option<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
@@ -551,6 +570,7 @@ pub struct ReqCourseSection2 {
pub resources: Option<Vec<ReqRefResource>>,
pub course_category_key: Option<String>,
pub course_category_name: Option<String>,
pub org_id: Option<String>,
}
impl CourseSection {
@@ -736,6 +756,25 @@ impl CourseSection {
}
}
pub fn find_all_active_sections_in_org(
in_org_id: &String,
conn: &mut PgConnection,
) -> anyhow::Result<Vec<CourseSection>> {
use crate::schema::course_section::dsl::*;
course_section
.filter(org_id.eq(in_org_id))
.filter(
crate::schema::course_section::is_delete
.eq(Some(false))
.and(is_draft.eq(Some(false)).or(is_draft.is_null())),
)
.get_results(conn)
.map_err(|e| anyhow!(HtyErr {
code: HtyErrCode::DbErr,
reason: Some(e.to_string()),
}))
}
pub fn find_all_active_sections_by_teacher_id_and_course_id_and_keyword_with_page(
page: i64,
page_size: i64,
@@ -997,6 +1036,7 @@ impl CourseSection {
updated_at: c_in_req.updated_at,
updated_by: c_in_req.updated_by,
teachers: c_in_req.teachers,
org_id: c_in_req.org_id,
})
} else {
let c_in_req = in_req.clone();
@@ -1020,6 +1060,7 @@ impl CourseSection {
updated_at: c_in_req.updated_at,
updated_by: c_in_req.updated_by,
teachers: c_in_req.teachers,
org_id: c_in_req.org_id,
})
}
}
@@ -1075,6 +1116,7 @@ impl CourseSection {
updated_by: self.updated_by.clone(),
updated_by_realname: None,
teachers: self.teachers.clone(),
org_id: self.org_id.clone(),
}
}
pub fn to_req_with_ref_resource(
@@ -1146,6 +1188,7 @@ impl CourseSection {
updated_by: self.updated_by.clone(),
updated_by_realname: None,
teachers: self.teachers.clone(),
org_id: self.org_id.clone(),
};
debug!(
@@ -1241,6 +1284,7 @@ pub struct Jihua {
pub course_sections: Option<MultiVals<CourseSectionJsonData>>,
pub created_by: Option<String>,
pub created_at: Option<NaiveDateTime>,
pub org_id: Option<String>,
}
#[derive(AsExpression, FromSqlRow, Debug, Serialize, Deserialize, PartialEq, Clone)]
@@ -1704,6 +1748,23 @@ impl Jihua {
}
}
pub fn find_by_id_in_org(
id_jihua: &String,
id_org: &String,
conn: &mut PgConnection,
) -> anyhow::Result<Jihua> {
use crate::schema::jihua::dsl::*;
jihua
.filter(id.eq(id_jihua).and(org_id.eq(id_org)))
.first::<Jihua>(conn)
.map_err(|e| {
anyhow!(HtyErr {
code: HtyErrCode::DbErr,
reason: Some(e.to_string()),
})
})
}
pub fn find_active_linked_course_sections(
&self,
conn: &mut PgConnection,
@@ -1802,6 +1863,7 @@ impl Jihua {
// return: (Vec<Jihua>, total_page: i64, total: i64)
pub fn find_active_jihuas_by_student_id_with_pagination(
id_student: &String,
id_org: &String,
page: i64,
page_size: i64,
start_from: &Option<NaiveDateTime>,
@@ -1817,6 +1879,7 @@ impl Jihua {
.into_boxed()
.order(jihua::start_date.desc())
.filter(student_id.eq(id_student))
.filter(org_id.eq(id_org))
.filter(jihua::is_delete.eq(Some(false)));
if is_compare_current_date.unwrap_or(false) {
@@ -1868,6 +1931,7 @@ impl Jihua {
// return: (Vec<Jihua>, total_page: i64, total: i64)
pub fn find_active_jihuas_by_teacher_id_with_pagination(
id_teacher: &String,
id_org: &String,
page: i64,
page_size: i64,
start_from: &Option<NaiveDateTime>,
@@ -1883,6 +1947,7 @@ impl Jihua {
.into_boxed()
.order(start_date.desc())
.filter(teacher_id.eq(id_teacher))
.filter(org_id.eq(id_org))
.filter(is_delete.eq(Some(false)));
if is_compare_current_date.unwrap_or(false) {
@@ -1991,6 +2056,7 @@ pub struct JihuaCourseSection {
pub meta: Option<JihuaCourseSectionMeta>,
pub created_at: Option<NaiveDateTime>,
pub is_delete: Option<bool>,
pub org_id: Option<String>,
}
impl JihuaCourseSection {
@@ -2122,6 +2188,24 @@ impl JihuaCourseSection {
}
}
pub fn find_all_by_course_section_id_in_org(
id_course_section: &String,
id_org: &String,
conn: &mut PgConnection,
) -> anyhow::Result<Vec<JihuaCourseSection>> {
use crate::schema::jihua_course_section::columns::*;
use crate::schema::jihua_course_section::dsl::*;
jihua_course_section
.filter(course_section_id.eq(id_course_section).and(org_id.eq(id_org)))
.load::<JihuaCourseSection>(conn)
.map_err(|e| {
anyhow!(HtyErr {
code: HtyErrCode::DbErr,
reason: Some(e.to_string()),
})
})
}
pub fn find_by_jihua_id(
id_jihua: &String,
conn: &mut PgConnection,
@@ -2141,6 +2225,24 @@ impl JihuaCourseSection {
result
}
pub fn find_by_jihua_id_in_org(
id_jihua: &String,
id_org: &String,
conn: &mut PgConnection,
) -> anyhow::Result<Vec<JihuaCourseSection>> {
use crate::schema::jihua_course_section::columns::*;
use crate::schema::jihua_course_section::dsl::*;
jihua_course_section
.filter(jihua_id.eq(id_jihua).and(org_id.eq(id_org)))
.get_results(conn)
.map_err(|e| {
anyhow!(HtyErr {
code: HtyErrCode::DbErr,
reason: Some(e.to_string()),
})
})
}
pub fn find_by_jihua_id_and_course_section_id(
id_jihua: &String,
id_course_section: &String,
@@ -2165,6 +2267,30 @@ impl JihuaCourseSection {
}
}
pub fn find_by_jihua_id_and_course_section_id_in_org(
id_jihua: &String,
id_course_section: &String,
id_org: &String,
conn: &mut PgConnection,
) -> anyhow::Result<JihuaCourseSection> {
use crate::schema::jihua_course_section::columns::*;
use crate::schema::jihua_course_section::dsl::*;
jihua_course_section
.filter(
course_section_id
.eq(id_course_section)
.and(jihua_id.eq(id_jihua))
.and(org_id.eq(id_org)),
)
.first::<JihuaCourseSection>(conn)
.map_err(|e| {
anyhow!(HtyErr {
code: HtyErrCode::DbErr,
reason: Some(e.to_string()),
})
})
}
// 这个方法不应该被使用
// DEPRECATED
pub fn delete_all_by_jihua_id(
@@ -2244,6 +2370,7 @@ pub struct Lianxi {
pub is_delete: Option<bool>,
pub created_by: Option<String>,
pub creator_name: Option<String>,
pub org_id: Option<String>,
}
impl Lianxi {
@@ -2315,6 +2442,21 @@ impl Lianxi {
}
}
pub fn find_by_id_in_org(
id_lianxi: &String,
in_org_id: &String,
conn: &mut PgConnection,
) -> anyhow::Result<Lianxi> {
use crate::schema::lianxi::dsl::*;
lianxi
.filter(id.eq(id_lianxi).and(org_id.eq(in_org_id)))
.first::<Lianxi>(conn)
.map_err(|e| anyhow!(HtyErr {
code: HtyErrCode::DbErr,
reason: Some(e.to_string()),
}))
}
pub fn delete_by_id(delete_id: &String, conn: &mut PgConnection) -> anyhow::Result<usize> {
use crate::schema::lianxi::dsl::*;
let result = delete(lianxi.filter(id.eq(delete_id)))
@@ -2539,6 +2681,7 @@ pub struct Piyue {
pub created_at: Option<NaiveDateTime>,
pub teacher_name: Option<String>,
pub is_delete: Option<bool>,
pub org_id: Option<String>,
}
impl Piyue {
@@ -2582,6 +2725,22 @@ impl Piyue {
}
}
pub fn find_first_by_lianxi_id_in_org(
id_lianxi: &String,
in_org_id: &String,
conn: &mut PgConnection,
) -> anyhow::Result<Option<Piyue>> {
use crate::schema::piyue::dsl::*;
piyue
.filter(lianxi_id.eq(id_lianxi).and(org_id.eq(in_org_id)))
.first::<Piyue>(conn)
.optional()
.map_err(|e| anyhow!(HtyErr {
code: HtyErrCode::DbErr,
reason: Some(e.to_string()),
}))
}
pub fn update(in_piyue: &Piyue, conn: &mut PgConnection) -> anyhow::Result<Piyue> {
let result = update(piyue::table)
.filter(piyue::id.eq(in_piyue.clone().id))
@@ -2649,6 +2808,7 @@ pub struct PiyueInfo {
pub serial: Option<String>,
pub created_at: Option<NaiveDateTime>,
pub is_delete: Option<bool>,
pub org_id: Option<String>,
}
impl PiyueInfo {
@@ -3249,6 +3409,7 @@ pub struct Daka {
pub students: Option<SingleVal<ReqHtyUserGroup>>,
pub course_sections: Option<MultiVals<CourseSectionJsonData>>,
pub is_yanqi: Option<bool>,
pub org_id: Option<String>,
}
#[derive(AsExpression, FromSqlRow, Debug, Serialize, Deserialize, PartialEq, Clone)]
@@ -3272,6 +3433,7 @@ impl_jsonb_boilerplate!(TeacherIdAndName);
impl Daka {
pub fn find_active_dakas_by_student_id_with_pagination(
id_student: &String,
id_org: &String,
page: i64,
page_size: i64,
conn: &mut PgConnection,
@@ -3280,7 +3442,7 @@ impl Daka {
// 可以参考Daka::find_active_dakas_by_all_teachers_with_pagination
use diesel::prelude::*;
let q_total = format!("select count(*) as result from daka where is_delete is not True AND jsonb_path_exists(students, '$.val.users.vals[*].user_id ? (@ == \"{}\")')", id_student).to_string();
let q_total = format!("select count(*) as result from daka where is_delete is not True AND org_id='{}' AND jsonb_path_exists(students, '$.val.users.vals[*].user_id ? (@ == \"{}\")')", id_org, id_student).to_string();
debug!(
"find_active_dakas_by_student_id_with_pagination -> q_total: {:?}",
q_total
@@ -3291,7 +3453,7 @@ impl Daka {
.result;
let total_page = total_len.clone() / page_size;
let q = format!("select * from daka where is_delete is not True AND jsonb_path_exists(students, '$.val.users.vals[*].user_id ? (@ == \"{}\")') ORDER BY updated_at DESC, created_at DESC LIMIT {} OFFSET ({} - 1) * {}", id_student, page_size.to_string(), page.to_string(), page_size.to_string()).to_string();
let q = format!("select * from daka where is_delete is not True AND org_id='{}' AND jsonb_path_exists(students, '$.val.users.vals[*].user_id ? (@ == \"{}\")') ORDER BY updated_at DESC, created_at DESC LIMIT {} OFFSET ({} - 1) * {}", id_org, id_student, page_size.to_string(), page.to_string(), page_size.to_string()).to_string();
debug!(
"find_active_dakas_by_student_id_with_pagination -> q: {:?}",
q
@@ -3411,22 +3573,40 @@ impl Daka {
}
}
pub fn find_by_id_in_org(
id_daka: &String,
id_org: &String,
conn: &mut PgConnection,
) -> anyhow::Result<Daka> {
use crate::schema::daka::dsl::*;
daka
.filter(id.eq(id_daka).and(org_id.eq(id_org)))
.first::<Daka>(conn)
.map_err(|e| {
anyhow!(HtyErr {
code: HtyErrCode::DbErr,
reason: Some(e.to_string()),
})
})
}
pub fn find_active_dakas_by_all_teachers_with_pagination(
id_teacher: &String,
id_org: &String,
page: i64,
page_size: i64,
conn: &mut PgConnection,
) -> anyhow::Result<(Vec<Daka>, i64, i64)> {
use diesel::prelude::*;
let q_total = format!("select count(*) as result from daka where is_delete is not True AND (teacher_id = '{}' OR jsonb_path_exists(teachers, '$.vals[*].teacher_id ? (@ == \"{}\")'))", id_teacher, id_teacher).to_string();
let q_total = format!("select count(*) as result from daka where is_delete is not True AND org_id='{}' AND (teacher_id = '{}' OR jsonb_path_exists(teachers, '$.vals[*].teacher_id ? (@ == \"{}\")'))", id_org, id_teacher, id_teacher).to_string();
let total_len = sql_query(q_total.clone())
.get_result::<CountResult>(conn)?
.result;
let total_page = total_len.clone() / page_size;
let q = format!("select * from daka where is_delete is not True AND (teacher_id = '{}' OR jsonb_path_exists(teachers, '$.vals[*].teacher_id ? (@ == \"{}\")')) ORDER BY updated_at DESC, created_at DESC LIMIT {} OFFSET ({} - 1) * {}", id_teacher, id_teacher, page_size.to_string(), page.to_string(), page_size.to_string()).to_string();
let q = format!("select * from daka where is_delete is not True AND org_id='{}' AND (teacher_id = '{}' OR jsonb_path_exists(teachers, '$.vals[*].teacher_id ? (@ == \"{}\")')) ORDER BY updated_at DESC, created_at DESC LIMIT {} OFFSET ({} - 1) * {}", id_org, id_teacher, id_teacher, page_size.to_string(), page.to_string(), page_size.to_string()).to_string();
debug!(
"find_active_dakas_by_all_teachers_with_pagination -> Q: {:?}",
q
@@ -3443,20 +3623,21 @@ impl Daka {
pub fn find_active_dakas_by_assigned_teachers_with_pagination(
id_teacher: &String,
id_org: &String,
page: i64,
page_size: i64,
conn: &mut PgConnection,
) -> anyhow::Result<(Vec<Daka>, i64, i64)> {
use diesel::prelude::*;
let q_total = format!("select count(*) as result from daka where is_delete is not True AND jsonb_path_exists(teachers, '$.vals[*].teacher_id ? (@ == \"{}\")')", id_teacher).to_string();
let q_total = format!("select count(*) as result from daka where is_delete is not True AND org_id='{}' AND jsonb_path_exists(teachers, '$.vals[*].teacher_id ? (@ == \"{}\")')", id_org, id_teacher).to_string();
let total_len = sql_query(q_total.clone())
.get_result::<CountResult>(conn)?
.result;
let total_page = total_len.clone() / page_size;
let q = format!("select * from daka where is_delete is not True AND jsonb_path_exists(teachers, '$.vals[*].teacher_id ? (@ == \"{}\")') ORDER BY updated_at DESC, created_at DESC LIMIT {} OFFSET ({} - 1) * {}", id_teacher, page_size.to_string(), page.to_string(), page_size.to_string()).to_string();
let q = format!("select * from daka where is_delete is not True AND org_id='{}' AND jsonb_path_exists(teachers, '$.vals[*].teacher_id ? (@ == \"{}\")') ORDER BY updated_at DESC, created_at DESC LIMIT {} OFFSET ({} - 1) * {}", id_org, id_teacher, page_size.to_string(), page.to_string(), page_size.to_string()).to_string();
debug!(
"find_active_dakas_by_assigned_teachers_with_pagination -> Q: {:?}",
q
@@ -3473,6 +3654,7 @@ impl Daka {
pub fn find_active_dakas_by_owner_teacher_with_pagination(
id_teacher: &String,
id_org: &String,
page: i64,
page_size: i64,
start_from: &Option<NaiveDateTime>,
@@ -3485,6 +3667,7 @@ impl Daka {
.into_boxed()
.order(daka::start_date.desc())
.filter(teacher_id.eq(id_teacher))
.filter(org_id.eq(id_org))
.filter(crate::schema::daka::is_delete.eq(false));
if let Some(start) = start_from {
q = q.filter(daka::start_date.ge(start.clone()))
@@ -3509,6 +3692,7 @@ impl Daka {
pub fn find_active_dakas_by_group_ids_with_pagination(
group_ids: Vec<String>,
id_org: &String,
page: i64,
page_size: i64,
start_from: &Option<NaiveDateTime>,
@@ -3520,6 +3704,7 @@ impl Daka {
let mut q = daka::table
.into_boxed()
.order(daka::start_date.desc())
.filter(org_id.eq(id_org))
.filter(crate::schema::daka::is_delete.eq(false))
.filter(group_id.eq_any(group_ids));
if let Some(start) = start_from {
@@ -3590,6 +3775,7 @@ pub struct DakaCourseSection {
pub course_section_id: String,
pub meta: Option<JihuaCourseSectionMeta>,
pub is_delete: bool,
pub org_id: Option<String>,
}
impl DakaCourseSection {
@@ -3667,6 +3853,30 @@ impl DakaCourseSection {
}
}
pub fn find_by_daka_id_and_course_section_id_in_org(
id_daka: &String,
id_course_section: &String,
id_org: &String,
conn: &mut PgConnection,
) -> anyhow::Result<DakaCourseSection> {
use crate::schema::daka_course_section::columns::*;
use crate::schema::daka_course_section::dsl::*;
daka_course_section
.filter(
course_section_id
.eq(id_course_section)
.and(daka_id.eq(id_daka))
.and(org_id.eq(id_org)),
)
.first::<DakaCourseSection>(conn)
.map_err(|e| {
anyhow!(HtyErr {
code: HtyErrCode::DbErr,
reason: Some(e.to_string()),
})
})
}
pub fn find_by_daka_id(
id_daka: &String,
conn: &mut PgConnection,
@@ -3686,6 +3896,24 @@ impl DakaCourseSection {
result
}
pub fn find_by_daka_id_in_org(
id_daka: &String,
id_org: &String,
conn: &mut PgConnection,
) -> anyhow::Result<Vec<DakaCourseSection>> {
use crate::schema::daka_course_section::columns::*;
use crate::schema::daka_course_section::dsl::*;
daka_course_section
.filter(daka_id.eq(id_daka).and(org_id.eq(id_org)))
.get_results(conn)
.map_err(|e| {
anyhow!(HtyErr {
code: HtyErrCode::DbErr,
reason: Some(e.to_string()),
})
})
}
pub fn logic_delete_by_id(
id_delete: &String,
conn: &mut PgConnection,
+9
View File
@@ -42,6 +42,7 @@ diesel::table! {
created_by -> Nullable<Varchar>,
created_at -> Nullable<Timestamp>,
is_delete -> Nullable<Bool>,
org_id -> Nullable<Varchar>,
}
}
@@ -100,6 +101,7 @@ diesel::table! {
updated_at -> Nullable<Timestamp>,
updated_by -> Nullable<Varchar>,
teachers -> Nullable<Jsonb>,
org_id -> Nullable<Varchar>,
}
}
@@ -124,6 +126,7 @@ diesel::table! {
students -> Nullable<Jsonb>,
course_sections -> Nullable<Jsonb>,
is_yanqi -> Nullable<Bool>,
org_id -> Nullable<Varchar>,
}
}
@@ -134,6 +137,7 @@ diesel::table! {
course_section_id -> Varchar,
meta -> Nullable<Jsonb>,
is_delete -> Bool,
org_id -> Nullable<Varchar>,
}
}
@@ -152,6 +156,7 @@ diesel::table! {
course_sections -> Nullable<Jsonb>,
created_by -> Nullable<Varchar>,
created_at -> Nullable<Timestamp>,
org_id -> Nullable<Varchar>,
}
}
@@ -163,6 +168,7 @@ diesel::table! {
meta -> Nullable<Jsonb>,
created_at -> Nullable<Timestamp>,
is_delete -> Nullable<Bool>,
org_id -> Nullable<Varchar>,
}
}
@@ -185,6 +191,7 @@ diesel::table! {
is_delete -> Nullable<Bool>,
created_by -> Nullable<Varchar>,
creator_name -> Nullable<Varchar>,
org_id -> Nullable<Varchar>,
}
}
@@ -197,6 +204,7 @@ diesel::table! {
created_at -> Nullable<Timestamp>,
teacher_name -> Nullable<Varchar>,
is_delete -> Nullable<Bool>,
org_id -> Nullable<Varchar>,
}
}
@@ -209,6 +217,7 @@ diesel::table! {
serial -> Nullable<Varchar>,
created_at -> Nullable<Timestamp>,
is_delete -> Nullable<Bool>,
org_id -> Nullable<Varchar>,
}
}