chore add core rust project files and diesel migrations

Track required workspace crates, scripts, and historical diesel migrations so the repository contains the complete runnable backend baseline.

Made-with: Cursor
This commit is contained in:
2026-04-23 17:20:01 +08:00
parent c843fecbce
commit 44c320d8fa
392 changed files with 11786 additions and 0 deletions
+307
View File
@@ -0,0 +1,307 @@
// #![recursion_limit = "4000"]
// #[macro_use]
// extern crate diesel;
// #[macro_use]
// extern crate serde_derive;
extern crate time;
use std::sync::Arc;
use axum::routing::{get, post};
use axum::{Json, Router};
use tracing::{debug, error};
use htycommons::common::HtyResponse;
use htycommons::db::{pool, DbState};
use htycommons::web::{get_uc_url, wrap_json_anyhow_err, wrap_json_ok_resp};
use tower_http::trace::TraceLayer;
use ws_all::*;
use ws_daka::*;
use ws_jihua::*;
use ws_lianxi::*;
mod notifications;
pub mod r_ws;
// mod schema;
mod ws_all;
mod ws_daka;
mod ws_jihua;
mod ws_lianxi;
async fn index() -> &'static str {
debug!("in server get_uc_url -> {}", get_uc_url());
"-=HTYWS=-"
}
// request HTYUC
async fn req_uc() -> Json<HtyResponse<String>> {
debug!("req_uc -> starts");
let msg = raw_req_uc().await;
match msg {
Ok(ok) => wrap_json_ok_resp(ok),
Err(e) => {
error!("req_uc -> failed to req uc, e: {}", e);
wrap_json_anyhow_err(e)
}
}
}
async fn raw_req_uc() -> anyhow::Result<String> {
let client = reqwest::Client::new();
let msg = format!(
"ws -> uc ::: {}",
client
.get(format!("{}/{}", get_uc_url(), "index"))
.send()
.await?
.text()
.await?
)
.as_str()
.to_string();
debug!("raw_req_uc -> {}", msg.as_str());
Ok(msg)
}
pub fn ws_rocket(db_url: &str) -> Router {
let db_state = DbState { pool: pool(db_url) };
let shared_db_state = Arc::new(db_state);
let app = Router::new()
.route(
"/api/v1/ws/find_all_course_sections",
get(find_all_course_sections),
)
.route(
"/api/v1/ws/find_all_course_sections_by_created_by_with_page",
get(find_all_course_sections_by_created_by_with_page),
)
.route(
"/api/v1/ws/find_all_course_sections_by_teacher_id_with_page",
get(find_all_course_sections_by_teacher_id_with_page),
)
.route("/api/v1/ws/find_all_courses", get(find_all_courses))
.route(
"/api/v1/ws/find_all_course_categories",
get(find_all_course_categories),
)
.route(
"/api/v1/ws/find_all_students_by_teacher_id_and_status",
post(find_all_students_by_teacher_id_and_status),
)
.route(
"/api/v1/ws/find_all_teachers_by_student_id/{student_id}",
get(find_all_teachers_by_student_id),
)
.route("/api/v1/ws/find_jihua_by_id/{id}", get(find_jihua_by_id))
.route("/api/v1/ws/find_jihua_by_id2/{id}", get(find_jihua_by_id2))
.route("/api/v1/ws/find_daka_by_id/{id}", get(find_daka_by_id))
.route("/api/v1/ws/find_daka_by_id2/{id}", get(find_daka_by_id2))
.route(
"/api/v1/ws/find_jihuas_with_sections_by_user_id",
get(find_jihuas_with_sections_by_user_id),
)
.route(
"/api/v1/ws/find_dakas_with_sections_by_user_id",
get(find_dakas_with_sections_by_user_id),
)
.route("/api/v1/ws/find_course_by_id/{id}", get(find_course_by_id))
.route(
"/api/v1/ws/find_course_section_by_id/{id}",
get(find_course_section_by_id),
)
.route(
"/api/v1/ws/find_course_sections_by_ids",
post(find_course_sections_by_ids),
)
.route(
"/api/v1/ws/find_jihuas_by_course_section_id/{id}",
get(find_jihuas_by_course_section_id),
)
.route(
"/api/v1/ws/get_ref_score_by_ref_id_and_task_id/{ref_id}/{task_id}",
get(get_ref_score_by_ref_id_and_task_id),
)
.route(
"/api/v1/ws/get_comments_by_ref_id/{ref_id}",
get(get_comments_by_ref_id),
)
.route(
"/api/v1/ws/find_scores_by_ref_id/{ref_id}",
get(find_scores_by_ref_id),
)
.route(
"/api/v1/ws/get_resource_note_group_by_ref_id/{ref_id}",
get(get_resource_note_group_by_ref_id),
)
.route(
"/api/v1/ws/find_section_by_coursename",
get(find_section_by_coursename),
)
.route(
"/api/v1/ws/get_unclaimed_students",
get(get_unclaimed_students),
)
.route("/api/v1/ws/index", get(index))
.route("/api/v1/ws/req_uc", get(req_uc))
.route(
"/api/v1/ws/link_teacher_student",
post(link_teacher_student),
)
.route("/api/v1/ws/claim_student", post(claim_student))
.route("/api/v1/ws/create_jihua", post(create_jihua))
.route("/api/v1/ws/create_daka", post(create_daka))
.route(
"/api/v1/ws/create_course_category",
post(create_course_category),
)
.route(
"/api/v1/ws/update_course_category",
post(update_course_category),
)
.route("/api/v1/ws/create_comment", post(create_comment))
.route("/api/v1/ws/create_score", post(create_score))
.route("/api/v1/ws/update_score", post(update_score))
.route("/api/v1/ws/update_comment", post(update_comment))
.route("/api/v1/ws/create_lianxi", post(create_lianxi))
.route("/api/v1/ws/create_lianxi2", post(create_lianxi2))
.route("/api/v1/ws/create_piyue", post(create_piyue))
.route(
"/api/v1/ws/find_course_group_by_ids",
post(find_course_group_by_ids),
)
.route(
"/api/v1/ws/find_course_group_by_hty_id/{hty_id}",
get(find_course_group_by_hty_id),
)
.route(
"/api/v1/ws/create_course_group",
post(create_course_group),
)
.route(
"/api/v1/ws/update_course_group",
post(update_course_group),
)
.route(
"/api/v1/ws/delete_course_group/{id_delete}",
post(delete_course_group),
)
.route("/api/v1/ws/update_piyue", post(update_piyue))
.route("/api/v1/ws/create_piyue2", post(create_piyue2))
.route("/api/v1/ws/create_course", post(create_course))
.route("/api/v1/ws/create_course_section", post(create_course_section))
.route("/api/v1/ws/notify", post(notify))
.route(
"/api/v1/ws/delete_jihua_by_id/{id_delete}",
post(delete_jihua_by_id),
)
.route(
"/api/v1/ws/delete_daka_by_id/{id_delete}",
post(delete_daka_by_id),
)
.route(
"/api/v1/ws/delete_lianxi_by_id/{id_delete}",
post(delete_lianxi_by_id),
)
.route(
"/api/v1/ws/delete_lianxi_by_id2/{id_delete}",
post(delete_lianxi_by_id2),
)
.route(
"/api/v1/ws/delete_course_by_id/{id_delete}",
post(delete_course_by_id),
)
.route(
"/api/v1/ws/delete_course_section_by_id/{id_delete}",
post(delete_course_section_by_id),
)
.route("/api/v1/ws/disclaim_student", post(disclaim_student))
.route(
"/api/v1/ws/delete_course_category/{id_delete}",
get(delete_course_category),
)
.route("/api/v1/ws/update_jihua", post(update_jihua))
.route("/api/v1/ws/update_daka", post(update_daka))
.route(
"/api/v1/ws/update_jihua_course_section_relations",
post(update_jihua_course_section_relations),
)
.route(
"/api/v1/ws/create_or_update_resource_note_group",
post(create_or_update_resource_note_group),
)
.route("/api/v1/ws/update_lianxi_by_id", post(update_lianxi_by_id))
// .route("/api/v1/ws/add_resource_notes", post(add_resource_notes))
.route("/api/v1/ws/update_course", post(update_course))
.route(
"/api/v1/ws/find_ref_resource_by_id/{id}",
get(find_ref_resource_by_id),
)
.route(
"/api/v1/ws/set_ref_resource_compression_processed/{id}",
post(set_ref_resource_compression_processed),
)
.route(
"/api/v1/ws/set_ref_resource_synced_with_hty_resource",
post(set_ref_resource_synced_with_hty_resource),
)
.route(
"/api/v1/ws/set_ref_resource_not_synced_with_hty_resource",
post(set_ref_resource_not_synced_with_hty_resource),
)
.route("/api/v1/ws/update_ref_resource", post(update_ref_resource))
.route(
"/api/v1/ws/find_teacher_students_by_ids",
post(find_teacher_students_by_ids),
)
.route("/api/v1/ws/create_ref_resource", post(create_ref_resource))
.route(
"/api/v1/ws/create_versioned_data",
post(create_versioned_data),
)
.route(
"/api/v1/ws/get_versioned_data_by_ref_id/{ref_id}",
get(get_versioned_data_by_ref_id),
)
.route(
"/api/v1/ws/delete_versioned_data_by_id/{id}",
get(delete_versioned_data_by_id),
)
.route(
"/api/v1/ws/find_ref_resources_by_hty_resource_id/{resource_id}",
get(find_ref_resources_by_hty_resource_id),
)
.route(
"/api/v1/ws/find_draft_course_sections_of_user",
get(find_draft_course_sections_of_user),
)
.route(
"/api/v1/ws/is_course_section_in_use/{course_section_id}",
get(is_course_section_in_use),
)
.route(
"/api/v1/ws/update_course_section_by_id",
post(update_course_section_by_id),
)
.route(
"/api/v1/ws/find_all_teacher_students",
get(find_all_teacher_students),
)
.route(
"/api/v1/ws/update_teacher_student",
post(update_teacher_student),
)
.route(
"/api/v1/ws/delete_teacher_student",
post(delete_teacher_student),
)
.layer(TraceLayer::new_for_http())
.with_state(shared_db_state);
app
}
+24
View File
@@ -0,0 +1,24 @@
// use htycommons::logger::logger_init;
use dotenv::dotenv;
use htycommons::db::get_ws_db_url;
use htycommons::logger::logger_init;
use htycommons::web::{get_ws_port, launch_rocket};
use htyws;
use htyws::ws_rocket;
#[tokio::main]
async fn main() {
dotenv().ok();
logger_init();
let port = get_ws_port().unwrap_or_else(|e| {
eprintln!("Failed to get WS_PORT: {}", e);
std::process::exit(1);
});
let r = launch_rocket(port, ws_rocket(&get_ws_db_url()));
let _ = r.await;
// this is reachable only after `Shutdown::notify()` or `Ctrl+C`.
println!("Rocket: deorbit.");
}
+16
View File
@@ -0,0 +1,16 @@
use crate::ws_rocket;
use dotenv::dotenv;
use htycommons::db::get_ws_db_url;
use htycommons::web::{get_ws_port, launch_rocket};
#[tokio::main]
pub async fn main() {
dotenv().ok();
let port = get_ws_port().unwrap_or_else(|e| {
eprintln!("Failed to get WS_PORT: {}", e);
std::process::exit(1);
});
let rocket = launch_rocket(port, ws_rocket(&get_ws_db_url()));
let _ = rocket.await;
}
+511
View File
@@ -0,0 +1,511 @@
use anyhow::anyhow;
use axum::extract::{Path, State};
use axum::Json;
use axum_macros::debug_handler;
use htycommons::common::{current_local_datetime, HtyErr, HtyErrCode, HtyResponse};
use htycommons::db::{extract_conn, fetch_db_conn, DbState};
use htycommons::uuid;
use htycommons::web::{
wrap_json_anyhow_err, wrap_json_ok_resp, HtyHostHeader, HtySudoerTokenHeader,
};
use htyws_models::models::{
Daka, DakaCourseSection, Jihua, JihuaCourseSection, Lianxi, Piyue, PiyueInfo, RefResource,
ReqLianxi, ReqLianxi2,
};
use std::ops::DerefMut;
use std::sync::Arc;
use tracing::{debug, error};
pub fn count_lianxi_and_piyue_on_the_fly(all_lianxis: &Vec<Lianxi>) -> (i32, i32) {
let count_lianxi = all_lianxis.clone().len() as i32;
let not_piyue_count = all_lianxis
.iter()
.filter(|lianxi| lianxi.has_piyue.is_none() || !lianxi.has_piyue.unwrap_or(false))
.count() as i32;
(count_lianxi, not_piyue_count)
}
// #[post(
// "/create_lianxi",
// format = "application/json",
// data = "<in_lianxi>"
// )]
#[debug_handler]
pub async fn create_lianxi(
root: HtySudoerTokenHeader,
host: HtyHostHeader,
State(db_pool): State<Arc<DbState>>,
Json(in_lianxi): Json<ReqLianxi>,
) -> Json<HtyResponse<String>> {
debug!("create_lianxi -> starts");
match raw_create_lianxi(&host, &root, &in_lianxi, db_pool).await {
Ok(ok) => {
debug!("create_lianxi -> success to create lianxi, e: {}", ok);
wrap_json_ok_resp(ok)
}
Err(e) => {
error!("create_lianxi -> failed to create lianxi, e: {}", e);
wrap_json_anyhow_err(e)
}
}
}
pub async fn raw_create_lianxi(
_host: &HtyHostHeader,
_root: &HtySudoerTokenHeader,
in_req_lianxi: &ReqLianxi,
db_pool: Arc<DbState>,
) -> anyhow::Result<String> {
let req_lianxi = in_req_lianxi.clone();
let mut in_jihua_course_section_id = None;
let mut in_daka_course_section_id = None;
if let Some(id) = &req_lianxi.jihua_course_section_id {
in_jihua_course_section_id = Some(id.clone());
}
if let Some(id) = &req_lianxi.daka_course_section_id {
in_daka_course_section_id = Some(id.clone());
}
let insert_lianxi = Lianxi {
id: uuid(),
video_url: req_lianxi.video_url.clone(), // todo: create ref_resource
video_id: req_lianxi.id.clone(),
created_at: Some(current_local_datetime()),
jihua_course_section_id: in_jihua_course_section_id.clone(),
daka_course_section_id: in_daka_course_section_id.clone(),
audio_question_url: req_lianxi.audio_question_url.clone(),
audio_question_id: req_lianxi.audio_question_id.clone(),
text_question: req_lianxi.text_question.clone(),
task: req_lianxi.task.clone(),
qupu_id: req_lianxi.qupu_id.clone(), // todo: create ref_resource
qupu_url: req_lianxi.qupu_url.clone(),
lianxi_type: req_lianxi.lianxi_type.clone(),
has_piyue: Some(false),
is_delete: Some(false),
created_by: req_lianxi.created_by.clone(),
creator_name: req_lianxi.creator_name.clone(),
};
let res = Lianxi::create(
&insert_lianxi,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
);
match res {
Ok(res_lianxi) => {
// lianxi created
// get jihua of this lianxi
// update lianxi_count + 1
debug!("update lianxi_count");
if let Some(section_id) = &in_jihua_course_section_id {
let jihua_course_section = JihuaCourseSection::find_by_id(
section_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let belonging_jihua = Jihua::find_by_id(
&jihua_course_section.jihua_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let _ = belonging_jihua
.update_count(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
} else if let Some(section_id) = &in_daka_course_section_id {
// in_daka_course_section_id is some
let daka_course_section = DakaCourseSection::find_by_id(
section_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let belonging_daka = Daka::find_by_id(
&daka_course_section.daka_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let _ = belonging_daka
.update_count(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
}
Ok(res_lianxi.id)
}
Err(e) => Err(anyhow!(HtyErr {
code: HtyErrCode::WebErr,
reason: Some("Fail to create lianxi e: ".to_string() + &e.to_string()),
})),
}
}
#[debug_handler]
pub async fn create_lianxi2(
root: HtySudoerTokenHeader,
host: HtyHostHeader,
State(db_pool): State<Arc<DbState>>,
Json(in_lianxi): Json<ReqLianxi2>,
) -> Json<HtyResponse<String>> {
debug!("create_lianxi2 -> starts");
match raw_create_lianxi2(&host, &root, &in_lianxi, db_pool).await {
Ok(ok) => {
debug!("create_lianxi2 -> success to create lianxi, e: {}", ok);
wrap_json_ok_resp(ok)
}
Err(e) => {
error!("create_lianxi2 -> failed to create lianxi, e: {}", e);
wrap_json_anyhow_err(e)
}
}
}
pub async fn raw_create_lianxi2(
_host: &HtyHostHeader,
_root: &HtySudoerTokenHeader,
in_req_lianxi: &ReqLianxi2,
db_pool: Arc<DbState>,
) -> anyhow::Result<String> {
let req_lianxi = in_req_lianxi.clone();
let mut in_jihua_course_section_id = None;
let mut in_daka_course_section_id = None;
if let Some(id) = &req_lianxi.jihua_course_section_id {
in_jihua_course_section_id = Some(id.clone());
}
if let Some(id) = &req_lianxi.daka_course_section_id {
in_daka_course_section_id = Some(id.clone());
}
let id_lianxi = uuid();
let insert_lianxi = Lianxi {
id: id_lianxi.clone(),
video_url: None,
video_id: None,
created_at: Some(current_local_datetime()),
jihua_course_section_id: in_jihua_course_section_id.clone(),
daka_course_section_id: in_daka_course_section_id.clone(),
audio_question_url: None,
audio_question_id: None,
text_question: req_lianxi.text_question.clone(),
task: None,
qupu_id: None,
qupu_url: None, // todo remove these `None` fields from database Lianxi table.
lianxi_type: req_lianxi.lianxi_type.clone(),
has_piyue: Some(false),
is_delete: Some(false),
created_by: req_lianxi.created_by.clone(),
creator_name: req_lianxi.creator_name.clone(),
};
let some_in_ref_resources = req_lianxi.ref_resources.clone();
if let Some(ref_resources) = &some_in_ref_resources {
for in_ref_resource in ref_resources {
let to_create_ref_resource = RefResource {
id: uuid(),
hty_resource_id: in_ref_resource.hty_resource_id.clone(),
ref_id: Some(id_lianxi.clone()),
ref_type: Some("Lianxi".to_string()),
resource_url: in_ref_resource.resource_url.clone(),
resource_type: in_ref_resource.resource_type.clone(),
ref_name: in_ref_resource.ref_name.clone(),
ref_desc: in_ref_resource.ref_desc.clone(),
meta: None,
tasks: in_ref_resource.tasks.clone(),
is_shifan: in_ref_resource.is_shifan.clone(),
created_at: in_ref_resource.created_at.clone(),
updated_at: in_ref_resource.updated_at.clone(),
compress_processed: in_ref_resource.compress_processed.clone(),
created_by: in_ref_resource.created_by.clone(),
synced_with_hty_resource: in_ref_resource.synced_with_hty_resource.clone(),
updated_by: in_ref_resource.updated_by.clone(),
};
debug!(
"raw_create_lianxi2 -> to_create_ref_resource / {:?}",
to_create_ref_resource
);
let _ = RefResource::create(
&to_create_ref_resource,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
}
}
// let audio_resource = RefResource {
// id: uuid(),
// hty_resource_id: req_lianxi.audio_question_id.clone(),
// ref_id: Some(id_lianxi.clone()),
// ref_type: Some("Lianxi".to_string()),
// resource_url: req_lianxi.audio_question_url.clone(),
// resource_type: Some("Audio".to_string()),
// ref_name: Some("练习录音".to_string()),
// ref_desc: None,
// meta: None,
// tasks: None,
// is_shifan: Some(false),
// };
//
// let _ = RefResource::create(&audio_resource, extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
//
// let qupu_resource = RefResource {
// id: uuid(),
// hty_resource_id: req_lianxi.qupu_id.clone(),
// ref_id: Some(id_lianxi.clone()),
// ref_type: Some("Lianxi".to_string()),
// resource_url: req_lianxi.qupu_url.clone(),
// resource_type: Some("Picture".to_string()),
// ref_name: Some("练习曲谱".to_string()),
// ref_desc: None,
// meta: None,
// tasks: None,
// is_shifan: Some(false),
// };
//
// let _ = RefResource::create(&qupu_resource, extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
//
//
// let video_resource = RefResource {
// id: uuid(),
// hty_resource_id: req_lianxi.video_id.clone(),
// ref_id: Some(id_lianxi.clone()),
// ref_type: Some("Lianxi".to_string()),
// resource_url: req_lianxi.video_url.clone(),
// resource_type: Some("Video".to_string()),
// ref_name: Some("练习视频".to_string()),
// ref_desc: None,
// meta: None,
// tasks: None,
// is_shifan: Some(false),
// };
//
// let _ = RefResource::create(&video_resource, extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
let res = Lianxi::create(
&insert_lianxi,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
);
match res {
Ok(res_lianxi) => {
// lianxi created
// get jihua of this lianxi
// update lianxi_count + 1
debug!("update lianxi_count");
if let Some(section_id) = &in_jihua_course_section_id {
let jihua_course_section = JihuaCourseSection::find_by_id(
section_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let belonging_jihua = Jihua::find_by_id(
&jihua_course_section.jihua_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let _ = belonging_jihua
.update_count(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
} else if let Some(section_id) = &in_daka_course_section_id {
// in_daka_course_section_id is some
let daka_course_section = DakaCourseSection::find_by_id(
section_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let belonging_daka = Daka::find_by_id(
&daka_course_section.daka_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let _ = belonging_daka
.update_count(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
}
Ok(res_lianxi.id)
}
Err(e) => Err(anyhow!(HtyErr {
code: HtyErrCode::WebErr,
reason: Some("Fail to create lianxi e: ".to_string() + &e.to_string()),
})),
}
}
pub async fn delete_lianxi_by_id(
_root: HtySudoerTokenHeader,
Path(id_delete): Path<String>,
State(db_pool): State<Arc<DbState>>,
) -> Json<HtyResponse<Lianxi>> {
debug!("delete_lianxi_by_id -> start here");
match raw_delete_lianxi_by_id(&id_delete, db_pool).await {
Ok(ok) => wrap_json_ok_resp(ok),
Err(e) => {
error!("delete_lianxi_by_id -> failed to delete lianxi, e: {}", e);
wrap_json_anyhow_err(e)
}
}
}
pub async fn raw_delete_lianxi_by_id(
id_delete: &String,
db_pool: Arc<DbState>,
) -> anyhow::Result<Lianxi> {
let to_delete_lianxi = Lianxi::find_by_id(
id_delete,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let piyues =
to_delete_lianxi.find_linked_piyues(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
let resp = Lianxi::logic_delete_by_id(
id_delete,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
// 所属的piyue和comments全都逻辑删除.
for mut to_delete_piyue in piyues {
// todo: piyue所属的piyue_info和comments都逻辑删除
// comments还没弄,先做piyue和piyue_info的逻辑删除
let piyue_infos = to_delete_piyue
.find_linked_piyue_infos(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
for mut to_delete_piyue_info in piyue_infos {
to_delete_piyue_info.is_delete = Some(true);
let _ = PiyueInfo::update(
&to_delete_piyue_info,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
}
to_delete_piyue.is_delete = Some(true);
let _ = Piyue::update(
&to_delete_piyue,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
}
if let Some(section_id) = &to_delete_lianxi.jihua_course_section_id {
let relation = JihuaCourseSection::find_by_id(
section_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let belonging_jihua = Jihua::find_by_id(
&relation.jihua_id,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
let _ = belonging_jihua.update_count(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
// 更新计划下属练习数量
}
Ok(resp)
}
pub async fn delete_lianxi_by_id2(
_root: HtySudoerTokenHeader,
Path(id_delete): Path<String>,
State(db_pool): State<Arc<DbState>>,
) -> Json<HtyResponse<ReqLianxi2>> {
debug!("delete_lianxi_by_id2 -> start here");
match raw_delete_lianxi_by_id2(&id_delete, db_pool).await {
Ok(ok) => wrap_json_ok_resp(ok),
Err(e) => {
error!("delete_lianxi_by_id2 -> failed to delete lianxi, e: {}", e);
wrap_json_anyhow_err(e)
}
}
}
pub async fn raw_delete_lianxi_by_id2(
id_delete: &String,
db_pool: Arc<DbState>,
) -> anyhow::Result<ReqLianxi2> {
// let to_delete_lianxi = Lianxi::find_by_id(id_delete, extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
// let piyues = to_delete_lianxi.find_linked_piyues(extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
let deleted_lianxi = Lianxi::logic_delete_by_id(
id_delete,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
Ok(deleted_lianxi.to_req2())
// todo: 现在只需要逻辑删除掉lianxi就行了,因为下面挂的其他关联数据也就随着lianxi不在前端使用了.
// // 所属的piyue和comments全都逻辑删除.
// for mut to_delete_piyue in piyues {
}
// deprecated. NO USE
pub async fn update_lianxi_by_id(
_root: HtySudoerTokenHeader,
State(db_pool): State<Arc<DbState>>,
Json(in_lianxi): Json<ReqLianxi>,
) -> Json<HtyResponse<String>> {
debug!("update_lianxi_by_id -> start here");
match raw_update_lianxi_by_id(&in_lianxi, db_pool).await {
Ok(ok) => {
debug!("update_lianxi_by_id -> success to update lianxi {}", ok);
wrap_json_ok_resp(ok)
}
Err(e) => {
error!("update_lianxi_by_id -> failed to update lianxi, e: {}", e);
wrap_json_anyhow_err(e)
}
}
}
pub async fn raw_update_lianxi_by_id(
in_req_lianxi: &ReqLianxi,
db_pool: Arc<DbState>,
) -> anyhow::Result<String> {
let req_lianxi = in_req_lianxi.clone();
if req_lianxi.jihua_course_section_id.is_none() && req_lianxi.daka_course_section_id.is_none() {
return Err(anyhow!(HtyErr {
code: HtyErrCode::WebErr,
reason: Some(
"jihua_course_section_id AND daka_course_section_id can not be null at same time"
.into()
),
}));
}
let id_lianxi = req_lianxi.id.ok_or_else(|| anyhow!("id is required"))?;
let exist = Lianxi::verify_exist_by_id(
&id_lianxi,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
if !exist {
return Err(anyhow!(HtyErr {
code: HtyErrCode::WebErr,
reason: Some("no exist lianxi".into()),
}));
}
// let db_lianxi = Lianxi::find_by_id(&id_lianxi, extract_conn(fetch_db_conn(&db_pool)?).deref_mut())?;
let some_id_req_jihua_course_section = req_lianxi.jihua_course_section_id.clone();
let some_id_req_daka_course_section = req_lianxi.daka_course_section_id.clone();
// return Err(anyhow!(HtyErr {
// code: HtyErrCode::WebErr,
// reason: Some("jihua_course_section_id not equal to existing record".into()),
// }));
// }
let update_lianxi = Lianxi {
id: id_lianxi,
video_url: req_lianxi.video_url,
video_id: req_lianxi.video_id,
created_at: req_lianxi.created_at,
jihua_course_section_id: some_id_req_jihua_course_section.clone(),
audio_question_url: req_lianxi.audio_question_url,
audio_question_id: req_lianxi.audio_question_id,
text_question: req_lianxi.text_question,
task: req_lianxi.task,
qupu_id: req_lianxi.qupu_id,
qupu_url: req_lianxi.qupu_url,
lianxi_type: req_lianxi.lianxi_type,
daka_course_section_id: some_id_req_daka_course_section.clone(),
has_piyue: req_lianxi.has_piyue,
is_delete: req_lianxi.is_delete,
created_by: req_lianxi.created_by,
creator_name: req_lianxi.creator_name,
};
let res = Lianxi::update(
&update_lianxi,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
Ok(res.id)
}