chore add core rust project files and diesel migrations

Track required workspace crates, scripts, and historical diesel migrations so the repository contains the complete runnable backend baseline.

Made-with: Cursor
This commit is contained in:
2026-04-23 17:20:01 +08:00
parent c843fecbce
commit 44c320d8fa
392 changed files with 11786 additions and 0 deletions
+23
View File
@@ -0,0 +1,23 @@
[package]
name = "htykc"
version = { workspace = true }
authors = { workspace = true }
edition = { workspace = true }
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
htycommons = { workspace = true }
htyuc_models = { workspace = true }
htyuc_remote = { workspace = true }
htykc_models = { path = "../htykc_models" }
anyhow = { workspace = true }
axum = { workspace = true }
axum-macros = { workspace = true }
diesel = { workspace = true }
dotenv = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true }
tower-http = { workspace = true }
tracing = { workspace = true }
+96
View File
@@ -0,0 +1,96 @@
use crate::ws_kecheng::*;
use crate::ws_repeat_kecheng::{create_kecheng_repeat, update_kecheng_repeat};
use axum::extract::State;
use axum::routing::{get, post};
use axum::{Json, Router};
use axum_macros::debug_handler;
use htycommons::common::*;
use htycommons::db::*;
use htycommons::models::PushInfo;
use htycommons::web::{
wrap_json_anyhow_err, wrap_json_ok_resp, AuthorizationHeader, HtyHostHeader,
HtySudoerTokenHeader,
};
use std::sync::Arc;
use tower_http::trace::TraceLayer;
use tracing::{debug, error};
mod notifications;
mod ws_kecheng;
mod ws_repeat_kecheng;
#[debug_handler]
async fn notify(
root: HtySudoerTokenHeader,
auth: AuthorizationHeader,
host: HtyHostHeader,
State(db_pool): State<Arc<DbState>>,
push_info: Json<PushInfo>,
) -> Json<HtyResponse<()>> {
debug!("notify -> starts / push_info: {:?}", push_info);
let info_push = push_info.0;
match notifications::raw_notify(info_push, root, auth, host, db_pool).await {
Ok(res) => {
debug!("notify -> success to notify!");
wrap_json_ok_resp(res)
}
Err(e) => {
error!("notify -> failed to notify, e: {}", e);
wrap_json_anyhow_err(e)
}
}
}
pub fn kc_rocket(db_url: &str) -> Router {
let db_state = DbState { pool: pool(db_url) };
let shared_db_state = Arc::new(db_state);
//
let app = Router::new()
.route(
"/api/v1/kc/create_kecheng_with_kecheng_repeat",
post(create_kecheng_with_kecheng_repeat),
)
.route("/api/v1/kc/update_kecheng", post(update_kecheng))
.route(
"/api/v1/kc/create_kecheng_repeat",
post(create_kecheng_repeat),
)
.route(
"/api/v1/kc/update_kecheng_repeat",
post(update_kecheng_repeat),
)
.route(
"/api/v1/kc/find_kechengs_by_daka_ids",
post(find_kechengs_by_daka_ids),
)
.route(
"/api/v1/kc/find_kecheng_repeat_by_id/{id}",
get(find_kecheng_repeat_by_id),
)
.route(
"/api/v1/kc/find_kechengs_by_hty_id",
get(find_kechengs_by_hty_id),
)
.route(
"/api/v1/kc/find_all_non_repeatable_kechengs_within_date_range_by_hty_id",
get(find_all_non_repeatable_kechengs_within_date_range_by_hty_id),
)
.route(
"/api/v1/kc/find_all_non_repeatable_kechengs_within_date_range",
get(find_all_non_repeatable_kechengs_within_date_range),
)
.route(
"/api/v1/kc/find_all_repeatable_kechengs_within_date_range",
get(find_all_repeatable_kechengs_within_date_range),
)
.route(
"/api/v1/kc/find_all_repeatable_kechengs_within_date_range_by_hty_id",
get(find_all_repeatable_kechengs_within_date_range_by_hty_id),
)
.route("/api/v1/kc/notify", post(notify))
.layer(TraceLayer::new_for_http())
.with_state(shared_db_state);
app
}
+22
View File
@@ -0,0 +1,22 @@
use dotenv::dotenv;
use htycommons::db::get_kc_db_url;
use htycommons::logger::logger_init;
use htycommons::web::{get_kc_port, launch_rocket};
use htykc::kc_rocket;
#[tokio::main]
async fn main() {
dotenv().ok();
logger_init();
let port = get_kc_port().unwrap_or_else(|e| {
eprintln!("Failed to get KC_PORT: {}", e);
std::process::exit(1);
});
let r = launch_rocket(port, kc_rocket(&get_kc_db_url()));
let _ = r.await;
// this is reachable only after `Shutdown::notify()` or `Ctrl+C`.
println!("Rocket: deorbit.");
}
+172
View File
@@ -0,0 +1,172 @@
use anyhow::anyhow;
use axum::extract::State;
use axum::Json;
use htycommons::common::{HtyErr, HtyErrCode, HtyResponse};
use htycommons::db::{extract_conn, fetch_db_conn, DbState};
use htycommons::jwt::jwt_decode_token;
use htycommons::uuid;
use htycommons::web::{
wrap_json_anyhow_err, wrap_json_ok_resp, AuthorizationHeader, HtyHostHeader,
HtySudoerTokenHeader,
};
use htykc_models::models::{KechengRepeat, ReqKechengRepeat};
use std::ops::DerefMut;
use std::sync::Arc;
use tracing::{debug, error};
pub async fn create_kecheng_repeat(
sudoer: HtySudoerTokenHeader,
host: HtyHostHeader,
auth: AuthorizationHeader,
State(db_pool): State<Arc<DbState>>,
Json(in_kecheng_repeat): Json<ReqKechengRepeat>,
) -> Json<HtyResponse<ReqKechengRepeat>> {
debug!("create_kecheng_repeat -> starts");
match raw_create_kecheng_repeat(auth, sudoer, host, db_pool, &in_kecheng_repeat).await {
Ok(created_kecheng) => wrap_json_ok_resp(created_kecheng.to_req()),
Err(e) => {
error!(
"create_kecheng_repeat -> failed to create kecheng_repeat, e: {}",
e
);
wrap_json_anyhow_err(e)
}
}
}
async fn raw_create_kecheng_repeat(
token: AuthorizationHeader,
_sudoer: HtySudoerTokenHeader,
_host: HtyHostHeader,
db_pool: Arc<DbState>,
in_kecheng_repeat: &ReqKechengRepeat,
) -> anyhow::Result<KechengRepeat> {
if in_kecheng_repeat.kecheng_id.is_none()
|| in_kecheng_repeat.start_from.is_none()
|| in_kecheng_repeat.end_by.is_none()
|| in_kecheng_repeat.repeat_cycle_days.is_none()
{
return Err(anyhow!(HtyErr {
code: HtyErrCode::WebErr,
reason: Some(
"kecheng_id or start_from or name or end_by or duration or repeat_cycle_days is none".into()
),
}));
}
let id_user = jwt_decode_token(&token.0)?
.hty_id
.ok_or_else(|| anyhow!("hty_id is required"))?;
debug!("raw_create_kecheng_repeat -> {:?}", id_user);
let kecheng_repeat_id = uuid();
let db_kecheng_repeat = KechengRepeat {
id: kecheng_repeat_id.clone(),
kecheng_id: in_kecheng_repeat.kecheng_id.clone(),
start_from: in_kecheng_repeat.start_from.clone(),
end_by: in_kecheng_repeat.end_by.clone(),
repeat_start: in_kecheng_repeat.repeat_start.clone(),
repeat_cycle_days: in_kecheng_repeat.repeat_cycle_days.clone(),
repeat_end: in_kecheng_repeat.repeat_end.clone(),
repeat_status: in_kecheng_repeat.repeat_status.clone(),
latest_kc_created_at: in_kecheng_repeat.latest_kc_created_at.clone(),
};
let created_kecheng_repeat_result = KechengRepeat::create(
&db_kecheng_repeat,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
);
match created_kecheng_repeat_result {
Ok(res) => Ok(res),
Err(e) => Err(anyhow!(HtyErr {
code: HtyErrCode::WebErr,
reason: Some(format!("fail to create kecheng_repeat e: {}", e)),
})),
}
}
pub async fn update_kecheng_repeat(
sudoer: HtySudoerTokenHeader,
host: HtyHostHeader,
auth: AuthorizationHeader,
State(db_pool): State<Arc<DbState>>,
Json(in_kecheng_repeat): Json<ReqKechengRepeat>,
) -> Json<HtyResponse<KechengRepeat>> {
debug!("update_kecheng_repeat -> starts");
match raw_update_kecheng_repeat(auth, sudoer, host, db_pool, &in_kecheng_repeat).await {
Ok(ok) => wrap_json_ok_resp(ok),
Err(e) => {
error!(
"update_kecheng_repeat -> failed to update kecheng, e: {}",
e
);
wrap_json_anyhow_err(e)
}
}
}
async fn raw_update_kecheng_repeat(
_token: AuthorizationHeader,
_sudoer: HtySudoerTokenHeader,
_host: HtyHostHeader,
db_pool: Arc<DbState>,
in_kecheng_repeat: &ReqKechengRepeat,
) -> anyhow::Result<KechengRepeat> {
debug!(
"raw_update_kecheng_repeat -> in_kecheng_repeat: {:?}",
in_kecheng_repeat
);
if in_kecheng_repeat.id.is_none() {
return Err(anyhow!(HtyErr {
code: HtyErrCode::WebErr,
reason: Some("kecheng_repeat_id is none".into()),
}));
}
let id_kecheng_repeat = in_kecheng_repeat
.id
.as_ref()
.ok_or_else(|| anyhow!("id is required"))?;
let some_db_kecheng_repeat = KechengRepeat::find_by_id(
id_kecheng_repeat,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
debug!(
"raw_update_kecheng_repeat -> some_db_kecheng_repeat: {:?}",
some_db_kecheng_repeat
);
if let Some(mut c_kecheng_repeat) = some_db_kecheng_repeat {
// set new values of kecheng_repeat
c_kecheng_repeat.kecheng_id = in_kecheng_repeat.kecheng_id.clone();
c_kecheng_repeat.repeat_cycle_days = in_kecheng_repeat.repeat_cycle_days.clone();
c_kecheng_repeat.repeat_start = in_kecheng_repeat.repeat_start.clone();
c_kecheng_repeat.repeat_end = in_kecheng_repeat.repeat_end.clone();
c_kecheng_repeat.repeat_status = in_kecheng_repeat.repeat_status.clone();
c_kecheng_repeat.start_from = in_kecheng_repeat.start_from.clone();
c_kecheng_repeat.end_by = in_kecheng_repeat.end_by.clone();
debug!(
"raw_update_kecheng_repeat -> updated kecheng_repeat: {:?}",
c_kecheng_repeat
);
let res = KechengRepeat::update(
&c_kecheng_repeat,
extract_conn(fetch_db_conn(&db_pool)?).deref_mut(),
)?;
Ok(res)
} else {
Err(anyhow!(HtyErr {
code: HtyErrCode::WebErr,
reason: Some("this kecheng_repeat is not found".into()),
}))
}
}
Executable
+9
View File
@@ -0,0 +1,9 @@
#!/bin/sh
set -x
echo "----------------------------" >> htykc.log
echo "$(date)" >> htykc.log
echo "----------------------------" >> htykc.log
nohup cargo run >> htykc.log &