소스 검색

增加批次表

晋江
OCEAN 1 개월 전
부모
커밋
e5605c9e66
4개의 변경된 파일251개의 추가작업 그리고 2개의 파일을 삭제
  1. +116
    -2
      tcp_client/config.toml
  2. +135
    -0
      tcp_server/src/main.rs
  3. BIN
      tcp_server/target/debug/tcp_server.exe
  4. BIN
      tcp_server/target/debug/tcp_server.pdb

+ 116
- 2
tcp_client/config.toml 파일 보기

@@ -23,8 +23,122 @@ password = "Auseft@2025qwer"


[[tables]]
name = "erp_shipping_details"
query = "SELECT id, plan_id, vehicle_id, vehicle_number, gross_weight::text as gross_weight, tare_weight::text as tare_weight, net_weight::text as net_weight, card_number, appointment_number, appointment_time, appointment_weight::text as appointment_weight, is_cancel, is_end, driver_phone, remarks, create_time, create_by, update_time, update_by FROM public.erp_shipping_details"
name = "comm_bch_batch"
query = "SELECT id, tenant_id, supply_id, material_id, batch_record_id, sample_type, related_code, batching_start_time, batching_end_time, batching_start_ticket, batching_end_ticket, batching_quantity, batching_weight::text, batching_code, batching_code_creator, batching_code_creation_time, preparation_code, preparation_code_creator, preparation_code_creation_time, assay_code, assay_code_creator, assay_code_creation_time, creator_id, creation_time, last_modifier_id, last_modification_reason, last_modification_time, deleted, deleter_id, deletion_reason, deletion_time, source_type FROM comm_bch_batch"
incremental = false
key_field = "UpdateTime"



[[tables]]
name = "comm_bch_delivering_sampling"
query = "SELECT id, tenant_id, supply_id, material_id, sample_type, operation_type, business_type, delivering_type, delivering_code, delivering_quantity, delivering_unit, delivering_weight::text as delivering_weight, delivering_weights, delivering_remark, deliverer, delivery_time, sampling, sampling_type, sampling_code, sampling_quantity, sampling_unit, sampling_weight::text as sampling_weight, sampling_weights, sampling_remark, sampler, sample_time, creator_id, creation_time, last_modifier_id, last_modification_reason, last_modification_time, deleted, deleter_id, deletion_reason, deletion_time FROM comm_bch_delivering_sampling "
incremental = false
key_field = "UpdateTime"


[[tables]]
name = "comm_sp_delivering_sampling"
query = "SELECT id, tenant_id, supply_id, material_id, sample_type, bch_ds_id, operation_type, business_type, delivering_type, delivering_code, delivering_quantity, delivering_unit, delivering_weight::text as delivering_weight, delivering_weights, delivering_remark, deliverer, delivery_time, sampling, sampling_type, sampling_code, sampling_quantity, sampling_unit, sampling_weight::text as sampling_weight, sampling_weights, sampling_remark, sampler, sample_time, creator_id, creation_time, last_modifier_id, last_modification_reason, last_modification_time, deleted, deleter_id, deletion_reason, deletion_time FROM comm_sp_delivering_sampling "
incremental = false
key_field = "UpdateTime"



[[tables]]
name = "hy_record"
query = "SELECT id, hy_code, type, hy_check, hy_approve, check_time, approve_time, approve_user, check_user, hy_time, hy_values, accept_time, accept_user, mt::text as mt, mad::text as mad, aad::text as aad, ad::text as ad, vad::text as vad, vd::text as vd, var::text as var, vdaf::text as vdaf, fcad::text as fcad, st_ar::text as st_ar, st_ad::text as st_ad, st_d::text as st_d, had::text as had, hd::text as hd, qb_ad::text as qb_ad, qgr_ad::text as qgr_ad, qgr_d::text as qgr_d, qnet_ar_mj_kg::text as qnet_ar_mj_kg, qnet_ar_j_cal::text as qnet_ar_j_cal, v::text as v, aar::text as aar, qnet_ar::text as qnet_ar, qnet_ar1::text as qnet_ar1, crc::text as crc, st_daf::text as st_daf, cad ::text as cad, cd::text as cd, isauto, hy_type, isnormal FROM hy_record where hy_approve=0 and accept_time >= NOW() - INTERVAL '10 days' "
incremental = false
key_field = "UpdateTime"


[[tables]]
name = "hy_itemdetail"
query = "SELECT * FROM hy_itemdetail where record_id in (select id from hy_record where hy_approve=0 and accept_time >= NOW() - INTERVAL '10 days') "
incremental = false
key_field = "UpdateTime"

[[tables]]
name = "hy_norm"
query = " select id,norm_id,zbvalues::text as zbvalues,itemdetail_id,hy_user,checktime,\"explain\" from hy_norm where itemdetail_id in (SELECT t1.id FROM hy_itemdetail t1 inner join hy_record t2 on t1.record_id=t2.id where t2.hy_approve=0 and t2.accept_time >= NOW() - INTERVAL '10 days')"
incremental = false
key_field = "UpdateTime"


[[tables]]
name = "hy_instrument"
query = "SELECT * FROM \"hy_instrument\""
incremental = false
key_field = "UpdateTime"

[[tables]]
name = "hy_information"
query = "SELECT * FROM \"hy_information\""
incremental = false
key_field = "UpdateTime"



[[tables]]
name = "hy_allot"
query = "SELECT * FROM \"hy_allot\""
incremental = false
key_field = "UpdateTime"

[[tables]]
name = "hy_cytask"
query = "SELECT * FROM \"hy_cytask\""
incremental = false
key_field = "UpdateTime"

[[tables]]
name = "hy_fullwatersample"
query = "select id,qs_code,qs_tonnage::text as qs_tonnage,mt::text as mt,remark,onecode,towcode,fx_code,fx_onecode,fx_twocode from hy_fullwatersample"
incremental = false
key_field = "UpdateTime"

[[tables]]
name = "hy_informationnorm"
query = "SELECT * FROM \"hy_informationnorm\""
incremental = false
key_field = "UpdateTime"







[[tables]]
name = "hy_laboratoryinstrument"
query = "SELECT * FROM \"hy_laboratoryinstrument\""
incremental = false
key_field = "UpdateTime"

[[tables]]
name = "hy_materialanalysis_type"
query = "SELECT * FROM \"hy_materialanalysis_type\""
incremental = false
key_field = "UpdateTime"

[[tables]]
name = "hy_materialdetail"
query = "SELECT * FROM \"hy_materialdetail\""
incremental = false
key_field = "UpdateTime"



[[tables]]
name = "hy_weight_input"
query = "SELECT * FROM \"hy_weight_input\""
incremental = false
key_field = "UpdateTime"


[[tables]]
name = "hy_warmhumid"
query = "select id,laboratoryid,temperature::text as temperature,humidity::text humidity,begintime,endtime, username from hy_warmhumid"
incremental = false
key_field = "UpdateTime"


+ 135
- 0
tcp_server/src/main.rs 파일 보기

@@ -571,6 +571,44 @@ struct ErpShippingPlan {
warehouse_name: Option<String>
}

#[derive(Debug, Deserialize)]
struct CommBchBatch {
id: i64,
tenant_id: Option<i64>,
supply_id: i64,
material_id: i64,
batch_record_id: Option<i32>,
sample_type: i32,
related_code: Option<String>,
batching_start_time: Option<NaiveDateTime>,
batching_end_time: Option<NaiveDateTime>,
batching_start_ticket: Option<String>,
batching_end_ticket: Option<String>,
batching_quantity: Option<i32>,
batching_weight: Option<Decimal>,
batching_code: String,
batching_code_creator: Option<String>,
batching_code_creation_time: Option<NaiveDateTime>,
preparation_code: Option<String>,
preparation_code_creator: Option<String>,
preparation_code_creation_time: Option<NaiveDateTime>,
assay_code: Option<String>,
assay_code_creator: Option<String>,
assay_code_creation_time: Option<NaiveDateTime>,
creator_id: Option<i64>,
creation_time: NaiveDateTime,
last_modifier_id: Option<i64>,
last_modification_reason: Option<String>,
last_modification_time: Option<NaiveDateTime>,
#[serde(deserialize_with = "deserialize_string_to_bool")]
deleted: bool,
deleter_id: Option<i64>,
deletion_reason: Option<String>,
deletion_time: Option<NaiveDateTime>,
source_type: Option<i16>,
batching_date: Option<NaiveDateTime>
}

#[derive(Debug, Deserialize)]
struct WtWeightInfo {
id: i32,
@@ -1224,6 +1262,88 @@ where
}
}

async fn insert_comm_bch_batch(client: &tokio_postgres::Client, info: &CommBchBatch) -> Result<(), PgError> {
// Check if record exists using id
let exists = client
.query_one(
"SELECT COUNT(*) FROM public.comm_bch_batch WHERE id = $1",
&[&info.id]
)
.await?
.get::<_, i64>(0) > 0;

if exists {
// Update existing record
client.execute(
"UPDATE public.comm_bch_batch SET
tenant_id = $1, supply_id = $2, material_id = $3, batch_record_id = $4,
sample_type = $5, related_code = $6, batching_start_time = $7,
batching_end_time = $8, batching_start_ticket = $9, batching_end_ticket = $10,
batching_quantity = $11, batching_weight = $12, batching_code = $13,
batching_code_creator = $14, batching_code_creation_time = $15,
preparation_code = $16, preparation_code_creator = $17,
preparation_code_creation_time = $18, assay_code = $19,
assay_code_creator = $20, assay_code_creation_time = $21,
creator_id = $22, creation_time = $23, last_modifier_id = $24,
last_modification_reason = $25, last_modification_time = $26,
deleted = $27, deleter_id = $28, deletion_reason = $29,
deletion_time = $30, source_type = $31, batching_date = $32
WHERE id = $33",
&[
&info.tenant_id, &info.supply_id, &info.material_id, &info.batch_record_id,
&info.sample_type, &info.related_code, &info.batching_start_time,
&info.batching_end_time, &info.batching_start_ticket, &info.batching_end_ticket,
&info.batching_quantity, &info.batching_weight, &info.batching_code,
&info.batching_code_creator, &info.batching_code_creation_time,
&info.preparation_code, &info.preparation_code_creator,
&info.preparation_code_creation_time, &info.assay_code,
&info.assay_code_creator, &info.assay_code_creation_time,
&info.creator_id, &info.creation_time, &info.last_modifier_id,
&info.last_modification_reason, &info.last_modification_time,
&info.deleted, &info.deleter_id, &info.deletion_reason,
&info.deletion_time, &info.source_type, &info.batching_date,
&info.id
],
).await?;
} else {
// Insert new record
client.execute(
"INSERT INTO public.comm_bch_batch (
id, tenant_id, supply_id, material_id, batch_record_id,
sample_type, related_code, batching_start_time,
batching_end_time, batching_start_ticket, batching_end_ticket,
batching_quantity, batching_weight, batching_code,
batching_code_creator, batching_code_creation_time,
preparation_code, preparation_code_creator,
preparation_code_creation_time, assay_code,
assay_code_creator, assay_code_creation_time,
creator_id, creation_time, last_modifier_id,
last_modification_reason, last_modification_time,
deleted, deleter_id, deletion_reason,
deletion_time, source_type, batching_date
) OVERRIDING SYSTEM VALUE VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12,
$13, $14, $15, $16, $17, $18, $19, $20, $21, $22, $23,
$24, $25, $26, $27, $28, $29, $30, $31, $32, $33)",
&[
&info.id, &info.tenant_id, &info.supply_id, &info.material_id,
&info.batch_record_id, &info.sample_type, &info.related_code,
&info.batching_start_time, &info.batching_end_time,
&info.batching_start_ticket, &info.batching_end_ticket,
&info.batching_quantity, &info.batching_weight, &info.batching_code,
&info.batching_code_creator, &info.batching_code_creation_time,
&info.preparation_code, &info.preparation_code_creator,
&info.preparation_code_creation_time, &info.assay_code,
&info.assay_code_creator, &info.assay_code_creation_time,
&info.creator_id, &info.creation_time, &info.last_modifier_id,
&info.last_modification_reason, &info.last_modification_time,
&info.deleted, &info.deleter_id, &info.deletion_reason,
&info.deletion_time, &info.source_type, &info.batching_date
],
).await?;
}
Ok(())
}

async fn insert_wt_weight_info(client: &tokio_postgres::Client, info: &WtWeightInfo) -> Result<(), PgError> {
// Check if record exists using bill_no and vehicle_no
let exists = client
@@ -2789,6 +2909,21 @@ async fn handle_client(socket: &mut TcpStream, client: &tokio_postgres::Client)
false
}
},
"comm_bch_batch" => {
if let Ok(info) = serde_json::from_str::<CommBchBatch>(data_str) {
println!("接收到comm_bch_batch信息: {:?}", info);
match insert_comm_bch_batch(client, &info).await {
Ok(_) => true,
Err(e) => {
eprintln!("插入comm_bch_batch信息失败: {}", e);
false
}
}
} else {
eprintln!("解析comm_bch_batch信息失败");
false
}
},
_ => {
eprintln!("未知的表名: {}", table_name);
false


BIN
tcp_server/target/debug/tcp_server.exe 파일 보기


BIN
tcp_server/target/debug/tcp_server.pdb 파일 보기


불러오는 중...
취소
저장