Phase1 后端核心:
- 新增 fsgx_v1.sql 迁移脚本(is_queue_goods/frozen_points/available_points/no_assess)
- SystemConfigServices 返佣设置扩展(周期人数/分档比例/范围/时机)
- StoreOrderCreateServices 周期循环佣金计算
- StoreOrderTakeServices 佣金发放后同步冻结积分
- StoreProductServices/StoreProduct 保存 is_queue_goods
Phase2 后端接口:
- GET /api/hjf/brokerage/progress 佣金周期进度
- GET /api/hjf/assets/overview 资产总览
- HjfPointsServices 每日 frozen_points 0.4‰ 释放定时任务
- PUT /adminapi/hjf/member/{uid}/no_assess 不考核接口
- GET /adminapi/hjf/points/release_log 积分日志接口
Phase3 前端清理:
- hjfCustom.js 路由精简(仅保留 points/log)
- hjfQueue.js/hjfMember.js API 清理/重定向至 CRMEB 原生接口
- pages.json 公排→推荐佣金/佣金记录/佣金规则
Phase4-5 前端改造:
- queue/status.vue 推荐佣金进度页整体重写
- 商品详情/订单确认/支付结果页文案与逻辑改造
- 个人中心/资产页/引导页/规则页文案改造
- HjfQueueProgress/HjfRefundNotice/HjfAssetCard 组件改造
- 推广中心嵌入佣金进度摘要
- hjfMockData.js 全量更新(公排字段→佣金字段)
Phase6 Admin 增强:
- 用户列表新增 frozen_points/available_points 列及不考核操作按钮
- hjfPoints.js USE_MOCK=false 对接真实积分日志接口
Phase7 配置文档:
- docs/fsgx-phase7-config-checklist.md 后台配置与全链路验收清单
Made-with: Cursor
187 lines
4.5 KiB
PHP
187 lines
4.5 KiB
PHP
<?php
|
|
|
|
/*
|
|
* This file is part of the Symfony package.
|
|
*
|
|
* (c) Fabien Potencier <fabien@symfony.com>
|
|
*
|
|
* For the full copyright and license information, please view the LICENSE
|
|
* file that was distributed with this source code.
|
|
*/
|
|
|
|
namespace Symfony\Component\VarDumper\Caster;
|
|
|
|
use RdKafka\Conf;
|
|
use RdKafka\Exception as RdKafkaException;
|
|
use RdKafka\KafkaConsumer;
|
|
use RdKafka\Message;
|
|
use RdKafka\Metadata\Broker as BrokerMetadata;
|
|
use RdKafka\Metadata\Collection as CollectionMetadata;
|
|
use RdKafka\Metadata\Partition as PartitionMetadata;
|
|
use RdKafka\Metadata\Topic as TopicMetadata;
|
|
use RdKafka\Topic;
|
|
use RdKafka\TopicConf;
|
|
use RdKafka\TopicPartition;
|
|
use Symfony\Component\VarDumper\Cloner\Stub;
|
|
|
|
/**
|
|
* Casts RdKafka related classes to array representation.
|
|
*
|
|
* @author Romain Neutron <imprec@gmail.com>
|
|
*/
|
|
class RdKafkaCaster
|
|
{
|
|
public static function castKafkaConsumer(KafkaConsumer $c, array $a, Stub $stub, bool $isNested)
|
|
{
|
|
$prefix = Caster::PREFIX_VIRTUAL;
|
|
|
|
try {
|
|
$assignment = $c->getAssignment();
|
|
} catch (RdKafkaException $e) {
|
|
$assignment = [];
|
|
}
|
|
|
|
$a += [
|
|
$prefix.'subscription' => $c->getSubscription(),
|
|
$prefix.'assignment' => $assignment,
|
|
];
|
|
|
|
$a += self::extractMetadata($c);
|
|
|
|
return $a;
|
|
}
|
|
|
|
public static function castTopic(Topic $c, array $a, Stub $stub, bool $isNested)
|
|
{
|
|
$prefix = Caster::PREFIX_VIRTUAL;
|
|
|
|
$a += [
|
|
$prefix.'name' => $c->getName(),
|
|
];
|
|
|
|
return $a;
|
|
}
|
|
|
|
public static function castTopicPartition(TopicPartition $c, array $a)
|
|
{
|
|
$prefix = Caster::PREFIX_VIRTUAL;
|
|
|
|
$a += [
|
|
$prefix.'offset' => $c->getOffset(),
|
|
$prefix.'partition' => $c->getPartition(),
|
|
$prefix.'topic' => $c->getTopic(),
|
|
];
|
|
|
|
return $a;
|
|
}
|
|
|
|
public static function castMessage(Message $c, array $a, Stub $stub, bool $isNested)
|
|
{
|
|
$prefix = Caster::PREFIX_VIRTUAL;
|
|
|
|
$a += [
|
|
$prefix.'errstr' => $c->errstr(),
|
|
];
|
|
|
|
return $a;
|
|
}
|
|
|
|
public static function castConf(Conf $c, array $a, Stub $stub, bool $isNested)
|
|
{
|
|
$prefix = Caster::PREFIX_VIRTUAL;
|
|
|
|
foreach ($c->dump() as $key => $value) {
|
|
$a[$prefix.$key] = $value;
|
|
}
|
|
|
|
return $a;
|
|
}
|
|
|
|
public static function castTopicConf(TopicConf $c, array $a, Stub $stub, bool $isNested)
|
|
{
|
|
$prefix = Caster::PREFIX_VIRTUAL;
|
|
|
|
foreach ($c->dump() as $key => $value) {
|
|
$a[$prefix.$key] = $value;
|
|
}
|
|
|
|
return $a;
|
|
}
|
|
|
|
public static function castRdKafka(\RdKafka $c, array $a, Stub $stub, bool $isNested)
|
|
{
|
|
$prefix = Caster::PREFIX_VIRTUAL;
|
|
|
|
$a += [
|
|
$prefix.'out_q_len' => $c->getOutQLen(),
|
|
];
|
|
|
|
$a += self::extractMetadata($c);
|
|
|
|
return $a;
|
|
}
|
|
|
|
public static function castCollectionMetadata(CollectionMetadata $c, array $a, Stub $stub, bool $isNested)
|
|
{
|
|
$a += iterator_to_array($c);
|
|
|
|
return $a;
|
|
}
|
|
|
|
public static function castTopicMetadata(TopicMetadata $c, array $a, Stub $stub, bool $isNested)
|
|
{
|
|
$prefix = Caster::PREFIX_VIRTUAL;
|
|
|
|
$a += [
|
|
$prefix.'name' => $c->getTopic(),
|
|
$prefix.'partitions' => $c->getPartitions(),
|
|
];
|
|
|
|
return $a;
|
|
}
|
|
|
|
public static function castPartitionMetadata(PartitionMetadata $c, array $a, Stub $stub, bool $isNested)
|
|
{
|
|
$prefix = Caster::PREFIX_VIRTUAL;
|
|
|
|
$a += [
|
|
$prefix.'id' => $c->getId(),
|
|
$prefix.'err' => $c->getErr(),
|
|
$prefix.'leader' => $c->getLeader(),
|
|
];
|
|
|
|
return $a;
|
|
}
|
|
|
|
public static function castBrokerMetadata(BrokerMetadata $c, array $a, Stub $stub, bool $isNested)
|
|
{
|
|
$prefix = Caster::PREFIX_VIRTUAL;
|
|
|
|
$a += [
|
|
$prefix.'id' => $c->getId(),
|
|
$prefix.'host' => $c->getHost(),
|
|
$prefix.'port' => $c->getPort(),
|
|
];
|
|
|
|
return $a;
|
|
}
|
|
|
|
private static function extractMetadata(KafkaConsumer|\RdKafka $c)
|
|
{
|
|
$prefix = Caster::PREFIX_VIRTUAL;
|
|
|
|
try {
|
|
$m = $c->getMetadata(true, null, 500);
|
|
} catch (RdKafkaException $e) {
|
|
return [];
|
|
}
|
|
|
|
return [
|
|
$prefix.'orig_broker_id' => $m->getOrigBrokerId(),
|
|
$prefix.'orig_broker_name' => $m->getOrigBrokerName(),
|
|
$prefix.'brokers' => $m->getBrokers(),
|
|
$prefix.'topics' => $m->getTopics(),
|
|
];
|
|
}
|
|
}
|